lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
src/directory/error.rs
pentlander/tantivy
0e8fcd57274e4276186f8fc32e46f9e9dccdc088
use std::error::Error as StdError; use std::fmt; use std::io; use std::path::PathBuf; #[derive(Debug)] pub struct IOError { path: Option<PathBuf>, err: io::Error, } impl fmt::Display for IOError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.path { Some(ref path) => write!(f, "io error occurred on path '{:?}': '{}'", path, self.err), None => write!(f, "io error occurred: '{}'", self.err), } } } impl StdError for IOError { fn description(&self) -> &str { "io error occurred" } fn cause(&self) -> Option<&StdError> { Some(&self.err) } } impl IOError { pub(crate) fn with_path(path: PathBuf, err: io::Error) -> Self { IOError { path: Some(path), err, } } } impl From<io::Error> for IOError { fn from(err: io::Error) -> IOError { IOError { path: None, err } } } #[derive(Debug)] pub enum OpenDirectoryError { DoesNotExist(PathBuf), NotADirectory(PathBuf), } impl fmt::Display for OpenDirectoryError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { OpenDirectoryError::DoesNotExist(ref path) => { write!(f, "the underlying directory '{:?}' does not exist", path) } OpenDirectoryError::NotADirectory(ref path) => { write!(f, "the path '{:?}' exists but is not a directory", path) } } } } impl StdError for OpenDirectoryError { fn description(&self) -> &str { "error occurred while opening a directory" } fn cause(&self) -> Option<&StdError> { None } } #[derive(Debug)] pub enum OpenWriteError { FileAlreadyExists(PathBuf), IOError(IOError), } impl From<IOError> for OpenWriteError { fn from(err: IOError) -> OpenWriteError { OpenWriteError::IOError(err) } } impl fmt::Display for OpenWriteError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { OpenWriteError::FileAlreadyExists(ref path) => { write!(f, "the file '{:?}' already exists", path) } OpenWriteError::IOError(ref err) => write!( f, "an io error occurred while opening a file for writing: '{}'", err ), } } } impl StdError for OpenWriteError { fn description(&self) -> &str { "error occurred while opening a file for writing" } fn cause(&self) -> Option<&StdError> { match *self { OpenWriteError::FileAlreadyExists(_) => None, OpenWriteError::IOError(ref err) => Some(err), } } } #[derive(Debug)] pub enum OpenReadError { FileDoesNotExist(PathBuf), IOError(IOError), } impl From<IOError> for OpenReadError { fn from(err: IOError) -> OpenReadError { OpenReadError::IOError(err) } } impl fmt::Display for OpenReadError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { OpenReadError::FileDoesNotExist(ref path) => { write!(f, "the file '{:?}' does not exist", path) } OpenReadError::IOError(ref err) => write!( f, "an io error occurred while opening a file for reading: '{}'", err ), } } } impl StdError for OpenReadError { fn description(&self) -> &str { "error occurred while opening a file for reading" } fn cause(&self) -> Option<&StdError> { match *self { OpenReadError::FileDoesNotExist(_) => None, OpenReadError::IOError(ref err) => Some(err), } } } #[derive(Debug)] pub enum DeleteError { FileDoesNotExist(PathBuf), IOError(IOError), } impl From<IOError> for DeleteError { fn from(err: IOError) -> DeleteError { DeleteError::IOError(err) } } impl fmt::Display for DeleteError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { DeleteError::FileDoesNotExist(ref path) => { write!(f, "the file '{:?}' does not exist", path) } DeleteError::IOError(ref err) => { write!(f, "an io error occurred while deleting a file: '{}'", err) } } } } impl StdError for DeleteError { fn description(&self) -> &str { "error occurred while deleting a file" } fn cause(&self) -> Option<&StdError> { match *self { DeleteError::FileDoesNotExist(_) => None, DeleteError::IOError(ref err) => Some(err), } } }
use std::error::Error as StdError; use std::fmt; use std::io; use std::path::PathBuf; #[derive(Debug)] pub struct IOError { path: Option<PathBuf>, err: io::Error, } impl fmt::Display for IOError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.path { Some(ref path) => write!(f, "io error occurred on path '{:?}': '{}'", path, self.err), None => write!(f, "io error occurred: '{}'", self.err), } } } impl StdError for IOError { fn description(&self) -> &str { "io error occurred" } fn cause(&self) -> Option<&StdError> { Some(&self.err) } } impl IOError { pub(crate) fn with_path(path: PathBuf, err: io::Error) -> Self { IOError { path: Some(path), err, } } } impl From<io::Error> for IOError { fn from(err: io::Error) -> IOError { IOError { path: None, err } } } #[derive(Debug)] pub enum OpenDirectoryError { DoesNotExist(PathBuf), NotADirectory(PathBuf), } impl fmt::Display for OpenDirectoryError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { OpenDirectoryError::DoesNotExist(ref path) => { write!(f, "the underlying directory '{:?}' does not exist", path) } OpenDirectoryError::NotADirectory(ref path) => { write!(f, "the path '{:?}' exists but is not a directory", path) } } } } impl StdError for OpenDirectoryError { fn description(&self) -> &str { "error occurred while opening a directory" } fn cause(&self) -> Option<&StdError> { None } } #[derive(Debug)] pub enum OpenWriteError { FileAlreadyExists(PathBuf), IOError(IOError), } impl From<IOError> for OpenWriteError { fn from(err: IOError) -> OpenWriteError { OpenWriteError::IOError(err) } } impl fmt::Display for OpenWriteError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { OpenWriteError::FileAlreadyExists(ref path) => { write!(f, "the file '{:?}' already exists", path) } OpenWriteError::IOError(ref err) => write!( f, "an io error occurred while opening a file for writing: '{}'", err ), } } } impl StdError for OpenWriteError { fn description(&self) -> &str { "error occurred while opening a file for writing" }
} #[derive(Debug)] pub enum OpenReadError { FileDoesNotExist(PathBuf), IOError(IOError), } impl From<IOError> for OpenReadError { fn from(err: IOError) -> OpenReadError { OpenReadError::IOError(err) } } impl fmt::Display for OpenReadError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { OpenReadError::FileDoesNotExist(ref path) => { write!(f, "the file '{:?}' does not exist", path) } OpenReadError::IOError(ref err) => write!( f, "an io error occurred while opening a file for reading: '{}'", err ), } } } impl StdError for OpenReadError { fn description(&self) -> &str { "error occurred while opening a file for reading" } fn cause(&self) -> Option<&StdError> { match *self { OpenReadError::FileDoesNotExist(_) => None, OpenReadError::IOError(ref err) => Some(err), } } } #[derive(Debug)] pub enum DeleteError { FileDoesNotExist(PathBuf), IOError(IOError), } impl From<IOError> for DeleteError { fn from(err: IOError) -> DeleteError { DeleteError::IOError(err) } } impl fmt::Display for DeleteError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { DeleteError::FileDoesNotExist(ref path) => { write!(f, "the file '{:?}' does not exist", path) } DeleteError::IOError(ref err) => { write!(f, "an io error occurred while deleting a file: '{}'", err) } } } } impl StdError for DeleteError { fn description(&self) -> &str { "error occurred while deleting a file" } fn cause(&self) -> Option<&StdError> { match *self { DeleteError::FileDoesNotExist(_) => None, DeleteError::IOError(ref err) => Some(err), } } }
fn cause(&self) -> Option<&StdError> { match *self { OpenWriteError::FileAlreadyExists(_) => None, OpenWriteError::IOError(ref err) => Some(err), } }
function_block-full_function
[ { "content": "/// Write a `u32` as a vint payload.\n\npub fn write_u32_vint<W: io::Write>(val: u32, writer: &mut W) -> io::Result<()> {\n\n let (val, num_bytes) = serialize_vint_u32(val);\n\n let mut buffer = [0u8; 8];\n\n LittleEndian::write_u64(&mut buffer, val);\n\n writer.write_all(&buffer[..num_bytes])\n\n}\n\n\n\nimpl VInt {\n\n pub fn val(&self) -> u64 {\n\n self.0\n\n }\n\n\n\n pub fn deserialize_u64<R: Read>(reader: &mut R) -> io::Result<u64> {\n\n VInt::deserialize(reader).map(|vint| vint.0)\n\n }\n\n\n\n pub fn serialize_into_vec(&self, output: &mut Vec<u8>) {\n\n let mut buffer = [0u8; 10];\n\n let num_bytes = self.serialize_into(&mut buffer);\n\n output.extend(&buffer[0..num_bytes]);\n", "file_path": "src/common/vint.rs", "rank": 0, "score": 298177.86123454454 }, { "content": "/// Write a delete `BitSet`\n\n///\n\n/// where `delete_bitset` is the set of deleted `DocId`.\n\npub fn write_delete_bitset(delete_bitset: &BitSet, writer: &mut WritePtr) -> io::Result<()> {\n\n let max_doc = delete_bitset.capacity();\n\n let mut byte = 0u8;\n\n let mut shift = 0u8;\n\n for doc in 0..max_doc {\n\n if delete_bitset.contains(doc) {\n\n byte |= 1 << shift;\n\n }\n\n if shift == 7 {\n\n writer.write_all(&[byte])?;\n\n shift = 0;\n\n byte = 0;\n\n } else {\n\n shift += 1;\n\n }\n\n }\n\n if max_doc % 8 > 0 {\n\n writer.write_all(&[byte])?;\n\n }\n\n writer.flush()\n", "file_path": "src/fastfield/delete.rs", "rank": 1, "score": 266647.1015626155 }, { "content": "/// Returns None iff the file exists, can be read, but is empty (and hence\n\n/// cannot be mmapped).\n\n///\n\nfn open_mmap(full_path: &Path) -> result::Result<Option<MmapReadOnly>, OpenReadError> {\n\n let file = File::open(full_path).map_err(|e| {\n\n if e.kind() == io::ErrorKind::NotFound {\n\n OpenReadError::FileDoesNotExist(full_path.to_owned())\n\n } else {\n\n OpenReadError::IOError(IOError::with_path(full_path.to_owned(), e))\n\n }\n\n })?;\n\n\n\n let meta_data = file\n\n .metadata()\n\n .map_err(|e| IOError::with_path(full_path.to_owned(), e))?;\n\n if meta_data.len() == 0 {\n\n // if the file size is 0, it will not be possible\n\n // to mmap the file, so we return None\n\n // instead.\n\n return Ok(None);\n\n }\n\n unsafe {\n\n MmapReadOnly::open(&file)\n", "file_path": "src/directory/mmap_directory.rs", "rank": 2, "score": 250895.97885498602 }, { "content": "pub fn compress(uncompressed: &[u8], compressed: &mut Vec<u8>) -> io::Result<()> {\n\n compressed.clear();\n\n let mut encoder = snap::Writer::new(compressed);\n\n encoder.write_all(&uncompressed)?;\n\n encoder.flush()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/store/compression_snap.rs", "rank": 3, "score": 225893.29122310627 }, { "content": "pub fn compress(uncompressed: &[u8], compressed: &mut Vec<u8>) -> io::Result<()> {\n\n compressed.clear();\n\n let mut encoder = lz4::EncoderBuilder::new().build(compressed)?;\n\n encoder.write_all(&uncompressed)?;\n\n let (_, encoder_result) = encoder.finish();\n\n encoder_result?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/store/compression_lz4.rs", "rank": 4, "score": 225893.29122310627 }, { "content": "pub fn decompress(compressed: &[u8], decompressed: &mut Vec<u8>) -> io::Result<()> {\n\n decompressed.clear();\n\n snap::Reader::new(compressed).read_to_end(decompressed)?;\n\n Ok(())\n\n}\n", "file_path": "src/store/compression_snap.rs", "rank": 5, "score": 225893.29122310627 }, { "content": "pub fn decompress(compressed: &[u8], decompressed: &mut Vec<u8>) -> io::Result<()> {\n\n decompressed.clear();\n\n let mut decoder = lz4::Decoder::new(compressed)?;\n\n decoder.read_to_end(decompressed)?;\n\n Ok(())\n\n}\n", "file_path": "src/store/compression_lz4.rs", "rank": 6, "score": 225893.29122310627 }, { "content": "/// Compose two occur values.\n\npub fn compose_occur(left: Occur, right: Occur) -> Occur {\n\n match left {\n\n Occur::Should => right,\n\n Occur::Must => {\n\n if right == Occur::MustNot {\n\n Occur::MustNot\n\n } else {\n\n Occur::Must\n\n }\n\n }\n\n Occur::MustNot => {\n\n if right == Occur::MustNot {\n\n Occur::Must\n\n } else {\n\n Occur::MustNot\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/query/occur.rs", "rank": 7, "score": 223445.35875574555 }, { "content": "fn convert_fst_error(e: fst::Error) -> io::Error {\n\n io::Error::new(io::ErrorKind::Other, e)\n\n}\n\n\n\n/// Builder for the new term dictionary.\n\n///\n\n/// Inserting must be done in the order of the `keys`.\n\npub struct TermDictionaryBuilder<W> {\n\n fst_builder: fst::MapBuilder<W>,\n\n term_info_store_writer: TermInfoStoreWriter,\n\n term_ord: u64,\n\n}\n\n\n\nimpl<W> TermDictionaryBuilder<W>\n\nwhere\n\n W: Write,\n\n{\n\n /// Creates a new `TermDictionaryBuilder`\n\n pub fn create(w: W, _field_type: &FieldType) -> io::Result<Self> {\n\n let fst_builder = fst::MapBuilder::new(w).map_err(convert_fst_error)?;\n", "file_path": "src/termdict/termdict.rs", "rank": 8, "score": 220681.93509054254 }, { "content": "fn occur_letter(occur: Occur) -> &'static str {\n\n match occur {\n\n Occur::Must => \"+\",\n\n Occur::MustNot => \"-\",\n\n Occur::Should => \"\",\n\n }\n\n}\n\n\n\nimpl fmt::Debug for LogicalAST {\n\n fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n match *self {\n\n LogicalAST::Clause(ref clause) => {\n\n if clause.is_empty() {\n\n write!(formatter, \"<emptyclause>\")?;\n\n } else {\n\n let (ref occur, ref subquery) = clause[0];\n\n write!(formatter, \"({}{:?}\", occur_letter(*occur), subquery)?;\n\n for &(ref occur, ref subquery) in &clause[1..] {\n\n write!(formatter, \" {}{:?}\", occur_letter(*occur), subquery)?;\n\n }\n", "file_path": "src/query/query_parser/logical_ast.rs", "rank": 9, "score": 219321.39962780342 }, { "content": "/// Expose the current version of tantivy, as well\n\n/// whether it was compiled with the simd compression.\n\npub fn version() -> &'static str {\n\n if cfg!(feature = \"simdcompression\") {\n\n concat!(env!(\"CARGO_PKG_VERSION\"), \"-simd\")\n\n } else {\n\n concat!(env!(\"CARGO_PKG_VERSION\"), \"-nosimd\")\n\n }\n\n}\n\n\n\n/// Defines tantivy's merging strategy\n\npub mod merge_policy {\n\n pub use indexer::DefaultMergePolicy;\n\n pub use indexer::LogMergePolicy;\n\n pub use indexer::MergePolicy;\n\n pub use indexer::NoMergePolicy;\n\n}\n\n\n\n/// A `u32` identifying a document within a segment.\n\n/// Documents have their `DocId` assigned incrementally,\n\n/// as they are added in the segment.\n\npub type DocId = u32;\n", "file_path": "src/lib.rs", "rank": 10, "score": 214011.40184999607 }, { "content": "/// This Write wraps a File, but has the specificity of\n\n/// call `sync_all` on flush.\n\nstruct SafeFileWriter(File);\n\n\n\nimpl SafeFileWriter {\n\n fn new(file: File) -> SafeFileWriter {\n\n SafeFileWriter(file)\n\n }\n\n}\n\n\n\nimpl Write for SafeFileWriter {\n\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n\n self.0.write(buf)\n\n }\n\n\n\n fn flush(&mut self) -> io::Result<()> {\n\n self.0.flush()?;\n\n self.0.sync_all()\n\n }\n\n}\n\n\n\nimpl Seek for SafeFileWriter {\n", "file_path": "src/directory/mmap_directory.rs", "rank": 11, "score": 213394.78984308048 }, { "content": "/// Returns true iff the file is \"managed\".\n\n/// Non-managed file are not subject to garbage collection.\n\n///\n\n/// Filenames that starts by a \".\" -typically locks-\n\n/// are not managed.\n\nfn is_managed(path: &Path) -> bool {\n\n path.to_str()\n\n .map(|p_str| !p_str.starts_with('.'))\n\n .unwrap_or(true)\n\n}\n\n\n\n/// Wrapper of directories that keeps track of files created by Tantivy.\n\n///\n\n/// A managed directory is just a wrapper of a directory\n\n/// that keeps a (persisted) list of the files that\n\n/// have been created (and not deleted) by tantivy so far.\n\n///\n\n/// Thanks to this list, it implements a `garbage_collect` method\n\n/// that removes the files that were created by tantivy and are not\n\n/// useful anymore.\n\n#[derive(Debug)]\n\npub struct ManagedDirectory {\n\n directory: Box<Directory>,\n\n meta_informations: Arc<RwLock<MetaInformation>>,\n\n}\n\n\n", "file_path": "src/directory/managed_directory.rs", "rank": 12, "score": 211736.46786384872 }, { "content": "/// Save the index meta file.\n\n/// This operation is atomic :\n\n/// Either\n\n// - it fails, in which case an error is returned,\n\n/// and the `meta.json` remains untouched,\n\n/// - it success, and `meta.json` is written\n\n/// and flushed.\n\n///\n\n/// This method is not part of tantivy's public API\n\npub fn save_new_metas(schema: Schema, opstamp: u64, directory: &mut Directory) -> Result<()> {\n\n save_metas(vec![], schema, opstamp, None, directory)\n\n}\n\n\n", "file_path": "src/indexer/segment_updater.rs", "rank": 13, "score": 210474.4379068635 }, { "content": "/// Synonym of Seek + Write\n\npub trait SeekableWrite: Seek + Write {}\n\nimpl<T: Seek + Write> SeekableWrite for T {}\n\n\n\n/// Write object for Directory.\n\n///\n\n/// `WritePtr` are required to implement both Write\n\n/// and Seek.\n\npub type WritePtr = BufWriter<Box<SeekableWrite>>;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n use std::io::{Seek, SeekFrom, Write};\n\n use std::path::Path;\n\n\n\n lazy_static! {\n\n static ref TEST_PATH: &'static Path = Path::new(\"some_path_for_test\");\n\n }\n\n\n", "file_path": "src/directory/mod.rs", "rank": 14, "score": 193307.4784583563 }, { "content": "/// Validator for a potential `field_name`.\n\n/// Returns true iff the name can be use for a field name.\n\n///\n\n/// A field name must start by a letter `[a-zA-Z]`.\n\n/// The other characters can be any alphanumic character `[a-ZA-Z0-9]` or `_`.\n\npub fn is_valid_field_name(field_name: &str) -> bool {\n\n lazy_static! {\n\n static ref FIELD_NAME_PTN: Regex = Regex::new(\"^[a-zA-Z][_a-zA-Z0-9]*$\").unwrap();\n\n }\n\n FIELD_NAME_PTN.is_match(field_name)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::is_valid_field_name;\n\n\n\n #[test]\n\n fn test_is_valid_name() {\n\n assert!(is_valid_field_name(\"text\"));\n\n assert!(is_valid_field_name(\"text0\"));\n\n assert!(!is_valid_field_name(\"0text\"));\n\n assert!(!is_valid_field_name(\"\"));\n\n assert!(!is_valid_field_name(\"シャボン玉\"));\n\n assert!(is_valid_field_name(\"my_text_field\"));\n\n }\n\n\n\n}\n", "file_path": "src/schema/mod.rs", "rank": 15, "score": 192985.64669306553 }, { "content": "/// Reads a vint `u32` from a buffer, and\n\n/// consumes its payload data.\n\n///\n\n/// # Panics\n\n///\n\n/// If the buffer does not start by a valid\n\n/// vint payload\n\npub fn read_u32_vint(data: &mut &[u8]) -> u32 {\n\n let vlen = vint_len(*data);\n\n let mut result = 0u32;\n\n let mut shift = 0u64;\n\n for &b in &data[..vlen] {\n\n result |= u32::from(b & 127u8) << shift;\n\n shift += 7;\n\n }\n\n *data = &data[vlen..];\n\n result\n\n}\n\n\n", "file_path": "src/common/vint.rs", "rank": 16, "score": 188759.96195486782 }, { "content": "/// Open a new index writer. Attempts to acquire a lockfile.\n\n///\n\n/// The lockfile should be deleted on drop, but it is possible\n\n/// that due to a panic or other error, a stale lockfile will be\n\n/// left in the index directory. If you are sure that no other\n\n/// `IndexWriter` on the system is accessing the index directory,\n\n/// it is safe to manually delete the lockfile.\n\n///\n\n/// `num_threads` specifies the number of indexing workers that\n\n/// should work at the same time.\n\n/// # Errors\n\n/// If the lockfile already exists, returns `Error::FileAlreadyExists`.\n\n/// # Panics\n\n/// If the heap size per thread is too small, panics.\n\npub fn open_index_writer(\n\n index: &Index,\n\n num_threads: usize,\n\n heap_size_in_bytes_per_thread: usize,\n\n directory_lock: DirectoryLock,\n\n) -> Result<IndexWriter> {\n\n if heap_size_in_bytes_per_thread < HEAP_SIZE_MIN {\n\n let err_msg = format!(\n\n \"The heap size per thread needs to be at least {}.\",\n\n HEAP_SIZE_MIN\n\n );\n\n return Err(TantivyError::InvalidArgument(err_msg));\n\n }\n\n if heap_size_in_bytes_per_thread >= HEAP_SIZE_MAX {\n\n let err_msg = format!(\"The heap size per thread cannot exceed {}\", HEAP_SIZE_MAX);\n\n return Err(TantivyError::InvalidArgument(err_msg));\n\n }\n\n let (document_sender, document_receiver): (DocumentSender, DocumentReceiver) =\n\n channel::bounded(PIPELINE_MAX_SIZE_IN_DOCS);\n\n\n", "file_path": "src/indexer/index_writer.rs", "rank": 17, "score": 184735.54588075393 }, { "content": "#[inline(always)]\n\npub fn compress_sorted<'a>(input: &[u32], output: &'a mut [u8], mut offset: u32) -> &'a [u8] {\n\n let mut byte_written = 0;\n\n for &v in input {\n\n let mut to_encode: u32 = v - offset;\n\n offset = v;\n\n loop {\n\n let next_byte: u8 = (to_encode % 128u32) as u8;\n\n to_encode /= 128u32;\n\n if to_encode == 0u32 {\n\n output[byte_written] = next_byte | 128u8;\n\n byte_written += 1;\n\n break;\n\n } else {\n\n output[byte_written] = next_byte;\n\n byte_written += 1;\n\n }\n\n }\n\n }\n\n &output[..byte_written]\n\n}\n", "file_path": "src/postings/compression/vint.rs", "rank": 18, "score": 176235.23094834434 }, { "content": "/// Returns the intersection scorer.\n\n///\n\n/// The score associated to the documents is the sum of the\n\n/// score of the `Scorer`s given in argument.\n\n///\n\n/// For better performance, the function uses a\n\n/// specialized implementation if the two\n\n/// shortest scorers are `TermScorer`s.\n\npub fn intersect_scorers(mut scorers: Vec<Box<Scorer>>) -> Box<Scorer> {\n\n let num_docsets = scorers.len();\n\n scorers.sort_by(|left, right| right.size_hint().cmp(&left.size_hint()));\n\n let rarest_opt = scorers.pop();\n\n let second_rarest_opt = scorers.pop();\n\n scorers.reverse();\n\n match (rarest_opt, second_rarest_opt) {\n\n (None, None) => Box::new(EmptyScorer),\n\n (Some(single_docset), None) => single_docset,\n\n (Some(left), Some(right)) => {\n\n {\n\n let all_term_scorers = [&left, &right].into_iter().all(|scorer| {\n\n let scorer_ref: &Scorer = (*scorer).borrow();\n\n Downcast::<TermScorer>::is_type(scorer_ref)\n\n });\n\n if all_term_scorers {\n\n let left = *Downcast::<TermScorer>::downcast(left).unwrap();\n\n let right = *Downcast::<TermScorer>::downcast(right).unwrap();\n\n return Box::new(Intersection {\n\n left,\n", "file_path": "src/query/intersection.rs", "rank": 19, "score": 173574.8709505801 }, { "content": "/// Saves the file containing the list of existing files\n\n/// that were created by tantivy.\n\nfn save_managed_paths(\n\n directory: &mut Directory,\n\n wlock: &RwLockWriteGuard<MetaInformation>,\n\n) -> io::Result<()> {\n\n let mut w = serde_json::to_vec(&wlock.managed_paths)?;\n\n writeln!(&mut w)?;\n\n directory.atomic_write(&MANAGED_FILEPATH, &w[..])?;\n\n Ok(())\n\n}\n\n\n\nimpl ManagedDirectory {\n\n /// Wraps a directory as managed directory.\n\n pub fn wrap<Dir: Directory>(directory: Dir) -> Result<ManagedDirectory> {\n\n match directory.atomic_read(&MANAGED_FILEPATH) {\n\n Ok(data) => {\n\n let managed_files_json = String::from_utf8_lossy(&data);\n\n let managed_files: HashSet<PathBuf> = serde_json::from_str(&managed_files_json)\n\n .map_err(|e| {\n\n DataCorruption::new(\n\n MANAGED_FILEPATH.clone(),\n", "file_path": "src/directory/managed_directory.rs", "rank": 20, "score": 171120.76207420207 }, { "content": "#[derive(Clone)]\n\nstruct InnerDirectory(Arc<RwLock<HashMap<PathBuf, Arc<Vec<u8>>>>>);\n\n\n\nimpl InnerDirectory {\n\n fn new() -> InnerDirectory {\n\n InnerDirectory(Arc::new(RwLock::new(HashMap::new())))\n\n }\n\n\n\n fn write(&self, path: PathBuf, data: &[u8]) -> io::Result<bool> {\n\n let mut map = self.0.write().map_err(|_| {\n\n make_io_err(format!(\n\n \"Failed to lock the directory, when trying to write {:?}\",\n\n path\n\n ))\n\n })?;\n\n let prev_value = map.insert(path, Arc::new(Vec::from(data)));\n\n Ok(prev_value.is_some())\n\n }\n\n\n\n fn open_read(&self, path: &Path) -> Result<ReadOnlySource, OpenReadError> {\n\n self.0\n", "file_path": "src/directory/ram_directory.rs", "rank": 21, "score": 168857.46285864888 }, { "content": "pub fn store<Item: Copy + 'static>(dest: &mut [u8], val: Item) {\n\n assert_eq!(dest.len(), std::mem::size_of::<Item>());\n\n unsafe {\n\n ptr::write_unaligned(dest.as_mut_ptr() as *mut Item, val);\n\n }\n\n}\n\n\n", "file_path": "src/postings/stacker/memory_arena.rs", "rank": 22, "score": 165857.73074406764 }, { "content": "#[inline(always)]\n\npub fn uncompress_sorted<'a>(compressed_data: &'a [u8], output: &mut [u32], offset: u32) -> usize {\n\n let mut read_byte = 0;\n\n let mut result = offset;\n\n for output_mut in output.iter_mut() {\n\n let mut shift = 0u32;\n\n loop {\n\n let cur_byte = compressed_data[read_byte];\n\n read_byte += 1;\n\n result += u32::from(cur_byte % 128u8) << shift;\n\n if cur_byte & 128u8 != 0u8 {\n\n break;\n\n }\n\n shift += 7;\n\n }\n\n *output_mut = result;\n\n }\n\n read_byte\n\n}\n\n\n\n#[inline(always)]\n", "file_path": "src/postings/compression/vint.rs", "rank": 23, "score": 158792.34728471417 }, { "content": "// This method is used as a trick to workaround the borrow checker\n\nfn write(\n\n multifield_postings: &MultiFieldPostingsWriter,\n\n fast_field_writers: &FastFieldsWriter,\n\n fieldnorms_writer: &FieldNormsWriter,\n\n mut serializer: SegmentSerializer,\n\n) -> Result<()> {\n\n let term_ord_map = multifield_postings.serialize(serializer.get_postings_serializer())?;\n\n fast_field_writers.serialize(serializer.get_fast_field_serializer(), &term_ord_map)?;\n\n fieldnorms_writer.serialize(serializer.get_fieldnorms_serializer())?;\n\n serializer.close()?;\n\n Ok(())\n\n}\n\n\n\nimpl SerializableSegment for SegmentWriter {\n\n fn write(&self, serializer: SegmentSerializer) -> Result<u32> {\n\n let max_doc = self.max_doc;\n\n write(\n\n &self.multifield_postings,\n\n &self.fast_field_writers,\n\n &self.fieldnorms_writer,\n\n serializer,\n\n )?;\n\n Ok(max_doc)\n\n }\n\n}\n", "file_path": "src/indexer/segment_writer.rs", "rank": 24, "score": 156525.9622233901 }, { "content": "fn escape_slashes(s: &str) -> Cow<str> {\n\n lazy_static! {\n\n static ref SLASH_PTN: Regex = Regex::new(r\"[\\\\/]\").unwrap();\n\n }\n\n SLASH_PTN.replace_all(s, \"\\\\/\")\n\n}\n\n\n\nimpl Serialize for Facet {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n serializer.serialize_str(&self.to_string())\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for Facet {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n", "file_path": "src/schema/facet.rs", "rank": 25, "score": 155438.55964812564 }, { "content": "/// Save the index meta file.\n\n/// This operation is atomic:\n\n/// Either\n\n// - it fails, in which case an error is returned,\n\n/// and the `meta.json` remains untouched,\n\n/// - it success, and `meta.json` is written\n\n/// and flushed.\n\n///\n\n/// This method is not part of tantivy's public API\n\npub fn save_metas(\n\n segment_metas: Vec<SegmentMeta>,\n\n schema: Schema,\n\n opstamp: u64,\n\n payload: Option<String>,\n\n directory: &mut Directory,\n\n) -> Result<()> {\n\n let metas = IndexMeta {\n\n segments: segment_metas,\n\n schema,\n\n opstamp,\n\n payload,\n\n };\n\n let mut buffer = serde_json::to_vec_pretty(&metas)?;\n\n writeln!(&mut buffer)?;\n\n directory.atomic_write(&META_FILEPATH, &buffer[..])?;\n\n debug!(\"Saved metas {:?}\", serde_json::to_string_pretty(&metas));\n\n Ok(())\n\n}\n\n\n\n// The segment update runner is in charge of processing all\n\n// of the `SegmentUpdate`s.\n\n//\n\n// All this processing happens on a single thread\n\n// consuming a common queue.\n\n#[derive(Clone)]\n\npub struct SegmentUpdater(Arc<InnerSegmentUpdater>);\n\n\n", "file_path": "src/indexer/segment_updater.rs", "rank": 26, "score": 145025.96488546714 }, { "content": "/// Advance delete for the given segment up\n\n/// to the target opstamp.\n\npub fn advance_deletes(\n\n mut segment: Segment,\n\n segment_entry: &mut SegmentEntry,\n\n target_opstamp: u64,\n\n) -> Result<()> {\n\n {\n\n if segment_entry.meta().delete_opstamp() == Some(target_opstamp) {\n\n // We are already up-to-date here.\n\n return Ok(());\n\n }\n\n\n\n let segment_reader = SegmentReader::open(&segment)?;\n\n let max_doc = segment_reader.max_doc();\n\n\n\n let mut delete_bitset: BitSet = match segment_entry.delete_bitset() {\n\n Some(previous_delete_bitset) => (*previous_delete_bitset).clone(),\n\n None => BitSet::with_capacity(max_doc as usize),\n\n };\n\n\n\n let delete_cursor = segment_entry.delete_cursor();\n", "file_path": "src/indexer/index_writer.rs", "rank": 27, "score": 145016.49620140417 }, { "content": "// writes a lowercased version of text into output.\n\nfn to_lowercase_unicode(text: &mut String, output: &mut String) {\n\n output.clear();\n\n for c in text.chars() {\n\n // Contrary to the std, we do not take care of sigma special case.\n\n // This will have an normalizationo effect, which is ok for search.\n\n output.extend(c.to_lowercase());\n\n }\n\n}\n\n\n\nimpl<TailTokenStream> TokenStream for LowerCaserTokenStream<TailTokenStream>\n\nwhere\n\n TailTokenStream: TokenStream,\n\n{\n\n fn token(&self) -> &Token {\n\n self.tail.token()\n\n }\n\n\n\n fn token_mut(&mut self) -> &mut Token {\n\n self.tail.token_mut()\n\n }\n", "file_path": "src/tokenizer/lower_caser.rs", "rank": 28, "score": 142766.97916441876 }, { "content": "pub fn compute_deleted_bitset(\n\n delete_bitset: &mut BitSet,\n\n segment_reader: &SegmentReader,\n\n delete_cursor: &mut DeleteCursor,\n\n doc_opstamps: &DocToOpstampMapping,\n\n target_opstamp: u64,\n\n) -> Result<bool> {\n\n let mut might_have_changed = false;\n\n\n\n #[cfg_attr(feature = \"cargo-clippy\", allow(clippy::while_let_loop))]\n\n loop {\n\n if let Some(delete_op) = delete_cursor.get() {\n\n if delete_op.opstamp > target_opstamp {\n\n break;\n\n } else {\n\n // A delete operation should only affect\n\n // document that were inserted after it.\n\n //\n\n // Limit doc helps identify the first document\n\n // that may be affected by the delete operation.\n", "file_path": "src/indexer/index_writer.rs", "rank": 29, "score": 142684.5090970942 }, { "content": "pub fn get_mergeable_segments(\n\n segment_manager: &SegmentManager,\n\n) -> (Vec<SegmentMeta>, Vec<SegmentMeta>) {\n\n let registers_lock = segment_manager.read();\n\n (\n\n registers_lock.committed.get_mergeable_segments(),\n\n registers_lock.uncommitted.get_mergeable_segments(),\n\n )\n\n}\n\n\n\nimpl SegmentManager {\n\n pub fn from_segments(\n\n segment_metas: Vec<SegmentMeta>,\n\n delete_cursor: &DeleteCursor,\n\n ) -> SegmentManager {\n\n SegmentManager {\n\n registers: RwLock::new(SegmentRegisters {\n\n uncommitted: SegmentRegister::default(),\n\n committed: SegmentRegister::new(segment_metas, delete_cursor),\n\n writing: HashSet::new(),\n", "file_path": "src/indexer/segment_manager.rs", "rank": 30, "score": 142684.5090970942 }, { "content": "fn bitpack_serialize<W: Write>(\n\n write: &mut W,\n\n bit_packer: &mut BitPacker,\n\n term_info_block_meta: &TermInfoBlockMeta,\n\n term_info: &TermInfo,\n\n) -> io::Result<()> {\n\n bit_packer.write(\n\n u64::from(term_info.doc_freq),\n\n term_info_block_meta.doc_freq_nbits,\n\n write,\n\n )?;\n\n bit_packer.write(\n\n term_info.postings_offset,\n\n term_info_block_meta.postings_offset_nbits,\n\n write,\n\n )?;\n\n bit_packer.write(\n\n term_info.positions_idx,\n\n term_info_block_meta.positions_idx_nbits,\n\n write,\n", "file_path": "src/termdict/term_info_store.rs", "rank": 31, "score": 139014.1887062586 }, { "content": "/// DirectoryClone\n\npub trait DirectoryClone {\n\n /// Clones the directory and boxes the clone\n\n fn box_clone(&self) -> Box<Directory>;\n\n}\n\n\n\nimpl<T> DirectoryClone for T\n\nwhere\n\n T: 'static + Directory + Clone,\n\n{\n\n fn box_clone(&self) -> Box<Directory> {\n\n Box::new(self.clone())\n\n }\n\n}\n", "file_path": "src/directory/directory.rs", "rank": 32, "score": 135327.56713858407 }, { "content": "// `drain_filter` is not stable yet.\n\n// This function is similar except that it does is not unstable, and\n\n// it does not keep the original vector ordering.\n\n//\n\n// Also, it does not \"yield\" any elements.\n\nfn unordered_drain_filter<T, P>(v: &mut Vec<T>, mut predicate: P)\n\nwhere\n\n P: FnMut(&mut T) -> bool,\n\n{\n\n let mut i = 0;\n\n while i < v.len() {\n\n if predicate(&mut v[i]) {\n\n v.swap_remove(i);\n\n } else {\n\n i += 1;\n\n }\n\n }\n\n}\n\n\n\n/// Creates a `DocSet` that iterator through the intersection of two `DocSet`s.\n\npub struct Union<TScorer, TScoreCombiner = DoNothingCombiner> {\n\n docsets: Vec<TScorer>,\n\n bitsets: Box<[TinySet; HORIZON_NUM_TINYBITSETS]>,\n\n scores: Box<[TScoreCombiner; HORIZON as usize]>,\n\n cursor: usize,\n", "file_path": "src/query/union.rs", "rank": 33, "score": 133680.99872185392 }, { "content": "/// Writer associated with the `RAMDirectory`\n\n///\n\n/// The Writer just writes a buffer.\n\n///\n\n/// # Panics\n\n///\n\n/// On drop, if the writer was left in a *dirty* state.\n\n/// That is, if flush was not called after the last call\n\n/// to write.\n\n///\n\nstruct VecWriter {\n\n path: PathBuf,\n\n shared_directory: InnerDirectory,\n\n data: Cursor<Vec<u8>>,\n\n is_flushed: bool,\n\n}\n\n\n\nimpl VecWriter {\n\n fn new(path_buf: PathBuf, shared_directory: InnerDirectory) -> VecWriter {\n\n VecWriter {\n\n path: path_buf,\n\n data: Cursor::new(Vec::new()),\n\n shared_directory,\n\n is_flushed: true,\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for VecWriter {\n\n fn drop(&mut self) {\n", "file_path": "src/directory/ram_directory.rs", "rank": 34, "score": 133420.74518547248 }, { "content": "#[derive(Debug, Default)]\n\nstruct MetaInformation {\n\n managed_paths: HashSet<PathBuf>,\n\n}\n\n\n", "file_path": "src/directory/managed_directory.rs", "rank": 35, "score": 133409.99894056199 }, { "content": "struct MmapCache {\n\n counters: CacheCounters,\n\n cache: HashMap<PathBuf, MmapReadOnly>,\n\n}\n\n\n\nimpl Default for MmapCache {\n\n fn default() -> MmapCache {\n\n MmapCache {\n\n counters: CacheCounters::default(),\n\n cache: HashMap::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl MmapCache {\n\n /// Removes a `MmapReadOnly` entry from the mmap cache.\n\n fn discard_from_cache(&mut self, full_path: &Path) -> bool {\n\n self.cache.remove(full_path).is_some()\n\n }\n\n\n", "file_path": "src/directory/mmap_directory.rs", "rank": 36, "score": 133409.99894056199 }, { "content": "#[inline(always)]\n\npub fn fieldnorm_to_id(fieldnorm: u32) -> u8 {\n\n FIELD_NORMS_TABLE\n\n .binary_search(&fieldnorm)\n\n .unwrap_or_else(|idx| idx - 1) as u8\n\n}\n\n\n\n#[cfg_attr(feature = \"cargo-clippy\", allow(clippy::unreadable_literal))]\n\npub const FIELD_NORMS_TABLE: [u32; 256] = [\n\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,\n\n 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 42, 44, 46, 48, 50, 52, 54, 56, 60,\n\n 64, 68, 72, 76, 80, 84, 88, 96, 104, 112, 120, 128, 136, 144, 152, 168, 184, 200, 216, 232,\n\n 248, 264, 280, 312, 344, 376, 408, 440, 472, 504, 536, 600, 664, 728, 792, 856, 920, 984,\n\n 1_048, 1176, 1304, 1432, 1560, 1688, 1816, 1944, 2072, 2328, 2584, 2840, 3096, 3352, 3608,\n\n 3864, 4120, 4632, 5144, 5656, 6168, 6680, 7192, 7704, 8216, 9240, 10264, 11288, 12312, 13336,\n\n 14360, 15384, 16408, 18456, 20504, 22552, 24600, 26648, 28696, 30744, 32792, 36888, 40984,\n\n 45080, 49176, 53272, 57368, 61464, 65560, 73752, 81944, 90136, 98328, 106520, 114712, 122904,\n\n 131096, 147480, 163864, 180248, 196632, 213016, 229400, 245784, 262168, 294936, 327704, 360472,\n\n 393240, 426008, 458776, 491544, 524312, 589848, 655384, 720920, 786456, 851992, 917528, 983064,\n\n 1048600, 1179672, 1310744, 1441816, 1572888, 1703960, 1835032, 1966104, 2097176, 2359320,\n\n 2621464, 2883608, 3145752, 3407896, 3670040, 3932184, 4194328, 4718616, 5242904, 5767192,\n", "file_path": "src/fieldnorm/code.rs", "rank": 37, "score": 130947.7056471164 }, { "content": "#[inline(always)]\n\npub fn u64_to_i64(val: u64) -> i64 {\n\n (val ^ HIGHEST_BIT) as i64\n\n}\n\n\n\n#[cfg(test)]\n\npub(crate) mod test {\n\n\n\n pub use super::serialize::test::fixed_size_test;\n\n use super::{compute_num_bits, i64_to_u64, u64_to_i64};\n\n\n\n fn test_i64_converter_helper(val: i64) {\n\n assert_eq!(u64_to_i64(i64_to_u64(val)), val);\n\n }\n\n\n\n #[test]\n\n fn test_i64_converter() {\n\n assert_eq!(i64_to_u64(i64::min_value()), u64::min_value());\n\n assert_eq!(i64_to_u64(i64::max_value()), u64::max_value());\n\n test_i64_converter_helper(0i64);\n\n test_i64_converter_helper(i64::min_value());\n", "file_path": "src/common/mod.rs", "rank": 38, "score": 130947.7056471164 }, { "content": "#[inline(always)]\n\npub fn i64_to_u64(val: i64) -> u64 {\n\n (val as u64) ^ HIGHEST_BIT\n\n}\n\n\n\n/// Reverse the mapping given by [`i64_to_u64`](./fn.i64_to_u64.html).\n", "file_path": "src/common/mod.rs", "rank": 39, "score": 130947.7056471164 }, { "content": "#[inline(always)]\n\npub fn id_to_fieldnorm(id: u8) -> u32 {\n\n FIELD_NORMS_TABLE[id as usize]\n\n}\n\n\n", "file_path": "src/fieldnorm/code.rs", "rank": 40, "score": 130947.7056471164 }, { "content": "/// Returns the size in bytes of a compressed block, given `num_bits`.\n\npub fn compressed_block_size(num_bits: u8) -> usize {\n\n (num_bits as usize) * COMPRESSION_BLOCK_SIZE / 8\n\n}\n\n\n\npub struct BlockEncoder {\n\n bitpacker: BitPacker4x,\n\n pub output: [u8; COMPRESSED_BLOCK_MAX_SIZE],\n\n pub output_len: usize,\n\n}\n\n\n\nimpl BlockEncoder {\n\n pub fn new() -> BlockEncoder {\n\n BlockEncoder {\n\n bitpacker: BitPacker4x::new(),\n\n output: [0u8; COMPRESSED_BLOCK_MAX_SIZE],\n\n output_len: 0,\n\n }\n\n }\n\n\n\n pub fn compress_block_sorted(&mut self, block: &[u32], offset: u32) -> (u8, &[u8]) {\n", "file_path": "src/postings/compression/mod.rs", "rank": 41, "score": 124970.38707753697 }, { "content": "pub fn serialize_vint_u32(val: u32) -> (u64, usize) {\n\n const START_2: u64 = 1 << 7;\n\n const START_3: u64 = 1 << 14;\n\n const START_4: u64 = 1 << 21;\n\n const START_5: u64 = 1 << 28;\n\n\n\n const STOP_1: u64 = START_2 - 1;\n\n const STOP_2: u64 = START_3 - 1;\n\n const STOP_3: u64 = START_4 - 1;\n\n const STOP_4: u64 = START_5 - 1;\n\n\n\n const MASK_1: u64 = 127;\n\n const MASK_2: u64 = MASK_1 << 7;\n\n const MASK_3: u64 = MASK_2 << 7;\n\n const MASK_4: u64 = MASK_3 << 7;\n\n const MASK_5: u64 = MASK_4 << 7;\n\n\n\n let val = u64::from(val);\n\n const STOP_BIT: u64 = 128u64;\n\n match val {\n", "file_path": "src/common/vint.rs", "rank": 42, "score": 124334.74125203429 }, { "content": "/// Returns the actual memory size in bytes\n\n/// required to create a table of size $2^num_bits$.\n\npub fn compute_table_size(num_bits: usize) -> usize {\n\n (1 << num_bits) * mem::size_of::<KeyValue>()\n\n}\n\n\n\n/// `KeyValue` is the item stored in the hash table.\n\n/// The key is actually a `BytesRef` object stored in an external heap.\n\n/// The `value_addr` also points to an address in the heap.\n\n///\n\n/// The key and the value are actually stored contiguously.\n\n/// For this reason, the (start, stop) information is actually redundant\n\n/// and can be simplified in the future\n", "file_path": "src/postings/stacker/term_hashmap.rs", "rank": 43, "score": 123168.4084405116 }, { "content": "/// Returns a Snippet\n\n///\n\n/// Takes a vector of `FragmentCandidate`s and the text.\n\n/// Figures out the best fragment from it and creates a snippet.\n\nfn select_best_fragment_combination(fragments: &[FragmentCandidate], text: &str) -> Snippet {\n\n let best_fragment_opt = fragments.iter().max_by(|left, right| {\n\n let cmp_score = left\n\n .score\n\n .partial_cmp(&right.score)\n\n .unwrap_or(Ordering::Equal);\n\n if cmp_score == Ordering::Equal {\n\n (right.start_offset, right.stop_offset).cmp(&(left.start_offset, left.stop_offset))\n\n } else {\n\n cmp_score\n\n }\n\n });\n\n if let Some(fragment) = best_fragment_opt {\n\n let fragment_text = &text[fragment.start_offset..fragment.stop_offset];\n\n let highlighted = fragment\n\n .highlighted\n\n .iter()\n\n .map(|item| {\n\n HighlightSection::new(\n\n item.start - fragment.start_offset,\n", "file_path": "src/snippet/mod.rs", "rank": 44, "score": 122398.71692382592 }, { "content": "/// Retry the logic of acquiring locks is pretty simple.\n\n/// We just retry `n` times after a given `duratio`, both\n\n/// depending on the type of lock.\n\nstruct RetryPolicy {\n\n num_retries: usize,\n\n wait_in_ms: u64,\n\n}\n\n\n\nimpl RetryPolicy {\n\n fn no_retry() -> RetryPolicy {\n\n RetryPolicy {\n\n num_retries: 0,\n\n wait_in_ms: 0,\n\n }\n\n }\n\n\n\n fn wait_and_retry(&mut self) -> bool {\n\n if self.num_retries == 0 {\n\n false\n\n } else {\n\n self.num_retries -= 1;\n\n let wait_duration = Duration::from_millis(self.wait_in_ms);\n\n thread::sleep(wait_duration);\n", "file_path": "src/indexer/directory_lock.rs", "rank": 45, "score": 122121.04406913836 }, { "content": "// `ahead` represents the offset of the block currently loaded\n\n// compared to the cursor of the actual stream.\n\n//\n\n// By contract, when this function is called, the current block has to be\n\n// decompressed.\n\n//\n\n// If the requested number of els ends exactly at a given block, the next\n\n// block is not decompressed.\n\nfn read_impl(\n\n mut position: &[u8],\n\n buffer: &mut [u32; 128],\n\n mut inner_offset: usize,\n\n num_bits: &[u8],\n\n output: &mut [u32],\n\n) -> usize {\n\n let mut output_start = 0;\n\n let mut output_len = output.len();\n\n let mut ahead = 0;\n\n loop {\n\n let available_len = 128 - inner_offset;\n\n if output_len <= available_len {\n\n output[output_start..].copy_from_slice(&buffer[inner_offset..][..output_len]);\n\n return ahead;\n\n } else {\n\n output[output_start..][..available_len].copy_from_slice(&buffer[inner_offset..]);\n\n output_len -= available_len;\n\n output_start += available_len;\n\n inner_offset = 0;\n", "file_path": "src/positions/reader.rs", "rank": 46, "score": 121766.11509053006 }, { "content": "/// Creates a new segment given an `Index` and a `SegmentId`\n\n///\n\n/// The function is here to make it private outside `tantivy`.\n\n/// #[doc(hidden)]\n\npub fn create_segment(index: Index, meta: SegmentMeta) -> Segment {\n\n Segment { index, meta }\n\n}\n\n\n\nimpl Segment {\n\n /// Returns the index the segment belongs to.\n\n pub fn index(&self) -> &Index {\n\n &self.index\n\n }\n\n\n\n /// Returns our index's schema.\n\n pub fn schema(&self) -> Schema {\n\n self.index.schema()\n\n }\n\n\n\n /// Returns the segment meta-information\n\n pub fn meta(&self) -> &SegmentMeta {\n\n &self.meta\n\n }\n\n\n", "file_path": "src/core/segment.rs", "rank": 47, "score": 120159.6012345212 }, { "content": "/// Intersect twos sorted arrays `left` and `right` and outputs the\n\n/// resulting array in left.\n\n///\n\n/// Returns the length of the intersection\n\nfn intersection(left: &mut [u32], right: &[u32]) -> usize {\n\n let mut left_i = 0;\n\n let mut right_i = 0;\n\n let mut count = 0;\n\n let left_len = left.len();\n\n let right_len = right.len();\n\n while left_i < left_len && right_i < right_len {\n\n let left_val = left[left_i];\n\n let right_val = right[right_i];\n\n if left_val < right_val {\n\n left_i += 1;\n\n } else if right_val < left_val {\n\n right_i += 1;\n\n } else {\n\n left[count] = left_val;\n\n count += 1;\n\n left_i += 1;\n\n right_i += 1;\n\n }\n\n }\n", "file_path": "src/query/phrase_query/phrase_scorer.rs", "rank": 48, "score": 119750.01157831344 }, { "content": "/// Write-once read many (WORM) abstraction for where\n\n/// tantivy's data should be stored.\n\n///\n\n/// There are currently two implementations of `Directory`\n\n///\n\n/// - The [`MMapDirectory`](struct.MmapDirectory.html), this\n\n/// should be your default choice.\n\n/// - The [`RAMDirectory`](struct.RAMDirectory.html), which\n\n/// should be used mostly for tests.\n\n///\n\npub trait Directory: DirectoryClone + fmt::Debug + Send + Sync + 'static {\n\n /// Opens a virtual file for read.\n\n ///\n\n /// Once a virtual file is open, its data may not\n\n /// change.\n\n ///\n\n /// Specifically, subsequent writes or flushes should\n\n /// have no effect on the returned `ReadOnlySource` object.\n\n fn open_read(&self, path: &Path) -> result::Result<ReadOnlySource, OpenReadError>;\n\n\n\n /// Removes a file\n\n ///\n\n /// Removing a file will not affect an eventual\n\n /// existing ReadOnlySource pointing to it.\n\n ///\n\n /// Removing a nonexistent file, yields a\n\n /// `DeleteError::DoesNotExist`.\n\n fn delete(&self, path: &Path) -> result::Result<(), DeleteError>;\n\n\n\n /// Returns true iff the file exists\n", "file_path": "src/directory/directory.rs", "rank": 49, "score": 119630.84215068576 }, { "content": "pub fn load<Item: Copy + 'static>(data: &[u8]) -> Item {\n\n assert_eq!(data.len(), std::mem::size_of::<Item>());\n\n unsafe { ptr::read_unaligned(data.as_ptr() as *const Item) }\n\n}\n\n\n\n/// The `MemoryArena`\n\npub struct MemoryArena {\n\n pages: Vec<Page>,\n\n}\n\n\n\nimpl MemoryArena {\n\n /// Creates a new memory arena.\n\n pub fn new() -> MemoryArena {\n\n let first_page = Page::new(0);\n\n MemoryArena {\n\n pages: vec![first_page],\n\n }\n\n }\n\n\n\n fn add_page(&mut self) -> &mut Page {\n", "file_path": "src/postings/stacker/memory_arena.rs", "rank": 50, "score": 116286.20768730169 }, { "content": "fn load_metas(directory: &Directory) -> Result<IndexMeta> {\n\n let meta_data = directory.atomic_read(&META_FILEPATH)?;\n\n let meta_string = String::from_utf8_lossy(&meta_data);\n\n serde_json::from_str(&meta_string)\n\n .map_err(|e| {\n\n DataCorruption::new(\n\n META_FILEPATH.clone(),\n\n format!(\"Meta file cannot be deserialized. {:?}.\", e),\n\n )\n\n })\n\n .map_err(From::from)\n\n}\n\n\n\n/// Search Index\n\npub struct Index {\n\n directory: ManagedDirectory,\n\n schema: Schema,\n\n num_searchers: Arc<AtomicUsize>,\n\n searcher_pool: Arc<Pool<Searcher>>,\n\n executor: Arc<Executor>,\n", "file_path": "src/core/index.rs", "rank": 51, "score": 115286.88232578048 }, { "content": "fn open_fst_index(source: ReadOnlySource) -> fst::Map {\n\n let fst = match source {\n\n ReadOnlySource::Anonymous(data) => {\n\n Fst::from_shared_bytes(data.data, data.start, data.len).expect(\"FST data is corrupted\")\n\n }\n\n #[cfg(feature = \"mmap\")]\n\n ReadOnlySource::Mmap(mmap_readonly) => {\n\n Fst::from_mmap(mmap_readonly).expect(\"FST data is corrupted\")\n\n }\n\n };\n\n fst::Map::from(fst)\n\n}\n\n\n\n/// The term dictionary contains all of the terms in\n\n/// `tantivy index` in a sorted manner.\n\n///\n\n/// The `Fst` crate is used to associate terms to their\n\n/// respective `TermOrdinal`. The `TermInfoStore` then makes it\n\n/// possible to fetch the associated `TermInfo`.\n\npub struct TermDictionary {\n", "file_path": "src/termdict/termdict.rs", "rank": 52, "score": 101454.96660966176 }, { "content": "/// Returns true iff the two sorted array contain a common element\n\nfn intersection_exists(left: &[u32], right: &[u32]) -> bool {\n\n let mut left_i = 0;\n\n let mut right_i = 0;\n\n while left_i < left.len() && right_i < right.len() {\n\n let left_val = left[left_i];\n\n let right_val = right[right_i];\n\n if left_val < right_val {\n\n left_i += 1;\n\n } else if right_val < left_val {\n\n right_i += 1;\n\n } else {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/query/phrase_query/phrase_scorer.rs", "rank": 63, "score": 96534.85114863486 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nenum State {\n\n ExcludeOne(DocId),\n\n Finished,\n\n}\n\n\n\n/// Filters a given `DocSet` by removing the docs from a given `DocSet`.\n\n///\n\n/// The excluding docset has no impact on scoring.\n\npub struct Exclude<TDocSet, TDocSetExclude> {\n\n underlying_docset: TDocSet,\n\n excluding_docset: TDocSetExclude,\n\n excluding_state: State,\n\n}\n\n\n\nimpl<TDocSet, TDocSetExclude> Exclude<TDocSet, TDocSetExclude>\n\nwhere\n\n TDocSetExclude: DocSet,\n\n{\n\n /// Creates a new `ExcludeScorer`\n\n pub fn new(\n", "file_path": "src/query/exclude.rs", "rank": 64, "score": 81785.98436628352 }, { "content": "enum State {\n\n NotStarted,\n\n Started,\n\n Finished,\n\n}\n\n\n\n/// Scorer associated to the `AllQuery` query.\n\npub struct AllScorer {\n\n state: State,\n\n doc: DocId,\n\n max_doc: DocId,\n\n}\n\n\n\nimpl DocSet for AllScorer {\n\n fn advance(&mut self) -> bool {\n\n match self.state {\n\n State::NotStarted => {\n\n self.state = State::Started;\n\n self.doc = 0;\n\n }\n", "file_path": "src/query/all_query.rs", "rank": 65, "score": 81785.98436628352 }, { "content": "struct Block {\n\n doc_ids: [DocId; COMPRESSION_BLOCK_SIZE],\n\n term_freqs: [u32; COMPRESSION_BLOCK_SIZE],\n\n len: usize,\n\n}\n\n\n\nimpl Block {\n\n fn new() -> Self {\n\n Block {\n\n doc_ids: [0u32; COMPRESSION_BLOCK_SIZE],\n\n term_freqs: [0u32; COMPRESSION_BLOCK_SIZE],\n\n len: 0,\n\n }\n\n }\n\n\n\n fn doc_ids(&self) -> &[DocId] {\n\n &self.doc_ids[..self.len]\n\n }\n\n\n\n fn term_freqs(&self) -> &[u32] {\n", "file_path": "src/postings/serializer.rs", "rank": 66, "score": 80687.06434660549 }, { "content": "#[derive(Default)]\n\nstruct Stats {\n\n count: usize,\n\n sum: f64,\n\n squared_sum: f64,\n\n}\n\n\n\nimpl Stats {\n\n pub fn count(&self) -> usize {\n\n self.count\n\n }\n\n\n\n pub fn mean(&self) -> f64 {\n\n self.sum / (self.count as f64)\n\n }\n\n\n\n fn square_mean(&self) -> f64 {\n\n self.squared_sum / (self.count as f64)\n\n }\n\n\n\n pub fn standard_deviation(&self) -> f64 {\n", "file_path": "examples/custom_collector.rs", "rank": 67, "score": 80687.06434660549 }, { "content": "#[derive(Debug)]\n\nenum State {\n\n RootFacetNotEmitted,\n\n UpToPosition(usize), //< we already emitted facet prefix up to &text[..cursor]\n\n Terminated,\n\n}\n\n\n\npub struct FacetTokenStream<'a> {\n\n text: &'a str,\n\n state: State,\n\n token: Token,\n\n}\n\n\n\nimpl<'a> Tokenizer<'a> for FacetTokenizer {\n\n type TokenStreamImpl = FacetTokenStream<'a>;\n\n\n\n fn token_stream(&self, text: &'a str) -> Self::TokenStreamImpl {\n\n FacetTokenStream {\n\n text,\n\n state: State::RootFacetNotEmitted, //< pos is the first char that has not been processed yet.\n\n token: Token::default(),\n", "file_path": "src/tokenizer/facet_tokenizer.rs", "rank": 68, "score": 80393.57965313477 }, { "content": "fn map_bound<TFrom, TTo, Transform: Fn(&TFrom) -> TTo>(\n\n bound: &Bound<TFrom>,\n\n transform: &Transform,\n\n) -> Bound<TTo> {\n\n use self::Bound::*;\n\n match bound {\n\n Excluded(ref from_val) => Excluded(transform(from_val)),\n\n Included(ref from_val) => Included(transform(from_val)),\n\n Unbounded => Unbounded,\n\n }\n\n}\n\n\n\n/// `RangeQuery` match all documents that have at least one term within a defined range.\n\n///\n\n/// Matched document will all get a constant `Score` of one.\n\n///\n\n/// # Implementation\n\n///\n\n/// The current implement will iterate over the terms within the range\n\n/// and append all of the document cross into a `BitSet`.\n", "file_path": "src/query/range_query.rs", "rank": 69, "score": 79488.83945814581 }, { "content": "struct DeltaComputer {\n\n buffer: Vec<u32>,\n\n}\n\n\n\nimpl DeltaComputer {\n\n fn new() -> DeltaComputer {\n\n DeltaComputer {\n\n buffer: vec![0u32; 512],\n\n }\n\n }\n\n\n\n fn compute_delta(&mut self, positions: &[u32]) -> &[u32] {\n\n if positions.len() > self.buffer.len() {\n\n self.buffer.resize(positions.len(), 0u32);\n\n }\n\n let mut last_pos = 0u32;\n\n for (cur_pos, dest) in positions.iter().cloned().zip(self.buffer.iter_mut()) {\n\n *dest = cur_pos - last_pos;\n\n last_pos = cur_pos;\n\n }\n", "file_path": "src/indexer/merger.rs", "rank": 70, "score": 79297.36776827779 }, { "content": "struct StatsCollector {\n\n field: Field,\n\n}\n\n\n\nimpl StatsCollector {\n\n fn with_field(field: Field) -> StatsCollector {\n\n StatsCollector { field }\n\n }\n\n}\n\n\n\nimpl Collector for StatsCollector {\n\n // That's the type of our result.\n\n // Our standard deviation will be a float.\n\n type Fruit = Option<Stats>;\n\n\n\n type Child = StatsSegmentCollector;\n\n\n\n fn for_segment(\n\n &self,\n\n _segment_local_id: u32,\n", "file_path": "examples/custom_collector.rs", "rank": 71, "score": 79297.36776827779 }, { "content": "struct InnerSchema {\n\n fields: Vec<FieldEntry>,\n\n fields_map: HashMap<String, Field>, // transient\n\n}\n\n\n\nimpl PartialEq for InnerSchema {\n\n fn eq(&self, other: &InnerSchema) -> bool {\n\n self.fields == other.fields\n\n }\n\n}\n\n\n\nimpl Eq for InnerSchema {}\n\n\n\n/// Tantivy has a very strict schema.\n\n/// You need to specify in advance, whether a field is indexed or not,\n\n/// stored or not, and RAM-based or not.\n\n///\n\n/// This is done by creating a schema object, and\n\n/// setting up the fields one by one.\n\n/// It is for the moment impossible to remove fields.\n", "file_path": "src/schema/schema.rs", "rank": 72, "score": 79297.36776827779 }, { "content": "struct Block {\n\n operations: Arc<Vec<DeleteOperation>>,\n\n next: NextBlock,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct DeleteCursor {\n\n block: Arc<Block>,\n\n pos: usize,\n\n}\n\n\n\nimpl DeleteCursor {\n\n /// Skips operations and position it so that\n\n /// - either all of the delete operation currently in the\n\n /// queue are consume and the next get will return None.\n\n /// - the next get will return the first operation with an\n\n /// `opstamp >= target_opstamp`.\n\n pub fn skip_to(&mut self, target_opstamp: u64) {\n\n // TODO Can be optimize as we work with block.\n\n while self.is_behind_opstamp(target_opstamp) {\n", "file_path": "src/indexer/delete_queue.rs", "rank": 73, "score": 79297.36776827779 }, { "content": "enum CapacityResult {\n\n Available(u32),\n\n NeedAlloc(u32),\n\n}\n\n\n", "file_path": "src/postings/stacker/expull.rs", "rank": 74, "score": 79081.09579020785 }, { "content": "struct Page {\n\n page_id: usize,\n\n len: usize,\n\n data: Box<[u8]>,\n\n}\n\n\n\nimpl Page {\n\n fn new(page_id: usize) -> Page {\n\n Page {\n\n page_id,\n\n len: 0,\n\n data: vec![0u8; PAGE_SIZE].into_boxed_slice(),\n\n }\n\n }\n\n\n\n #[inline(always)]\n\n fn is_available(&self, len: usize) -> bool {\n\n len + self.len <= PAGE_SIZE\n\n }\n\n\n", "file_path": "src/postings/stacker/memory_arena.rs", "rank": 75, "score": 77987.4365994187 }, { "content": "struct StatsSegmentCollector {\n\n fast_field_reader: FastFieldReader<u64>,\n\n stats: Stats,\n\n}\n\n\n\nimpl SegmentCollector for StatsSegmentCollector {\n\n type Fruit = Option<Stats>;\n\n\n\n fn collect(&mut self, doc: u32, _score: f32) {\n\n let value = self.fast_field_reader.get(doc) as f64;\n\n self.stats.count += 1;\n\n self.stats.sum += value;\n\n self.stats.squared_sum += value * value;\n\n }\n\n\n\n fn harvest(self) -> <Self as SegmentCollector>::Fruit {\n\n self.stats.non_zero_count()\n\n }\n\n}\n\n\n", "file_path": "examples/custom_collector.rs", "rank": 76, "score": 77987.4365994187 }, { "content": "struct TermOrdinalMapping {\n\n per_segment_new_term_ordinals: Vec<Vec<TermOrdinal>>,\n\n}\n\n\n\nimpl TermOrdinalMapping {\n\n fn new(max_term_ords: Vec<TermOrdinal>) -> TermOrdinalMapping {\n\n TermOrdinalMapping {\n\n per_segment_new_term_ordinals: max_term_ords\n\n .into_iter()\n\n .map(|max_term_ord| vec![TermOrdinal::default(); max_term_ord as usize])\n\n .collect(),\n\n }\n\n }\n\n\n\n fn register_from_to(&mut self, segment_ord: usize, from_ord: TermOrdinal, to_ord: TermOrdinal) {\n\n self.per_segment_new_term_ordinals[segment_ord][from_ord as usize] = to_ord;\n\n }\n\n\n\n fn get_segment(&self, segment_ord: usize) -> &[TermOrdinal] {\n\n &(self.per_segment_new_term_ordinals[segment_ord])[..]\n", "file_path": "src/indexer/merger.rs", "rank": 77, "score": 77987.4365994187 }, { "content": "struct PositionComputer {\n\n // store the amount of position int\n\n // before reading positions.\n\n //\n\n // if none, position are already loaded in\n\n // the positions vec.\n\n position_to_skip: usize,\n\n position_reader: PositionReader,\n\n}\n\n\n\nimpl PositionComputer {\n\n pub fn new(position_reader: PositionReader) -> PositionComputer {\n\n PositionComputer {\n\n position_to_skip: 0,\n\n position_reader,\n\n }\n\n }\n\n\n\n pub fn add_skip(&mut self, num_skip: usize) {\n\n self.position_to_skip += num_skip;\n", "file_path": "src/postings/segment_postings.rs", "rank": 78, "score": 77987.4365994187 }, { "content": "#[derive(Default)]\n\nstruct SegmentRegisters {\n\n uncommitted: SegmentRegister,\n\n committed: SegmentRegister,\n\n writing: HashSet<SegmentId>,\n\n}\n\n\n\n/// The segment manager stores the list of segments\n\n/// as well as their state.\n\n///\n\n/// It guarantees the atomicity of the\n\n/// changes (merges especially)\n\n#[derive(Default)]\n\npub struct SegmentManager {\n\n registers: RwLock<SegmentRegisters>,\n\n}\n\n\n\nimpl Debug for SegmentManager {\n\n fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {\n\n let lock = self.read();\n\n write!(\n\n f,\n\n \"{{ uncommitted: {:?}, committed: {:?} }}\",\n\n lock.uncommitted, lock.committed\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/indexer/segment_manager.rs", "rank": 79, "score": 77987.4365994187 }, { "content": "#[derive(Clone, Debug, Serialize, Deserialize)]\n\nstruct DeleteMeta {\n\n num_deleted_docs: u32,\n\n opstamp: u64,\n\n}\n\n\n\n/// `SegmentMeta` contains simple meta information about a segment.\n\n///\n\n/// For instance the number of docs it contains,\n\n/// how many are deleted, etc.\n\n#[derive(Clone)]\n\npub struct SegmentMeta {\n\n tracked: TrackedObject<InnerSegmentMeta>,\n\n}\n\n\n\nimpl fmt::Debug for SegmentMeta {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n self.tracked.fmt(f)\n\n }\n\n}\n\n\n", "file_path": "src/core/segment_meta.rs", "rank": 80, "score": 77987.4365994187 }, { "content": "enum InnerNextBlock {\n\n Writer(DeleteQueue),\n\n Closed(Arc<Block>),\n\n}\n\n\n", "file_path": "src/indexer/delete_queue.rs", "rank": 81, "score": 77841.84383123176 }, { "content": "enum Element {\n\n SingleEl(UserInputAST),\n\n NormalDisjunctive(Vec<Vec<UserInputAST>>),\n\n}\n\n\n\nimpl Element {\n\n pub fn into_dnf(self) -> Vec<Vec<UserInputAST>> {\n\n match self {\n\n Element::NormalDisjunctive(conjunctions) => conjunctions,\n\n Element::SingleEl(el) => vec![vec![el]],\n\n }\n\n }\n\n}\n\n\n\nparser! {\n\n pub fn parse_to_ast[I]()(I) -> UserInputAST\n\n where [I: Stream<Item = char>]\n\n {\n\n (\n\n try(\n", "file_path": "src/query/query_parser/query_grammar.rs", "rank": 82, "score": 77841.84383123176 }, { "content": "struct Hit<'a> {\n\n count: u64,\n\n facet: &'a Facet,\n\n}\n\n\n\nimpl<'a> Eq for Hit<'a> {}\n\n\n\nimpl<'a> PartialEq<Hit<'a>> for Hit<'a> {\n\n fn eq(&self, other: &Hit) -> bool {\n\n self.count == other.count\n\n }\n\n}\n\n\n\nimpl<'a> PartialOrd<Hit<'a>> for Hit<'a> {\n\n fn partial_cmp(&self, other: &Hit) -> Option<Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n\n\n\nimpl<'a> Ord for Hit<'a> {\n\n fn cmp(&self, other: &Self) -> Ordering {\n\n other.count.cmp(&self.count)\n\n }\n\n}\n\n\n", "file_path": "src/collector/facet_collector.rs", "rank": 83, "score": 77141.03447745094 }, { "content": "#[derive(Clone, Debug, Serialize, Deserialize)]\n\nstruct InnerSegmentMeta {\n\n segment_id: SegmentId,\n\n max_doc: u32,\n\n deletes: Option<DeleteMeta>,\n\n}\n", "file_path": "src/core/segment_meta.rs", "rank": 84, "score": 76750.5949033141 }, { "content": "struct QuadraticProbing {\n\n hash: usize,\n\n i: usize,\n\n mask: usize,\n\n}\n\n\n\nimpl QuadraticProbing {\n\n fn compute(hash: usize, mask: usize) -> QuadraticProbing {\n\n QuadraticProbing { hash, i: 0, mask }\n\n }\n\n\n\n #[inline]\n\n fn next_probe(&mut self) -> usize {\n\n self.i += 1;\n\n (self.hash + self.i) & self.mask\n\n }\n\n}\n\n\n\npub struct Iter<'a> {\n\n hashmap: &'a TermHashMap,\n", "file_path": "src/postings/stacker/term_hashmap.rs", "rank": 85, "score": 76750.5949033141 }, { "content": "#[derive(Copy, Clone)]\n\nstruct KeyValue {\n\n key_value_addr: Addr,\n\n hash: u32,\n\n}\n\n\n\nimpl Default for KeyValue {\n\n fn default() -> Self {\n\n KeyValue {\n\n key_value_addr: Addr::null_pointer(),\n\n hash: 0u32,\n\n }\n\n }\n\n}\n\n\n\nimpl KeyValue {\n\n fn is_empty(self) -> bool {\n\n self.key_value_addr.is_null()\n\n }\n\n}\n\n\n", "file_path": "src/postings/stacker/term_hashmap.rs", "rank": 86, "score": 76750.5949033141 }, { "content": "struct InnerSegmentUpdater {\n\n pool: CpuPool,\n\n index: Index,\n\n segment_manager: SegmentManager,\n\n merge_policy: RwLock<Box<MergePolicy>>,\n\n merging_thread_id: AtomicUsize,\n\n merging_threads: RwLock<HashMap<usize, JoinHandle<Result<()>>>>,\n\n generation: AtomicUsize,\n\n killed: AtomicBool,\n\n stamper: Stamper,\n\n}\n\n\n\nimpl SegmentUpdater {\n\n pub fn create(\n\n index: Index,\n\n stamper: Stamper,\n\n delete_cursor: &DeleteCursor,\n\n ) -> Result<SegmentUpdater> {\n\n let segments = index.searchable_segment_metas()?;\n\n let segment_manager = SegmentManager::from_segments(segments, delete_cursor);\n", "file_path": "src/indexer/segment_updater.rs", "rank": 87, "score": 76750.5949033141 }, { "content": "#[derive(Default)]\n\nstruct InnerDeleteQueue {\n\n writer: Vec<DeleteOperation>,\n\n last_block: Option<Arc<Block>>,\n\n}\n\n\n\n#[derive(Clone, Default)]\n\npub struct DeleteQueue {\n\n inner: Arc<RwLock<InnerDeleteQueue>>,\n\n}\n\n\n\nimpl DeleteQueue {\n\n // Creates a new delete queue.\n\n pub fn new() -> DeleteQueue {\n\n let delete_queue = DeleteQueue {\n\n inner: Arc::default(),\n\n };\n\n\n\n let next_block = NextBlock::from(delete_queue.clone());\n\n {\n\n let mut delete_queue_wlock = delete_queue.inner.write().unwrap();\n", "file_path": "src/indexer/delete_queue.rs", "rank": 88, "score": 76750.5949033141 }, { "content": "enum BinaryOperand {\n\n Or,\n\n And,\n\n}\n\n\n\nparser! {\n\n fn binary_operand[I]()(I) -> BinaryOperand\n\n where [I: Stream<Item = char>] {\n\n (spaces1(),\n\n (\n\n string(\"AND\").map(|_| BinaryOperand::And)\n\n .or(string(\"OR\").map(|_| BinaryOperand::Or))\n\n ),\n\n spaces1()).map(|(_, op,_)| op)\n\n }\n\n}\n\n\n", "file_path": "src/query/query_parser/query_grammar.rs", "rank": 89, "score": 76669.86100937078 }, { "content": "/// Has length trait\n\npub trait HasLen {\n\n /// Return length\n\n fn len(&self) -> usize;\n\n\n\n /// Returns true iff empty.\n\n fn is_empty(&self) -> bool {\n\n self.len() == 0\n\n }\n\n}\n\n\n\nconst HIGHEST_BIT: u64 = 1 << 63;\n\n\n\n/// Maps a `i64` to `u64`\n\n///\n\n/// For simplicity, tantivy internally handles `i64` as `u64`.\n\n/// The mapping is defined by this function.\n\n///\n\n/// Maps `i64` to `u64` so that\n\n/// `-2^63 .. 2^63-1` is mapped\n\n/// to\n", "file_path": "src/common/mod.rs", "rank": 90, "score": 76040.98652285745 }, { "content": "/// Represents an iterable set of sorted doc ids.\n\npub trait DocSet {\n\n /// Goes to the next element.\n\n /// `.advance(...)` needs to be called a first time to point to the correct\n\n /// element.\n\n fn advance(&mut self) -> bool;\n\n\n\n /// After skipping, position the iterator in such a way that `.doc()`\n\n /// will return a value greater than or equal to target.\n\n ///\n\n /// SkipResult expresses whether the `target value` was reached, overstepped,\n\n /// or if the `DocSet` was entirely consumed without finding any value\n\n /// greater or equal to the `target`.\n\n ///\n\n /// WARNING: Calling skip always advances the docset.\n\n /// More specifically, if the docset is already positionned on the target\n\n /// skipping will advance to the next position and return SkipResult::Overstep.\n\n ///\n\n /// If `.skip_next()` oversteps, then the docset must be positionned correctly\n\n /// on an existing document. In other words, `.doc()` should return the first document\n\n /// greater than `DocId`.\n", "file_path": "src/docset.rs", "rank": 91, "score": 76040.98652285745 }, { "content": "#[test]\n\n#[ignore]\n\n#[cfg(feature = \"mmap\")]\n\nfn test_indexing() {\n\n let mut schema_builder = Schema::builder();\n\n\n\n let id_field = schema_builder.add_u64_field(\"id\", INT_INDEXED);\n\n let multiples_field = schema_builder.add_u64_field(\"multiples\", INT_INDEXED);\n\n let schema = schema_builder.build();\n\n\n\n let index = Index::create_from_tempdir(schema).unwrap();\n\n\n\n let mut rng = thread_rng();\n\n\n\n let mut index_writer = index.writer_with_num_threads(3, 120_000_000).unwrap();\n\n\n\n let mut committed_docs: HashSet<u64> = HashSet::new();\n\n let mut uncommitted_docs: HashSet<u64> = HashSet::new();\n\n\n\n for _ in 0..200 {\n\n let random_val = rng.gen_range(0, 20);\n\n if random_val == 0 {\n\n index_writer.commit().expect(\"Commit failed\");\n", "file_path": "src/functional_test.rs", "rank": 92, "score": 76011.94627239468 }, { "content": "/// Emits all of the offsets where a codepoint starts\n\n/// or a codepoint ends.\n\n///\n\n/// By convention, we emit [0] for the empty string.\n\nstruct CodepointFrontiers<'a> {\n\n s: &'a str,\n\n next_el: Option<usize>,\n\n}\n\n\n\nimpl<'a> CodepointFrontiers<'a> {\n\n fn for_str(s: &'a str) -> Self {\n\n CodepointFrontiers {\n\n s,\n\n next_el: Some(0),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for CodepointFrontiers<'a> {\n\n type Item = usize;\n\n\n\n fn next(&mut self) -> Option<usize> {\n\n self.next_el.map(|offset| {\n\n if self.s.is_empty() {\n", "file_path": "src/tokenizer/ngram_tokenizer.rs", "rank": 93, "score": 75831.10330859186 }, { "content": "#[derive(Clone)]\n\nstruct BoxableTokenizer<A>(A)\n\nwhere\n\n A: for<'a> Tokenizer<'a> + Send + Sync;\n\n\n\nimpl<A> BoxedTokenizer for BoxableTokenizer<A>\n\nwhere\n\n A: 'static + Send + Sync + for<'a> Tokenizer<'a>,\n\n{\n\n fn token_stream<'a>(&self, text: &'a str) -> Box<TokenStream + 'a> {\n\n Box::new(self.0.token_stream(text))\n\n }\n\n\n\n fn token_stream_texts<'b>(&self, texts: &'b [&'b str]) -> Box<TokenStream + 'b> {\n\n assert!(!texts.is_empty());\n\n if texts.len() == 1 {\n\n Box::new(self.0.token_stream(texts[0]))\n\n } else {\n\n let mut offsets = vec![];\n\n let mut total_offset = 0;\n\n for &text in texts {\n", "file_path": "src/tokenizer/tokenizer.rs", "rank": 94, "score": 75244.3948095569 }, { "content": "/// `TokenStream` is the result of the tokenization.\n\n///\n\n/// It consists consumable stream of `Token`s.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// extern crate tantivy;\n\n/// use tantivy::tokenizer::*;\n\n///\n\n/// # fn main() {\n\n/// let tokenizer = SimpleTokenizer\n\n/// .filter(RemoveLongFilter::limit(40))\n\n/// .filter(LowerCaser);\n\n/// let mut token_stream = tokenizer.token_stream(\"Hello, happy tax payer\");\n\n/// {\n\n/// let token = token_stream.next().unwrap();\n\n/// assert_eq!(&token.text, \"hello\");\n\n/// assert_eq!(token.offset_from, 0);\n\n/// assert_eq!(token.offset_to, 5);\n\n/// assert_eq!(token.position, 0);\n\n/// }\n\n/// {\n\n/// let token = token_stream.next().unwrap();\n\n/// assert_eq!(&token.text, \"happy\");\n\n/// assert_eq!(token.offset_from, 7);\n\n/// assert_eq!(token.offset_to, 12);\n\n/// assert_eq!(token.position, 1);\n\n/// }\n\n/// # }\n\n/// ```\n\n///\n\npub trait TokenStream {\n\n /// Advance to the next token\n\n ///\n\n /// Returns false if there are no other tokens.\n\n fn advance(&mut self) -> bool;\n\n\n\n /// Returns a reference to the current token.\n\n fn token(&self) -> &Token;\n\n\n\n /// Returns a mutable reference to the current token.\n\n fn token_mut(&mut self) -> &mut Token;\n\n\n\n /// Helper to iterate over tokens. It\n\n /// simply combines a call to `.advance()`\n\n /// and `.token()`.\n\n ///\n\n /// ```\n\n /// # extern crate tantivy;\n\n /// # use tantivy::tokenizer::*;\n\n /// #\n", "file_path": "src/tokenizer/tokenizer.rs", "rank": 95, "score": 74743.2157195535 }, { "content": "pub trait SerializableSegment {\n\n /// Writes a view of a segment by pushing information\n\n /// to the `SegmentSerializer`.\n\n ///\n\n /// # Returns\n\n /// The number of documents in the segment.\n\n fn write(&self, serializer: SegmentSerializer) -> Result<u32>;\n\n}\n", "file_path": "src/core/segment.rs", "rank": 96, "score": 74733.4410255313 }, { "content": "pub trait QueryClone {\n\n fn box_clone(&self) -> Box<Query>;\n\n}\n\n\n\nimpl<T> QueryClone for T\n\nwhere\n\n T: 'static + Query + Clone,\n\n{\n\n fn box_clone(&self) -> Box<Query> {\n\n Box::new(self.clone())\n\n }\n\n}\n\n\n\nimpl Query for Box<Query> {\n\n fn weight(&self, searcher: &Searcher, scoring_enabled: bool) -> Result<Box<Weight>> {\n\n self.as_ref().weight(searcher, scoring_enabled)\n\n }\n\n\n\n fn count(&self, searcher: &Searcher) -> Result<usize> {\n\n self.as_ref().count(searcher)\n", "file_path": "src/query/query.rs", "rank": 97, "score": 74733.4410255313 }, { "content": "#[test]\n\nfn test_map_multithread() {\n\n let result: Vec<usize> = Executor::multi_thread(3, \"search-test\")\n\n .map(|i| Ok(i * 2), 0..10)\n\n .unwrap();\n\n assert_eq!(result.len(), 10);\n\n for i in 0..10 {\n\n assert_eq!(result[i], i * 2);\n\n }\n\n}\n", "file_path": "src/core/executor.rs", "rank": 98, "score": 74707.93860881317 }, { "content": "fn perform_merge(\n\n index: &Index,\n\n mut segment_entries: Vec<SegmentEntry>,\n\n mut merged_segment: Segment,\n\n target_opstamp: u64,\n\n) -> Result<SegmentEntry> {\n\n // first we need to apply deletes to our segment.\n\n\n\n // TODO add logging\n\n let schema = index.schema();\n\n\n\n for segment_entry in &mut segment_entries {\n\n let segment = index.segment(segment_entry.meta().clone());\n\n advance_deletes(segment, segment_entry, target_opstamp)?;\n\n }\n\n\n\n let delete_cursor = segment_entries[0].delete_cursor().clone();\n\n\n\n let segments: Vec<Segment> = segment_entries\n\n .iter()\n", "file_path": "src/indexer/segment_updater.rs", "rank": 99, "score": 74707.93860881317 } ]
Rust
src/content.rs
silvrwolfboy/hubcaps
f173a3be1e5135b389587afe355b49103a49f8d8
use std::fmt; use std::ops; use serde::Deserialize; use serde::de::{self, Visitor}; use crate::utils::{percent_encode, PATH}; use crate::{Future, Github, Stream}; pub struct Content { github: Github, owner: String, repo: String, } impl Content { #[doc(hidden)] pub fn new<O, R>(github: Github, owner: O, repo: R) -> Self where O: Into<String>, R: Into<String>, { Content { github, owner: owner.into(), repo: repo.into(), } } fn path(&self, location: &str) -> String { let location = percent_encode(location.as_ref(), PATH); format!("/repos/{}/{}/contents{}", self.owner, self.repo, location) } pub fn get(&self, location: &str) -> Future<Contents> { self.github.get(&self.path(location)) } pub fn file(&self, location: &str) -> Future<File> { self.github.get(&self.path(location)) } pub fn root(&self) -> Stream<DirectoryItem> { self.iter("/") } pub fn iter(&self, location: &str) -> Stream<DirectoryItem> { self.github.get_stream(&self.path(location)) } } #[derive(Debug, Deserialize)] #[serde(rename_all = "snake_case", tag = "type")] pub enum Contents { File(File), Symlink(Symlink), Submodule(Submodule), } #[derive(Debug, Deserialize)] #[serde(rename_all = "snake_case")] pub enum Encoding { Base64, } #[derive(Debug, Deserialize)] pub struct File { pub encoding: Encoding, pub size: u32, pub name: String, pub path: String, pub content: DecodedContents, pub sha: String, pub url: String, pub git_url: String, pub html_url: String, pub download_url: String, pub _links: Links, } #[derive(Debug, Deserialize)] pub struct DirectoryItem { #[serde(rename = "type")] pub _type: String, pub size: u32, pub name: String, pub path: String, pub sha: String, pub url: String, pub git_url: String, pub html_url: String, pub download_url: Option<String>, pub _links: Links, } #[derive(Debug, Deserialize)] pub struct Symlink { pub target: String, pub size: u32, pub name: String, pub path: String, pub sha: String, pub url: String, pub git_url: String, pub html_url: String, pub download_url: String, pub _links: Links, } #[derive(Debug, Deserialize)] pub struct Submodule { pub submodule_git_url: String, pub size: u32, pub name: String, pub path: String, pub sha: String, pub url: String, pub git_url: String, pub html_url: String, pub download_url: Option<String>, pub _links: Links, } #[derive(Debug, Deserialize)] pub struct Links { pub git: String, #[serde(rename = "self")] pub _self: String, pub html: String, } #[derive(Debug)] pub struct DecodedContents(Vec<u8>); impl Into<Vec<u8>> for DecodedContents { fn into(self) -> Vec<u8> { self.0 } } impl AsRef<[u8]> for DecodedContents { fn as_ref(&self) -> &[u8] { &self.0 } } impl ops::Deref for DecodedContents { type Target = [u8]; fn deref(&self) -> &Self::Target { &self.0 } } impl<'de> Deserialize<'de> for DecodedContents { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de>, { struct DecodedContentsVisitor; impl<'de> Visitor<'de> for DecodedContentsVisitor { type Value = DecodedContents; fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "base64 string") } fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: de::Error, { let v = v.replace("\n", ""); let decoded = base64::decode_config(&v, base64::STANDARD).map_err(|e| match e { base64::DecodeError::InvalidLength => { E::invalid_length(v.len(), &"invalid base64 length") } base64::DecodeError::InvalidByte(offset, byte) => E::invalid_value( de::Unexpected::Bytes(&[byte]), &format!("valid base64 character at offset {}", offset).as_str(), ), base64::DecodeError::InvalidLastSymbol(offset, byte) => E::invalid_value( de::Unexpected::Bytes(&[byte]), &format!("valid last base64 character at offset {}", offset).as_str(), ), })?; Ok(DecodedContents(decoded)) } } deserializer.deserialize_str(DecodedContentsVisitor) } }
use std::fmt; use std::ops; use serde::Deserialize; use serde::de::{self, Visitor}; use crate::utils::{percent_encode, PATH}; use crate::{Future, Github, Stream}; pub struct Content { github: Github, owner: String, repo: String, } impl Content { #[doc(hidden)] pub fn new<O, R>(github: Github, owner: O, repo: R) -> Self where O: Into<String>, R: Into<String>, { Content { github, owner: owner.into(), repo: repo.into(), } } fn path(&self, location: &str) -> String { let location = percent_encode(location.as_ref(), PATH); format!("/repos/{}/{}/contents{}", self.owner, self.repo, location) } pub fn get(&self, location: &str) -> Future<Contents> { self.github.get(&self.path(location)) } pub fn file(&self, location: &str) -> Future<File> { self.github.get(&self.path(location)) } pub fn root(&self) -> Stream<DirectoryItem> { self.iter("/") } pub fn iter(&self, location: &str) -> Stream<DirectoryItem> { self.github.get_stream(&self.path(location)) } } #[derive(Debug, Deserialize)] #[serde(rename_all = "snake_case", tag = "type")] pub enum Contents { File(File), Symlink(Symlink), Submodule(Submodule), } #[derive(Debug, Deserialize)] #[serde(rename_all = "snake_case")] pub enum Encoding { Base64, } #[derive(Debug, Deserialize)] pub struct File { pub encoding: Encoding, pub size: u32, pub name: String, pub path: String, pub content: DecodedContents, pub sha: String, pub url: String, pub git_url: String, pub html_url: String, pub download_url: String, pub _links: Links, } #[derive(Debug, Deserialize)] pub struct DirectoryItem { #[serde(rename = "type")] pub _type: String, pub size: u32, pub name: String, pub path: String, pub sha: String, pub url: String, pub git_url: String, pub html_url: String, pub download_url: Option<String>, pub _links: Links, } #[derive(Debug, Deserialize)] pub struct Symlink { pub target: String, pub size: u32, pub name: String, pub path: String, pub sha: String, pub url: String, pub git_url: String, pub html_url: String, pub download_url: String, pub _links: Links, } #[derive(Debug, Deserialize)] pub struct Submodule { pub submodule_git_url: String, pub size: u32, pub name: String, pub path: String, pub sha: String, pub url: String, pub git_url: String, pub html_url: String, pub download_url: Option<String>, pub _links: Links, } #[derive(Debug, Deserialize)] pub struct Links { pub git: String, #[serde(rename = "self")] pub _self: String, pub html: String, } #[derive(Debug)] pub struct DecodedContents(Vec<u8>); impl Into<Vec<u8>> for DecodedContents { fn into(self) -> Vec<u8> { self.0 } } impl AsRef<[u8]> for DecodedContents { fn as_ref(&self) -> &[u8] { &self.0 } } impl ops::Deref for DecodedContents { type Target = [u8]; fn deref(&self) -> &Self::Target { &self.0 } } impl<'de> Deserialize<'de> for DecodedContents {
}
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de>, { struct DecodedContentsVisitor; impl<'de> Visitor<'de> for DecodedContentsVisitor { type Value = DecodedContents; fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "base64 string") } fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: de::Error, { let v = v.replace("\n", ""); let decoded = base64::decode_config(&v, base64::STANDARD).map_err(|e| match e { base64::DecodeError::InvalidLength => { E::invalid_length(v.len(), &"invalid base64 length") } base64::DecodeError::InvalidByte(offset, byte) => E::invalid_value( de::Unexpected::Bytes(&[byte]), &format!("valid base64 character at offset {}", offset).as_str(), ), base64::DecodeError::InvalidLastSymbol(offset, byte) => E::invalid_value( de::Unexpected::Bytes(&[byte]), &format!("valid last base64 character at offset {}", offset).as_str(), ), })?; Ok(DecodedContents(decoded)) } } deserializer.deserialize_str(DecodedContentsVisitor) }
function_block-full_function
[ { "content": "fn var(name: &str) -> Result<String> {\n\n if let Some(v) = env::var(name).ok() {\n\n Ok(v)\n\n } else {\n\n Err(format!(\"example missing {}\", name).into())\n\n }\n\n}\n\n\n\nconst USER_AGENT: &str = concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\"));\n\n\n", "file_path": "examples/invitations.rs", "rank": 0, "score": 178819.3265345446 }, { "content": "fn var(name: &str) -> Result<String> {\n\n if let Some(v) = env::var(name).ok() {\n\n Ok(v)\n\n } else {\n\n Err(format!(\"example missing {}\", name).into())\n\n }\n\n}\n\n\n\nconst USER_AGENT: &str = concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\"));\n\n\n", "file_path": "examples/checks.rs", "rank": 1, "score": 178819.3265345446 }, { "content": "#[doc(hidden)] // public for doc testing only\n\npub fn cache_path<S: AsRef<OsStr>>(dir: &Path, uri: &str, extension: S) -> PathBuf {\n\n let uri = uri.parse::<Uri>().expect(\"Expected a URI\");\n\n let mut path = dir.to_path_buf();\n\n path.push(\"v1\");\n\n path.push(uri.scheme_part().expect(\"no URI scheme\").as_str()); // https\n\n path.push(uri.authority_part().expect(\"no URI authority\").as_str()); // api.github.com\n\n path.push(Path::new(&uri.path()[1..])); // users/dwijnand/repos\n\n if let Some(query) = uri.query() {\n\n path.push(hash1(query, DefaultHasher::new())); // fa269019d5035d5f\n\n }\n\n path.set_extension(extension); // .json\n\n path\n\n}\n\n\n", "file_path": "src/http_cache.rs", "rank": 2, "score": 148053.99662727758 }, { "content": "fn next_link(l: &Link) -> Option<String> {\n\n l.values()\n\n .into_iter()\n\n .find(|v| v.rel().unwrap_or(&[]).get(0) == Some(&RelationType::Next))\n\n .map(|v| v.link().to_owned())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 3, "score": 142743.22302800056 }, { "content": "fn read_to_string<P: AsRef<Path>>(path: P) -> Result<String> {\n\n trace!(\"reading path: {}\", path.as_ref().display());\n\n fs::read_to_string(path).map_err(Error::from)\n\n}\n\n\n", "file_path": "src/http_cache.rs", "rank": 4, "score": 136944.49740798664 }, { "content": "#[cfg(feature = \"httpcache\")]\n\ntype HeaderValues = (Option<u32>, Option<u32>, Option<Vec<u8>>);\n\n\n", "file_path": "src/lib.rs", "rank": 5, "score": 135935.031624488 }, { "content": "#[doc(hidden)] // public for doc testing only\n\npub fn u64_to_padded_hex(x: u64) -> String {\n\n format!(\"{:016x}\", x)\n\n}\n", "file_path": "src/http_cache.rs", "rank": 6, "score": 124823.1619387835 }, { "content": "pub fn test_home() -> PathBuf {\n\n test_root().join(\"home\")\n\n}\n", "file_path": "tests/testkit.rs", "rank": 7, "score": 120292.45450450273 }, { "content": "#[cfg(not(feature = \"httpcache\"))]\n\ntype HeaderValues = (Option<u32>, Option<u32>);\n", "file_path": "src/lib.rs", "rank": 8, "score": 108908.24975311091 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n if let Some(file) = rt\n\n .block_on(\n\n github\n\n .repo(\"softprops\", \"hubcaps\")\n\n .git()\n\n .tree(\"master\", true),\n\n )?\n\n .tree\n\n .iter()\n\n .find(|file| file.path == \"README.md\")\n\n {\n", "file_path": "examples/git.rs", "rank": 9, "score": 80189.12840435375 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n\n\n let repo = github.repo(\"softprops\", \"hubcaps\");\n\n\n\n println!(\"License file:\");\n\n let license = rt.block_on(repo.content().file(\"LICENSE\"))?;\n\n println!(\"{}\", str::from_utf8(&license.content).unwrap());\n\n\n\n println!(\"Directory contents stream:\");\n\n rt.block_on(repo.content().iter(\"/examples\").for_each(|item| {\n\n println!(\" {}\", item.path);\n\n Ok(())\n", "file_path": "examples/content.rs", "rank": 10, "score": 80189.12840435375 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n let handle = rt.executor();\n\n rt.block_on(\n\n github\n\n .user_repos(\"softprops\")\n\n .iter(&Default::default())\n\n .for_each(move |repo| {\n\n println!(\"{}\", repo.name);\n\n let f = repo.languages(github.clone()).map(|langs| {\n\n for (language, bytes_of_code) in langs {\n\n println!(\"{}: {} bytes\", language, bytes_of_code)\n\n }\n", "file_path": "examples/repos.rs", "rank": 11, "score": 79957.90459492442 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n for diff in rt.block_on(github.repo(\"rust-lang\", \"rust\").pulls().get(49536).files())? {\n\n println!(\"{:#?}\", diff);\n\n }\n\n Ok(())\n\n }\n\n _ => Err(\"example missing GITHUB_TOKEN\".into()),\n\n }\n\n}\n", "file_path": "examples/pull_files.rs", "rank": 12, "score": 77206.07969256867 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n println!(\"repo search results\");\n\n // https://developer.github.com/v3/search/#parameters\n\n rt.block_on(\n\n github\n\n .search()\n\n .repos()\n\n .iter(\n\n \"user:softprops hubcaps\",\n\n &SearchReposOptions::builder().per_page(100).build(),\n\n )\n\n .for_each(|repo| {\n\n println!(\"{}\", repo.full_name);\n\n Ok(())\n\n }),\n\n )?;\n\n Ok(())\n\n }\n\n _ => Err(\"example missing GITHUB_TOKEN\".into()),\n\n }\n\n}\n", "file_path": "examples/search_repos.rs", "rank": 13, "score": 76867.08450134347 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n env::var(\"GITHUB_TOKEN\")\n\n .ok()\n\n .map(|token| Credentials::Token(token)),\n\n )?;\n\n\n\n let first_commit = rt.block_on(\n\n github\n\n .repo(\"softprops\", \"hubcaps\")\n\n .commits()\n\n .get(\"1758957ddab20ba17a1fa501f31932d1a9d96f78\"),\n\n )?;\n\n println!(\"Check out the first commit: {:#?}\", first_commit);\n\n\n\n println!(\"Here are some more recent commits:\");\n\n let commits = rt.block_on(github.repo(\"softprops\", \"hubcaps\").commits().list())?;\n\n for commit in commits {\n\n println!(\" - {}\", commit.author.login);\n\n }\n\n println!(\"Thank you for your help!\");\n\n Ok(())\n\n}\n", "file_path": "examples/repo_commits.rs", "rank": 14, "score": 76867.08450134347 }, { "content": "fn test_root() -> PathBuf {\n\n let root = global_test_root();\n\n\n\n static NEXT_TEST_NUM: AtomicUsize = AtomicUsize::new(0);\n\n thread_local!(static TEST_NUM: usize = NEXT_TEST_NUM.fetch_add(1, Ordering::SeqCst));\n\n let root = root.join(&TEST_NUM.with(|my_id| format!(\"t{}\", my_id)));\n\n\n\n thread_local!(static TEST_ROOT_INIT: Cell<bool> = Cell::new(false));\n\n TEST_ROOT_INIT.with(|i| {\n\n if i.get() {\n\n return;\n\n }\n\n i.set(true);\n\n if root.exists() {\n\n fs::remove_dir_all(&root).expect(\"removing root\");\n\n debug!(\"deleted root {}\", root.display());\n\n }\n\n });\n\n\n\n root\n\n}\n\n\n", "file_path": "tests/testkit.rs", "rank": 15, "score": 74337.66329744467 }, { "content": "fn global_test_root() -> PathBuf {\n\n let mut path = env::current_exe().unwrap();\n\n path.pop(); // chop off exe name\n\n path.pop(); // chop off 'debug'\n\n\n\n // If `cargo test` is run manually then our path looks like\n\n // `target/debug/foo`, in which case our `path` is already pointing at\n\n // `target`. If, however, `cargo test --target $target` is used then the\n\n // output is `target/$target/debug/foo`, so our path is pointing at\n\n // `target/$target`. Here we conditionally pop the `$target` name.\n\n if path.file_name().and_then(|s| s.to_str()) != Some(\"target\") {\n\n path.pop();\n\n }\n\n\n\n path.join(\"int-test\")\n\n}\n\n\n", "file_path": "tests/testkit.rs", "rank": 16, "score": 71733.08327501919 }, { "content": "fn hash1<A: Hash, H: Hasher>(x: A, mut hasher: H) -> String {\n\n x.hash(&mut hasher);\n\n u64_to_padded_hex(hasher.finish())\n\n}\n\n\n\n/// Construct a 0-padded hex string from a u64.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # use hubcaps::http_cache::u64_to_padded_hex;\n\n/// assert_eq!(u64_to_padded_hex(0), \"0000000000000000\");\n\n/// assert_eq!(u64_to_padded_hex(u64::max_value()), \"ffffffffffffffff\");\n\n/// ```\n", "file_path": "src/http_cache.rs", "rank": 17, "score": 68751.80824908246 }, { "content": "#[derive(Debug)]\n\nstruct ExpiringJWTCredential {\n\n token: String,\n\n created_at: time::Instant,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 18, "score": 57029.79673340387 }, { "content": "#[derive(Serialize)]\n\nstruct JWTCredentialClaim {\n\n iat: u64,\n\n exp: u64,\n\n iss: u64,\n\n}\n\n\n\nimpl ExpiringJWTCredential {\n\n fn calculate(app_id: u64, private_key: &[u8]) -> Result<ExpiringJWTCredential> {\n\n // SystemTime can go backwards, Instant can't, so always use\n\n // Instant for ensuring regular cycling.\n\n let created_at = time::Instant::now();\n\n let now = time::SystemTime::now()\n\n .duration_since(time::UNIX_EPOCH)\n\n .unwrap();\n\n let expires = now + MAX_JWT_TOKEN_LIFE;\n\n\n\n let payload = JWTCredentialClaim {\n\n iat: now.as_secs(),\n\n exp: expires.as_secs(),\n\n iss: app_id,\n", "file_path": "src/lib.rs", "rank": 19, "score": 57029.79673340387 }, { "content": "#[doc(hidden)]\n\npub trait HttpCacheClone {\n\n #[doc(hidden)]\n\n fn box_clone(&self) -> BoxedHttpCache;\n\n}\n\n\n\nimpl<T> HttpCacheClone for T\n\nwhere\n\n T: 'static + HttpCache + Clone + Send,\n\n{\n\n fn box_clone(&self) -> BoxedHttpCache {\n\n Box::new(self.clone())\n\n }\n\n}\n\n\n", "file_path": "src/http_cache.rs", "rank": 20, "score": 51527.74507185098 }, { "content": "Many changes were made to transition into using serde as a serialization backend and to focus on making interfaces more consistent across the board. A more flexible interface for authenticating requests was added as well as a new interface for requesting organization repository listings. Relevant itemized changes are listed below.\n\n\n\n* port serialization from `rustc-serialize` to `serde`!\n\n* as a result of the serde port, `Error::{Decoding, Encoding}` which were wrappers around rustc-serialize error types, were removed and replaced with a unified `Error::Codec` which wraps serde's error type\n\n* renamed `hubcaps::statuses::State` to `hubcaps::StatusState`\n\n* added `payload` field to `hubcaps::Deployment` represented as a `serde_json::Value`\n\n* added `content_type` field to `hubcaps::GistFile` represented as `String`\n\n* added `truncated` field to `hubcaps::Gist` represented as an `bool` and updated `truncated` field of `hubcaps::GistFile` to be `Option<bool>` (this field is omitted in gist listing responses)\n\n* introduces `hubcaps::Credentials` as the means of authenticating with the Github api. A `Credentials` value is needed to instantiate a `Github` instance. This is a breaking change from the previous `Option<String>` token api, with a more flexible set options. `hubcaps::Credentials::{None, Token, Client}`. `hubcaps::Credentials` implements `Default` returning `hubcaps::Credentials::None`\n\n* `hubcaps::Error` enum now implements `std::error::Error`\n\n* pull request and issue listing fn's now both take options structs. This is a breaking change.\n\n* repo listing fn's now take option structs. This is a breaking change.\n\n* gist listing fn's now take option structs. This is a breaking change.\n\n* added support for fetching organization repository listings [via @carols10cents](https://github.com/softprops/hubcaps/pull/28)\n\n\n", "file_path": "CHANGELOG.md", "rank": 21, "score": 50318.44168566051 }, { "content": "# 0.4.6\n\n\n\n* add support for pull request label deserialization and pull request issue interaction\n\n\n\n```rust\n\ngithub.repo(\"you\", \"repo\")\n\n .pulls()\n\n .get(number)\n\n .get()\n\n .inspect(|&pull| println!(\"{:#?}\",pull.labels))\n\n\n\n...\n\n\n\ngithub.repo(\"you\", \"repo\")\n\n .pulls()\n\n .get(number)\n\n .labels()\n\n .add(vec![\"enhancement\"])\n\n```\n\n\n\n# 0.4.5\n\n\n\n* add support for iterating over a stream of repo issues `github.repo(.., ..).issues().iter(opts)`\n\n* support anonymous gist owners [#111](https://github.com/softprops/hubcaps/pull/111)\n\n\n\n# 0.4.4\n\n\n\n* fix issue with stream pagination [#108](https://github.com/softprops/hubcaps/pull/108)\n\n* implement stream iter for repo labels [#110](https://github.com/softprops/hubcaps/pull/110)\n\n* issue body is now an `Option<String>` type [#107](https://github.com/softprops/hubcaps/pull/107)\n\n* upgrade log dependency `0.3` => `0.4`\n\n\n\n# 0.4.3\n\n\n\n* fixed url bug with language looking for repositories\n\n* fixed url bug with iter based pagination\n\n* introduce new ErrorKind::RateLimit error for better rate limit detection\n\n\n\n# 0.4.2\n\n\n\n* add transparent handling of 307 temporary redirect\n\n\n\n# 0.4.1\n\n\n\n* add transparent handling of 301 permanent moves for repo renames\n\n\n\n# 0.4.0\n\n\n\n* upgrade to async hyper (0.11)\n\n* begin [stars](https://developer.github.com/v3/activity/starring) interface\n\n\n", "file_path": "CHANGELOG.md", "rank": 22, "score": 50317.779465854976 }, { "content": "# 0.3.9\n\n\n\n* add support for fetching a single repo by name\n\n\n\n# 0.3.8\n\n\n\n* add support for org repo creation\n\n\n\n# 0.3.7\n\n\n\n* add support for updating a repository branch's protection\n\n\n\n# 0.3.6\n\n\n\n* added `per_page` to various repo list builder interfaces\n\n* fixed org list builder's type filter to use org repo type\n\n\n\n# 0.3.5\n\n\n\n* hubcaps::git::GitFile's now have an optional url because commits types don't\n\n have urls.\n\n\n\n# 0.3.4\n\n\n\n* added git tree and blob fetching interfaces\n\n\n\n# 0.3.3\n\n\n\n* added org repos interface\n\n\n\n# 0.3.2\n\n\n\n* use error_chain to generate error types\n\n* add support for posting issue comments [#71](https://github.com/softprops/hubcaps/pull/71)\n\n* add support for repo teams\n\n* add team permissions\n\n* add iter support to branches, repos, pulls, and teams\n\n\n\n# 0.3.1\n\n\n\n* fix order of Iter traversal\n\n\n\n# 0.3.0\n\n\n\n* added support for repo hooks\n\n* `Github::new` now takes an owned reference to a hyper::Client. this makes it possible\n\n to pass a github instance into a threaded context.\n\n* upgrade to serde 0.9 (and now unneeded build.rs machinery)\n\n* sizable code restructure to support scalability in future growth. move foo.rs modules to foo/mod.rs files. moved respective rep.rs reps into mods\n\n* the effect of the above is that everything may no longer be accessible via the top level `hubcaps` module. For instance, in the past you would be able to to access `hubcaps::Pull` directly, now you would access it via is api category `hubcaps::pulls::Pull`.\n\n* update hyper to 0.10. the implications are that you now need to bring your own tls-configured hyper client\n\n\n\n# 0.2.8\n\n\n\n* expose more pub fields on pull commits\n\n\n\n# 0.2.7\n\n\n\n* added support for listing pull commits\n\n* added support for returning an iterator over all pull commits\n\n\n\n# 0.2.6\n\n\n\n* added support for listing issue/pull comments\n\n* added support for listing review comments\n\n\n\n# 0.2.5\n\n\n\n* added support for search issues api\n\n* add partial support for new Iter type which serves as an transparent iterator over pages of results\n\n\n", "file_path": "CHANGELOG.md", "rank": 23, "score": 50316.15580087746 }, { "content": "# 0.4.10\n\n\n\n* added ability to post review comments [#142](https://github.com/softprops/hubcaps/pull/142)\n\n* added interfaces for [notifications apis](https://developer.github.com/v3/activity/notifications/) [#146](https://github.com/softprops/hubcaps/pull/146)\n\n\n\n```rust\n\ngithub.repo(\"you\", \"repo\")\n\n .activity()\n\n .notifications()\n\n .list(&Default::default())\n\n```\n\n\n\n* added interfaces for [traffic apis](https://developer.github.com/v3/repos/traffic/) [#145](https://github.com/softprops/hubcaps/pull/145)\n\n\n\n```rust\n\ngithub.repo(\"you\", \"repo\")\n\n .traffic()\n\n .clones(TimeUnit::Day)\n\n```\n\n\n\n* added interfaces for getting the latest release and release by tag [#147](https://github.com/softprops/hubcaps/pull/147)\n\n\n\n```rust\n\ngithub.repo(\"you\", \"repo\")\n\n .releases()\n\n .latest()\n\n```\n\n\n\n# 0.4.9\n\n\n\n* add the ability to delete a git ref (tag, branch ect)\n\n\n\n```rust\n\ngithub.repo(\"you\", \"repo\")\n\n .git()\n\n .delete_reference(\"heads/awesome-feature\")\n\n```\n\n\n\n# 0.4.8\n\n\n\n* fixed bug with `hubcaps::search::IssueItem.repo_tuple()`\n\n\n\n# 0.4.7\n\n\n\n* added assignee manage interfaces to pull request and issues interfaces\n\n* deserialize issue assignees\n\n\n\n```rust\n\ngithub.repo(\"you\", \"repo\")\n\n .pulls()\n\n .get(number)\n\n .assignees()\n\n .add(vec![\"your-github-login\"])\n\n```\n\n\n\n* introduced a minor ergonomic improvement in Github instance creation. Credentials\n\n are now provided as `Into<Option<Credentials>>` meaning you no longer have to wrap\n\n credentials with `Some(...)` when providing credentials\n\n\n\nbefore\n\n\n\n```rust\n\nlet github = Github::new(\n\n \"my-cool-user-agent/0.1.0\",\n\n Some(Credentials::Token(\"personal-access-token\")),\n\n &core.handle()\n\n);\n\n```\n\n\n\nafter\n\n\n\n```rust\n\nlet github = Github::new(\n\n \"my-cool-user-agent/0.1.0\",\n\n Credentials::Token(\"personal-access-token\"),\n\n &core.handle()\n\n);\n\n```\n\n\n", "file_path": "CHANGELOG.md", "rank": 24, "score": 50314.033511808426 }, { "content": "# 0.2.4\n\n\n\nImproved coverage of pull request api\n\n\n\n* Pull.body is now represented as an `Option<String>`\n\n* Pull.assignees is now deserialized\n\n* added `pull.files()` which returns a `Vec<FileDiff>`\n\n\n\n# 0.2.3\n\n\n\n* added support for repo creation [#38](https://github.com/softprops/hubcaps/pull/38)\n\n* upgrade syntex build dependency to 0.35\n\n\n\n# 0.2.2\n\n\n\n* upgrade to [hyper 0.8](https://github.com/hyperium/hyper/blob/master/CHANGELOG.md#v080-2016-03-14)\n\n* upgrade syntex build dependency to 0.33\n\n\n\n# 0.2.1 (2016-04-09)\n\n\n\n* Added support for listing organization repositories [via @carols10cents](https://github.com/softprops/hubcaps/pull/29)\n\n* Fixed deserialization issue related to error response in release api calls [issue #31](https://github.com/softprops/hubcaps/issues/31)\n\n\n\n# 0.2.0\n\n\n", "file_path": "CHANGELOG.md", "rank": 25, "score": 50312.86274557665 }, { "content": "### repositories\n\n\n\nTypically the reference point of most github services is a repository\n\n\n\n```rust\n\nlet repo = github.repo(\"user\", \"repo\");\n\n```\n\n\n\nWith a repo instance on hand, you can access a number of sub services,\n\nlike `labels`, `deployments`, `pulls`, `issues`, `releases`, and many more.\n\nEach of this are named functions exported from the repo interface.\n\n\n\nSee [examples directory](examples/repos.rs) for examples\n\n\n\n### branches\n\n\n\nBranches is a service for listing repository branches\n\n\n\n```rust\n\nlet branches = repo.branches();\n\n```\n\n\n\n### labels\n\n\n\nLabels is a service for tagging resources like issues and pulls with names which you can later group and filter on.\n\n\n\n```rust\n\nuse hubcaps::labels::LabelOptions;\n\n\n\nlet labels = repo.labels();\n\n\n\n// create new labels\n\nlabels.create(\n\n &LabelOptions::new(\n\n \"rustic\", \"ccc\"\n\n )\n\n )\n\n```\n\n\n\n### deployments\n\n\n\nDeployments is a service for orchestrating deployments of applications sourced from github repositories\n\n\n\n```rust\n\nlet deployments = repo.deployments();\n\n```\n\n\n\nSee [examples directory](examples/deployments.rs) for examples\n\n\n\n### pulls\n\n\n\nPulls is a service for issuing code change requests against a repository\n\n\n\n```rust\n\nlet pulls = repo.pulls();\n\n```\n\n\n\nSee [examples directory](examples/pulls.rs) for examples\n\n\n\n### issues\n\n\n\nIssues is a service for tracking bugs for a repository\n\n\n\n```rust\n\nlet issues = repo.issues();\n\n```\n\n\n\nSee [examples directory](examples/issues.rs) for examples\n\n\n\n### releases\n\n\n\nReleases is a service for tracking changes for a stable releases of a versioned library or application\n\n\n\n```rust\n\nlet releases = repo.releases();\n\n```\n\n\n\n### gists\n\n\n\nGists is a service for micro repositories\n\n\n\n```rust\n\nlet gists = github.gists();\n\n```\n\n\n\nSee [examples directory](examples/gists.rs) for examples\n\n\n\n\n\n### hooks\n\n\n\nHooks is a service for managing repository hooks\n\n\n\n```rust\n\nlet hooks = repo.hooks();\n\n```\n\n\n\nSee [examples directory](examples/hooks.rs) for examples\n\n\n", "file_path": "README.md", "rank": 26, "score": 50311.83006575183 }, { "content": "# hubcaps\n\n\n\n[![Build Status](https://travis-ci.org/softprops/hubcaps.svg?branch=master)](https://travis-ci.org/softprops/hubcaps) [![Coverage Status](https://coveralls.io/repos/softprops/hubcaps/badge.svg?branch=master&service=github)](https://coveralls.io/github/softprops/hubcaps?branch=master) [![Software License](https://img.shields.io/badge/license-MIT-brightgreen.svg)](LICENSE) [![crates.io](http://meritbadge.herokuapp.com/hubcaps)](https://crates.io/crates/hubcaps) [![Released API docs](https://docs.rs/hubcaps/badge.svg)](http://docs.rs/hubcaps) [![Master API docs](https://img.shields.io/badge/docs-master-green.svg)](https://softprops.github.io/hubcaps)\n\n\n\n> a rust interface for github\n\n\n\n## /!\\ planned API changes\n\n\n\nThe goal and motivation behind these are not to intentionally make breaking changes, but rather to adopt evolving community standards\n\n\n\n* replace builder implementations with [derive_builder](https://crates.io/crates/derive_builder) crate type derivation\n\n\n\n## installation\n\n\n\nAdd the following to your `Cargo.toml` file\n\n\n\n```toml\n\n[dependencies]\n\nhubcaps = \"0.5\"\n\n```\n\n\n\n## usage\n\n\n\nBasic usage requires a user agent string (because github requires this) and\n\noptionally a flavor of `hubcaps::Credentials` for making requests as a particular\n\ngithub user.\n\n\n\nFor user authenticated requests you'll typically want to use\n\n`hubcaps::Credentials::Token` with a\n\n[personal access token](https://github.com/settings/tokens).\n\n\n\n```rust\n\nuse hubcaps::{Credentials, Github};\n\n\n\nfn main() {\n\n let github = Github::new(\n\n \"my-cool-user-agent/0.1.0\",\n\n Credentials::Token(\"personal-access-token\"),\n\n );\n\n}\n\n```\n\n\n\nGithub instances define methods for accessing api services that map closely to\n\ntheir url structure.\n\n\n\nAs a convention, api methods that expect arguments are represented as functions\n\nthat accept a struct representing those arguments with an optional builder\n\ninterface for convenience of construction.\n\n\n\nSee [examples directory](examples/) for some getting started examples\n\n\n", "file_path": "README.md", "rank": 27, "score": 50310.79375667103 }, { "content": "# 0.5.0\n\n\n\n* BREAKING CHANGE: upgrade to hyper 0.12 and replace `tokio-core` with `tokio` [#136](https://github.com/softprops/hubcaps/pull/136)\n\n\n\nThis simplifies interfaces for constructing instances as it removes the need to pass a borrowed `Handle` around\n\n\n\nbefore\n\n\n\n```rust\n\nlet mut core = Core::new().expect(\"failed to initilaize core\");\n\n Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n &core.handle(),\n\n);\n\n```\n\n\n\nafter\n\n\n\n```rust\n\n Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token)\n\n );\n\n```\n\n* add experimental feature for http [etag](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ETag) caching [#151](https://github.com/softprops/hubcaps/pull/151) [#160](https://github.com/softprops/hubcaps/pull/160)\n\n\n\nThis allows clients to keep a local cache of response data to avoid the need to download responses when data hasn't changed\n\nThis features is currently behind a feature flag until its stabalized.\n\n\n\nYou can find an example in this repo with\n\n\n\n```sh\n\n$ cargo run --no-default-features --features tls,httpcache --example conditional_requests\n\n```\n\n\n\nTo enable this feature in your application dependencies add the following to you're `Cargo.toml` file\n\n\n\n```toml\n\n[dependencies.hubcaps]\n\nversion = \"0.5.0\"\n\ndefault-features = false\n\nfeatures = [\"tls\",\"httpcache\"]\n\n```\n\n\n\n* add `pull_request` field to issue struct [#156](https://github.com/softprops/hubcaps/pull/156)\n\n* improve contents API [#155](https://github.com/softprops/hubcaps/pull/155)\n\n* implement repository [contributors api](https://developer.github.com/v3/repos/#list-contributors) [#154](https://github.com/softprops/hubcaps/pull/154)\n\n* add release helper methods to get `latest` release and `release_by_tag` [#147](https://github.com/softprops/hubcaps/pull/147)\n\n* add optional [rustls](https://github.com/ctz/rustls) support\n\n\n\n\n", "file_path": "CHANGELOG.md", "rank": 28, "score": 50310.523015781204 }, { "content": "## breaking changes\n\n\n\nHyper 0.11's switch to async APIs had a major impact to the API design choices\n\nin this release. The following are the major notable changes\n\n\n\n* interfaces that previously returned `hubcaps::Result` types now return `hubcaps::Future` types. The semantics are the same, the difference is that\n\nthese map to async computed values. To learn more about Futures and\n\nFuture combinators see [this documentation](http://alexcrichton.com/futures-rs/futures/future/index.html)\n\n* `hubcaps::Client`'s associated methods for creating new interfaces got a facelift. The `hyper::Client` previously required for constructor methods is provided by default ( customization is still supported ) with a default tls\n\nconnector. A `tokio_core::reactor::Handle` reference is required in order to\n\nconstruct this client. The motivation is that its the application responsibility\n\nto manage `Core` resources.\n\n* `iter` methods previously returned `Iter` types which provided a way to iterate\n\nover elements of paginated collections. The analog to iterators in the async world `hubcaps::Stream` types which are akin to an iterator in which values are\n\ncomputed asynchronously. To learn more about Streams and Stream combinators see\n\n[this documentation](http://alexcrichton.com/futures-rs/futures/stream/index.html)\n\n* Credentials are now provided as an Option type removing the need for Credential::None\n\n\n\n# 0.3.16\n\n\n\n* added users api interfaces [@dpc](https://github.com/softprops/hubcaps/pull/90)\n\n\n\n# 0.3.15\n\n\n\n* org team description is now an Option type\n\n# 0.3.14\n\n\n\n* fixed response parsing for adding branch protection\n\n\n\n# 0.3.13\n\n\n\n* updated branches interface to reflect [branch API changes](https://developer.github.com/changes/2017-09-06-protected-branches-preview-end/)\n\n* added `SearchIssuesOptions.per_page(n)` interface for limiting search results\n\n\n\n# 0.3.12\n\n\n\n* fixed issue with persistence of repo term permission\n\n\n\n# 0.3.11\n\n\n\n* fixed PUT vs PATCH issue with repo team adds\n\n\n\n# 0.3.10\n\n\n\n* add ability to add team to repository\n\n\n", "file_path": "CHANGELOG.md", "rank": 29, "score": 50308.1522951257 }, { "content": "### search\n\n\n\nSearch provides a raw string query search for indexed data. Currently only search for issues is supported\n\n\n\n```rust\n\nlet search_issues = github.search().issues();\n\n```\n\n\n\n### teams\n\n\n\nTeams is a service for listing repository and organization teams\n\n\n\n```rust\n\nlet teams = repo.teams();\n\n```\n\n\n\nSee [examples directory](examples/teams.rs) for examples\n\n\n\nDoug Tangren (softprops) 2015-2018\n", "file_path": "README.md", "rank": 30, "score": 50307.252647796355 }, { "content": "### IDE Configuration files\n\nMachine specific configuration files may be generaged by your IDE while working on the project. Please make sure to add these files to a global .gitignore so they are kept from accidentally being commited to the project and causing issues for other contributors.\n\n\n\nSome examples of these files are the `.idea` folder created by JetBrains products (WebStorm, IntelliJ, etc) as well as `.vscode` created by Visual Studio Code for workspace specific settings.\n\n\n\nFor help setting up a global .gitignore check out this [GitHub article]!\n\n\n\n[GitHub article]: https://help.github.com/articles/ignoring-files/#create-a-global-gitignore\n\n\n\n## Conduct\n\n\n", "file_path": "CONTRIBUTING.md", "rank": 31, "score": 50307.12796743908 }, { "content": "# Contributing\n\n\n\n## Filing an Issue\n\n\n\nIf you are trying to use `hubcaps` and run into an issue- please file an\n\nissue! We'd love to get you up and running, even if the issue you have might\n\nnot be directly related to the code in `hubcaps`. This library seeks to make\n\nit easy for developers to get going, so there's a good chance we can do\n\nsomething to alleviate the issue by making `hubcaps` better documented or\n\nmore robust to different developer environments.\n\n\n\nWhen filing an issue, do your best to be as specific as possible\n\n The faster was can reproduce your issue, the faster we\n\ncan fix it for you!\n\n\n\n## Submitting a PR\n\n\n\nIf you are considering filing a pull request, make sure that there's an issue\n\nfiled for the work you'd like to do. There might be some discussion required!\n\nFiling an issue first will help ensure that the work you put into your pull\n\nrequest will get merged :)\n\n\n\nBefore you submit your pull request, check that you have completed all of the\n\nsteps mentioned in the pull request template. Link the issue that your pull\n\nrequest is responding to, and format your code using [rustfmt][rustfmt].\n\n\n\n### Configuring rustfmt\n\n\n\nBefore submitting code in a PR, make sure that you have formatted the codebase\n\nusing [rustfmt][rustfmt]. `rustfmt` is a tool for formatting Rust code, which\n\nhelps keep style consistent across the project. If you have not used `rustfmt`\n\nbefore, it is not too difficult.\n\n\n\nIf you have not already configured `rustfmt` for the\n\nnightly toolchain, it can be done using the following steps:\n\n\n\n**1. Use Nightly Toolchain**\n\n\n\nInstall the nightly toolchain. This will only be necessary as long as rustfmt produces different results on stable and nightly.\n\n\n\n```sh\n\n$ rustup toolchain install nightly\n\n```\n\n\n\n**2. Add the rustfmt component**\n\n\n\nInstall the most recent version of `rustfmt` using this command:\n\n\n\n```sh\n\n$ rustup component add rustfmt-preview --toolchain nightly\n\n```\n\n\n\n**3. Running rustfmt**\n\n\n\nTo run `rustfmt`, use this command:\n\n\n\n```sh\n\ncargo +nightly fmt\n\n```\n\n\n\n[rustfmt]: https://github.com/rust-lang-nursery/rustfmt\n\n\n", "file_path": "CONTRIBUTING.md", "rank": 32, "score": 50305.51279691916 }, { "content": "# 0.1.1\n\n\n\n* DeploymentStatusOptions now have an optional field for `target_url` and `description`\n\n\n\n# 0.1.0\n\n\n\n* initial release\n", "file_path": "CHANGELOG.md", "rank": 33, "score": 50302.83385740946 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n\n\n println!(\"My organizations:\");\n\n println!(\"\");\n\n\n\n for org in rt.block_on(github.orgs().list())? {\n\n println!(\"{}\", org.login);\n\n println!(\"=============\");\n\n println!(\"Repos:\");\n\n\n\n for repo in rt.block_on(github.org_repos(&org.login[..]).list(&Default::default()))? {\n\n println!(\"* {}\", repo.name);\n", "file_path": "examples/collaborators.rs", "rank": 34, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n // add labels associated with a pull\n\n println!(\n\n \"{:#?}\",\n\n rt.block_on(\n\n github\n\n .repo(\"softprops\", \"hubcaps\")\n\n .pulls()\n\n .get(121)\n\n .labels()\n\n .add(vec![\"enhancement\"])\n\n )?\n", "file_path": "examples/labels.rs", "rank": 35, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n let stars = github.activity().stars();\n\n let f = stars\n\n .star(\"softprops\", \"hubcaps\")\n\n .join(stars.is_starred(\"softprops\", \"hubcaps\"));\n\n match rt.block_on(f) {\n\n Ok((_, starred)) => println!(\"starred? {:?}\", starred),\n\n Err(err) => println!(\"err {}\", err),\n\n }\n\n Ok(())\n\n }\n\n _ => Err(\"example missing GITHUB_TOKEN\".into()),\n\n }\n\n}\n", "file_path": "examples/stars.rs", "rank": 36, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n match rt.block_on(github.users().authenticated()) {\n\n Ok(me) => println!(\"{:#?}\", me),\n\n Err(err) => println!(\"err {:#?}\", err),\n\n }\n\n\n\n match rt.block_on(\n\n github.users().get(\n\n env::var(\"GH_USERNAME\")\n\n .ok()\n\n .unwrap_or_else(|| \"bors\".into()),\n\n ),\n\n ) {\n\n Ok(user) => println!(\"{:#?}\", user),\n\n Err(err) => println!(\"err {:#?}\", err),\n\n }\n\n Ok(())\n\n }\n\n _ => Err(\"example missing GITHUB_TOKEN\".into()),\n\n }\n\n}\n", "file_path": "examples/users.rs", "rank": 37, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n let key_file = var(\"GH_APP_KEY\")?;\n\n let app_id = var(\"GH_APP_ID\")?;\n\n let installation_id = var(\"GH_INSTALL_ID\")?;\n\n\n\n let mut rt = Runtime::new()?;\n\n\n\n let mut key = Vec::new();\n\n File::open(&key_file)?.read_to_end(&mut key)?;\n\n let cred = JWTCredentials::new(app_id.parse().expect(\"Bad GH_APP_ID\"), key)?;\n\n\n\n let mut github = Github::new(USER_AGENT, Credentials::JWT(cred.clone()))?;\n\n github.set_credentials(Credentials::InstallationToken(\n\n InstallationTokenGenerator::new(installation_id.parse().unwrap(), cred),\n\n ));\n\n\n\n rt.block_on(\n\n github\n\n .org(\"NixOS\")\n", "file_path": "examples/invitations.rs", "rank": 38, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n rt.block_on(github.activity().stars().star(\"rust-lang\", \"log\"))?;\n\n Ok(())\n\n }\n\n _ => Err(\"example missing GITHUB_TOKEN\".into()),\n\n }\n\n}\n", "file_path": "examples/redir.rs", "rank": 39, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n let token = env::var(\"GITHUB_TOKEN\").expect(\"example missing GITHUB_TOKEN\");\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n\n\n println!(\"watched repos\");\n\n rt.block_on(github.activity().watching().iter().for_each(|repo| {\n\n println!(\"{}\", repo.full_name);\n\n Ok(())\n\n }))?;\n\n\n\n println!(\"watch a repo\");\n\n rt.block_on(github.activity().watching().watch_repo(\"octocat\", \"Hello-World\")).and_then(|sub| {\n\n println!(\"subscription: {:#?}\", sub);\n\n Ok(())\n\n })?;\n", "file_path": "examples/watching.rs", "rank": 40, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n let key_file = var(\"GH_APP_KEY\")?;\n\n let app_id = var(\"GH_APP_ID\")?;\n\n let user_name = var(\"GH_USERNAME\")?;\n\n let repo = var(\"GH_REPO\")?;\n\n let branch = var(\"GH_BRANCH\")?;\n\n let mut rt = Runtime::new()?;\n\n\n\n let mut key = Vec::new();\n\n File::open(&key_file)?.read_to_end(&mut key)?;\n\n let cred = JWTCredentials::new(app_id.parse().expect(\"Bad GH_APP_ID\"), key)?;\n\n\n\n let mut github = Github::new(USER_AGENT, Credentials::JWT(cred.clone()))?;\n\n let installation = rt\n\n .block_on(\n\n github\n\n .app()\n\n .find_repo_installation(user_name.clone(), repo.clone()),\n\n ).unwrap();\n", "file_path": "examples/checks.rs", "rank": 41, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n let repo = github.repo(\"softprops\", \"hubcat\");\n\n let pulls = repo.pulls();\n\n rt.block_on(pulls.iter(&Default::default()).for_each(|pull| {\n\n println!(\"{:#?}\", pull);\n\n Ok(())\n\n }))?;\n\n\n\n println!(\"comments\");\n\n for c in rt.block_on(\n\n github\n\n .repo(\"softprops\", \"hubcaps\")\n", "file_path": "examples/pulls.rs", "rank": 42, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n for gist in rt.block_on(github.gists().list(&Default::default()))? {\n\n println!(\"{:#?}\", gist)\n\n }\n\n Ok(())\n\n }\n\n _ => Err(\"example missing GITHUB_TOKEN\".into()),\n\n }\n\n}\n", "file_path": "examples/gists.rs", "rank": 43, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n let repo = github.repo(\"softprops\", \"hubcaps\");\n\n let hook = rt.block_on(\n\n repo.hooks().create(\n\n &HookCreateOptions::web()\n\n .url(\"http://localhost:8080\")\n\n .content_type(WebHookContentType::Json)\n\n .build(),\n\n ),\n\n );\n\n println!(\"{:#?}\", hook);\n\n let hooks = repo.hooks();\n\n for hook in rt.block_on(hooks.list())? {\n\n println!(\"{:#?}\", hook)\n\n }\n\n Ok(())\n\n }\n\n _ => Err(\"example missing GITHUB_TOKEN\".into()),\n\n }\n\n}\n", "file_path": "examples/hooks.rs", "rank": 44, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(USER_AGENT, Credentials::Token(token))?;\n\n\n\n let issue = github.repo(\"softprops\", \"hubcat\").issues().get(1);\n\n let f = issue.comments().create(&CommentOptions {\n\n body: format!(\"Hello, world!\\n---\\nSent by {}\", USER_AGENT),\n\n });\n\n\n\n match rt.block_on(f) {\n\n Ok(comment) => println!(\"{:?}\", comment),\n\n Err(err) => println!(\"err {}\", err),\n\n }\n\n\n\n Ok(())\n\n }\n\n _ => Err(\"example missing GITHUB_TOKEN\".into()),\n\n }\n\n}\n", "file_path": "examples/comments.rs", "rank": 45, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n\n\n let org = \"eb6cb83a-cf75-4e88-a11a-ce117467d8ae\";\n\n let repo_name = \"d18e3679-9830-40a9-8cf5-16602639b43e\";\n\n\n\n println!(\"org teams\");\n\n rt.block_on(github.org(org).teams().iter().for_each(|team| {\n\n println!(\"{:#?}\", team);\n\n Ok(())\n\n }))\n\n .unwrap_or_else(|e| println!(\"error: {:#?}\", e));\n\n\n", "file_path": "examples/teams.rs", "rank": 46, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n let owner = \"octokit\";\n\n let repo = \"rest.js\";\n\n\n\n let options = ForkListOptions::builder().build();\n\n rt.block_on(\n\n github\n\n .repo(owner, repo)\n\n .forks()\n\n .iter(&options)\n\n .for_each(move |repo| {\n\n println!(\"{}\", repo.full_name);\n\n Ok(())\n\n })\n\n )?;\n\n\n\n Ok(())\n\n }\n\n _ => Err(\"example missing GITHUB_TOKEN\".into()),\n\n }\n\n}\n", "file_path": "examples/forks.rs", "rank": 47, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n\n\n if let Err(err) = rt.block_on(\n\n github\n\n .repo(\"softprops\", \"hubcaps\")\n\n .branches()\n\n .iter()\n\n .for_each(|branch| {\n\n println!(\"{:#?}\", branch);\n\n Ok(())\n\n }),\n\n ) {\n", "file_path": "examples/branches.rs", "rank": 48, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n\n\n let repo = github.repo(\"softprops\", \"hubcaps\");\n\n\n\n let forked = rt.block_on(repo.forks().create())?;\n\n\n\n println!(\"Forked repository to {}\", forked.full_name);\n\n\n\n Ok(())\n\n }\n\n _ => Err(\"example missing GITHUB_TOKEN\".into()),\n\n }\n\n}\n", "file_path": "examples/fork.rs", "rank": 49, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n let pull = rt.block_on(\n\n github\n\n .repo(\"softprops\", \"hubcaps\")\n\n .pulls()\n\n .get(122)\n\n .assignees()\n\n .add(vec![\"softprops\"]),\n\n )?;\n\n println!(\"{:#?}\", pull);\n\n\n\n let issue = rt.block_on(\n", "file_path": "examples/assignees.rs", "rank": 50, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n let owner = \"softprops\";\n\n let repo = \"hubcaps\";\n\n\n\n println!(\"Top 10 referrers\");\n\n for referrer in rt.block_on(github.repo(owner, repo).traffic().referrers())? {\n\n println!(\"{:#?}\", referrer)\n\n }\n\n\n\n println!(\"Top 10 paths\");\n\n for path in rt.block_on(github.repo(owner, repo).traffic().paths())? {\n\n println!(\"{:#?}\", path)\n", "file_path": "examples/traffic.rs", "rank": 51, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n let owner = \"octokit\";\n\n let repo = \"rest.js\";\n\n\n\n for r in rt.block_on(github.repo(owner, repo).releases().list())? {\n\n println!(\"{:#?}\", r.name);\n\n }\n\n\n\n let latest = rt.block_on(github.repo(owner, repo).releases().latest())?;\n\n println!(\"{:#?}\", latest);\n\n\n\n let release = rt.block_on(github.repo(owner, repo).releases().by_tag(\"v11.0.0\"))?;\n\n println!(\"{:#?}\", release);\n\n\n\n Ok(())\n\n }\n\n _ => Err(\"example missing GITHUB_TOKEN\".into()),\n\n }\n\n}\n", "file_path": "examples/releases.rs", "rank": 52, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n let repo = github.repo(\"softprops\", \"hubcaps\");\n\n let deployments = repo.deployments();\n\n // let deploy = deployments.create(&DeploymentOptions::builder(\"master\")\n\n // .payload(\"this is the payload\".to_owned()).build());\n\n // println!(\"{:?}\", deploy);\n\n for d in rt.block_on(deployments.list(&Default::default()))? {\n\n println!(\"{:#?}\", d)\n\n }\n\n Ok(())\n\n }\n\n _ => Err(\"example missing GITHUB_TOKEN\".into()),\n\n }\n\n}\n", "file_path": "examples/deployments.rs", "rank": 53, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n rt.block_on(\n\n github\n\n .repo(\"matthiasbeyer\", \"imag\")\n\n .issues()\n\n .iter(\n\n &IssueListOptions::builder()\n\n .per_page(100)\n\n .state(State::All)\n\n .build(),\n\n )\n\n .for_each(move |issue| {\n\n println!(\"{} ({})\", issue.title, issue.state);\n\n Ok(())\n\n }),\n\n )?;\n\n Ok(())\n\n }\n\n _ => Err(\"example missing GITHUB_TOKEN\".into()),\n\n }\n\n}\n", "file_path": "examples/issues.rs", "rank": 54, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n\n\n let opts = ThreadListOptions::builder().all(true).build();\n\n for thread in rt.block_on(github.activity().notifications().list(&opts))? {\n\n println!(\"{:#?}\", thread);\n\n let subscription = rt.block_on(\n\n github\n\n .activity()\n\n .notifications()\n\n .get_subscription(thread.id),\n\n );\n\n if let Ok(sub) = subscription {\n", "file_path": "examples/notifications.rs", "rank": 55, "score": 45473.850081294804 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n\n\n let options = OrganizationRepoListOptions::builder()\n\n .repo_type(OrgRepoType::Forks)\n\n .build();\n\n\n\n println!(\"Forks in the rust-lang organization:\");\n\n\n\n for repo in rt.block_on(github.org_repos(\"rust-lang\").list(&options))? {\n\n println!(\"{}\", repo.name)\n\n }\n\n\n", "file_path": "examples/orgs.rs", "rank": 56, "score": 45473.850081294804 }, { "content": "pub trait HttpCache: HttpCacheClone + Debug {\n\n fn cache_response(\n\n &self,\n\n uri: &str,\n\n body: &[u8],\n\n etag: &[u8],\n\n next_link: &Option<String>,\n\n ) -> Result<()>;\n\n fn lookup_etag(&self, uri: &str) -> Result<String>;\n\n fn lookup_body(&self, uri: &str) -> Result<String>;\n\n fn lookup_next_link(&self, uri: &str) -> Result<Option<String>>;\n\n}\n\n\n\nimpl dyn HttpCache {\n\n pub fn noop() -> BoxedHttpCache {\n\n Box::new(NoCache)\n\n }\n\n\n\n pub fn in_home_dir() -> BoxedHttpCache {\n\n let mut dir = dirs::home_dir().expect(\"Expected a home dir\");\n", "file_path": "src/http_cache.rs", "rank": 57, "score": 44617.43651340181 }, { "content": "/// \"unfold\" paginated results of a list of github entities\n\nfn unfold<D, I>(\n\n github: Github,\n\n first: Future<(Option<Link>, D)>,\n\n into_items: fn(D) -> Vec<I>,\n\n) -> Stream<I>\n\nwhere\n\n D: DeserializeOwned + 'static + Send,\n\n I: 'static + Send,\n\n{\n\n Box::new(\n\n first\n\n .map(move |(link, payload)| {\n\n let mut items = into_items(payload);\n\n items.reverse();\n\n stream::unfold::<_, _, Future<(I, (Option<Link>, Vec<I>))>, _>(\n\n (link, items),\n\n move |(link, mut items)| match items.pop() {\n\n Some(item) => Some(Box::new(future::ok((item, (link, items))))),\n\n _ => link.and_then(|l| next_link(&l)).map(|url| {\n\n let url = Url::parse(&url).unwrap();\n", "file_path": "src/lib.rs", "rank": 58, "score": 44450.838914234235 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n println!(\"issue search results\");\n\n // https://developer.github.com/v3/search/#parameters-3\n\n rt.block_on(\n\n github\n\n .search()\n\n .issues()\n\n .iter(\n\n \"user:softprops\",\n\n &SearchIssuesOptions::builder().per_page(100).build(),\n\n )\n\n .for_each(|issue| {\n\n println!(\"{}\", issue.title);\n\n Ok(())\n\n }),\n\n )?;\n\n Ok(())\n\n }\n\n _ => Err(\"example missing GITHUB_TOKEN\".into()),\n\n }\n\n}\n", "file_path": "examples/search_issues.rs", "rank": 59, "score": 43931.363878978445 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n None,\n\n )?;\n\n let status = rt.block_on(github.rate_limit().get())?;\n\n println!(\"{:#?}\", status);\n\n Ok(())\n\n}\n", "file_path": "examples/rate_limit.rs", "rank": 60, "score": 43931.363878978445 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n\n\n #[cfg(not(feature = \"httpcache\"))]\n\n {\n\n println!(\"rerun this example with `cargo run --no-default-features --features default-tls,httpcache --example conditional_requests`\");\n\n Ok(())\n\n }\n\n\n\n #[cfg(feature = \"httpcache\")]\n\n {\n\n let mut rt = Runtime::new()?;\n\n\n\n let host = \"https://api.github.com\";\n\n let agent = concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\"));\n\n let client = Client::builder().build()?;\n\n let http_cache = HttpCache::in_home_dir();\n\n let github = Github::custom(host, agent, None, client, http_cache);\n\n\n\n let _repos = rt.block_on(github.user_repos(\"dwijnand\").list(&Default::default()))?;\n", "file_path": "examples/conditional_requests.rs", "rank": 61, "score": 43931.363878978445 }, { "content": "fn main() -> Result<()> {\n\n pretty_env_logger::init();\n\n match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => {\n\n let mut rt = Runtime::new()?;\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Credentials::Token(token),\n\n )?;\n\n\n\n // create new gist\n\n let mut files = HashMap::new();\n\n files.insert(\"file1\", \"Hello World\");\n\n let options = GistOptions::new(Some(\"gist description\"), false, files);\n\n let gist = rt.block_on(github.gists().create(&options))?;\n\n println!(\"{:#?}\", gist);\n\n\n\n // edit file1\n\n let mut files = HashMap::new();\n\n files.insert(\"file1\", \"Hello World!!\");\n", "file_path": "examples/gists_create.rs", "rank": 62, "score": 43931.363878978445 }, { "content": "#[test]\n\n#[cfg(feature = \"httpcache\")]\n\nfn compare_counts() -> Result<()> {\n\n pretty_env_logger::init();\n\n\n\n let mut rt = Runtime::new()?;\n\n\n\n let agent = concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\"));\n\n let credentials = match env::var(\"GITHUB_TOKEN\").ok() {\n\n Some(token) => Some(Credentials::Token(token)),\n\n None => {\n\n if env::var(\"CI\") == Ok(String::from(\"true\")) {\n\n println!(\"No GITHUB_TOKEN env var in CI, skipping test\");\n\n return Ok(());\n\n } else {\n\n None\n\n }\n\n }\n\n };\n\n let owner = \"octocat\";\n\n let per_page = 5;\n\n let repo_list_options = UserRepoListOptions::builder().per_page(per_page).build();\n", "file_path": "tests/conditional_requests.rs", "rank": 63, "score": 42521.4408382831 }, { "content": "//! Git interface\n\n\n\n// Third party\n\nuse serde::Deserialize;\n\n\n\n// Ours\n\nuse crate::{Future, Github};\n\n\n\n/// reference to git operations associated with a github repo\n\npub struct Git {\n\n github: Github,\n\n owner: String,\n\n repo: String,\n\n}\n\n\n\nimpl Git {\n\n #[doc(hidden)]\n\n pub fn new<O, R>(github: Github, owner: O, repo: R) -> Self\n\n where\n\n O: Into<String>,\n", "file_path": "src/git.rs", "rank": 70, "score": 38364.525808489634 }, { "content": "\n\n// representations\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct TreeData {\n\n pub sha: String,\n\n pub url: String,\n\n pub tree: Vec<GitFile>,\n\n pub truncated: bool,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct GitFile {\n\n pub path: String,\n\n pub mode: String,\n\n /// typically tree or blob\n\n #[serde(rename = \"type\")]\n\n pub content_type: String,\n\n /// size will be None for directories\n\n pub size: Option<usize>,\n", "file_path": "src/git.rs", "rank": 71, "score": 38361.46395679641 }, { "content": " pub sha: String,\n\n /// url will be None for commits\n\n pub url: Option<String>,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Blob {\n\n pub content: String,\n\n pub encoding: String,\n\n pub url: String,\n\n pub sha: String,\n\n /// sizes will be None for directories\n\n pub size: Option<usize>,\n\n}\n\n\n\n#[derive(Debug, Deserialize, PartialEq)]\n\n#[serde(untagged)]\n\n/// The response for getting a git reference\n\npub enum GetReferenceResponse {\n\n /// The reference data matching the specified reference\n", "file_path": "src/git.rs", "rank": 74, "score": 38355.12264503968 }, { "content": " R: Into<String>,\n\n {\n\n Git {\n\n github,\n\n owner: owner.into(),\n\n repo: repo.into(),\n\n }\n\n }\n\n\n\n fn path(&self, more: &str) -> String {\n\n format!(\"/repos/{}/{}/git{}\", self.owner, self.repo, more)\n\n }\n\n\n\n /// list a git tree of files for this repo at a given sha\n\n /// https://developer.github.com/v3/git/trees/#get-a-tree\n\n /// https://developer.github.com/v3/git/trees/#get-a-tree-recursively\n\n pub fn tree<S>(&self, sha: S, recursive: bool) -> Future<TreeData>\n\n where\n\n S: Into<String>,\n\n {\n", "file_path": "src/git.rs", "rank": 75, "score": 38354.68065942028 }, { "content": " fn deserialize_get_ref_exact() {\n\n let payload = r#\"{\n\n \"ref\": \"refs/heads/featureA\",\n\n \"url\": \"https://api.github.com/repos/octocat/Hello-World/git/refs/heads/featureA\",\n\n \"object\": {\n\n \"type\": \"commit\",\n\n \"sha\": \"aa218f56b14c9653891f9e74264a383fa43fefbd\",\n\n \"url\": \"https://api.github.com/repos/octocat/Hello-World/git/commits/aa218f56b14c9653891f9e74264a383fa43fefbd\"\n\n }\n\n}\"#;\n\n let expected = GetReferenceResponse::Exact(Reference {\n\n reference: \"refs/heads/featureA\".to_string(),\n\n url: \"https://api.github.com/repos/octocat/Hello-World/git/refs/heads/featureA\".to_string(),\n\n object: Object {\n\n object_type: \"commit\".to_string(),\n\n sha: \"aa218f56b14c9653891f9e74264a383fa43fefbd\".to_string(),\n\n url: \"https://api.github.com/repos/octocat/Hello-World/git/commits/aa218f56b14c9653891f9e74264a383fa43fefbd\".to_string(),\n\n },\n\n });\n\n test_deserializing(payload, expected)\n", "file_path": "src/git.rs", "rank": 77, "score": 38350.911130798624 }, { "content": " \"url\": \"https://api.github.com/repos/octocat/Hello-World/git/commits/612077ae6dffb4d2fbd8ce0cccaa58893b07b5ac\"\n\n }\n\n }\n\n]\"#;\n\n let expected = GetReferenceResponse::StartWith(vec![\n\n Reference {\n\n reference: \"refs/heads/feature-a\".to_string(),\n\n url: \"https://api.github.com/repos/octocat/Hello-World/git/refs/heads/feature-a\".to_string(),\n\n object: Object {\n\n object_type: \"commit\".to_string(),\n\n sha: \"aa218f56b14c9653891f9e74264a383fa43fefbd\".to_string(),\n\n url: \"https://api.github.com/repos/octocat/Hello-World/git/commits/aa218f56b14c9653891f9e74264a383fa43fefbd\".to_string(),\n\n },\n\n },\n\n Reference {\n\n reference: \"refs/heads/feature-b\".to_string(),\n\n url: \"https://api.github.com/repos/octocat/Hello-World/git/refs/heads/feature-b\".to_string(),\n\n object: Object {\n\n object_type: \"commit\".to_string(),\n\n sha: \"612077ae6dffb4d2fbd8ce0cccaa58893b07b5ac\".to_string(),\n\n url: \"https://api.github.com/repos/octocat/Hello-World/git/commits/612077ae6dffb4d2fbd8ce0cccaa58893b07b5ac\".to_string(),\n\n },\n\n },\n\n ]);\n\n test_deserializing(payload, expected)\n\n }\n\n}\n", "file_path": "src/git.rs", "rank": 78, "score": 38349.950966658005 }, { "content": " pub sha: String,\n\n pub url: String,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use serde::Deserialize;\n\n use serde_json;\n\n use std::fmt::Debug;\n\n\n\n fn test_deserializing<'de, T>(payload: &'static str, expected: T)\n\n where\n\n T: Debug + PartialEq + Deserialize<'de>,\n\n {\n\n let incoming: T = serde_json::from_str(payload).unwrap();\n\n assert_eq!(incoming, expected)\n\n }\n\n\n\n #[test]\n", "file_path": "src/git.rs", "rank": 79, "score": 38348.18182273064 }, { "content": " }\n\n\n\n #[test]\n\n fn deserialize_get_ref_starts_with() {\n\n let payload = r#\"[\n\n {\n\n \"ref\": \"refs/heads/feature-a\",\n\n \"url\": \"https://api.github.com/repos/octocat/Hello-World/git/refs/heads/feature-a\",\n\n \"object\": {\n\n \"type\": \"commit\",\n\n \"sha\": \"aa218f56b14c9653891f9e74264a383fa43fefbd\",\n\n \"url\": \"https://api.github.com/repos/octocat/Hello-World/git/commits/aa218f56b14c9653891f9e74264a383fa43fefbd\"\n\n }\n\n },\n\n {\n\n \"ref\": \"refs/heads/feature-b\",\n\n \"url\": \"https://api.github.com/repos/octocat/Hello-World/git/refs/heads/feature-b\",\n\n \"object\": {\n\n \"type\": \"commit\",\n\n \"sha\": \"612077ae6dffb4d2fbd8ce0cccaa58893b07b5ac\",\n", "file_path": "src/git.rs", "rank": 80, "score": 38343.99785415192 }, { "content": " self.github.get(&self.path(&format!(\n\n \"/trees/{}?recursive={}\",\n\n sha.into(),\n\n if recursive { \"1\" } else { \"0\" }\n\n )))\n\n }\n\n\n\n /// get the blob contents of a given sha\n\n /// https://developer.github.com/v3/git/blobs/#get-a-blob\n\n pub fn blob<S>(&self, sha: S) -> Future<Blob>\n\n where\n\n S: Into<String>,\n\n {\n\n self.github\n\n .get(&self.path(&format!(\"/blobs/{}\", sha.into())))\n\n }\n\n\n\n /// get the git reference data of a given ref\n\n /// the specified reference must be formatted as as \"heads/branch\", not just \"branch\"\n\n /// https://developer.github.com/v3/git/refs/#get-a-reference\n", "file_path": "src/git.rs", "rank": 82, "score": 38342.41975151975 }, { "content": " Exact(Reference),\n\n /// If the reference doesn't exist in the repository\n\n /// but existing refs start with ref they will be returned as an array.\n\n /// For example, a call to get the data for a branch named feature,\n\n /// which doesn't exist, would return head refs including featureA and featureB which do.\n\n StartWith(Vec<Reference>),\n\n}\n\n\n\n#[derive(Debug, Deserialize, PartialEq)]\n\npub struct Reference {\n\n #[serde(rename = \"ref\")]\n\n pub reference: String,\n\n pub url: String,\n\n pub object: Object,\n\n}\n\n\n\n#[derive(Debug, Deserialize, PartialEq)]\n\npub struct Object {\n\n #[serde(rename = \"type\")]\n\n pub object_type: String,\n", "file_path": "src/git.rs", "rank": 84, "score": 38341.431941706905 }, { "content": " pub fn reference<S>(&self, reference: S) -> Future<GetReferenceResponse>\n\n where\n\n S: Into<String>,\n\n {\n\n self.github\n\n .get(&self.path(&format!(\"/refs/{}\", reference.into())))\n\n }\n\n\n\n //// deletes a refish\n\n /// branches should be in the format `heads/feature-a`\n\n /// tags should be in the format `tags/v1.0`\n\n /// https://developer.github.com/v3/git/refs/#delete-a-reference\n\n pub fn delete_reference<S>(&self, reference: S) -> Future<()>\n\n where\n\n S: Into<String>,\n\n {\n\n self.github\n\n .delete(&self.path(&format!(\"/refs/{}\", reference.into())))\n\n }\n\n}\n", "file_path": "src/git.rs", "rank": 85, "score": 38339.596381873176 }, { "content": "use std::env;\n\nuse std::str;\n\n\n\nuse futures::Stream;\n\nuse tokio::runtime::Runtime;\n\n\n\nuse hubcaps::{Credentials, Github, Result};\n\n\n", "file_path": "examples/content.rs", "rank": 86, "score": 38334.496159010014 }, { "content": " let blob = rt.block_on(\n\n github\n\n .repo(\"softprops\", \"hubcaps\")\n\n .git()\n\n .blob(file.sha.clone()),\n\n )?;\n\n println!(\"readme {:#?}\", blob);\n\n }\n\n Ok(())\n\n }\n\n _ => Err(\"example missing GITHUB_TOKEN\".into()),\n\n }\n\n}\n", "file_path": "examples/git.rs", "rank": 87, "score": 38333.29653662962 }, { "content": " }))?;\n\n\n\n println!(\"Root directory:\");\n\n for item in rt.block_on(repo.content().root().collect())? {\n\n println!(\" {}\", item.path)\n\n }\n\n\n\n Ok(())\n\n }\n\n _ => Err(\"example missing GITHUB_TOKEN\".into()),\n\n }\n\n}\n", "file_path": "examples/content.rs", "rank": 88, "score": 38332.67669607687 }, { "content": "use std::env;\n\n\n\nuse tokio::runtime::Runtime;\n\n\n\nuse hubcaps::{Credentials, Github, Result};\n\n\n", "file_path": "examples/git.rs", "rank": 89, "score": 38326.83987036899 }, { "content": "use std::env;\n\n\n\nuse futures::{Future, Stream};\n\nuse tokio::runtime::Runtime;\n\n\n\nuse hubcaps::{Credentials, Github, Result};\n\n\n", "file_path": "examples/repos.rs", "rank": 90, "score": 38075.52439974147 }, { "content": " });\n\n handle.spawn(f.map_err(|_| ()));\n\n Ok(())\n\n }),\n\n )?;\n\n Ok(())\n\n }\n\n _ => Err(\"example missing GITHUB_TOKEN\".into()),\n\n }\n\n}\n", "file_path": "examples/repos.rs", "rank": 91, "score": 38066.50160316547 }, { "content": "use std::env;\n\n\n\nuse tokio::runtime::Runtime;\n\n\n\nuse hubcaps::{Credentials, Github, Result};\n\n\n", "file_path": "examples/pull_files.rs", "rank": 92, "score": 36566.430617366976 }, { "content": "//! Repo Commits interface\n\n//! https://developer.github.com/v3/repos/commits/#get-a-single-commit\n\nuse serde::Deserialize;\n\n\n\nuse crate::users::User;\n\nuse crate::{Future, Github, Stream};\n\n\n\n/// A structure for interfacing with a repository commits\n\npub struct RepoCommits {\n\n github: Github,\n\n owner: String,\n\n repo: String,\n\n}\n\n\n\nimpl RepoCommits {\n\n #[doc(hidden)]\n\n pub fn new<O, R>(github: Github, owner: O, repo: R) -> Self\n\n where\n\n O: Into<String>,\n\n R: Into<String>,\n", "file_path": "src/repo_commits.rs", "rank": 93, "score": 36228.20077047494 }, { "content": " pub fn build(&self) -> SearchReposOptions {\n\n SearchReposOptions {\n\n params: self.0.params.clone(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct ReposItem {\n\n pub id: u32,\n\n pub name: String,\n\n pub full_name: String,\n\n pub owner: User,\n\n pub private: bool,\n\n pub html_url: String,\n\n pub description: String,\n\n pub fork: bool,\n\n pub url: String,\n\n pub forks_url: String,\n\n pub keys_url: String,\n", "file_path": "src/search/repos.rs", "rank": 94, "score": 36221.646942309904 }, { "content": " }\n\n\n\n /// get a repo commit\n\n pub fn get(&self, commit_ref: &str) -> Future<RepoCommit> {\n\n let uri = format!(\"/repos/{}/{}/commits/{}\", self.owner, self.repo, commit_ref);\n\n self.github.get::<RepoCommit>(&uri)\n\n }\n\n}\n\n\n\n// representations\n\n\n\n// !!! RepoCommit, CommitDetails, CommitRef, UserStamp are exact\n\n// dupes of pull_commits.rs' representations.\n\n\n\n/// Representation of a repo commit\n\n#[derive(Debug, Deserialize)]\n\npub struct RepoCommit {\n\n pub url: String,\n\n pub sha: String,\n\n pub html_url: String,\n", "file_path": "src/repo_commits.rs", "rank": 95, "score": 36220.79920742961 }, { "content": "pub struct CommitRef {\n\n pub url: String,\n\n pub sha: String,\n\n}\n\n\n\n/// Representation of a git user\n\n#[derive(Debug, Deserialize)]\n\npub struct UserStamp {\n\n pub name: String,\n\n pub email: String,\n\n pub date: String,\n\n}\n", "file_path": "src/repo_commits.rs", "rank": 96, "score": 36215.5507203733 }, { "content": " pub score: f64,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct License {\n\n key: String,\n\n name: String,\n\n spdx_id: String,\n\n url: String,\n\n}\n", "file_path": "src/search/repos.rs", "rank": 97, "score": 36209.13015158034 }, { "content": "pub struct SearchReposOptions {\n\n params: HashMap<&'static str, String>,\n\n}\n\n\n\nimpl SearchReposOptions {\n\n pub fn builder() -> SearchReposOptionsBuilder {\n\n SearchReposOptionsBuilder::default()\n\n }\n\n\n\n pub fn serialize(&self) -> Option<String> {\n\n if self.params.is_empty() {\n\n None\n\n } else {\n\n let encoded: String = form_urlencoded::Serializer::new(String::new())\n\n .extend_pairs(&self.params)\n\n .finish();\n\n Some(encoded)\n\n }\n\n }\n\n}\n", "file_path": "src/search/repos.rs", "rank": 98, "score": 36208.01795200071 }, { "content": "use std::collections::HashMap;\n\nuse std::fmt;\n\n\n\nuse url::form_urlencoded;\n\nuse serde::Deserialize;\n\n\n\nuse super::{Search, SearchResult};\n\nuse crate::{Future, SortDirection, Stream};\n\nuse crate::users::User;\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum ReposSort {\n\n /// Sort by the number of stars\n\n Stars,\n\n ///Sort by the number of forks\n\n Forks,\n\n /// Sort by when the repo was last updated\n\n Updated,\n\n}\n\n\n", "file_path": "src/search/repos.rs", "rank": 99, "score": 36207.99595712649 } ]
Rust
lib/engine/src/window/input.rs
OrangeBacon/opengl-rust
842354c929db9d60b73fb54781420d41f8e99f23
use std::collections::HashMap; use super::scancode::Scancode; #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] pub enum KeyState { None, Down, Hold, Up, } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)] pub struct MouseState { x: i32, y: i32, delta_x: i32, delta_y: i32, wheel_x: i32, wheel_y: i32, wheel_delta_x: i32, wheel_delta_y: i32, left_button: bool, middle_button: bool, right_button: bool, mouse_four: bool, mouse_five: bool, } #[derive(Clone, PartialEq, Eq, Debug, Default)] pub struct InputState { mouse: MouseState, keys: HashMap<Scancode, KeyState>, } impl InputState { pub fn update(&mut self, mouse_state: MouseState) { self.mouse = mouse_state; self.keys = self .keys .iter() .map(|(scan, state)| { let new_state = match state { KeyState::Down => KeyState::Hold, KeyState::Up => KeyState::None, a => *a, }; (*scan, new_state) }) .collect(); } pub fn key_state(&self, key: Scancode) -> KeyState { *self.keys.get(&key).unwrap_or(&KeyState::None) } pub(crate) fn set_key_state(&mut self, key: Scancode, state: KeyState) { self.keys.insert(key, state); } pub fn is_key_pressed(&self, key: Scancode) -> bool { let key = *self.keys.get(&key).unwrap_or(&KeyState::None); if key == KeyState::Down || key == KeyState::Hold { true } else { false } } pub fn mouse_position(&self) -> (i32, i32) { (self.mouse.x, self.mouse.y) } pub(crate) fn set_mouse_position(&mut self, x: i32, y: i32) { self.mouse.x = x; self.mouse.y = y; } pub fn mouse_delta(&self) -> (i32, i32) { (self.mouse.delta_x, self.mouse.delta_y) } pub(crate) fn set_mouse_delta(&mut self, x: i32, y: i32) { self.mouse.delta_x = x; self.mouse.delta_y = y; } pub fn wheel_position(&self) -> (i32, i32) { (self.mouse.wheel_x, self.mouse.wheel_y) } pub(crate) fn set_wheel_position(&mut self, x: i32, y: i32) { self.mouse.wheel_x = x; self.mouse.wheel_y = y; } pub fn wheel_delta(&self) -> (i32, i32) { (self.mouse.wheel_delta_x, self.mouse.wheel_delta_y) } pub(crate) fn set_wheel_delta(&mut self, x: i32, y: i32) { self.mouse.wheel_delta_x = x; self.mouse.wheel_delta_y = y; } pub fn mouse_left(&self) -> bool { self.mouse.left_button } pub(crate) fn set_mouse_left(&mut self, value: bool) { self.mouse.left_button = value; } pub fn mouse_middle(&self) -> bool { self.mouse.middle_button } pub(crate) fn set_mouse_middle(&mut self, value: bool) { self.mouse.middle_button = value; } pub fn mouse_right(&self) -> bool { self.mouse.right_button } pub(crate) fn set_mouse_right(&mut self, value: bool) { self.mouse.right_button = value; } pub fn mouse_four(&self) -> bool { self.mouse.mouse_four } pub(crate) fn set_mouse_four(&mut self, value: bool) { self.mouse.mouse_four = value; } pub fn mouse_five(&self) -> bool { self.mouse.mouse_five } pub(crate) fn set_mouse_five(&mut self, value: bool) { self.mouse.mouse_five = value; } }
use std::collections::HashMap; use super::scancode::Scancode; #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] pub enum KeyState { None, Down, Hold, Up, } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)] pub struct MouseState { x: i32, y: i32, delta_x: i32, delta_y: i32, wheel_x: i32, wheel_y: i32, wheel_delta_x: i32, wheel_delta_y: i32, left_button: bool, middle_button: bool, right_button: bool, mouse_four: bool, mouse_five: bool, } #[derive(Clone, PartialEq, Eq, Debug, Default)] pub struct InputState { mouse: MouseState, keys: HashMap<Scancode, KeyState>, } impl InputState { pub fn update(&mut self, mouse_state: MouseState) { self.mouse = mouse_state; self.keys = self .keys .iter() .map(|(scan, state)| { let new_state = match state { KeyState::Down => KeyState::Hold, KeyState::Up => KeyState::None, a => *a, }; (*scan, new_state) }) .collect(); } pub fn key_state(&self, key: Scancode) -> KeyState { *self.keys.get(&key).unwrap_or(&KeyState::None) } pub(crate) fn set_key_state(&mut self, key: Scancode, state: KeyState) { self.keys.insert(key, state); }
pub fn mouse_position(&self) -> (i32, i32) { (self.mouse.x, self.mouse.y) } pub(crate) fn set_mouse_position(&mut self, x: i32, y: i32) { self.mouse.x = x; self.mouse.y = y; } pub fn mouse_delta(&self) -> (i32, i32) { (self.mouse.delta_x, self.mouse.delta_y) } pub(crate) fn set_mouse_delta(&mut self, x: i32, y: i32) { self.mouse.delta_x = x; self.mouse.delta_y = y; } pub fn wheel_position(&self) -> (i32, i32) { (self.mouse.wheel_x, self.mouse.wheel_y) } pub(crate) fn set_wheel_position(&mut self, x: i32, y: i32) { self.mouse.wheel_x = x; self.mouse.wheel_y = y; } pub fn wheel_delta(&self) -> (i32, i32) { (self.mouse.wheel_delta_x, self.mouse.wheel_delta_y) } pub(crate) fn set_wheel_delta(&mut self, x: i32, y: i32) { self.mouse.wheel_delta_x = x; self.mouse.wheel_delta_y = y; } pub fn mouse_left(&self) -> bool { self.mouse.left_button } pub(crate) fn set_mouse_left(&mut self, value: bool) { self.mouse.left_button = value; } pub fn mouse_middle(&self) -> bool { self.mouse.middle_button } pub(crate) fn set_mouse_middle(&mut self, value: bool) { self.mouse.middle_button = value; } pub fn mouse_right(&self) -> bool { self.mouse.right_button } pub(crate) fn set_mouse_right(&mut self, value: bool) { self.mouse.right_button = value; } pub fn mouse_four(&self) -> bool { self.mouse.mouse_four } pub(crate) fn set_mouse_four(&mut self, value: bool) { self.mouse.mouse_four = value; } pub fn mouse_five(&self) -> bool { self.mouse.mouse_five } pub(crate) fn set_mouse_five(&mut self, value: bool) { self.mouse.mouse_five = value; } }
pub fn is_key_pressed(&self, key: Scancode) -> bool { let key = *self.keys.get(&key).unwrap_or(&KeyState::None); if key == KeyState::Down || key == KeyState::Hold { true } else { false } }
function_block-full_function
[ { "content": "fn default_event_handler(state: &mut EngineState, event: &Event) -> EventResult {\n\n match event {\n\n Event::Quit { .. } => return EventResult::Exit,\n\n Event::KeyDown { key, .. } => {\n\n state.inputs.set_key_state(*key, KeyState::Down);\n\n }\n\n Event::KeyUp { key, .. } => {\n\n state.inputs.set_key_state(*key, KeyState::Up);\n\n }\n\n Event::Scroll { x: dx, y: dy, .. } => {\n\n state.inputs.set_wheel_delta(*dx, *dy);\n\n\n\n let (x, y) = state.inputs.wheel_position();\n\n state.inputs.set_wheel_position(x + *dx, y + *dy);\n\n }\n\n _ => (),\n\n }\n\n\n\n EventResult::Ignored\n\n}\n\n\n\n/// attach console print debugging to the provided OpenGL Context\n", "file_path": "lib/engine/src/main_loop.rs", "rank": 0, "score": 118972.0358949166 }, { "content": "fn copy(from: &Path, to: &Path) {\n\n let from_path: PathBuf = from.into();\n\n let to_path: PathBuf = to.into();\n\n\n\n for entry in WalkDir::new(from_path.clone()) {\n\n let entry = entry.unwrap();\n\n\n\n if let Ok(rel_path) = entry.path().strip_prefix(&from_path) {\n\n let target_path = to_path.join(rel_path);\n\n\n\n if entry.file_type().is_dir() {\n\n DirBuilder::new()\n\n .recursive(true)\n\n .create(target_path)\n\n .expect(\"failed to create target dir\");\n\n } else {\n\n fs::copy(entry.path(), &target_path).expect(\"failed to copy\");\n\n }\n\n }\n\n }\n\n}\n", "file_path": "build.rs", "rank": 1, "score": 89585.87098038869 }, { "content": "/// convert between sdl scancodes and the engine's scancode\n\n/// returns an option because the number of scancodes in sdl is a valid scancode\n\n/// which should not be converted to a valid scancode. Hopefully it wouldn't\n\n/// be passed in either.\n\nfn convert_scancode(scancode: sdl2::keyboard::Scancode) -> Option<Scancode> {\n\n use sdl2::keyboard::Scancode as SdlCode;\n\n let code = match scancode {\n\n SdlCode::A => Scancode::A,\n\n SdlCode::B => Scancode::B,\n\n SdlCode::C => Scancode::C,\n\n SdlCode::D => Scancode::D,\n\n SdlCode::E => Scancode::E,\n\n SdlCode::F => Scancode::F,\n\n SdlCode::G => Scancode::G,\n\n SdlCode::H => Scancode::H,\n\n SdlCode::I => Scancode::I,\n\n SdlCode::J => Scancode::J,\n\n SdlCode::K => Scancode::K,\n\n SdlCode::L => Scancode::L,\n\n SdlCode::M => Scancode::M,\n\n SdlCode::N => Scancode::N,\n\n SdlCode::O => Scancode::O,\n\n SdlCode::P => Scancode::P,\n\n SdlCode::Q => Scancode::Q,\n", "file_path": "lib/engine/src/window/sdl_window.rs", "rank": 2, "score": 87935.07313807071 }, { "content": "/// The value 1.0 for some defaults\n\nfn default_one() -> f64 {\n\n 1.0\n\n}\n\n/// The alpha blending mode of a material\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"UPPERCASE\")]\n\npub enum MaterialAlphaMode {\n\n /// Alpha ignored, fully opaque\n\n Opaque,\n\n\n\n /// The value is either fully opaque or fully transparant\n\n Mask,\n\n\n\n /// The alpha is used to composite the source and destination areas\n\n Blend,\n\n}\n\n\n\nimpl Default for MaterialAlphaMode {\n\n /// By default alpha is ignored\n\n fn default() -> Self {\n\n MaterialAlphaMode::Opaque\n\n }\n\n}\n\n\n", "file_path": "lib/engine/src/gltf.rs", "rank": 3, "score": 86685.88817643767 }, { "content": "fn convert_mouse_button(button: sdl2::mouse::MouseButton) -> Option<MouseButton> {\n\n use sdl2::mouse::MouseButton as SdlMouse;\n\n let val = match button {\n\n SdlMouse::Left => MouseButton::Left,\n\n SdlMouse::Right => MouseButton::Right,\n\n SdlMouse::Middle => MouseButton::Middle,\n\n SdlMouse::X1 => MouseButton::Four,\n\n SdlMouse::X2 => MouseButton::Five,\n\n SdlMouse::Unknown => return None,\n\n };\n\n\n\n Some(val)\n\n}\n\n\n", "file_path": "lib/engine/src/window/sdl_window.rs", "rank": 4, "score": 84703.28082863492 }, { "content": "/// By default the alpha cutoff is 0.5\n\nfn default_alpha_cutoff() -> f64 {\n\n 0.5\n\n}\n\n\n\n/// Infomation about a normal texture\n\n#[derive(Debug, Deserialize)]\n\n#[serde(deny_unknown_fields)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct TextureNormal {\n\n /// The index of the texture\n\n pub index: usize,\n\n\n\n /// The uv coordinates to use for the texture, taken from the mesh primitive\n\n /// attributes, so a value of 0 means the uv coordinates are from the\n\n /// TEXCOORD_0 attribute.\n\n #[serde(default)]\n\n pub tex_coord: usize,\n\n\n\n /// A scalar multiplier applied to each normal vector of the texture.\n\n /// Linearly interpolated.\n", "file_path": "lib/engine/src/gltf.rs", "rank": 5, "score": 84647.880001827 }, { "content": "/// The default color factor is [1.0, 1.0, 1.0, 1.0]\n\nfn default_color_factor() -> [f32; 4] {\n\n [1.0; 4]\n\n}\n\n\n", "file_path": "lib/engine/src/gltf.rs", "rank": 6, "score": 83048.08002451582 }, { "content": "fn is_base_color(prim: &gltf::Primitive, model: &Model, idx: usize) -> bool {\n\n prim.material\n\n .and_then(|mat| model.gltf.materials[mat].pbr_metallic_roughness.as_ref())\n\n .and_then(|pbr| pbr.base_color_texture.as_ref())\n\n .map(|color| color.tex_coord == idx)\n\n .unwrap_or(false)\n\n}\n", "file_path": "lib/engine/src/model.rs", "rank": 7, "score": 80745.38665627241 }, { "content": "#[proc_macro_attribute]\n\npub fn context_globals(attr: TokenStream, input: TokenStream) -> TokenStream {\n\n let attr = parse_macro_input!(attr as Attrs);\n\n let input = parse_macro_input!(input as ItemStruct);\n\n\n\n let name = input.ident.clone();\n\n\n\n let accessor = attr.accessor;\n\n let vis = input.vis.clone();\n\n let mut methods = vec![];\n\n\n\n // create the implementations for all the globals\n\n for global in attr.globals {\n\n // the input name e.g. `inputs` is the field name, remove the `s` to\n\n // make it not a plural for the method names\n\n let global_name = global.to_string();\n\n let global_name = &global_name[..global_name.len() - 1];\n\n\n\n // infer the name of the GlobalAllocationContext enum varient\n\n let mut allocation = String::from(\n\n global_name\n", "file_path": "lib/engine_proc_macro/src/lib.rs", "rank": 8, "score": 79651.65474148218 }, { "content": "#[cfg(debug_assertions)]\n\nfn enable_gl_debugging(gl: &gl::Gl) {\n\n let mut flags = 0;\n\n unsafe {\n\n gl.GetIntegerv(gl::CONTEXT_FLAGS, &mut flags);\n\n }\n\n\n\n // Only set the debugging options if debugging enabled on the context\n\n if flags as u32 & gl::CONTEXT_FLAG_DEBUG_BIT == 0 {\n\n return;\n\n }\n\n\n\n unsafe {\n\n // enables debug output\n\n gl.Enable(gl::DEBUG_OUTPUT);\n\n\n\n // ensure that debugging messages are only output on the main thread\n\n // ensures that the log function is called in the same order that the\n\n // messages are generated\n\n gl.Enable(gl::DEBUG_OUTPUT_SYNCHRONOUS);\n\n\n", "file_path": "lib/engine/src/main_loop.rs", "rank": 9, "score": 76283.28747624869 }, { "content": "/// attach console print debugging to the provided OpenGL Context\n\nfn enable_gl_debugging(gl: &gl::Gl) {\n\n let mut flags = 0;\n\n unsafe {\n\n gl.GetIntegerv(gl::CONTEXT_FLAGS, &mut flags);\n\n }\n\n\n\n // Only set the debugging options if debugging enabled on the context\n\n if flags as u32 & gl::CONTEXT_FLAG_DEBUG_BIT == 0 {\n\n return;\n\n }\n\n\n\n unsafe {\n\n // enables debug output\n\n gl.Enable(gl::DEBUG_OUTPUT);\n\n\n\n // ensure that debugging messages are only output on the main thread\n\n // ensures that the log function is called in the same order that the\n\n // messages are generated\n\n gl.Enable(gl::DEBUG_OUTPUT_SYNCHRONOUS);\n\n\n", "file_path": "lib/engine/src/renderer/gl.rs", "rank": 10, "score": 76282.97434338994 }, { "content": "fn fn_display(\n\n f: &mut Formatter,\n\n func: &Function,\n\n prog: &Program,\n\n builtin: BuiltinFunction,\n\n arguments: &[VariableId],\n\n) -> fmt::Result {\n\n let operator = match builtin {\n\n BuiltinFunction::Add => Some(\"+\"),\n\n BuiltinFunction::Div => Some(\"/\"),\n\n BuiltinFunction::Mul => Some(\"*\"),\n\n BuiltinFunction::Sub => Some(\"-\"),\n\n _ => None,\n\n };\n\n\n\n if arguments.len() == 2 {\n\n if let Some(op) = operator {\n\n arguments[0].fmt(f, prog, func)?;\n\n write!(f, \" {} \", op)?;\n\n arguments[1].fmt(f, prog, func)?;\n", "file_path": "lib/engine/src/renderer/shader.rs", "rank": 11, "score": 63728.27824673437 }, { "content": "struct Triangle {\n\n camera: Camera,\n\n model: Model,\n\n}\n\n\n\nimpl Triangle {\n\n fn swap_model(&mut self, state: &mut EngineStateRef) -> Option<()> {\n\n let result = FileDialog::new()\n\n .add_filter(\"glTF Model\", &[\"gltf\", \"glb\"])\n\n .show_open_single_file();\n\n\n\n let path = match result {\n\n Ok(Some(path)) => path,\n\n _ => return None,\n\n };\n\n\n\n let folder = path.parent()?;\n\n let file = path.file_name()?.to_str()?;\n\n\n\n let res = Resources::from_path(&folder);\n", "file_path": "src/main.rs", "rank": 12, "score": 63189.95509089895 }, { "content": "enum Attribute {\n\n Position,\n\n Normal { accessor: usize },\n\n Tangent { accessor: usize },\n\n TexCoord { idx: usize },\n\n VertexColor { accessor: usize, idx: usize },\n\n Joints { accessor: usize, idx: usize },\n\n Weights { accessor: usize, idx: usize },\n\n BaseColor { color: [f32; 4] },\n\n}\n\n\n\nimpl Attribute {\n\n fn from(value: &str, accessor: usize) -> Option<Self> {\n\n let comps: Vec<_> = value.split('_').collect();\n\n\n\n let ty = match comps.as_slice() {\n\n [\"POSITION\"] => Attribute::Position,\n\n [\"NORMAL\"] => Attribute::Normal { accessor },\n\n [\"TANGENT\"] => Attribute::Tangent { accessor },\n\n [\"TEXCOORD\", a] => Attribute::TexCoord {\n", "file_path": "lib/engine/src/model.rs", "rank": 13, "score": 61582.94788050173 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\nenum TextureData {\n\n U8(Vec<u8>),\n\n U16(Vec<u16>),\n\n}\n\n\n\nimpl Texture {\n\n /// Load a named resource image file using the default settings\n\n pub fn from_res_encoding(res: &Resources, name: &str) -> Result<Self, TextureError> {\n\n Self::from_res_encoding_config(res, name, Default::default())\n\n }\n\n\n\n /// Loads a named resource file using the provided settings\n\n /// See [`Self::from_encoding_config`] for more information\n\n pub fn from_res_encoding_config(\n\n res: &Resources,\n\n name: &str,\n\n config: TextureOptions,\n\n ) -> Result<Self, TextureError> {\n\n let data = res\n\n .load_bytes(name)\n", "file_path": "lib/engine/src/texture.rs", "rank": 14, "score": 60440.37000817575 }, { "content": "fn main() {\n\n let out_dir = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n let manifest_dir = PathBuf::from(env::var(\"CARGO_MANIFEST_DIR\").unwrap());\n\n\n\n let executable_path = locate_target_dir(&out_dir)\n\n .expect(\"failed to find target dir\")\n\n .join(env::var(\"PROFILE\").unwrap());\n\n\n\n copy(\n\n &manifest_dir.join(\"assets\"),\n\n &executable_path.join(\"assets\"),\n\n )\n\n}\n\n\n", "file_path": "build.rs", "rank": 15, "score": 60183.71497158067 }, { "content": "#[derive(Debug)]\n\nstruct GPUPrimitive {\n\n pipeline: PipelineId,\n\n vertex_count: usize,\n\n base_color_texidx: Option<TextureId>,\n\n indicies: Option<GPUPrimitiveIndexInfo>,\n\n draw_mode: DrawingMode,\n\n culling: bool,\n\n\n\n vertex_buffers: Vec<VertexBufferId>,\n\n vertex_strides: Vec<i32>,\n\n vertex_offsets: Vec<usize>,\n\n}\n\n\n\nimpl GPUPrimitive {\n\n fn new(\n\n prim: &gltf::Primitive,\n\n model: &Model,\n\n renderer: &mut Renderer,\n\n ) -> Result<Self, ModelError> {\n\n let pipeline = Self::create_shader(prim, model)?;\n", "file_path": "lib/engine/src/model.rs", "rank": 16, "score": 59481.951142348815 }, { "content": "#[derive(Debug)]\n\nstruct Buffer {\n\n gl: gl::Gl,\n\n vbo: GLuint,\n\n pub buffer_type: GLenum,\n\n}\n\n\n\nimpl Buffer {\n\n fn new(gl: &gl::Gl, buffer_type: GLenum) -> Buffer {\n\n let mut vbo = 0;\n\n unsafe {\n\n gl.GenBuffers(1, &mut vbo);\n\n }\n\n\n\n Buffer {\n\n gl: gl.clone(),\n\n vbo,\n\n buffer_type,\n\n }\n\n }\n\n\n", "file_path": "lib/engine/src/renderer/gl.rs", "rank": 17, "score": 59481.951142348815 }, { "content": "struct ImguiRenderer {\n\n /// The imgui rendering pipeline, is always the same shader\n\n _program: PipelineId,\n\n\n\n /// The currently avaliable textures for imgui, if imgui needs a texture,\n\n /// then it needs to be in this vec. Item 0 is the font atlas\n\n _textures: Vec<TextureId>,\n\n}\n\n\n\nimpl ImguiRenderer {\n\n fn new(state: &mut EngineStateRef, context: &mut imgui::Context) -> Result<Self> {\n\n // the default shader, translated from imgui's source\n\n let mut program = Program::new(|ctx| {\n\n ctx.vertex(|ctx| {\n\n let projection = ctx.uniform(\"projection\", Type::Mat4);\n\n let position = ctx.input(\"position\", Type::Vec2);\n\n let uv_in = ctx.input(\"uv_in\", Type::Vec2);\n\n let uv_out = ctx.output(\"uv\", Type::Vec2);\n\n let color_in = ctx.input(\"color_in\", Type::Vec4);\n\n let color_out = ctx.output(\"color\", Type::Vec4);\n", "file_path": "lib/engine/src/imgui.rs", "rank": 18, "score": 59477.60923877818 }, { "content": "#[derive(Debug, Error)]\n\nenum GlError {\n\n #[error(\"Error compiling shader:\\n{message}\")]\n\n ShaderCompilation { message: String },\n\n\n\n #[error(\"Error linking shaders:\\n{message}\")]\n\n ShaderLink { message: String },\n\n\n\n #[error(\"Shader code contained nul byte, unable to compile it:\\n{message}\")]\n\n ShaderNullByte { message: NulError },\n\n\n\n #[error(\"Error getting buffer for error message, unable to display error\")]\n\n ErrorBuffer,\n\n\n\n #[error(\"Unable to find a free active texture unit\")]\n\n TextureUnitsFull,\n\n\n\n #[error(\"Cannot bind texture to unbound pipeline\")]\n\n PipelineNotBound,\n\n\n\n #[error(\"Texture is not currently loaded, cannot bind it to a pipeline\")]\n", "file_path": "lib/engine/src/renderer/gl.rs", "rank": 19, "score": 59337.90652137873 }, { "content": "#[derive(Debug, Error)]\n\nenum GlslError {\n\n #[error(\"Unable to represent the type {ty} in glsl\")]\n\n UnreprsentableType { ty: Type },\n\n}\n\n\n\npub(super) struct GlslCode {\n\n pub(super) vert: Option<String>,\n\n pub(super) frag: Option<String>,\n\n}\n\n\n\nimpl Program {\n\n pub(super) fn to_glsl(&mut self) -> Result<GlslCode, anyhow::Error> {\n\n self.glsl_verification()?;\n\n\n\n Ok(GlslCode {\n\n vert: self.vert_shader()?,\n\n frag: self.frag_shader()?,\n\n })\n\n }\n\n\n", "file_path": "lib/engine/src/renderer/glsl.rs", "rank": 20, "score": 59337.90652137873 }, { "content": "fn main() {\n\n if let Err(e) = run() {\n\n println!(\"{}\", e);\n\n std::process::exit(1);\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 21, "score": 58780.57959926494 }, { "content": "#[derive(Debug)]\n\nstruct Attrs {\n\n accessor: Expr,\n\n globals: Vec<Ident>,\n\n}\n\n\n\nimpl Parse for Attrs {\n\n /// parses `$ident => #($ident,)+`\n\n fn parse(input: ParseStream) -> Result<Self> {\n\n let accessor = input.parse()?;\n\n\n\n input.parse::<Token![=]>()?;\n\n input.parse::<Token![>]>()?;\n\n\n\n let globals = Punctuated::<Ident, Token![,]>::parse_terminated(input)?;\n\n\n\n Ok(Self {\n\n accessor,\n\n globals: globals.into_iter().collect(),\n\n })\n\n }\n\n}\n\n\n", "file_path": "lib/engine_proc_macro/src/lib.rs", "rank": 22, "score": 58395.59874055986 }, { "content": "#[derive(Debug)]\n\nstruct FragmentShader {\n\n main: usize,\n\n}\n\n\n\n/// A single function in a shader program, either a shader main function or\n\n/// a utility function\n\n#[derive(Debug)]\n\npub struct Function {\n\n blocks: Vec<Block>,\n\n vars: FunctionVars,\n\n}\n\n\n", "file_path": "lib/engine/src/renderer/shader.rs", "rank": 23, "score": 58395.59874055986 }, { "content": "#[derive(Debug)]\n\nstruct FunctionVars {\n\n locals: Vec<Variable>,\n\n outputs: Vec<Variable>,\n\n inputs: Vec<Variable>,\n\n}\n\n\n\n#[context_globals(function.vars => inputs, outputs)]\n\npub struct FunctionContext<'a, 'b> {\n\n program: &'a mut ProgramContext,\n\n function: &'b mut Function,\n\n}\n\n\n\n/// An ssa basic block, contains no control flow, all jumps will be at the end\n\n/// of the block, all entry will be at the start of the block\n\n#[derive(Debug)]\n\npub struct Block {\n\n statements: Vec<Statement>,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n", "file_path": "lib/engine/src/renderer/shader.rs", "rank": 24, "score": 58395.59874055986 }, { "content": "#[derive(Debug)]\n\nstruct VertexShader {\n\n main: usize,\n\n}\n\n\n\n/// A fragment shader's input/output descriptions\n", "file_path": "lib/engine/src/renderer/shader.rs", "rank": 25, "score": 58395.59874055986 }, { "content": "struct GlPipeline {\n\n gl: gl::Gl,\n\n program_id: GLuint,\n\n vao: GLuint,\n\n pipeline: Program,\n\n\n\n is_bound: bool,\n\n}\n\n\n\nimpl GlPipeline {\n\n fn new(mut pipeline: Program, gl: gl::Gl) -> Result<Self> {\n\n let shaders = pipeline.to_glsl()?;\n\n\n\n let shaders = vec![\n\n (shaders.vert, gl::VERTEX_SHADER),\n\n (shaders.frag, gl::FRAGMENT_SHADER),\n\n ];\n\n\n\n // convert shader source code into gl shader ids\n\n let shaders = shaders\n", "file_path": "lib/engine/src/renderer/gl.rs", "rank": 26, "score": 58391.256836989225 }, { "content": "#[derive(Error, Debug)]\n\nenum SdlError {\n\n #[error(\"Error while initialising SDL2: {reason}\")]\n\n Init { reason: String },\n\n\n\n #[error(\"Error while initialising video subsystem: {reason}\")]\n\n Video { reason: String },\n\n\n\n #[error(\"Error while initialising OpenGl Context: {reason}\")]\n\n GlContext { reason: String },\n\n\n\n #[error(\"Error while initialising SLD2 event pump: {reason}\")]\n\n Event { reason: String },\n\n\n\n #[error(\"Error while creating a cursor: {reason}\")]\n\n Cursor { reason: String },\n\n}\n\n\n\n/// Stores sdl state required\n\npub struct SdlWindow {\n\n /// The initialised sdl library\n", "file_path": "lib/engine/src/window/sdl_window.rs", "rank": 27, "score": 58313.19145130375 }, { "content": "/// A single render layer\n\npub trait Layer {\n\n /// Create a new instance of the layer, depending on the current game engine\n\n /// state.\n\n fn new(state: &mut EngineStateRef) -> Result<Self>\n\n where\n\n Self: Sized;\n\n\n\n /// Process a single input event.\n\n fn handle_event(&mut self, state: &mut EngineStateRef, event: &Event) -> EventResult {\n\n let _ = state;\n\n let _ = event;\n\n EventResult::Ignored\n\n }\n\n\n\n /// Physics update function, called with a fixed dt, shouldn't change between\n\n /// update calls. Can be called multiple times per render.\n\n /// dt: delta time, the period of time for this update in seconds\n\n fn update(&mut self, state: &mut EngineStateRef, dt: f32);\n\n\n\n /// Run the rendering for this layer\n", "file_path": "lib/engine/src/layer.rs", "rank": 28, "score": 57992.087675962 }, { "content": "fn main() {\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n let mut file_gl = File::create(&Path::new(&out_dir).join(\"bindings.rs\")).unwrap();\n\n\n\n let reg = Registry::new(\n\n Api::Gl,\n\n (4, 5),\n\n Profile::Core,\n\n Fallbacks::All,\n\n [\"GL_NV_command_list\"],\n\n );\n\n\n\n if env::var(\"CARGO_FEATURE_DEBUG\").is_ok() {\n\n reg.write_bindings(DebugStructGenerator, &mut file_gl)\n\n .unwrap();\n\n } else {\n\n reg.write_bindings(StructGenerator, &mut file_gl).unwrap();\n\n }\n\n}\n", "file_path": "lib/gl/build.rs", "rank": 29, "score": 57471.743424178145 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\nstruct TripleConfig {\n\n /// The index of the most recently written state\n\n idx1: u32,\n\n\n\n /// The index of the second most recently written state\n\n idx2: u32,\n\n\n\n /// The index of the oldest state\n\n idx3: u32,\n\n\n\n /// The index of the state that is currently locked for reading. If no\n\n /// state is locked, contains 3\n\n read: u32,\n\n\n\n /// The index of the state that is currently locked for writing. If no\n\n /// state is locked, contains 3\n\n write: u32,\n\n}\n\n\n\nimpl TripleConfig {\n", "file_path": "lib/engine/src/scene/triple_buffer.rs", "rank": 30, "score": 57390.50692636476 }, { "content": "#[derive(Debug)]\n\nstruct GPUPrimitiveIndexInfo {\n\n buffer: IndexBufferId,\n\n item_type: IndexType,\n\n count: usize,\n\n offset: usize,\n\n}\n\n\n", "file_path": "lib/engine/src/model.rs", "rank": 31, "score": 57373.60519169898 }, { "content": "/// Wrapper around the system clipboard\n\npub trait Clipboard {\n\n /// try to get a string out of the clipboard\n\n fn get(&mut self) -> Option<String>;\n\n\n\n /// set the string stored by the clipboard\n\n fn set(&mut self, data: &str);\n\n}\n\n\n\n/// The default set of operating system cursors\n\n#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]\n\npub enum SystemCursors {\n\n Arrow,\n\n TextInput,\n\n ResizeAll,\n\n ResizeNS,\n\n ResizeEW,\n\n ResizeNESW,\n\n ResizeNWSE,\n\n Hand,\n\n NotAllowed,\n", "file_path": "lib/engine/src/window/window.rs", "rank": 32, "score": 56902.84236833156 }, { "content": "/// The implementation of a windowing system, should probably also handle multi-\n\n/// window support in the future, so is supposed to be a single global instance\n\n/// rather than one per window.\n\npub trait Window {\n\n /// initialise the graphics library\n\n fn new(config: WindowConfig) -> Result<Self>\n\n where\n\n Self: Sized;\n\n\n\n /// try to create a new opengl context\n\n fn new_gl_context(&mut self) -> Result<gl::Gl>;\n\n\n\n /// get a function that can be used as an opengl function loader\n\n fn gl_loader(&self, name: &'static str) -> *const ::std::os::raw::c_void;\n\n\n\n /// try to get a new event from the windows active, if there are no events\n\n /// that need to be processed, it returns None\n\n fn event(&mut self) -> Option<Event>;\n\n\n\n /// Allow the window to capture the mouse, e.g. for first person camera support\n\n fn set_mouse_mode(&mut self, mode: MouseGrabMode);\n\n\n\n /// Take the last frames mouse state and update it with the current frames\n", "file_path": "lib/engine/src/window/window.rs", "rank": 33, "score": 56902.84236833156 }, { "content": "/// The methods required for each renderer backend to implement\n\npub trait RendererBackend {\n\n /// Clear the screen to the specified color\n\n fn clear(&mut self, r: f32, g: f32, b: f32);\n\n\n\n /// Set the viewport size\n\n fn viewport(&mut self, width: u32, height: u32);\n\n\n\n /// Enable or disable backface culling\n\n fn backface_culling(&mut self, enable: CullingMode);\n\n\n\n fn depth_testing(&mut self, mode: DepthTesting);\n\n\n\n /// Load a new texture\n\n fn load_texture(&mut self, texture: Texture) -> TextureId;\n\n\n\n /// Unload a texture\n\n fn unload_texture(&mut self, texture: TextureId);\n\n\n\n /// Load data as a vertex buffer\n\n fn load_vertex_buffer(&mut self, data: &[u8]) -> VertexBufferId;\n", "file_path": "lib/engine/src/renderer/backend.rs", "rank": 34, "score": 55878.127298256586 }, { "content": "fn run() -> Result<()> {\n\n MainLoop::new::<SdlWindow, engine::imgui::ImguiLayer<Triangle>>()?.run()?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 35, "score": 55523.318754474676 }, { "content": "fn write_builtin_call(\n\n shader: &mut String,\n\n prog: &Program,\n\n func: &Function,\n\n function: &BuiltinFunction,\n\n arguments: &[VariableId],\n\n result: &Option<VariableId>,\n\n) {\n\n if let Some(result) = result {\n\n write_variable_new(shader, prog, func, *result);\n\n shader.push_str(\" = \");\n\n } else {\n\n shader.push_str(\" \");\n\n }\n\n\n\n if let Some(op) = match function {\n\n BuiltinFunction::Add => Some(\"+\"),\n\n BuiltinFunction::Div => Some(\"/\"),\n\n BuiltinFunction::Mul => Some(\"*\"),\n\n BuiltinFunction::Sub => Some(\"-\"),\n", "file_path": "lib/engine/src/renderer/glsl.rs", "rank": 36, "score": 53011.79347382776 }, { "content": "// process a single sparse accessor\n\nfn process_accessor<F>(\n\n // a function that converts bytes to usize depending upon the type of\n\n // indicies specified\n\n to_usize: F,\n\n\n\n // the number of bytes to pass to the to_usize function\n\n idx_size: usize,\n\n\n\n // number of bytes in each substituted value\n\n value_size: usize,\n\n\n\n // the data to apply the accessor to\n\n data: &mut [u8],\n\n\n\n // the indicies into the data to apply values at\n\n indicies: &[u8],\n\n\n\n // the values to be applied\n\n values: &[u8],\n\n\n", "file_path": "lib/engine/src/model.rs", "rank": 37, "score": 52076.24878552937 }, { "content": "fn get_variable<'a>(\n\n variable: VariableId,\n\n prog: &'a mut ProgramContext,\n\n vars: &'a mut FunctionVars,\n\n) -> &'a mut Variable {\n\n match variable.kind {\n\n VariableAllocationContext::Local => &mut vars.locals[variable.id],\n\n VariableAllocationContext::Uniform => &mut prog.program.uniforms[variable.id],\n\n VariableAllocationContext::Input => &mut vars.inputs[variable.id],\n\n VariableAllocationContext::Output => &mut vars.outputs[variable.id],\n\n }\n\n}\n\n\n\nimpl Block {\n\n fn type_check(&self, prog: &mut ProgramContext, vars: &mut FunctionVars) {\n\n for statement in &self.statements {\n\n match statement {\n\n Statement::CallBuiltin {\n\n function,\n\n arguments,\n", "file_path": "lib/engine/src/renderer/shader.rs", "rank": 38, "score": 52076.24878552937 }, { "content": "fn global_output<'a>(\n\n out: &mut String,\n\n kind: &str,\n\n vars: impl Iterator<Item = &'a Variable>,\n\n) -> Result<(), GlslError> {\n\n for var in vars {\n\n out.push_str(&format!(\"{} {} {};\\n\", kind, var.ty.to_glsl(), var.name));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "lib/engine/src/renderer/glsl.rs", "rank": 39, "score": 52076.24878552937 }, { "content": "struct ImguiClipboard(Box<dyn Clipboard>);\n\n\n\nimpl imgui::ClipboardBackend for ImguiClipboard {\n\n fn get(&mut self) -> Option<imgui::ImString> {\n\n self.0.get().map(imgui::ImString::new)\n\n }\n\n\n\n fn set(&mut self, value: &imgui::ImStr) {\n\n self.0.set(value.to_str());\n\n }\n\n}\n\n\n", "file_path": "lib/engine/src/imgui.rs", "rank": 40, "score": 50520.874609127044 }, { "content": "struct SdlClipboard(sdl2::clipboard::ClipboardUtil);\n\n\n\nimpl Clipboard for SdlClipboard {\n\n fn get(&mut self) -> Option<String> {\n\n if self.0.has_clipboard_text() {\n\n self.0.clipboard_text().ok()\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n fn set(&mut self, data: &str) {\n\n // assume that the user doesn't care if setting the clipboard fails\n\n let _ = self.0.set_clipboard_text(data);\n\n }\n\n}\n\n\n", "file_path": "lib/engine/src/window/sdl_window.rs", "rank": 41, "score": 47937.41109117925 }, { "content": "fn wrap_gl(wrap: WrappingMode) -> GLint {\n\n match wrap {\n\n WrappingMode::Repeat => gl::REPEAT as _,\n\n WrappingMode::MirroredRepeat => gl::MIRRORED_REPEAT as _,\n\n WrappingMode::ClampToEdge => gl::CLAMP_TO_EDGE as _,\n\n }\n\n}\n\n\n", "file_path": "lib/engine/src/renderer/gl.rs", "rank": 42, "score": 44611.4838909096 }, { "content": "// find the target directory\n\nfn locate_target_dir(mut target_dir: &Path) -> Option<&Path> {\n\n loop {\n\n if target_dir.ends_with(\"target\") {\n\n return Some(target_dir);\n\n }\n\n\n\n target_dir = match target_dir.parent() {\n\n Some(path) => path,\n\n None => break,\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "build.rs", "rank": 43, "score": 44162.421611859834 }, { "content": "/// increase a value up to the next multiple of the alignment\n\nfn align_up(val: usize, align: usize) -> usize {\n\n let remainder = val % align;\n\n if remainder > 0 {\n\n val + (align - remainder)\n\n } else {\n\n val\n\n }\n\n}\n\n\n\n/// Describes a strongly typed view into a buffer view's raw binary data.\n\n/// In OpenGL is used to call vertexAttribPointer correctly\n\n#[derive(Debug, Deserialize, Clone)]\n\n#[serde(deny_unknown_fields)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Accessor {\n\n /// The buffer view containing the data described in the accessor.\n\n /// If none, the data should be all zeros. This can be changed by\n\n /// extensions or the sparse property\n\n pub buffer_view: Option<usize>,\n\n\n", "file_path": "lib/engine/src/gltf.rs", "rank": 44, "score": 44162.421611859834 }, { "content": "fn mag_filter_gl(mag: MagFilter) -> GLint {\n\n match mag {\n\n MagFilter::Linear => gl::LINEAR as _,\n\n MagFilter::Nearest => gl::NEAREST as _,\n\n }\n\n}\n\n\n", "file_path": "lib/engine/src/renderer/gl.rs", "rank": 45, "score": 43846.088384800256 }, { "content": "fn format_gl(format: TextureSourceFormat) -> GLenum {\n\n match format {\n\n TextureSourceFormat::R => gl::RED,\n\n TextureSourceFormat::RG => gl::RG,\n\n TextureSourceFormat::RGB => gl::RGB,\n\n TextureSourceFormat::BGR => gl::BGR,\n\n TextureSourceFormat::RGBA => gl::RGBA,\n\n TextureSourceFormat::BGRA => gl::BGRA,\n\n }\n\n}\n\n\n", "file_path": "lib/engine/src/renderer/gl.rs", "rank": 46, "score": 43846.088384800256 }, { "content": "fn min_filter_gl(min: MinFilter) -> GLint {\n\n match min {\n\n MinFilter::Nearest => gl::NEAREST as _,\n\n MinFilter::Linear => gl::LINEAR as _,\n\n MinFilter::NearestMipmapNearest => gl::NEAREST_MIPMAP_NEAREST as _,\n\n MinFilter::LinearMipmapNearest => gl::LINEAR_MIPMAP_NEAREST as _,\n\n MinFilter::NearestMipmapLinear => gl::NEAREST_MIPMAP_LINEAR as _,\n\n MinFilter::LinearMipmapLinear => gl::LINEAR_MIPMAP_LINEAR as _,\n\n }\n\n}\n\n\n", "file_path": "lib/engine/src/renderer/gl.rs", "rank": 47, "score": 43846.088384800256 }, { "content": "fn internal_format_gl(source: TextureStorageType) -> GLint {\n\n match source {\n\n TextureStorageType::R => gl::RED as _,\n\n TextureStorageType::RG => gl::RG as _,\n\n TextureStorageType::RGB => gl::RGB as _,\n\n TextureStorageType::SRGB => gl::SRGB as _,\n\n TextureStorageType::RGBA => gl::RGBA as _,\n\n TextureStorageType::SRGBA => gl::SRGB_ALPHA as _,\n\n }\n\n}\n\n\n", "file_path": "lib/engine/src/renderer/gl.rs", "rank": 48, "score": 43119.194412052675 }, { "content": "fn texture_type_gl(ty: TextureSourceType) -> GLenum {\n\n match ty {\n\n TextureSourceType::U8 => gl::UNSIGNED_BYTE,\n\n TextureSourceType::I8 => gl::BYTE,\n\n TextureSourceType::U16 => gl::UNSIGNED_SHORT,\n\n TextureSourceType::I16 => gl::SHORT,\n\n TextureSourceType::U32 => gl::UNSIGNED_INT,\n\n TextureSourceType::I32 => gl::INT,\n\n TextureSourceType::F32 => gl::FLOAT,\n\n }\n\n}\n\n\n\npub struct BoundGlTexture<'a> {\n\n tex: &'a GlTexture,\n\n index: GLuint,\n\n}\n\n\n\nimpl<'a> BoundGlTexture<'a> {\n\n fn new(tex: &'a GlTexture, index: GLuint) -> Self {\n\n unsafe {\n", "file_path": "lib/engine/src/renderer/gl.rs", "rank": 49, "score": 43119.194412052675 }, { "content": "// normalise path differences between windows and linux\n\nfn resource_name_to_path(root_dir: &Path, location: &str) -> PathBuf {\n\n let mut path: PathBuf = root_dir.into();\n\n\n\n for part in location.split('/') {\n\n path = path.join(part);\n\n }\n\n\n\n path\n\n}\n", "file_path": "lib/engine/src/resources.rs", "rank": 50, "score": 41010.91626196522 }, { "content": "/// Create a space filled CString of given length\n\nfn create_whitespace_cstring(len: usize) -> Result<CString, GlError> {\n\n let mut buffer: Vec<u8> = Vec::with_capacity(len + 1);\n\n buffer.extend([b' '].iter().cycle().take(len));\n\n CString::new(buffer).map_err(|_| GlError::ErrorBuffer)\n\n}\n\n\n", "file_path": "lib/engine/src/renderer/gl.rs", "rank": 51, "score": 40319.68999517981 }, { "content": "/// Try to convert an sdl event into this engine's event type\n\nfn event_from_sdl_event(event: &sdl2::event::Event) -> Option<Event> {\n\n use sdl2::event::{Event as SdlEvent, WindowEvent};\n\n\n\n match event {\n\n // window events are a seperate enum in sdl, but flattened in the engine\n\n // to make it easier to pattern match against\n\n SdlEvent::Window { win_event, .. } => match win_event {\n\n WindowEvent::FocusGained => Some(Event::FocusGained),\n\n WindowEvent::FocusLost => Some(Event::FocusLost),\n\n WindowEvent::Resized(width, height) => Some(Event::Resize {\n\n width: *width as u32,\n\n height: *height as u32,\n\n }),\n\n _ => None,\n\n },\n\n\n\n SdlEvent::KeyDown {\n\n scancode: Some(scancode),\n\n ..\n\n } => Some(Event::KeyDown {\n", "file_path": "lib/engine/src/window/sdl_window.rs", "rank": 52, "score": 39473.90930246537 }, { "content": "fn write_func(shader: &mut String, prog: &Program, func: &Function) {\n\n for block in func.blocks() {\n\n for statement in block.statements() {\n\n match statement {\n\n Statement::CallBuiltin {\n\n function,\n\n arguments,\n\n result,\n\n } => {\n\n write_builtin_call(shader, prog, func, function, arguments, result);\n\n }\n\n Statement::MakeFloat { value, variable } => {\n\n write_variable_new(shader, prog, func, *variable);\n\n shader.push_str(\" = \");\n\n shader.push_str(&format!(\"{:.20}\", value));\n\n shader.push_str(\";\\n\");\n\n }\n\n Statement::SetBuiltinVariable { variable, value } => {\n\n shader.push_str(\" \");\n\n shader.push_str(&variable.to_string());\n", "file_path": "lib/engine/src/renderer/glsl.rs", "rank": 53, "score": 39423.915676459816 }, { "content": "fn write_variable_name(shader: &mut String, var: &Variable, id: usize) {\n\n if var.name.is_empty() {\n\n shader.push_str(&format!(\"var_{}\", id));\n\n } else {\n\n shader.push_str(&var.name);\n\n }\n\n}\n\n\n", "file_path": "lib/engine/src/renderer/glsl.rs", "rank": 54, "score": 38732.689409674414 }, { "content": "fn print_function_header(f: &mut Formatter, inp: &[Variable], out: &[Variable]) -> fmt::Result {\n\n write!(f, \"(\")?;\n\n if inp.is_empty() {\n\n write!(f, \") \")?;\n\n } else {\n\n writeln!(f, \"\")?;\n\n for input in inp {\n\n writeln!(f, \" {},\", input.to_string(\"\")?)?;\n\n }\n\n write!(f, \" ) \")?;\n\n }\n\n\n\n if out.len() == 1 {\n\n write!(f, \"-> {} \", out[0].to_string(\"\")?)?;\n\n } else if out.len() > 1 {\n\n writeln!(f, \"-> (\")?;\n\n for output in out {\n\n writeln!(f, \" {},\", output.to_string(\"\")?)?;\n\n }\n\n write!(f, \" ) \")?;\n", "file_path": "lib/engine/src/renderer/shader.rs", "rank": 55, "score": 37494.90655692082 }, { "content": "/// A single button on the keyboard.\n\n/// Not useful for text input, use the text input event for that.\n\n/// See https://wiki.libsdl.org/SDL_Keycode\n\n/// See https://usb.org/sites/default/files/hut1_21.pdf\n\n#[derive(Copy, Clone, Eq, PartialEq, Hash, PartialOrd, Ord, Debug)]\n\npub enum Scancode {\n\n // Keyboard Letter keys\n\n A,\n\n B,\n\n C,\n\n D,\n\n E,\n\n F,\n\n G,\n\n H,\n\n I,\n\n J,\n\n K,\n\n L,\n\n M,\n", "file_path": "lib/engine/src/window/scancode.rs", "rank": 56, "score": 36745.561390049545 }, { "content": " N,\n\n O,\n\n P,\n\n Q,\n\n R,\n\n S,\n\n T,\n\n U,\n\n V,\n\n W,\n\n X,\n\n Y,\n\n Z,\n\n\n\n // Numeric keys above the main keys\n\n Zero,\n\n One,\n\n Two,\n\n Three,\n\n Four,\n", "file_path": "lib/engine/src/window/scancode.rs", "rank": 57, "score": 36728.2779759835 }, { "content": "\n\n // Control keys\n\n Backspace,\n\n CapsLock,\n\n Copy,\n\n Cut,\n\n Delete,\n\n End,\n\n Escape,\n\n Home,\n\n Insert,\n\n LeftAlt,\n\n LeftControl,\n\n LeftShift,\n\n LeftMeta,\n\n Menu,\n\n NumLock,\n\n PageDown,\n\n PageUp,\n\n Paste,\n", "file_path": "lib/engine/src/window/scancode.rs", "rank": 58, "score": 36726.96062754433 }, { "content": " Equals,\n\n Grave,\n\n LeftSquareBracket,\n\n LeftBracket,\n\n Dash,\n\n Period,\n\n RightSquareBracket,\n\n RightBracket,\n\n SemiColon,\n\n Slash,\n\n Space,\n\n Tab,\n\n AltHash,\n\n AltBackslash,\n\n\n\n // Arrow keys\n\n Down,\n\n Left,\n\n Right,\n\n Up,\n", "file_path": "lib/engine/src/window/scancode.rs", "rank": 59, "score": 36726.85140805286 }, { "content": " // Keypad hexadecimal number keys\n\n KpA,\n\n KpB,\n\n KpC,\n\n KpD,\n\n KpE,\n\n KpF,\n\n\n\n // Keypad symbols\n\n KpAnd,\n\n KpAndAnd,\n\n KpAt,\n\n KpCaret,\n\n KpColon,\n\n KpComma,\n\n KpDash,\n\n KpEquals,\n\n KpEqualsAs400,\n\n KpExclamation,\n\n KpHash,\n", "file_path": "lib/engine/src/window/scancode.rs", "rank": 60, "score": 36726.19184174398 }, { "content": " // Brightness keys\n\n BrightnessDown,\n\n BrightnessUp,\n\n\n\n // Keyboard illumination keys\n\n KeyboardIllumDown,\n\n KeyboardIllumUp,\n\n KeyboardIllumToggle,\n\n\n\n // Miscellaneous keys\n\n Again,\n\n AltErase,\n\n Calculator,\n\n Cancel,\n\n Clear,\n\n ClearAgain,\n\n Computer,\n\n CrSel,\n\n CurrencySubUnit,\n\n CurrencyUnit,\n", "file_path": "lib/engine/src/window/scancode.rs", "rank": 61, "score": 36725.05536465636 }, { "content": " InternationalFour,\n\n InternationalFive,\n\n InternationalSix,\n\n InternationalSeven,\n\n InternationalEight,\n\n InternationalNine,\n\n\n\n // Language keys\n\n LangOne,\n\n LangTwo,\n\n LangThree,\n\n LangFour,\n\n LangFive,\n\n LangSix,\n\n LangSeven,\n\n LangEight,\n\n LangNine,\n\n\n\n // Audio keys\n\n AudioMute,\n", "file_path": "lib/engine/src/window/scancode.rs", "rank": 62, "score": 36724.38713589428 }, { "content": " F10,\n\n F11,\n\n F12,\n\n F13,\n\n F14,\n\n F15,\n\n F16,\n\n F17,\n\n F18,\n\n F19,\n\n F20,\n\n F21,\n\n F22,\n\n F23,\n\n F24,\n\n\n\n // International keys\n\n InternationalOne,\n\n InternationalTwo,\n\n InternationalThree,\n", "file_path": "lib/engine/src/window/scancode.rs", "rank": 63, "score": 36723.8201792083 }, { "content": " PrintScreen,\n\n RightAlt,\n\n RightControl,\n\n Return,\n\n Return2,\n\n RightMeta,\n\n RightShift,\n\n ScrollLock,\n\n Undo,\n\n\n\n // Function keys\n\n F1,\n\n F2,\n\n F3,\n\n F4,\n\n F5,\n\n F6,\n\n F7,\n\n F8,\n\n F9,\n", "file_path": "lib/engine/src/window/scancode.rs", "rank": 64, "score": 36723.702522994456 }, { "content": " AudioNext,\n\n AudioPlay,\n\n AudioPrev,\n\n AudioStop,\n\n Mute,\n\n VolumeDown,\n\n VolumeUp,\n\n\n\n // Application keys\n\n Application,\n\n App1,\n\n App2,\n\n AppBack,\n\n AppBookmark,\n\n AppForward,\n\n AppHome,\n\n AppRefresh,\n\n AppSearch,\n\n AppStop,\n\n\n", "file_path": "lib/engine/src/window/scancode.rs", "rank": 65, "score": 36723.55781783467 }, { "content": " Five,\n\n Six,\n\n Seven,\n\n Eight,\n\n Nine,\n\n\n\n // Keypad numeric keys\n\n KpZero,\n\n KpOne,\n\n KpTwo,\n\n KpThree,\n\n KpFour,\n\n KpFive,\n\n KpSix,\n\n KpSeven,\n\n KpEight,\n\n KpNine,\n\n KpZeroZero,\n\n KpZeroZeroZero,\n\n\n", "file_path": "lib/engine/src/window/scancode.rs", "rank": 66, "score": 36723.394030481126 }, { "content": " KpClear,\n\n KpClearEntry,\n\n KpEnter,\n\n KpMemAdd,\n\n KpMemClear,\n\n KpMemDivide,\n\n KpMemMultiply,\n\n KpMemRecall,\n\n KpMemSubtract,\n\n KpMemStore,\n\n KpBinary,\n\n KpOctal,\n\n KpDecimal,\n\n KpHex,\n\n KpPower,\n\n\n\n // Symbol Keys\n\n Apostrophe,\n\n Backslash,\n\n Comma,\n", "file_path": "lib/engine/src/window/scancode.rs", "rank": 67, "score": 36723.16497136202 }, { "content": " DecimalSeparator,\n\n DisplaySwitch,\n\n Eject,\n\n ExSel,\n\n Find,\n\n Help,\n\n Mail,\n\n MediaSelect,\n\n Mode,\n\n ModeSwitch,\n\n Oper,\n\n Out,\n\n Power,\n\n Prior,\n\n Select,\n\n Separator,\n\n Sleep,\n\n Stop,\n\n SysReq,\n\n ThousandsSeparator,\n\n WorldWideWeb,\n\n Unknown,\n\n Pause,\n\n Execute,\n\n}\n", "file_path": "lib/engine/src/window/scancode.rs", "rank": 68, "score": 36720.43820457556 }, { "content": " KpLeftBrace,\n\n KpLeftParen,\n\n KpLess,\n\n KpPercent,\n\n KpPeriod,\n\n KpPipe,\n\n KpPipePipe,\n\n KpPlusMinus,\n\n KpRightBrace,\n\n KpRightParen,\n\n KpSlash,\n\n KpSpace,\n\n KpStar,\n\n KpTab,\n\n KpXor,\n\n KpPlus,\n\n KpGreater,\n\n\n\n // Keypad control characters\n\n KpBackspace,\n", "file_path": "lib/engine/src/window/scancode.rs", "rank": 69, "score": 36720.43820457556 }, { "content": "/// Creates an OpenGl shader program from shader IDs\n\nfn program_from_shaders(gl: &gl::Gl, shaders: &[GLuint]) -> Result<GLuint, GlError> {\n\n let program_id = unsafe { gl.CreateProgram() };\n\n\n\n for &shader in shaders {\n\n unsafe { gl.AttachShader(program_id, shader) }\n\n }\n\n\n\n unsafe { gl.LinkProgram(program_id) };\n\n\n\n let mut success = 1;\n\n unsafe {\n\n gl.GetProgramiv(program_id, gl::LINK_STATUS, &mut success);\n\n }\n\n\n\n if success == 0 {\n\n let mut len = 0;\n\n unsafe {\n\n gl.GetProgramiv(program_id, gl::INFO_LOG_LENGTH, &mut len);\n\n }\n\n\n", "file_path": "lib/engine/src/renderer/gl.rs", "rank": 70, "score": 36193.64410928416 }, { "content": "fn write_variable_get(shader: &mut String, prog: &Program, func: &Function, variable: VariableId) {\n\n write_variable_name(shader, prog.get_variable(func, variable), variable.id());\n\n}\n\n\n", "file_path": "lib/engine/src/renderer/glsl.rs", "rank": 71, "score": 35610.781291169915 }, { "content": "fn write_variable_new(shader: &mut String, prog: &Program, func: &Function, variable: VariableId) {\n\n shader.push_str(\" \");\n\n\n\n let variable_ref = prog.get_variable(func, variable);\n\n shader.push_str(&variable_ref.ty.to_glsl());\n\n shader.push_str(\" \");\n\n write_variable_name(shader, variable_ref, variable.id());\n\n}\n\n\n", "file_path": "lib/engine/src/renderer/glsl.rs", "rank": 72, "score": 35610.781291169915 }, { "content": "fn var_display(f: &mut Formatter, prefix: &str, id: usize, var: &Variable) -> fmt::Result {\n\n if var.name.is_empty() {\n\n write!(f, \"{}{}: {}\", prefix, id, var.ty)?;\n\n } else {\n\n let has_whitespace = var.name.chars().any(char::is_whitespace);\n\n\n\n if has_whitespace {\n\n write!(f, \"{}\\\"{}\\\": {}\", prefix, var.name, var.ty)?;\n\n } else {\n\n write!(f, \"{}{}: {}\", prefix, var.name, var.ty)?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\nimpl VariableId {\n\n fn fmt(&self, f: &mut Formatter, prog: &Program, func: &Function) -> fmt::Result {\n\n match self.kind {\n\n VariableAllocationContext::Local => {\n", "file_path": "lib/engine/src/renderer/shader.rs", "rank": 73, "score": 35217.03991554893 }, { "content": "/// Create a new OpenGL shader from glsl source code\n\nfn shader_from_source(gl: &gl::Gl, source: &CStr, kind: GLuint) -> Result<GLuint, GlError> {\n\n let id = unsafe { gl.CreateShader(kind) };\n\n\n\n unsafe {\n\n gl.ShaderSource(id, 1, &source.as_ptr(), std::ptr::null());\n\n gl.CompileShader(id);\n\n }\n\n\n\n let mut success: GLint = 1;\n\n unsafe {\n\n gl.GetShaderiv(id, gl::COMPILE_STATUS, &mut success);\n\n }\n\n\n\n if success == 0 {\n\n let mut len = 0;\n\n unsafe {\n\n gl.GetShaderiv(id, gl::INFO_LOG_LENGTH, &mut len);\n\n }\n\n\n\n let error = create_whitespace_cstring(len as usize)?;\n", "file_path": "lib/engine/src/renderer/gl.rs", "rank": 74, "score": 33446.343614715784 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub enum TextureSourceType {\n\n U8,\n\n I8,\n\n U16,\n\n I16,\n\n U32,\n\n I32,\n\n F32,\n\n}\n\n\n\nimpl Default for TextureSourceType {\n\n fn default() -> Self {\n\n Self::U8\n\n }\n\n}\n\n\n\n/// The format of the GPU storage buffer requested\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub enum TextureStorageType {\n", "file_path": "lib/engine/src/texture.rs", "rank": 77, "score": 33.94737858435794 }, { "content": " Middle,\n\n Four,\n\n Five,\n\n}\n\n\n\n/// What state a mouse button is\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub enum MouseButtonState {\n\n Pressed,\n\n Released,\n\n}\n", "file_path": "lib/engine/src/window/event.rs", "rank": 81, "score": 30.411129568629864 }, { "content": " /// One of the mouse buttons was pressed or released\n\n MouseButton {\n\n button: MouseButton,\n\n state: MouseButtonState,\n\n },\n\n /// Text was typed by the user\n\n TextInput { text: String },\n\n\n\n /// The window's quit button was pressed\n\n Quit,\n\n\n\n /// The mouse scroll wheel was moved\n\n Scroll { x: i32, y: i32 },\n\n}\n\n\n\n/// All tracked mouse buttons\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub enum MouseButton {\n\n Left,\n\n Right,\n", "file_path": "lib/engine/src/window/event.rs", "rank": 82, "score": 29.117739425926132 }, { "content": " BadDecodeFormat { ty: TextureSourceType },\n\n}\n\n\n\n/// Filtering mode to use when increasing the size of a texture\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub enum MagFilter {\n\n /// uses the weighted linear blend between nearest adjacent samples\n\n Linear,\n\n\n\n /// uses the texel closest to the texture coordinate provided\n\n Nearest,\n\n}\n\n\n\nimpl Default for MagFilter {\n\n fn default() -> Self {\n\n Self::Nearest\n\n }\n\n}\n\n\n\n/// The filtering settings for decreasing the size of an image, e.g.\n", "file_path": "lib/engine/src/texture.rs", "rank": 83, "score": 28.674092129987933 }, { "content": " SRGBA,\n\n}\n\n\n\nimpl Default for TextureStorageType {\n\n fn default() -> Self {\n\n Self::RGBA\n\n }\n\n}\n\n\n\n/// The options related to texture loading\n\n#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub struct TextureOptions {\n\n /// How the 's' uv coordinate wraps\n\n pub wrap_s: WrappingMode,\n\n\n\n /// How the 't' uv coordinate wraps\n\n pub wrap_t: WrappingMode,\n\n\n\n /// Minification filtering setting\n\n pub min_filter: MinFilter,\n", "file_path": "lib/engine/src/texture.rs", "rank": 84, "score": 28.376153976802463 }, { "content": " U8,\n\n U16,\n\n U32,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum CullingMode {\n\n None,\n\n Front,\n\n Back,\n\n FrontBack,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum DepthTesting {\n\n None,\n\n Enabled {\n\n read_only: bool,\n\n func: DepthTestingFunction,\n\n },\n", "file_path": "lib/engine/src/renderer/frontend.rs", "rank": 85, "score": 26.776051025382753 }, { "content": "use nalgebra_glm as glm;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Default)]\n\npub struct Bounds {\n\n pub min_x: f32,\n\n pub min_y: f32,\n\n pub min_z: f32,\n\n\n\n pub max_x: f32,\n\n pub max_y: f32,\n\n pub max_z: f32,\n\n}\n\n\n\nimpl Bounds {\n\n pub fn new_nan() -> Self {\n\n Self {\n\n min_x: f32::NAN,\n\n min_y: f32::NAN,\n\n min_z: f32::NAN,\n\n\n", "file_path": "lib/engine/src/bound.rs", "rank": 86, "score": 26.219680150677977 }, { "content": "use super::scancode::Scancode;\n\n\n\n/// An generic event, from any source\n\n#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]\n\npub enum Event {\n\n /// The window was resized to have new width and height\n\n Resize { width: u32, height: u32 },\n\n\n\n /// The window gained user focus\n\n FocusGained,\n\n\n\n /// The window lost user focus\n\n FocusLost,\n\n\n\n /// A key was pressed down\n\n KeyDown { key: Scancode },\n\n\n\n /// A key was released\n\n KeyUp { key: Scancode },\n\n\n", "file_path": "lib/engine/src/window/event.rs", "rank": 87, "score": 26.180725640132202 }, { "content": "use crate::{window::event::Event, EngineStateRef};\n\nuse anyhow::Result;\n\n\n\n/// The result of a layer processing an event\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]\n\npub enum EventResult {\n\n /// The game loop should quit, e.g. if the close button is clicked\n\n Exit,\n\n\n\n /// This even has been handled and should no longer be processed further\n\n Handled,\n\n\n\n /// This event was ignored and should be passed to the next layer\n\n Ignored,\n\n}\n\n\n\n/// The ordering that a layer should be updated or rendered in\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]\n\npub enum CallOrder {\n\n /// This render or update should be defered until after its child states\n\n Deferred,\n\n\n\n /// This render or update should be performed immediately\n\n Standard,\n\n}\n\n\n\n/// A single render layer\n", "file_path": "lib/engine/src/layer.rs", "rank": 88, "score": 25.804646483880347 }, { "content": "}\n\n\n\nimpl Default for MinFilter {\n\n fn default() -> Self {\n\n Self::Nearest\n\n }\n\n}\n\n\n\n/// Description of what should be done if sampling outside of [0, 1]\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub enum WrappingMode {\n\n /// The texture coordinate wraps around the texture.\n\n /// e.g. a texture coordinate of -0.2 becomes the equivalent of 0.8.\n\n Repeat,\n\n\n\n /// The texture coordinate wraps around, but mirrored.\n\n /// e.g. a texture coordinate of -0.2 becomes the equivalent of 0.2.\n\n /// e.g. a texture coordinate of -1.2 becomes the equivalent of 0.8.\n\n MirroredRepeat,\n\n\n", "file_path": "lib/engine/src/texture.rs", "rank": 89, "score": 25.499171021254547 }, { "content": "use anyhow::Result;\n\nuse nalgebra_glm as glm;\n\n\n\nuse super::{backend::RendererBackend, shader::Program};\n\nuse crate::texture::Texture;\n\n\n\n/// type inside all *Id tuple structs\n\npub type IdType = u64;\n\n\n\n#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq)]\n\npub struct TextureId(pub(crate) IdType);\n\n\n\n#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq)]\n\npub struct VertexBufferId(pub(crate) IdType);\n\n\n\n#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq)]\n\npub struct IndexBufferId(pub(crate) IdType);\n\n\n\n#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq)]\n\npub struct PipelineId(pub(crate) IdType);\n", "file_path": "lib/engine/src/renderer/frontend.rs", "rank": 90, "score": 24.751534126637683 }, { "content": " /// Clamps the provided texture coordinates to the range [0, 1]\n\n ClampToEdge,\n\n}\n\n\n\nimpl Default for WrappingMode {\n\n fn default() -> Self {\n\n Self::Repeat\n\n }\n\n}\n\n\n\n/// Description of the pixel data provided\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub enum TextureSourceFormat {\n\n /// A single color component (red), converted to float in [0, 1]\n\n R,\n\n\n\n /// Two color components (red/green), converted to vec2 float in [0, 1]\n\n RG,\n\n\n\n /// Three color components (red/green/blue), converted to vec3 float in [0, 1]\n", "file_path": "lib/engine/src/texture.rs", "rank": 91, "score": 23.988847382247304 }, { "content": "use anyhow::Result;\n\n\n\nuse crate::renderer::backend::RendererBackend;\n\n\n\nuse super::{event::Event, input::InputState};\n\n\n\n/// The startup settings to configure a new window with\n\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]\n\npub struct WindowConfig<'a> {\n\n /// The initial width of the window. Should not be relied upon to be\n\n /// correct, use `window.size()` to get the current width\n\n pub width: u32,\n\n\n\n /// The initial height of the window. This has the same limitations as\n\n /// for the window width\n\n pub height: u32,\n\n\n\n /// The text to show in the window's title bar\n\n pub title: &'a str,\n\n\n", "file_path": "lib/engine/src/window/window.rs", "rank": 92, "score": 23.24628060011035 }, { "content": "\n\nimpl<'a> Drop for BoundPipeline<'a> {\n\n fn drop(&mut self) {\n\n self.renderer.backend.unbind_pipeline(self.pipeline);\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum DrawingMode {\n\n Points,\n\n Lines,\n\n LineLoop,\n\n LineStrip,\n\n Triangles,\n\n TriangleStrip,\n\n TriangleFan,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum IndexType {\n", "file_path": "lib/engine/src/renderer/frontend.rs", "rank": 93, "score": 21.770850232672284 }, { "content": "}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum DepthTestingFunction {\n\n Always,\n\n Never,\n\n Less,\n\n Equal,\n\n LessEqual,\n\n Greater,\n\n NotEqual,\n\n GreaterEqual,\n\n}\n\n\n\nimpl DepthTesting {\n\n #[allow(non_upper_case_globals)]\n\n pub const Default: DepthTesting = DepthTesting::Enabled {\n\n read_only: false,\n\n func: DepthTestingFunction::Less,\n\n };\n\n}\n", "file_path": "lib/engine/src/renderer/frontend.rs", "rank": 94, "score": 20.699993559319115 }, { "content": "\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum VariableAllocationContext {\n\n Local,\n\n Uniform,\n\n Input,\n\n Output,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct Variable {\n\n pub name: String,\n\n pub ty: Type,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum Type {\n\n Vector(usize),\n\n\n\n /// matrix rows x columns\n", "file_path": "lib/engine/src/renderer/shader.rs", "rank": 95, "score": 20.075425679023713 }, { "content": "\n\n /// Magnification filtering setting\n\n pub mag_filter: MagFilter,\n\n\n\n /// The layout of the data provided\n\n pub source_format: TextureSourceFormat,\n\n\n\n /// The type of the data provided\n\n pub source_type: TextureSourceType,\n\n\n\n /// The pixel width of the image\n\n pub width: u32,\n\n\n\n /// The pixel height of the image\n\n pub height: u32,\n\n\n\n /// the format to store the texture as on the GPU\n\n pub storage: TextureStorageType,\n\n}\n\n\n\n/// An image and settings about how to interpret the data\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub struct Texture {\n\n image: TextureData,\n\n config: TextureOptions,\n\n}\n\n\n\n/// Wrapper to simplify using both 16bit and 8bit textures\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]\n", "file_path": "lib/engine/src/texture.rs", "rank": 96, "score": 19.973048426845885 }, { "content": "///\n\n/// Five indicies into the triple buffer's state are used, so with 2 bits per\n\n/// index, a total of 10 bits are required. AtomicU32 is used as 16 bit atomics\n\n/// are significantly less efficient that 32 bit. (See [`performance`])\n\n///\n\n/// The read and write indicies could be removed, however it seems like a\n\n/// cleaner solution to use more of the atomic value, rather than calling\n\n/// try_lock on muticies\n\n///\n\n/// [`performance`] https://stackoverflow.com/questions/29322218/performance-comparison-of-atomic-operations-on-different-sizes\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\n", "file_path": "lib/engine/src/scene/triple_buffer.rs", "rank": 97, "score": 19.69550504838491 }, { "content": " #[serde(default)]\n\n pub extensions: HashMap<String, Value>,\n\n\n\n /// Application specific data\n\n #[serde(default)]\n\n pub extras: Value,\n\n}\n\n\n\n/// A sampler magnification filter, based on the OpenGL enum values\n\n#[derive(Debug, Deserialize_repr, Clone, Copy)]\n\n#[repr(u32)]\n\n#[serde(rename_all = \"UPPERCASE\")]\n\npub enum SamplerMagFilter {\n\n Nearest = gl::NEAREST,\n\n Linear = gl::LINEAR,\n\n}\n\n\n\nimpl Default for SamplerMagFilter {\n\n /// Default magnification filter is nearest\n\n fn default() -> Self {\n", "file_path": "lib/engine/src/gltf.rs", "rank": 98, "score": 19.25741302481506 }, { "content": "/// How a buffer view is going to be used in the model\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub enum BufferViewType {\n\n /// No usage found for the buffer view\n\n None,\n\n\n\n /// The buffer view is needed on the CPU, possibly a texture to decode\n\n CPUBuffer,\n\n\n\n /// The buffer view is needed as an OpenGL ArrayBuffer or equivalent.\n\n /// E.g. vertex attributes\n\n ArrayBuffer,\n\n\n\n /// The buffer view is needed as an OpenGL ElementArrayBuffer or\n\n /// equivalent. E.g. vertex indicies for DrawElements\n\n ElementArrayBuffer,\n\n}\n\n\n\n/// As the methods in here are called on load, rather than on render, they\n\n/// need to use checked array access so the program does not crash if\n", "file_path": "lib/engine/src/model.rs", "rank": 99, "score": 19.123253004254117 } ]
Rust
delivery-service/ds-lib/src/lib.rs
greydot/openmls
fca1cbd3400d377dae06ef9ceebda5e0392c6c89
use openmls::{framing::VerifiableMlsPlaintext, prelude::*}; use tls_codec::{ Size, TlsByteSliceU16, TlsByteVecU16, TlsByteVecU32, TlsByteVecU8, TlsDeserialize, TlsSerialize, TlsSize, TlsVecU32, }; #[derive(Debug, Default, Clone)] pub struct ClientInfo<'a> { pub client_name: String, pub key_packages: ClientKeyPackages, pub id: Vec<u8>, pub msgs: Vec<DsMlsMessage<'a>>, pub welcome_queue: Vec<Welcome>, } #[derive(Debug, Default, Clone, PartialEq, TlsSerialize, TlsDeserialize, TlsSize)] pub struct ClientKeyPackages(pub TlsVecU32<(TlsByteVecU8, KeyPackage)>); impl<'a> ClientInfo<'a> { pub fn new(client_name: String, mut key_packages: Vec<(Vec<u8>, KeyPackage)>) -> Self { Self { client_name, id: key_packages[0].1.credential().identity().to_vec(), key_packages: ClientKeyPackages( key_packages .drain(..) .map(|(e1, e2)| (e1.into(), e2)) .collect::<Vec<(TlsByteVecU8, KeyPackage)>>() .into(), ), msgs: Vec::new(), welcome_queue: Vec::new(), } } pub fn id(&self) -> &[u8] { self.id.as_slice() } } #[derive(Debug, Clone)] pub enum DsMlsMessage<'a> { Plaintext(VerifiableMlsPlaintext<'a>), Ciphertext(MlsCiphertext), } impl<'a> DsMlsMessage<'a> { pub fn group_id(&self) -> &[u8] { match self { DsMlsMessage::Plaintext(p) => p.payload().group_id(), DsMlsMessage::Ciphertext(c) => c.group_id().as_slice(), } } pub fn epoch(&self) -> u64 { match self { DsMlsMessage::Ciphertext(m) => m.epoch().0, DsMlsMessage::Plaintext(m) => m.payload().epoch().0, } } pub fn is_handshake_message(&self) -> bool { match self { DsMlsMessage::Ciphertext(m) => m.is_handshake_message(), DsMlsMessage::Plaintext(m) => m.payload().is_handshake_message(), } } } #[derive(Debug)] pub enum Message<'a> { MlsMessage(DsMlsMessage<'a>), Welcome(Welcome), } #[derive(Debug, Clone, Copy, TlsSerialize, TlsDeserialize, TlsSize)] #[repr(u8)] pub enum MessageType { MlsCiphertext = 0, MlsPlaintext = 1, Welcome = 2, } #[derive(Debug)] pub struct GroupMessage<'a> { pub msg: DsMlsMessage<'a>, pub recipients: TlsVecU32<TlsByteVecU32>, } impl<'a> GroupMessage<'a> { pub fn new(msg: DsMlsMessage<'a>, recipients: &[Vec<u8>]) -> Self { Self { msg, recipients: recipients .iter() .map(|r| r.clone().into()) .collect::<Vec<TlsByteVecU32>>() .into(), } } pub fn group_id(&self) -> &[u8] { self.msg.group_id() } pub fn epoch(&self) -> u64 { self.msg.epoch() } pub fn is_handshake_message(&self) -> bool { self.msg.is_handshake_message() } } impl<'a> tls_codec::Size for ClientInfo<'a> { fn tls_serialized_len(&self) -> usize { TlsByteSliceU16(self.client_name.as_bytes()).tls_serialized_len() + self.key_packages.tls_serialized_len() } } impl<'a> tls_codec::Serialize for ClientInfo<'a> { fn tls_serialize<W: std::io::Write>(&self, writer: &mut W) -> Result<usize, tls_codec::Error> { let written = TlsByteSliceU16(self.client_name.as_bytes()).tls_serialize(writer)?; self.key_packages.tls_serialize(writer).map(|l| l + written) } } impl<'a> tls_codec::Deserialize for ClientInfo<'a> { fn tls_deserialize<R: std::io::Read>(bytes: &mut R) -> Result<Self, tls_codec::Error> { let client_name = String::from_utf8_lossy(TlsByteVecU16::tls_deserialize(bytes)?.as_slice()).into(); let mut key_packages: Vec<(TlsByteVecU8, KeyPackage)> = TlsVecU32::<(TlsByteVecU8, KeyPackage)>::tls_deserialize(bytes)?.into(); let key_packages = key_packages .drain(..) .map(|(e1, e2)| (e1.into(), e2)) .collect(); Ok(Self::new(client_name, key_packages)) } } impl<'a> tls_codec::Size for Message<'a> { fn tls_serialized_len(&self) -> usize { MessageType::MlsCiphertext.tls_serialized_len() + match self { Message::MlsMessage(mm) => match mm { DsMlsMessage::Plaintext(p) => p.tls_serialized_len(), DsMlsMessage::Ciphertext(c) => c.tls_serialized_len(), }, Message::Welcome(w) => w.tls_serialized_len(), } } } impl<'a> tls_codec::Serialize for Message<'a> { fn tls_serialize<W: std::io::Write>(&self, writer: &mut W) -> Result<usize, tls_codec::Error> { let written; match self { Message::MlsMessage(m) => match m { DsMlsMessage::Ciphertext(m) => { written = MessageType::MlsCiphertext.tls_serialize(writer)?; m.tls_serialize(writer) } DsMlsMessage::Plaintext(m) => { written = MessageType::MlsPlaintext.tls_serialize(writer)?; m.tls_serialize(writer) } }, Message::Welcome(m) => { written = MessageType::Welcome.tls_serialize(writer)?; m.tls_serialize(writer) } } .map(|l| l + written) } } impl<'a> tls_codec::Deserialize for Message<'a> { fn tls_deserialize<R: std::io::Read>(bytes: &mut R) -> Result<Self, tls_codec::Error> { let msg_type = MessageType::tls_deserialize(bytes)?; Ok(match msg_type { MessageType::MlsCiphertext => Message::MlsMessage(DsMlsMessage::Ciphertext( MlsCiphertext::tls_deserialize(bytes)?, )), MessageType::MlsPlaintext => Message::MlsMessage(DsMlsMessage::Plaintext( VerifiableMlsPlaintext::tls_deserialize(bytes)?, )), MessageType::Welcome => Message::Welcome(Welcome::tls_deserialize(bytes)?), }) } } impl<'a> tls_codec::Size for GroupMessage<'a> { fn tls_serialized_len(&self) -> usize { MessageType::MlsCiphertext.tls_serialized_len() + match &self.msg { DsMlsMessage::Plaintext(p) => p.tls_serialized_len(), DsMlsMessage::Ciphertext(c) => c.tls_serialized_len(), } + self.recipients.tls_serialized_len() } } impl<'a> tls_codec::Serialize for GroupMessage<'a> { fn tls_serialize<W: std::io::Write>(&self, writer: &mut W) -> Result<usize, tls_codec::Error> { let mut written = 0; written += match &self.msg { DsMlsMessage::Ciphertext(m) => { MessageType::MlsCiphertext.tls_serialize(writer)? + m.tls_serialize(writer)? } DsMlsMessage::Plaintext(m) => { MessageType::MlsPlaintext.tls_serialize(writer)? + m.tls_serialize(writer)? } }; self.recipients.tls_serialize(writer).map(|l| l + written) } } impl<'a> tls_codec::Deserialize for GroupMessage<'a> { fn tls_deserialize<R: std::io::Read>(bytes: &mut R) -> Result<Self, tls_codec::Error> { let msg_type = MessageType::tls_deserialize(bytes)?; let msg = match msg_type { MessageType::MlsCiphertext => { DsMlsMessage::Ciphertext(MlsCiphertext::tls_deserialize(bytes)?) } MessageType::MlsPlaintext => { DsMlsMessage::Plaintext(VerifiableMlsPlaintext::tls_deserialize(bytes)?) } _ => { return Err(tls_codec::Error::DecodingError(format!( "Invalid message type {:?}", msg_type ))) } }; let recipients = TlsVecU32::<TlsByteVecU32>::tls_deserialize(bytes)?; Ok(Self { msg, recipients }) } }
use openmls::{framing::VerifiableMlsPlaintext, prelude::*}; use tls_codec::{ Size, TlsByteSliceU16, TlsByteVecU16, TlsByteVecU32, TlsByteVecU8, TlsDeserialize, TlsSerialize, TlsSize, TlsVecU32, }; #[derive(Debug, Default, Clone)] pub struct ClientInfo<'a> { pub client_name: String, pub key_packages: ClientKeyPackages, pub id: Vec<u8>, pub msgs: Vec<DsMlsMessage<'a>>, pub welcome_queue: Vec<Welcome>, } #[derive(Debug, Default, Clone, PartialEq, TlsSerialize, TlsDeserialize, TlsSize)] pub struct ClientKeyPackages(pub TlsVecU32<(TlsByteVecU8, KeyPackage)>); impl<'a> ClientInfo<'a> { pub fn new(client_name: String, mut key_packages: Vec<(Vec<u8>, KeyPackage)>) -> Self { Self { client_name, id: key_packages[0].1.credential().identity().to_vec(), key_packages: ClientKeyPackages( key_packages .drain(..) .map(|(e1, e2)| (e1.into(), e2)) .collect::<Vec<(TlsByteVecU8, KeyPackage)>>() .into(), ), msgs: Vec::new(), welcome_queue: Vec::new(), } } pub fn id(&self) -> &[u8] { self.id.as_slice() } } #[derive(Debug, Clone)] pub enum DsMlsMessage<'a> { Plaintext(VerifiableMlsPlaintext<'a>), Ciphertext(MlsCiphertext), } impl<'a> DsMlsMessage<'a> { pub fn group_id(&self) -> &[u8] { match self { DsMlsMessage::Plaintext(p) => p.payload().group_id(), DsMlsMessage::Ciphertext(c) => c.group_id().as_slice(), } } pub fn epoch(&self) -> u64 { match self { DsMlsMessage::Ciphertext(m) => m.epoch().0, DsMlsMessage::Plaintext(m) => m.payload().epoch().0, } } pub fn is_handshake_message(&self) -> bool { match self { DsMlsMessage::Ciphertext(m) => m.is_handshake_message(), DsMlsMessage::Plaintext(m) => m.payload().is_handshake_message(), } } } #[derive(Debug)] pub enum Message<'a> { MlsMessage(DsMlsMessage<'a>), Welcome(Welcome), } #[derive(Debug, Clone, Copy, TlsSerialize, TlsDeserialize, TlsSize)] #[repr(u8)] pub enum MessageType { MlsCiphertext = 0, MlsPlaintext = 1, Welcome = 2, } #[derive(Debug)] pub struct GroupMessage<'a> { pub msg: DsMlsMessage<'a>, pub recipients: TlsVecU32<TlsByteVecU32>, } impl<'a> GroupMessage<'a> { pub fn new(msg: DsMlsMessage<'a>, recipients: &[Vec<u8>]) -> Self { Self { msg, recipients: recipients .iter() .map(|r| r.clone().into()) .collect::<Vec<TlsByteVecU32>>() .into(), } } pub fn group_id(&self) -> &[u8] { self.msg.group_id() } pub fn epoch(&self) -> u64 { self.msg.epoch() } pub fn is_handshake_message(&self) -> bool { self.msg.is_handshake_message() } } impl<'a> tls_codec::Size for ClientInfo<'a> { fn tls_serialized_len(&self) -> usize { TlsByteSliceU16(self.client_name.as_bytes()).tls_serialized_len() + self.key_packages.tls_serialized_len() } } impl<'a> tls_codec::Serialize for ClientInfo<'a> { fn tls_serialize<W: std::io::Write>(&self, writer: &mut W) -> Result<usize, tls_codec::Error> { let written = TlsByteSliceU16(self.client_name.as_bytes()).tls_serialize(writer)?; self.key_packages.tls_serialize(writer).map(|l| l + written) } } impl<'a> tls_codec::Dese
(bytes)?, )), MessageType::Welcome => Message::Welcome(Welcome::tls_deserialize(bytes)?), }) } } impl<'a> tls_codec::Size for GroupMessage<'a> { fn tls_serialized_len(&self) -> usize { MessageType::MlsCiphertext.tls_serialized_len() + match &self.msg { DsMlsMessage::Plaintext(p) => p.tls_serialized_len(), DsMlsMessage::Ciphertext(c) => c.tls_serialized_len(), } + self.recipients.tls_serialized_len() } } impl<'a> tls_codec::Serialize for GroupMessage<'a> { fn tls_serialize<W: std::io::Write>(&self, writer: &mut W) -> Result<usize, tls_codec::Error> { let mut written = 0; written += match &self.msg { DsMlsMessage::Ciphertext(m) => { MessageType::MlsCiphertext.tls_serialize(writer)? + m.tls_serialize(writer)? } DsMlsMessage::Plaintext(m) => { MessageType::MlsPlaintext.tls_serialize(writer)? + m.tls_serialize(writer)? } }; self.recipients.tls_serialize(writer).map(|l| l + written) } } impl<'a> tls_codec::Deserialize for GroupMessage<'a> { fn tls_deserialize<R: std::io::Read>(bytes: &mut R) -> Result<Self, tls_codec::Error> { let msg_type = MessageType::tls_deserialize(bytes)?; let msg = match msg_type { MessageType::MlsCiphertext => { DsMlsMessage::Ciphertext(MlsCiphertext::tls_deserialize(bytes)?) } MessageType::MlsPlaintext => { DsMlsMessage::Plaintext(VerifiableMlsPlaintext::tls_deserialize(bytes)?) } _ => { return Err(tls_codec::Error::DecodingError(format!( "Invalid message type {:?}", msg_type ))) } }; let recipients = TlsVecU32::<TlsByteVecU32>::tls_deserialize(bytes)?; Ok(Self { msg, recipients }) } }
rialize for ClientInfo<'a> { fn tls_deserialize<R: std::io::Read>(bytes: &mut R) -> Result<Self, tls_codec::Error> { let client_name = String::from_utf8_lossy(TlsByteVecU16::tls_deserialize(bytes)?.as_slice()).into(); let mut key_packages: Vec<(TlsByteVecU8, KeyPackage)> = TlsVecU32::<(TlsByteVecU8, KeyPackage)>::tls_deserialize(bytes)?.into(); let key_packages = key_packages .drain(..) .map(|(e1, e2)| (e1.into(), e2)) .collect(); Ok(Self::new(client_name, key_packages)) } } impl<'a> tls_codec::Size for Message<'a> { fn tls_serialized_len(&self) -> usize { MessageType::MlsCiphertext.tls_serialized_len() + match self { Message::MlsMessage(mm) => match mm { DsMlsMessage::Plaintext(p) => p.tls_serialized_len(), DsMlsMessage::Ciphertext(c) => c.tls_serialized_len(), }, Message::Welcome(w) => w.tls_serialized_len(), } } } impl<'a> tls_codec::Serialize for Message<'a> { fn tls_serialize<W: std::io::Write>(&self, writer: &mut W) -> Result<usize, tls_codec::Error> { let written; match self { Message::MlsMessage(m) => match m { DsMlsMessage::Ciphertext(m) => { written = MessageType::MlsCiphertext.tls_serialize(writer)?; m.tls_serialize(writer) } DsMlsMessage::Plaintext(m) => { written = MessageType::MlsPlaintext.tls_serialize(writer)?; m.tls_serialize(writer) } }, Message::Welcome(m) => { written = MessageType::Welcome.tls_serialize(writer)?; m.tls_serialize(writer) } } .map(|l| l + written) } } impl<'a> tls_codec::Deserialize for Message<'a> { fn tls_deserialize<R: std::io::Read>(bytes: &mut R) -> Result<Self, tls_codec::Error> { let msg_type = MessageType::tls_deserialize(bytes)?; Ok(match msg_type { MessageType::MlsCiphertext => Message::MlsMessage(DsMlsMessage::Ciphertext( MlsCiphertext::tls_deserialize(bytes)?, )), MessageType::MlsPlaintext => Message::MlsMessage(DsMlsMessage::Plaintext( VerifiableMlsPlaintext::tls_deserialize
random
[ { "content": "pub fn post(url: &Url, msg: &impl Serialize) -> Result<Vec<u8>, String> {\n\n let serialized_msg = msg.tls_serialize_detached().unwrap();\n\n log::debug!(\"Post {:?}\", url);\n\n log::trace!(\"Payload: {:?}\", serialized_msg);\n\n let client = Client::new();\n\n let response = client.post(&url.to_string()).body(serialized_msg).send();\n\n if let Ok(r) = response {\n\n if r.status() != StatusCode::OK {\n\n return Err(format!(\"Error status code {:?}\", r.status()));\n\n }\n\n match r.bytes() {\n\n Ok(bytes) => Ok(bytes.as_ref().to_vec()),\n\n Err(e) => Err(format!(\"Error retrieving bytes from response: {:?}\", e)),\n\n }\n\n } else {\n\n Err(format!(\"ERROR: {:?}\", response.err()))\n\n }\n\n}\n\n\n", "file_path": "cli/src/networking.rs", "rank": 0, "score": 249064.05942122958 }, { "content": "pub fn get(url: &Url) -> Result<Vec<u8>, String> {\n\n log::debug!(\"Get {:?}\", url);\n\n let client = Client::new();\n\n let response = client.get(&url.to_string()).send();\n\n if let Ok(r) = response {\n\n if r.status() != StatusCode::OK {\n\n return Err(format!(\"Error status code {:?}\", r.status()));\n\n }\n\n match r.bytes() {\n\n Ok(bytes) => Ok(bytes.as_ref().to_vec()),\n\n Err(e) => Err(format!(\"Error retrieving bytes from response: {:?}\", e)),\n\n }\n\n } else {\n\n Err(format!(\"ERROR: {:?}\", response.err()))\n\n }\n\n}\n", "file_path": "cli/src/networking.rs", "rank": 1, "score": 218381.12169674813 }, { "content": "/// No crypto randomness!\n\npub fn randombytes(n: usize) -> Vec<u8> {\n\n let mut out = vec![0u8; n];\n\n OsRng.fill_bytes(&mut out);\n\n out\n\n}\n\n\n", "file_path": "openmls/tests/utils/mls_utils/mod.rs", "rank": 2, "score": 209644.80981036884 }, { "content": "fn update(client: &mut user::User, group_id: Option<String>, stdout: &mut StdoutLock) {\n\n let messages = client.update(group_id).unwrap();\n\n stdout.write_all(b\" >>> Updated client :)\\n\").unwrap();\n\n if !messages.is_empty() {\n\n stdout.write_all(b\" New messages:\\n\\n\").unwrap();\n\n }\n\n messages.iter().for_each(|m| {\n\n stdout\n\n .write_all(format!(\" {}\\n\", m).as_bytes())\n\n .unwrap();\n\n });\n\n stdout.write_all(b\"\\n\").unwrap();\n\n}\n\n\n", "file_path": "cli/src/main.rs", "rank": 3, "score": 200130.99935765355 }, { "content": "#[inline(always)]\n\nfn equal_ct(a: &[u8], b: &[u8]) -> bool {\n\n let mut diff = 0u8;\n\n for (l, r) in a.iter().zip(b.iter()) {\n\n diff |= l ^ r;\n\n }\n\n diff == 0\n\n}\n\n\n\n/// A struct to contain secrets. This is to provide better visibility into where\n\n/// and how secrets are used and to avoid passing secrets in their raw\n\n/// representation.\n\n#[derive(Clone, Debug)]\n\npub struct Secret {\n\n ciphersuite: &'static Ciphersuite,\n\n value: Vec<u8>,\n\n mls_version: ProtocolVersion,\n\n}\n\n\n\nimplement_persistence!(Secret, value, mls_version);\n\n\n", "file_path": "openmls/src/ciphersuite/mod.rs", "rank": 4, "score": 195782.1738441881 }, { "content": "fn _bytes_to_hex(bytes: &[u8]) -> String {\n\n let mut hex = String::new();\n\n for b in bytes {\n\n hex += &format!(\"{:02X}\", *b);\n\n }\n\n hex\n\n}\n\n\n\n// With the crypto-debug feature enabled sensitive crypto parts can be logged.\n\n#[cfg(feature = \"crypto-debug\")]\n\nmacro_rules! log_crypto {\n\n (debug, $($arg:tt)*) => ({\n\n log::debug!($($arg)*);\n\n });\n\n (trace, $($arg:tt)*) => ({\n\n log::trace!($($arg)*);\n\n })\n\n}\n\n\n\n// With the content-debug feature enabled sensitive message content parts can be logged.\n", "file_path": "openmls/src/utils.rs", "rank": 5, "score": 188029.77000139322 }, { "content": "pub fn random_usize() -> usize {\n\n OsRng.next_u64() as usize\n\n}\n\n\n", "file_path": "openmls/tests/utils/mls_utils/mod.rs", "rank": 6, "score": 175817.03178939596 }, { "content": "pub fn write(file_name: &str, payload: &[u8]) {\n\n let mut file = match File::create(file_name) {\n\n Ok(f) => f,\n\n Err(_) => panic!(\"Couldn't open file {}.\", file_name),\n\n };\n\n file.write_all(payload)\n\n .expect(\"Error writing test vector file\");\n\n}\n\n\n\n#[tonic::async_trait]\n\nimpl MlsClient for MlsClientImpl {\n\n async fn name(&self, _request: Request<NameRequest>) -> Result<Response<NameResponse>, Status> {\n\n println!(\"Got Name request\");\n\n\n\n let response = NameResponse {\n\n name: IMPLEMENTATION_NAME.to_string(),\n\n };\n\n Ok(Response::new(response))\n\n }\n\n\n", "file_path": "interop_client/src/main.rs", "rank": 7, "score": 169553.65469309353 }, { "content": "#[cfg(any(feature = \"test-utils\", test))]\n\npub fn generate_test_vector(\n\n n_epochs: u64,\n\n ciphersuite: &'static Ciphersuite,\n\n) -> KeyScheduleTestVector {\n\n use tls_codec::Serialize;\n\n\n\n // Set up setting.\n\n let mut init_secret = InitSecret::random(ciphersuite, ProtocolVersion::default());\n\n let initial_init_secret = init_secret.clone();\n\n let group_id = ciphersuite.randombytes(16);\n\n\n\n let mut epochs = Vec::new();\n\n\n\n // Generate info for all epochs\n\n for epoch in 0..n_epochs {\n\n println!(\"Generating epoch: {:?}\", epoch);\n\n let (\n\n confirmed_transcript_hash,\n\n commit_secret,\n\n joiner_secret,\n", "file_path": "openmls/src/schedule/kat_key_schedule.rs", "rank": 8, "score": 128267.56828852238 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n criterion_kp_bundle(c);\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "openmls/benches/benchmark.rs", "rank": 9, "score": 125769.5159514301 }, { "content": "// Get a ciphertext sample of `hash_length` from the ciphertext.\n\nfn ciphertext_sample<'a>(ciphersuite: &Ciphersuite, ciphertext: &'a [u8]) -> &'a [u8] {\n\n let sample_length = ciphersuite.hash_length();\n\n log::debug!(\"Getting ciphertext sample of length {:?}\", sample_length);\n\n if ciphertext.len() <= sample_length {\n\n ciphertext\n\n } else {\n\n &ciphertext[0..sample_length]\n\n }\n\n}\n\n\n\n/// A key that can be used to derive an `AeadKey` and an `AeadNonce`.\n\n#[derive(Debug, Serialize, Deserialize)]\n\n#[cfg_attr(test, derive(PartialEq))]\n\npub(crate) struct SenderDataSecret {\n\n secret: Secret,\n\n}\n\n\n\nimpl SenderDataSecret {\n\n /// Derive an `ExporterSecret` from an `EpochSecret`.\n\n fn new(epoch_secret: &EpochSecret) -> Self {\n", "file_path": "openmls/src/schedule/mod.rs", "rank": 10, "score": 125507.45275707863 }, { "content": "#[cfg(any(feature = \"test-utils\", test))]\n\npub fn generate_test_vector(\n\n n_generations: u32,\n\n n_leaves: u32,\n\n ciphersuite: &'static Ciphersuite,\n\n) -> EncryptionTestVector {\n\n let ciphersuite_name = ciphersuite.name();\n\n let epoch_secret = ciphersuite.randombytes(ciphersuite.hash_length());\n\n let encryption_secret =\n\n EncryptionSecret::from_slice(&epoch_secret[..], ProtocolVersion::default(), ciphersuite);\n\n let encryption_secret_group =\n\n EncryptionSecret::from_slice(&epoch_secret[..], ProtocolVersion::default(), ciphersuite);\n\n let encryption_secret_bytes = encryption_secret.as_slice().to_vec();\n\n let sender_data_secret = SenderDataSecret::random(ciphersuite);\n\n let sender_data_secret_bytes = sender_data_secret.as_slice();\n\n let mut secret_tree = SecretTree::new(encryption_secret, LeafIndex::from(n_leaves));\n\n let group_secret_tree = SecretTree::new(encryption_secret_group, LeafIndex::from(n_leaves));\n\n\n\n // Create sender_data_key/secret\n\n let ciphertext = ciphersuite.randombytes(77);\n\n let sender_data_key = sender_data_secret.derive_aead_key(&ciphertext);\n", "file_path": "openmls/src/tree/tests_and_kats/kats/kat_encryption.rs", "rank": 11, "score": 124773.65981003434 }, { "content": "fn criterion_kp_bundle(c: &mut Criterion) {\n\n for ciphersuite in Config::supported_ciphersuites() {\n\n c.bench_function(\n\n &format!(\n\n \"KeyPackage create bundle with ciphersuite: {:?}\",\n\n ciphersuite.name()\n\n ),\n\n move |b| {\n\n b.iter_with_setup(\n\n || {\n\n CredentialBundle::new(\n\n vec![1, 2, 3],\n\n CredentialType::Basic,\n\n ciphersuite.signature_scheme(),\n\n )\n\n .unwrap()\n\n },\n\n |credential_bundle: CredentialBundle| {\n\n KeyPackageBundle::new(\n\n &[ciphersuite.name()],\n", "file_path": "openmls/benches/benchmark.rs", "rank": 12, "score": 123718.05285075042 }, { "content": "/// This function DER encodes a given ECDSA signature consisting of bytes\n\n/// representing the concatenated scalars. If the encoding fails, it will\n\n/// throw a `CryptoError`.\n\nfn der_encode(raw_signature: &[u8]) -> Result<Vec<u8>, CryptoError> {\n\n // A small helper function to determine the length of a given raw\n\n // scalar.\n\n fn scalar_length(mut scalar: &[u8]) -> Result<usize, CryptoError> {\n\n // Remove prepending zeros of the given, unencoded scalar.\n\n let mut msb = scalar\n\n .read_u8()\n\n .map_err(|_| CryptoError::SignatureEncodingError)?;\n\n while msb == 0x00 {\n\n msb = scalar\n\n .read_u8()\n\n .map_err(|_| CryptoError::SignatureEncodingError)?;\n\n }\n\n\n\n // The length of the scalar is what's left after removing the\n\n // prepending zeroes, plus 1 for the msb which we've already read.\n\n let mut scalar_length = scalar.len() + 1;\n\n\n\n // If the most significant bit is 1, we have to prepend 0x00 to indicate\n\n // that the integer is unsigned.\n", "file_path": "openmls/src/ciphersuite/crypto/evercrypt_provider.rs", "rank": 13, "score": 122445.33149395787 }, { "content": "#[test]\n\nfn invalid_welcomes() {\n\n // An almost good welcome message.\n\n let mut bytes = &[\n\n 2u8, 0, 2, 0, 0, 0, 90, 4, 0, 0, 0, 0, 0, 32, 183, 76, 159, 248, 180, 5, 79, 86, 242, 165,\n\n 206, 103, 47, 8, 110, 250, 81, 48, 206, 185, 186, 104, 220, 181, 245, 106, 134, 32, 97,\n\n 233, 141, 26, 0, 49, 13, 203, 68, 119, 97, 90, 172, 36, 170, 239, 80, 191, 63, 146, 177,\n\n 211, 151, 152, 93, 117, 192, 136, 96, 22, 168, 213, 67, 165, 244, 165, 183, 228, 88, 62,\n\n 232, 36, 220, 224, 93, 216, 155, 210, 167, 34, 112, 7, 73, 42, 2, 0, 0, 0, 71, 254, 148,\n\n 190, 32, 30, 92, 51, 15, 16, 11, 46, 196, 65, 132, 142, 111, 177, 115, 21, 218, 71, 51,\n\n 118, 228, 188, 12, 134, 23, 216, 51, 20, 138, 215, 232, 62, 216, 119, 242, 93, 164, 250,\n\n 100, 223, 214, 94, 85, 139, 159, 205, 193, 153, 181, 243, 139, 12, 78, 253, 200, 47, 207,\n\n 79, 86, 82, 63, 217, 126, 204, 178, 24, 199, 49,\n\n ] as &[u8];\n\n let msg = Welcome::tls_deserialize(&mut bytes);\n\n assert!(msg.is_err());\n\n}\n", "file_path": "openmls/src/messages/tests/test_welcome.rs", "rank": 14, "score": 119903.5014151829 }, { "content": "#[proc_macro]\n\npub fn ctest(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as TestInput);\n\n impl_ciphersuite_tests(input, quote! {})\n\n}\n\n\n", "file_path": "test_macros/src/lib.rs", "rank": 15, "score": 114246.50572564111 }, { "content": "fn into_bytes(obj: impl Serialize) -> Vec<u8> {\n\n serde_json::to_string_pretty(&obj)\n\n .expect(\"Error serializing test vectors\")\n\n .as_bytes()\n\n .to_vec()\n\n}\n\n\n", "file_path": "interop_client/src/main.rs", "rank": 16, "score": 113588.42387328857 }, { "content": "#[test]\n\nfn default_ciphersuites() {\n\n // Make sure the supported ciphersuites are what we expect them to be.\n\n let supported_ciphersuites = Config::supported_ciphersuite_names();\n\n assert_eq!(\n\n vec![\n\n CiphersuiteName::MLS10_128_DHKEMX25519_AES128GCM_SHA256_Ed25519,\n\n CiphersuiteName::MLS10_128_DHKEMP256_AES128GCM_SHA256_P256,\n\n CiphersuiteName::MLS10_128_DHKEMX25519_CHACHA20POLY1305_SHA256_Ed25519,\n\n ],\n\n supported_ciphersuites\n\n );\n\n}\n\n\n", "file_path": "openmls/tests/test_config.rs", "rank": 17, "score": 113074.48464661818 }, { "content": "#[test]\n\nfn default_extensions() {\n\n // Make sure the supported extensions are what we expect them to be.\n\n let supported_extensions = Config::supported_extensions();\n\n assert_eq!(\n\n vec![\n\n ExtensionType::Capabilities,\n\n ExtensionType::Lifetime,\n\n ExtensionType::KeyId\n\n ],\n\n supported_extensions\n\n );\n\n}\n\n\n", "file_path": "openmls/tests/test_config.rs", "rank": 18, "score": 113074.48464661818 }, { "content": "#[test]\n\nfn default_constants() {\n\n // Make sure the supported ciphersuites are what we expect them to be.\n\n let default_key_package_lifetime = Config::default_key_package_lifetime();\n\n let key_package_lifetime_margin = Config::key_package_lifetime_margin();\n\n assert_eq!(60 * 60 * 24 * 28 * 3, default_key_package_lifetime);\n\n assert_eq!(60 * 60, key_package_lifetime_margin);\n\n}\n", "file_path": "openmls/tests/test_config.rs", "rank": 19, "score": 113074.48464661818 }, { "content": "fn own_identity(managed_group: &ManagedGroup) -> Vec<u8> {\n\n match managed_group.credential() {\n\n Ok(credential) => credential.identity().to_vec(),\n\n Err(_) => \"us\".as_bytes().to_vec(),\n\n }\n\n}\n\n\n", "file_path": "openmls/tests/test_managed_group.rs", "rank": 20, "score": 112891.09535127046 }, { "content": "#[proc_macro]\n\npub fn ctest_panic(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as TestInput);\n\n impl_ciphersuite_tests(\n\n input,\n\n quote! {\n\n #[should_panic]\n\n },\n\n )\n\n}\n\n\n", "file_path": "test_macros/src/lib.rs", "rank": 21, "score": 112537.90929754045 }, { "content": "pub fn _print_tree(tree: &RatchetTree, message: &str) {\n\n let factor = 3;\n\n println!(\"{}\", message);\n\n for (i, node) in tree.nodes.iter().enumerate() {\n\n let level = treemath::level(NodeIndex::from(i));\n\n print!(\"{:04}\", i);\n\n if !node.is_blank() {\n\n let (key_bytes, parent_hash_bytes) = match node.node_type {\n\n NodeType::Leaf => {\n\n print!(\"\\tL\");\n\n let key_bytes = if let Some(kp) = &node.key_package {\n\n kp.hpke_init_key().as_slice()\n\n } else {\n\n &[]\n\n };\n\n let parent_hash_bytes = if let Some(kp) = &node.key_package {\n\n if let Some(phe) = kp.extension_with_type(ExtensionType::ParentHash) {\n\n let parent_hash_extension: &ParentHashExtension =\n\n phe.as_parent_hash_extension().expect(\"Library error\");\n\n parent_hash_extension.parent_hash().to_vec()\n", "file_path": "openmls/src/utils.rs", "rank": 22, "score": 111471.6815307117 }, { "content": "/// The verifiable trait must be implemented by any struct that is signed with\n\n/// a credential. The actual `verify` method is provided.\n\n/// The `unsigned_payload` and `signature` functions have to be implemented for\n\n/// each struct, returning the serialized payload and the signature respectively.\n\n///\n\n/// Note that `Verifiable` should not be implemented on the same struct as\n\n/// `Signable`. If this appears to be necessary, it is probably a sign that the\n\n/// struct implementing them aren't well defined. Not that both traits define an\n\n/// `unsigned_payload` function.\n\npub trait Verifiable: Sized {\n\n /// Return the unsigned, serialized payload that should be verified.\n\n fn unsigned_payload(&self) -> Result<Vec<u8>, tls_codec::Error>;\n\n\n\n /// A reference to the signature to be verified.\n\n fn signature(&self) -> &Signature;\n\n\n\n /// Verifies the payload against the given `credential`.\n\n /// The signature is fetched via the [`Verifiable::signature()`] function and\n\n /// the payload via [`Verifiable::unsigned_payload()`].\n\n ///\n\n /// Returns `Ok(Self::VerifiedOutput)` if the signature is valid and\n\n /// `CredentialError::InvalidSignature` otherwise.\n\n fn verify<T>(self, credential: &Credential) -> Result<T, CredentialError>\n\n where\n\n T: VerifiedStruct<Self>,\n\n {\n\n let payload = self.unsigned_payload()?;\n\n credential.verify(&payload, self.signature())?;\n\n Ok(T::from_verifiable(self, T::SealingType::default()))\n", "file_path": "openmls/src/ciphersuite/signable.rs", "rank": 23, "score": 111149.21734182225 }, { "content": "/// The `Signable` trait is implemented by all struct that are being signed.\n\n/// The implementation has to provide the `unsigned_payload` function.\n\npub trait Signable: Sized {\n\n type SignedOutput;\n\n\n\n /// Return the unsigned, serialized payload that should be signed.\n\n fn unsigned_payload(&self) -> Result<Vec<u8>, tls_codec::Error>;\n\n\n\n /// Sign the payload with the given `id`.\n\n ///\n\n /// Returns a `Signature`.\n\n fn sign(\n\n self,\n\n credential_bundle: &CredentialBundle,\n\n ) -> Result<Self::SignedOutput, CredentialError>\n\n where\n\n Self::SignedOutput: SignedStruct<Self>,\n\n {\n\n let payload = self.unsigned_payload()?;\n\n let signature = credential_bundle.sign(&payload)?;\n\n Ok(Self::SignedOutput::from_payload(self, signature))\n\n }\n\n}\n\n\n", "file_path": "openmls/src/ciphersuite/signable.rs", "rank": 24, "score": 111148.24426844681 }, { "content": "/// This trait must be implemented by all structs that contain a self-signature.\n\npub trait SignedStruct<T> {\n\n /// Build a signed struct version from the payload struct.\n\n fn from_payload(payload: T, signature: Signature) -> Self;\n\n}\n\n\n", "file_path": "openmls/src/ciphersuite/signable.rs", "rank": 25, "score": 109053.41885018752 }, { "content": "/// This trait must be implemented by all structs that contain a verified\n\n/// self-signature.\n\npub trait VerifiedStruct<T> {\n\n /// This type is used to prevent users of the trait from bypassing `verify`\n\n /// by simply calling `from_verifiable`. `Seal` should be a dummy type\n\n /// defined in a private module as follows:\n\n /// ```\n\n /// mod private_mod {\n\n /// pub struct Seal;\n\n ///\n\n /// impl Default for Seal {\n\n /// fn default() -> Self {\n\n /// Seal {}\n\n /// }\n\n /// }\n\n /// }\n\n /// ```\n\n type SealingType: Default;\n\n\n\n /// Build a verified struct version from the payload struct. This function\n\n /// is only meant to be called by the implementation of the `Verifiable`\n\n /// trait corresponding to this `VerifiedStruct`.\n\n #[doc(hidden)]\n\n fn from_verifiable(verifiable: T, _seal: Self::SealingType) -> Self;\n\n}\n\n\n", "file_path": "openmls/src/ciphersuite/signable.rs", "rank": 26, "score": 109053.34056560414 }, { "content": "#[test]\n\nfn key_package_id() {\n\n // A key package extension with the default values for openmls.\n\n let data = &[0u8, 8, 1, 2, 3, 4, 5, 6, 6, 6];\n\n let kpi = KeyIdExtension::new(&data[2..]);\n\n\n\n let kpi_from_bytes = KeyIdExtension::tls_deserialize(&mut (data as &[u8])).unwrap();\n\n assert_eq!(kpi, kpi_from_bytes);\n\n\n\n let serialized_extension_struct = kpi.tls_serialize_detached().unwrap();\n\n assert_eq!(&data[..], &serialized_extension_struct);\n\n}\n\n\n", "file_path": "openmls/src/extensions/test_extensions.rs", "rank": 27, "score": 108862.41670381186 }, { "content": "#[test]\n\nfn test_welcome_message_encoding() {\n\n let test_setup = create_encoding_test_setup();\n\n let test_clients = test_setup.clients.borrow();\n\n let alice = test_clients.get(\"alice\").unwrap().borrow();\n\n\n\n for group_state in alice.group_states.borrow_mut().values_mut() {\n\n let credential_bundle = alice\n\n .credential_bundles\n\n .get(&group_state.ciphersuite().name())\n\n .unwrap();\n\n\n\n // Create a few proposals to put into the commit\n\n\n\n // Alice adds Charlie to the group\n\n let charlie_key_package = test_setup\n\n ._key_store\n\n .borrow_mut()\n\n .get_mut(&(\"charlie\", group_state.ciphersuite().name()))\n\n .unwrap()\n\n .pop()\n", "file_path": "openmls/tests/test_encoding.rs", "rank": 28, "score": 108801.76851680494 }, { "content": "pub trait WriteU8: Write {\n\n /// A small helper function to write a u8 to a Writer.\n\n #[inline]\n\n fn write_u8(&mut self, n: u8) -> std::io::Result<()> {\n\n self.write_all(&[n])\n\n }\n\n}\n\n\n", "file_path": "openmls/src/ciphersuite/crypto/evercrypt_provider.rs", "rank": 29, "score": 105202.82204454578 }, { "content": "pub trait ReadU8: Read {\n\n /// A small helper function to read a u8 from a Reader.\n\n #[inline]\n\n fn read_u8(&mut self) -> std::io::Result<u8> {\n\n let mut buf = [0; 1];\n\n self.read_exact(&mut buf)?;\n\n Ok(buf[0])\n\n }\n\n}\n\n\n\nimpl<W: Write + ?Sized> WriteU8 for W {}\n\n\n", "file_path": "openmls/src/ciphersuite/crypto/evercrypt_provider.rs", "rank": 30, "score": 105202.82204454578 }, { "content": "#[cfg(any(feature = \"test-utils\", test))]\n\npub fn generate_test_vector(n_leaves: u32) -> TreeMathTestVector {\n\n let leaves = LeafIndex::from(n_leaves);\n\n let n_nodes = node_width(leaves.as_usize()) as u32;\n\n let mut test_vector = TreeMathTestVector {\n\n n_leaves,\n\n n_nodes,\n\n root: Vec::new(),\n\n left: Vec::new(),\n\n right: Vec::new(),\n\n parent: Vec::new(),\n\n sibling: Vec::new(),\n\n };\n\n\n\n for i in 0..n_leaves {\n\n test_vector.root.push(root(LeafIndex::from(i + 1)).as_u32());\n\n }\n\n for i in 0..n_nodes {\n\n test_vector.left.push(convert!(left(NodeIndex::from(i))));\n\n test_vector\n\n .right\n", "file_path": "openmls/src/tree/tests_and_kats/kats/kat_treemath.rs", "rank": 31, "score": 103652.07268429767 }, { "content": "pub fn generate_test_vector(ciphersuite: &'static Ciphersuite) -> TranscriptTestVector {\n\n // Generate random values.\n\n let group_id = GroupId::random(ciphersuite);\n\n let epoch = random_u64();\n\n let tree_hash_before = ciphersuite.randombytes(ciphersuite.hash_length());\n\n let confirmed_transcript_hash_before = ciphersuite.randombytes(ciphersuite.hash_length());\n\n let interim_transcript_hash_before = ciphersuite.randombytes(ciphersuite.hash_length());\n\n let membership_key =\n\n MembershipKey::from_secret(Secret::random(ciphersuite, None /* MLS version */));\n\n let confirmation_key =\n\n ConfirmationKey::from_secret(Secret::random(ciphersuite, None /* MLS version */));\n\n\n\n // Build plaintext commit message.\n\n let credential_bundle = CredentialBundle::new(\n\n b\"client\".to_vec(),\n\n CredentialType::Basic,\n\n SignatureScheme::from(ciphersuite.name()),\n\n )\n\n .unwrap();\n\n let context = GroupContext::new(\n", "file_path": "openmls/src/group/tests/kat_transcripts.rs", "rank": 32, "score": 103604.78703192637 }, { "content": "pub fn generate_test_vector(ciphersuite: &'static Ciphersuite) -> MessagesTestVector {\n\n let ciphersuite_name = ciphersuite.name();\n\n let credential_bundle = CredentialBundle::new(\n\n b\"OpenMLS rocks\".to_vec(),\n\n CredentialType::Basic,\n\n SignatureScheme::from(ciphersuite_name),\n\n )\n\n .unwrap();\n\n let key_package_bundle =\n\n KeyPackageBundle::new(&[ciphersuite_name], &credential_bundle, Vec::new()).unwrap();\n\n let capabilities = CapabilitiesExtension::default();\n\n let lifetime = LifetimeExtension::default();\n\n\n\n // Let's create a group\n\n let group_id = GroupId::random(ciphersuite);\n\n let config = MlsGroupConfig::default();\n\n let mut group = MlsGroup::new(\n\n group_id.as_slice(),\n\n ciphersuite_name,\n\n key_package_bundle,\n", "file_path": "openmls/src/group/tests/kat_messages.rs", "rank": 33, "score": 103604.78703192637 }, { "content": "#[test]\n\nfn key_package_id_extension() {\n\n for ciphersuite in Config::supported_ciphersuites() {\n\n let id = vec![1, 2, 3];\n\n let credential_bundle =\n\n CredentialBundle::new(id, CredentialType::Basic, ciphersuite.name().into()).unwrap();\n\n let kpb = KeyPackageBundle::new(\n\n &[ciphersuite.name()],\n\n &credential_bundle,\n\n vec![Extension::LifeTime(LifetimeExtension::new(60))],\n\n )\n\n .unwrap();\n\n assert!(kpb.key_package().verify().is_ok());\n\n let mut kpb = kpb.unsigned();\n\n\n\n // Add an ID to the key package.\n\n let id = [1, 2, 3, 4];\n\n kpb.add_extension(Extension::KeyPackageId(KeyIdExtension::new(&id)));\n\n\n\n // Sign it to make it valid.\n\n let kpb = kpb.sign(&credential_bundle).unwrap();\n\n assert!(kpb.key_package().verify().is_ok());\n\n\n\n // Check ID\n\n assert_eq!(&id[..], kpb.key_package().key_id().expect(\"No key ID\"));\n\n }\n\n}\n\n\n", "file_path": "openmls/src/key_packages/test_key_packages.rs", "rank": 34, "score": 103249.36283281798 }, { "content": "#[inline(always)]\n\nfn leaf_in_tree(leaf_index: LeafIndex, size: LeafIndex) -> Result<(), TreeMathError> {\n\n if leaf_index >= size {\n\n Err(TreeMathError::LeafNotInTree)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\n/// Direct path from a leaf node to the root.\n\n/// Does not include the leaf node but includes the root.\n\npub(crate) fn leaf_direct_path(\n\n leaf_index: LeafIndex,\n\n size: LeafIndex,\n\n) -> Result<Vec<NodeIndex>, TreeMathError> {\n\n leaf_in_tree(leaf_index, size)?;\n\n let node_index = NodeIndex::from(leaf_index);\n\n let r = root(size);\n\n if node_index == r {\n\n return Ok(vec![r]);\n\n }\n", "file_path": "openmls/src/tree/treemath.rs", "rank": 35, "score": 100284.61358173669 }, { "content": "#[inline(always)]\n\nfn node_in_tree(node_index: NodeIndex, size: LeafIndex) -> Result<(), TreeMathError> {\n\n if node_index.as_usize() >= node_width(size.as_usize()) {\n\n Err(TreeMathError::NodeNotInTree)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "openmls/src/tree/treemath.rs", "rank": 36, "score": 100284.61358173669 }, { "content": "pub fn run_test_vector(tv: MessagesTestVector) -> Result<(), MessagesTestVectorError> {\n\n // KeyPackage\n\n let tv_key_package = hex_to_bytes(&tv.key_package);\n\n let mut tv_key_package_slice = tv_key_package.as_slice();\n\n let my_key_package = KeyPackage::tls_deserialize(&mut tv_key_package_slice)\n\n .unwrap()\n\n .tls_serialize_detached()\n\n .unwrap();\n\n if tv_key_package != my_key_package {\n\n log::error!(\" KeyPackage encoding mismatch\");\n\n log::debug!(\" Encoded: {:x?}\", my_key_package);\n\n log::debug!(\" Expected: {:x?}\", tv_key_package);\n\n if cfg!(test) {\n\n panic!(\"KeyPackage encoding mismatch\");\n\n }\n\n return Err(MessagesTestVectorError::KeyPackageEncodingMismatch);\n\n }\n\n\n\n // Capabilities\n\n log::debug!(\"Capabilities tv: {}\", tv.capabilities);\n", "file_path": "openmls/src/group/tests/kat_messages.rs", "rank": 37, "score": 99589.88993686621 }, { "content": "fn default_managed_group_config() -> ManagedGroupConfig {\n\n let handshake_message_format = HandshakeMessageFormat::Plaintext;\n\n let update_policy = UpdatePolicy::default();\n\n let callbacks = ManagedGroupCallbacks::default();\n\n ManagedGroupConfig::new(\n\n handshake_message_format,\n\n update_policy,\n\n 10,\n\n 0,\n\n false, // use_ratchet_tree_extension\n\n callbacks,\n\n )\n\n}\n\n\n\n// # 1:1 join\n\n// A: Create group\n\n// B->A: KeyPackage\n\n// A->B: Welcome\n\n// ***: Verify group state\n\nctest_ciphersuites!(one_to_one_join, test(ciphersuite_name: CiphersuiteName) {\n", "file_path": "openmls/tests/test_interop_scenarios.rs", "rank": 38, "score": 98768.64783304345 }, { "content": "pub fn run_test_vector(test_vector: TranscriptTestVector) -> Result<(), TranscriptTestVectorError> {\n\n let ciphersuite =\n\n CiphersuiteName::try_from(test_vector.cipher_suite).expect(\"Invalid ciphersuite\");\n\n let ciphersuite = match Config::ciphersuite(ciphersuite) {\n\n Ok(cs) => cs,\n\n Err(_) => {\n\n log::info!(\n\n \"Unsupported ciphersuite {} in test vector. Skipping ...\",\n\n ciphersuite\n\n );\n\n return Ok(());\n\n }\n\n };\n\n log::debug!(\"Testing test vector for ciphersuite {:?}\", ciphersuite);\n\n log::trace!(\" {:?}\", test_vector);\n\n\n\n // Read input values.\n\n let group_id = GroupId {\n\n value: hex_to_bytes(&test_vector.group_id).into(),\n\n };\n", "file_path": "openmls/src/group/tests/kat_transcripts.rs", "rank": 39, "score": 98349.73610394904 }, { "content": "#[cfg(any(feature = \"test-utils\", test))]\n\npub fn run_test_vector(test_vector: KeyScheduleTestVector) -> Result<(), KsTestVectorError> {\n\n use tls_codec::Serialize;\n\n\n\n let ciphersuite =\n\n CiphersuiteName::try_from(test_vector.cipher_suite).expect(\"Invalid ciphersuite\");\n\n let ciphersuite = match Config::ciphersuite(ciphersuite) {\n\n Ok(cs) => cs,\n\n Err(_) => {\n\n log::info!(\n\n \"Unsupported ciphersuite {} in test vector. Skipping ...\",\n\n ciphersuite\n\n );\n\n return Ok(());\n\n }\n\n };\n\n log::debug!(\"Testing test vector for ciphersuite {:?}\", ciphersuite);\n\n log::trace!(\" {:?}\", test_vector);\n\n\n\n let group_id = hex_to_bytes(&test_vector.group_id);\n\n let init_secret = hex_to_bytes(&test_vector.initial_init_secret);\n", "file_path": "openmls/src/schedule/kat_key_schedule.rs", "rank": 40, "score": 97154.23738879776 }, { "content": "// This function is only safe to use if index <= size.\n\n// If this is not checked before calling the function, `parent` should be used.\n\nfn unsafe_parent(index: NodeIndex, size: LeafIndex) -> Result<NodeIndex, TreeMathError> {\n\n let x = index.as_usize();\n\n let n = size.as_usize();\n\n if index == root(size) {\n\n return Err(TreeMathError::RootHasNoParent);\n\n }\n\n let mut p = parent_step(x);\n\n while p >= node_width(n) {\n\n let new_p = parent_step(p);\n\n if new_p == p {\n\n return Err(TreeMathError::InvalidInput);\n\n }\n\n p = new_p;\n\n }\n\n Ok(NodeIndex::from(p))\n\n}\n\n\n\npub(crate) fn sibling(index: NodeIndex, size: LeafIndex) -> Result<NodeIndex, TreeMathError> {\n\n node_in_tree(index, size)?;\n\n let p = unsafe_parent(index, size)?;\n\n match index.cmp(&p) {\n\n Ordering::Less => right(p, size),\n\n Ordering::Greater => left(p),\n\n Ordering::Equal => left(p),\n\n }\n\n}\n\n\n", "file_path": "openmls/src/tree/treemath.rs", "rank": 41, "score": 96893.49115128207 }, { "content": "#[cfg(any(feature = \"test-utils\", test))]\n\npub fn run_test_vector(test_vector: EncryptionTestVector) -> Result<(), EncTestVectorError> {\n\n use tls_codec::{Deserialize, Serialize};\n\n\n\n let n_leaves = test_vector.n_leaves;\n\n if n_leaves != test_vector.leaves.len() as u32 {\n\n return Err(EncTestVectorError::LeafNumberMismatch);\n\n }\n\n let ciphersuite =\n\n CiphersuiteName::try_from(test_vector.cipher_suite).expect(\"Invalid ciphersuite\");\n\n let ciphersuite = match Config::ciphersuite(ciphersuite) {\n\n Ok(cs) => cs,\n\n Err(_) => {\n\n println!(\n\n \"Unsupported ciphersuite {} in test vector. Skipping ...\",\n\n ciphersuite\n\n );\n\n return Ok(());\n\n }\n\n };\n\n log::debug!(\"Running test vector with {:?}\", ciphersuite.name());\n", "file_path": "openmls/src/tree/tests_and_kats/kats/kat_encryption.rs", "rank": 42, "score": 96001.0245530747 }, { "content": "#[cfg(any(feature = \"test-utils\", test))]\n\nfn receiver_group(ciphersuite: &Ciphersuite, group_id: &GroupId) -> MlsGroup {\n\n let credential_bundle = CredentialBundle::new(\n\n \"Receiver\".into(),\n\n CredentialType::Basic,\n\n SignatureScheme::from(ciphersuite.name()),\n\n )\n\n .unwrap();\n\n let key_package_bundle =\n\n KeyPackageBundle::new(&[ciphersuite.name()], &credential_bundle, Vec::new()).unwrap();\n\n MlsGroup::new(\n\n group_id.as_slice(),\n\n ciphersuite.name(),\n\n key_package_bundle,\n\n MlsGroupConfig::default(),\n\n None, /* Initial PSK */\n\n ProtocolVersion::Mls10,\n\n )\n\n .unwrap()\n\n}\n\n\n\n// XXX: we could be more creative in generating these messages.\n", "file_path": "openmls/src/tree/tests_and_kats/kats/kat_encryption.rs", "rank": 43, "score": 95514.52848390581 }, { "content": "#[cfg(any(feature = \"test-utils\", test))]\n\npub fn run_test_vector(test_vector: TreeMathTestVector) -> Result<(), TmTestVectorError> {\n\n let n_leaves = test_vector.n_leaves as usize;\n\n let n_nodes = node_width(n_leaves);\n\n let leaves = LeafIndex::from(n_leaves);\n\n if test_vector.n_nodes != node_width(leaves.as_usize()) as u32 {\n\n return Err(TmTestVectorError::TreeSizeMismatch);\n\n }\n\n for i in 0..n_leaves {\n\n if test_vector.root[i] != root(LeafIndex::from(i + 1)).as_u32() {\n\n return Err(TmTestVectorError::RootIndexMismatch);\n\n }\n\n }\n\n\n\n for i in 0..n_nodes {\n\n if test_vector.left[i] != convert!(left(NodeIndex::from(i))) {\n\n return Err(TmTestVectorError::LeftIndexMismatch);\n\n }\n\n if test_vector.right[i] != convert!(right(NodeIndex::from(i), leaves)) {\n\n return Err(TmTestVectorError::RightIndexMismatch);\n\n }\n\n if test_vector.parent[i] != convert!(parent(NodeIndex::from(i), leaves)) {\n\n return Err(TmTestVectorError::ParentIndexMismatch);\n\n }\n\n if test_vector.sibling[i] != convert!(sibling(NodeIndex::from(i), leaves)) {\n\n return Err(TmTestVectorError::SiblingIndexMismatch);\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "openmls/src/tree/tests_and_kats/kats/kat_treemath.rs", "rank": 44, "score": 94887.89305003615 }, { "content": "pub fn run_test_vector(test_vector: TreeKemTestVector) -> Result<(), TreeKemTestVectorError> {\n\n log::debug!(\"Running TreeKEM test vector\");\n\n log::trace!(\"{:?}\", test_vector);\n\n let ciphersuite =\n\n CiphersuiteName::try_from(test_vector.cipher_suite).expect(\"Invalid ciphersuite\");\n\n let ciphersuite = Config::ciphersuite(ciphersuite).expect(\"Invalid ciphersuite\");\n\n\n\n log::trace!(\"ratchet tree before: {}\", test_vector.ratchet_tree_before);\n\n let ratchet_tree_before_bytes = hex_to_bytes(&test_vector.ratchet_tree_before);\n\n let ratchet_tree_before =\n\n TlsVecU32::<Option<Node>>::tls_deserialize(&mut ratchet_tree_before_bytes.as_slice())\n\n .expect(\"Error decoding ratchet tree\");\n\n\n\n let my_leaf_secret = Secret::from_slice(\n\n &hex_to_bytes(&test_vector.my_leaf_secret),\n\n ProtocolVersion::default(),\n\n ciphersuite,\n\n );\n\n\n\n let my_key_package =\n", "file_path": "openmls/src/tree/tests_and_kats/kats/kat_tree_kem.rs", "rank": 45, "score": 92773.796336084 }, { "content": "#[cfg(any(feature = \"test-utils\", test))]\n\npub fn generate_test_vector(n_leaves: u32, ciphersuite: &'static Ciphersuite) -> TreeKemTestVector {\n\n use crate::{extensions::RatchetTreeExtension, test_utils::test_framework::CodecUse};\n\n\n\n // The test really only makes sense with two or more leaves\n\n if n_leaves <= 1 {\n\n panic!(\"test vector can only be generated with two or more members\")\n\n }\n\n // Set up a group with `n_leaves` members.\n\n let handshake_message_format = HandshakeMessageFormat::Plaintext;\n\n let update_policy = UpdatePolicy::default();\n\n let callbacks = ManagedGroupCallbacks::default();\n\n let managed_group_config = ManagedGroupConfig::new(\n\n handshake_message_format,\n\n update_policy,\n\n 0,\n\n 0,\n\n false, // use_ratchet_tree_extension\n\n callbacks,\n\n );\n\n let setup = ManagedTestSetup::new(\n", "file_path": "openmls/src/tree/tests_and_kats/kats/kat_tree_kem.rs", "rank": 46, "score": 91598.52690328051 }, { "content": "#[derive(Debug, PartialEq)]\n\nenum State {\n\n Initial,\n\n Context,\n\n Done,\n\n}\n\n\n\npub(crate) struct KeySchedule {\n\n ciphersuite: &'static Ciphersuite,\n\n intermediate_secret: Option<IntermediateSecret>,\n\n epoch_secret: Option<EpochSecret>,\n\n state: State,\n\n}\n\n\n\nimpl KeySchedule {\n\n /// Initialize the key schedule and return it.\n\n pub(crate) fn init(\n\n ciphersuite: &'static Ciphersuite,\n\n joiner_secret: JoinerSecret,\n\n psk: impl Into<Option<PskSecret>>,\n\n ) -> Self {\n", "file_path": "openmls/src/schedule/mod.rs", "rank": 47, "score": 76455.1336145237 }, { "content": "#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]\n\nstruct Constants {\n\n /// The default lifetime of a key package in seconds.\n\n default_key_package_lifetime: u64, // in Seconds\n\n /// The amount of time (in seconds) the lifetime of a `KeyPackage` is\n\n /// extended into the past to allow for skewed clocks.\n\n key_package_lifetime_margin: u64, // in Seconds\n\n}\n\n\n\n/// The configuration we use for the library (`Config`) is not exactly the same\n\n/// as the one we persist.\n", "file_path": "openmls/src/config/mod.rs", "rank": 48, "score": 76095.8243074652 }, { "content": "#[derive(Clap)]\n\nstruct Opts {\n\n #[clap(short, long, default_value = \"[::1]\")]\n\n host: String,\n\n\n\n #[clap(short, long, default_value = \"50051\")]\n\n port: u16,\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let opts = Opts::parse();\n\n pretty_env_logger::init();\n\n\n\n let addr = format!(\"{}:{}\", opts.host, opts.port).parse().unwrap();\n\n let mls_client_impl = MlsClientImpl::new();\n\n\n\n println!(\"Listening on {}\", addr);\n\n\n\n Server::builder()\n\n .add_service(MlsClientServer::new(mls_client_impl))\n\n .serve(addr)\n\n .await?;\n\n\n\n Ok(())\n\n}\n", "file_path": "interop_client/src/main.rs", "rank": 49, "score": 76090.71767315769 }, { "content": "/// The intermediate secret includes the optional PSK and is used to later\n\n/// derive the welcome secret and epoch secret\n\nstruct IntermediateSecret {\n\n secret: Secret,\n\n}\n\n\n\nimpl IntermediateSecret {\n\n /// Derive an `IntermediateSecret` from a `JoinerSecret` and an optional\n\n /// PSK.\n\n fn new(joiner_secret: &JoinerSecret, psk: Option<PskSecret>) -> Self {\n\n log_crypto!(trace, \"PSK input: {:x?}\", psk.as_ref().map(|p| p.secret()));\n\n let secret = joiner_secret\n\n .secret\n\n .hkdf_extract(psk.as_ref().map(|p| p.secret()));\n\n log_crypto!(trace, \"Intermediate secret: {:x?}\", secret);\n\n Self { secret }\n\n }\n\n}\n\n\n\npub(crate) struct WelcomeSecret {\n\n secret: Secret,\n\n}\n", "file_path": "openmls/src/schedule/mod.rs", "rank": 50, "score": 74962.97289194456 }, { "content": "#[derive(TlsSerialize, TlsSize)]\n\nstruct KdfLabel {\n\n length: u16,\n\n label: TlsByteVecU8,\n\n context: TlsByteVecU32,\n\n}\n\n\n\nimpl KdfLabel {\n\n /// Serialize this label.\n\n /// Returns the serialized label as byte vector or returns a [`CryptoError`]\n\n /// if the parameters are invalid.\n\n fn serialized_label(\n\n context: &[u8],\n\n label: String,\n\n length: usize,\n\n ) -> Result<Vec<u8>, CryptoError> {\n\n if length > u16::MAX.into() {\n\n debug_assert!(\n\n false,\n\n \"Library error: Trying to derive a key with a too large length field!\"\n\n );\n", "file_path": "openmls/src/ciphersuite/mod.rs", "rank": 51, "score": 74958.31901758391 }, { "content": "/// An intermediate secret in the key schedule, the `EpochSecret` is used to\n\n/// create an `EpochSecrets` object and is finally consumed when creating that\n\n/// epoch's `InitSecret`.\n\nstruct EpochSecret {\n\n secret: Secret,\n\n}\n\n\n\nimpl EpochSecret {\n\n /// Derive an `EpochSecret` from a `JoinerSecret`\n\n fn new(\n\n ciphersuite: &Ciphersuite,\n\n intermediate_secret: IntermediateSecret,\n\n group_context: &GroupContext,\n\n ) -> Self {\n\n // FIXME: remove unwraps\n\n let secret = intermediate_secret\n\n .secret\n\n .kdf_expand_label(\n\n \"epoch\",\n\n &group_context.tls_serialize_detached().unwrap(),\n\n ciphersuite.hash_length(),\n\n )\n\n .unwrap();\n", "file_path": "openmls/src/schedule/mod.rs", "rank": 52, "score": 74957.79530560362 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct PersistentConfig {\n\n protocol_versions: Vec<ProtocolVersion>,\n\n ciphersuites: Vec<Ciphersuite>,\n\n extensions: Vec<ExtensionType>,\n\n constants: Constants,\n\n}\n\n\n\n/// # OpenMLS Configuration\n\n///\n\n/// This is the global configuration for OpenMLS.\n\n#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]\n\npub struct Config {\n\n protocol_versions: Vec<ProtocolVersion>,\n\n ciphersuites: Vec<Ciphersuite>,\n\n extensions: Vec<ExtensionType>,\n\n constants: Constants,\n\n}\n\n\n\n// Convert a config that's being read from a file to the config we use.\n\nimpl From<PersistentConfig> for Config {\n", "file_path": "openmls/src/config/mod.rs", "rank": 53, "score": 74953.0238022684 }, { "content": "struct TestInput {\n\n test_name: Ident,\n\n // An array to iterate over.\n\n parameters: ExprArray,\n\n body: Block,\n\n}\n\n\n\nimpl Parse for TestInput {\n\n fn parse(input: ParseStream) -> Result<Self> {\n\n Ok(Self {\n\n test_name: input.parse()?,\n\n parameters: input.parse()?,\n\n body: input.parse()?,\n\n })\n\n }\n\n}\n\n\n", "file_path": "test_macros/src/lib.rs", "rank": 54, "score": 74953.0238022684 }, { "content": "// Common setup for tests.\n\nfn setup(ciphersuite: &Ciphersuite, len: usize) -> (KeyPackageBundle, LeafIndex, Vec<NodeIndex>) {\n\n let credential_bundle = CredentialBundle::new(\n\n \"username\".into(),\n\n CredentialType::Basic,\n\n ciphersuite.signature_scheme(),\n\n )\n\n .unwrap();\n\n let key_package_bundle =\n\n KeyPackageBundle::new(&[ciphersuite.name()], &credential_bundle, vec![]).unwrap();\n\n let own_index = LeafIndex::from(0u32);\n\n let direct_path = generate_path_u8(len);\n\n\n\n (key_package_bundle, own_index, direct_path)\n\n}\n\n\n", "file_path": "openmls/src/tree/tests_and_kats/unit_tests/test_private_tree.rs", "rank": 55, "score": 74463.17801060555 }, { "content": "#[derive(Serialize, Deserialize, Debug, Clone, Default)]\n\nstruct Epoch {\n\n // Chosen by the generator\n\n tree_hash: String,\n\n commit_secret: String,\n\n // XXX: PSK is not supported in OpenMLS yet #141\n\n psk_secret: String,\n\n confirmed_transcript_hash: String,\n\n\n\n // Computed values\n\n group_context: String,\n\n joiner_secret: String,\n\n welcome_secret: String,\n\n init_secret: String,\n\n sender_data_secret: String,\n\n encryption_secret: String,\n\n exporter_secret: String,\n\n authentication_secret: String,\n\n external_secret: String,\n\n confirmation_key: String,\n\n membership_key: String,\n", "file_path": "openmls/src/schedule/kat_key_schedule.rs", "rank": 56, "score": 73880.56697551667 }, { "content": "#[derive(Serialize, Deserialize, Debug, Clone, Default)]\n\nstruct LeafSequence {\n\n generations: u32,\n\n handshake: Vec<RatchetStep>,\n\n application: Vec<RatchetStep>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct EncryptionTestVector {\n\n pub cipher_suite: u16,\n\n pub n_leaves: u32,\n\n encryption_secret: String,\n\n sender_data_secret: String,\n\n sender_data_info: SenderDataInfo,\n\n leaves: Vec<LeafSequence>,\n\n}\n\n\n", "file_path": "openmls/src/tree/tests_and_kats/kats/kat_encryption.rs", "rank": 57, "score": 70923.53650306095 }, { "content": "#[derive(Serialize, Deserialize, Debug, Clone, Default)]\n\nstruct RatchetStep {\n\n key: String,\n\n nonce: String,\n\n plaintext: String,\n\n ciphertext: String,\n\n}\n\n\n", "file_path": "openmls/src/tree/tests_and_kats/kats/kat_encryption.rs", "rank": 58, "score": 70923.53650306095 }, { "content": "#[derive(TlsSerialize, TlsSize)]\n\nstruct EncodedGroupSecrets<'a> {\n\n pub(crate) joiner_secret: &'a JoinerSecret,\n\n pub(crate) path_secret: Option<&'a PathSecret>,\n\n pub(crate) psks: &'a PreSharedKeys,\n\n}\n\n\n\nimpl GroupSecrets {\n\n /// Create new encoded group secrets.\n\n pub(crate) fn new_encoded<'a>(\n\n joiner_secret: &JoinerSecret,\n\n path_secret: Option<&'a PathSecret>,\n\n psks: &'a PreSharedKeys,\n\n ) -> Result<Vec<u8>, tls_codec::Error> {\n\n EncodedGroupSecrets {\n\n joiner_secret,\n\n path_secret,\n\n psks,\n\n }\n\n .tls_serialize_detached()\n\n }\n", "file_path": "openmls/src/messages/mod.rs", "rank": 59, "score": 70355.6725866627 }, { "content": "fn main() {\n\n pretty_env_logger::init();\n\n\n\n let stdout = stdout();\n\n let mut stdout = stdout.lock();\n\n let stdin = stdin();\n\n let mut stdin = stdin.lock();\n\n\n\n stdout\n\n .write_all(b\" >>> Welcome to the OpenMLS CLI :)\\nType help to get a list of commands\\n\\n\")\n\n .unwrap();\n\n let mut client = None;\n\n\n\n loop {\n\n stdout.flush().unwrap();\n\n let op = stdin.read_line().unwrap().unwrap();\n\n\n\n // Register a client.\n\n // There's no persistence. So once the client app stops you have to\n\n // register a new client.\n", "file_path": "cli/src/main.rs", "rank": 60, "score": 70347.03459809834 }, { "content": "#[derive(Serialize, Deserialize, Debug, Clone, Default)]\n\nstruct SenderDataInfo {\n\n ciphertext: String,\n\n key: String,\n\n nonce: String,\n\n}\n\n\n", "file_path": "openmls/src/tree/tests_and_kats/kats/kat_encryption.rs", "rank": 61, "score": 70024.4295606378 }, { "content": "#[test]\n\nfn basic_test() {\n\n // Reset the server before doing anything for testing.\n\n backend::Backend::default().reset_server();\n\n\n\n // Create one client\n\n let mut client_1 = user::User::new(\"Client1\".to_string());\n\n\n\n // Create another client\n\n let mut client_2 = user::User::new(\"Client2\".to_string());\n\n\n\n // Create another client\n\n let mut client_3 = user::User::new(\"Client3\".to_string());\n\n\n\n // Update the clients to know about the other clients.\n\n client_1.update(None).unwrap();\n\n client_2.update(None).unwrap();\n\n client_3.update(None).unwrap();\n\n\n\n // Client 1 creates a group.\n\n client_1.create_group(\"MLS Discussions\".to_string());\n", "file_path": "cli/src/main.rs", "rank": 62, "score": 69166.12195792679 }, { "content": "#[test]\n\nfn padding() {\n\n // Create a test config for a single client supporting all possible\n\n // ciphersuites.\n\n let alice_config = TestClientConfig {\n\n name: \"alice\",\n\n ciphersuites: Config::supported_ciphersuite_names().to_vec(),\n\n };\n\n\n\n let mut test_group_configs = Vec::new();\n\n\n\n // Create a group config for each ciphersuite.\n\n for ciphersuite_name in Config::supported_ciphersuite_names() {\n\n let test_group = TestGroupConfig {\n\n ciphersuite: *ciphersuite_name,\n\n config: MlsGroupConfig::default(),\n\n members: vec![alice_config.clone()],\n\n };\n\n test_group_configs.push(test_group);\n\n }\n\n\n", "file_path": "openmls/tests/test_framing.rs", "rank": 63, "score": 69166.12195792679 }, { "content": "#[test]\n\nfn codec() {\n\n for ciphersuite in Config::supported_ciphersuites() {\n\n let credential_bundle = CredentialBundle::new(\n\n vec![7, 8, 9],\n\n CredentialType::Basic,\n\n ciphersuite.signature_scheme(),\n\n )\n\n .unwrap();\n\n let sender = Sender {\n\n sender_type: SenderType::Member,\n\n sender: LeafIndex::from(2u32),\n\n };\n\n let group_context = GroupContext::new(\n\n GroupId::random(ciphersuite),\n\n GroupEpoch(1),\n\n vec![],\n\n vec![],\n\n &[],\n\n )\n\n .unwrap();\n", "file_path": "openmls/src/framing/test_framing.rs", "rank": 64, "score": 68043.4632248107 }, { "content": "#[test]\n\nfn invalid_inputs() {\n\n assert_eq!(\n\n Err(TreeMathError::InvalidInput),\n\n unsafe_parent(1000u32.into(), 100u32.into())\n\n );\n\n}\n\n\n", "file_path": "openmls/src/tree/treemath.rs", "rank": 65, "score": 68043.4632248107 }, { "content": "#[test]\n\nfn lifetime() {\n\n // A freshly created extensions must be valid.\n\n let ext = LifetimeExtension::default();\n\n assert!(ext.is_valid());\n\n\n\n // An extension without lifetime is invalid (waiting for 1 second).\n\n let ext = LifetimeExtension::new(0);\n\n std::thread::sleep(std::time::Duration::from_secs(1));\n\n assert!(!ext.is_valid());\n\n\n\n // Test (de)serializing invalid extension\n\n let serialized = ext\n\n .tls_serialize_detached()\n\n .expect(\"error encoding life time extension\");\n\n let ext_deserialized = LifetimeExtension::tls_deserialize(&mut serialized.as_slice())\n\n .err()\n\n .expect(\"Didn't get an error deserializing invalid life time extension\");\n\n assert_eq!(\n\n ext_deserialized,\n\n tls_codec::Error::DecodingError(\"Invalid\".to_string()),\n", "file_path": "openmls/src/extensions/test_extensions.rs", "rank": 66, "score": 68043.4632248107 }, { "content": "#[test]\n\nfn capabilities() {\n\n // A capabilities extension with the default values for openmls.\n\n let extension_bytes = [\n\n 0u8, 1, 0, 0, 0, 17, 2, 1, 200, 6, 0, 1, 0, 2, 0, 3, 6, 0, 1, 0, 2, 0, 3,\n\n ];\n\n let mut extension_bytes_mut = &extension_bytes[..];\n\n\n\n let ext = Extension::Capabilities(CapabilitiesExtension::default());\n\n\n\n // Check that decoding works\n\n let capabilities_extension = Extension::tls_deserialize(&mut extension_bytes_mut).unwrap();\n\n assert_eq!(ext, capabilities_extension);\n\n\n\n // Encoding creates the expected bytes.\n\n assert_eq!(\n\n extension_bytes,\n\n &capabilities_extension.tls_serialize_detached().unwrap()[..]\n\n );\n\n\n\n // Test encoding and decoding\n\n let encoded = ext\n\n .tls_serialize_detached()\n\n .expect(\"error encoding capabilities extension\");\n\n let ext_decoded = Extension::tls_deserialize(&mut encoded.as_slice())\n\n .expect(\"error decoding capabilities extension\");\n\n\n\n assert_eq!(ext, ext_decoded);\n\n}\n\n\n", "file_path": "openmls/src/extensions/test_extensions.rs", "rank": 67, "score": 68043.4632248107 }, { "content": "#[test]\n\nfn protocol_version() {\n\n let mls10_version = ProtocolVersion::Mls10;\n\n let default_version = ProtocolVersion::default();\n\n\n\n // The encoding of the protocol version is the version as u8.\n\n let mls10_encoded = mls10_version.tls_serialize_detached().unwrap();\n\n assert_eq!(1, mls10_encoded.len());\n\n assert_eq!(mls10_encoded[0], mls10_version as u8);\n\n\n\n let default_encoded = default_version.tls_serialize_detached().unwrap();\n\n assert_eq!(1, default_encoded.len());\n\n assert_eq!(default_encoded[0], default_version as u8);\n\n\n\n // Default and MLS1.0 versions have to be 1.\n\n assert_eq!(1, mls10_encoded[0]);\n\n assert_eq!(1, default_encoded[0]);\n\n\n\n // Make sure the supported protocol versions are what we expect them to be.\n\n let supported_versions = Config::supported_versions();\n\n assert_eq!(\n\n vec![ProtocolVersion::Mls10, ProtocolVersion::Mls10Draft11],\n\n supported_versions\n\n );\n\n}\n\n\n", "file_path": "openmls/tests/test_config.rs", "rank": 68, "score": 68043.4632248107 }, { "content": "/// This test simulates various group operations like Add, Update, Remove in a\n\n/// small group\n\n/// - Alice creates a group\n\n/// - Alice adds Bob\n\n/// - Alice sends a message to Bob\n\n/// - Bob updates and commits\n\n/// - Alice updates and commits\n\n/// - Bob updates and Alice commits\n\n/// - Bob adds Charlie\n\n/// - Charlie sends a message to the group\n\n/// - Charlie updates and commits\n\n/// - Charlie removes Bob\n\nfn group_operations() {\n\n for ciphersuite in Config::supported_ciphersuites() {\n\n let group_aad = b\"Alice's test group\";\n\n\n\n // Define credential bundles\n\n let alice_credential_bundle = CredentialBundle::new(\n\n \"Alice\".into(),\n\n CredentialType::Basic,\n\n ciphersuite.signature_scheme(),\n\n )\n\n .unwrap();\n\n let bob_credential_bundle = CredentialBundle::new(\n\n \"Bob\".into(),\n\n CredentialType::Basic,\n\n ciphersuite.signature_scheme(),\n\n )\n\n .unwrap();\n\n\n\n // Mandatory extensions\n\n let capabilities_extension = Extension::Capabilities(CapabilitiesExtension::new(\n", "file_path": "openmls/tests/test_group.rs", "rank": 69, "score": 68043.4632248107 }, { "content": "#[test]\n\nfn test_xor() {\n\n let ciphersuite = Ciphersuite::default();\n\n let reuse_guard: ReuseGuard = ReuseGuard::from_random(Ciphersuite::default());\n\n let original_nonce = AeadNonce::random(ciphersuite);\n\n let mut nonce = original_nonce.clone();\n\n nonce.xor_with_reuse_guard(&reuse_guard);\n\n assert_ne!(\n\n original_nonce, nonce,\n\n \"xoring with reuse_guard did not change the nonce\"\n\n );\n\n nonce.xor_with_reuse_guard(&reuse_guard);\n\n assert_eq!(\n\n original_nonce, nonce,\n\n \"xoring twice changed the original value\"\n\n );\n\n}\n", "file_path": "openmls/src/ciphersuite/mod.rs", "rank": 70, "score": 68043.4632248107 }, { "content": "/// Validator function for RemoveProposals\n\n/// `(managed_group: &ManagedGroup, sender: &Credential, removed_member:\n\n/// &Credential) -> bool`\n\nfn validate_remove(\n\n _managed_group: &ManagedGroup,\n\n _sender: &Credential,\n\n _removed_member: &Credential,\n\n) -> bool {\n\n true\n\n}\n\n\n", "file_path": "openmls/tests/test_managed_group.rs", "rank": 71, "score": 66979.86597640275 }, { "content": "/// Validator function for AddProposals\n\n/// `(managed_group: &ManagedGroup, sender: &Credential, added_member:\n\n/// &Credential) -> bool`\n\nfn validate_add(\n\n _managed_group: &ManagedGroup,\n\n _sender: &Credential,\n\n _added_member: &Credential,\n\n) -> bool {\n\n true\n\n}\n", "file_path": "openmls/tests/test_managed_group.rs", "rank": 72, "score": 66979.86597640275 }, { "content": "#[test]\n\nfn test_commit_encoding() {\n\n let test_setup = create_encoding_test_setup();\n\n let test_clients = test_setup.clients.borrow();\n\n let alice = test_clients.get(\"alice\").unwrap().borrow();\n\n\n\n for group_state in alice.group_states.borrow_mut().values_mut() {\n\n let alice_credential_bundle = alice\n\n .credential_bundles\n\n .get(&group_state.ciphersuite().name())\n\n .unwrap();\n\n\n\n let capabilities_extension = Extension::Capabilities(CapabilitiesExtension::new(\n\n None,\n\n Some(&[group_state.ciphersuite().name()]),\n\n None,\n\n ));\n\n let lifetime_extension = Extension::LifeTime(LifetimeExtension::new(60));\n\n let mandatory_extensions: Vec<Extension> = vec![capabilities_extension, lifetime_extension];\n\n\n\n let alice_key_package_bundle = KeyPackageBundle::new(\n", "file_path": "openmls/tests/test_encoding.rs", "rank": 73, "score": 66974.8516962939 }, { "content": "fn impl_ciphersuite_tests(\n\n input: TestInput,\n\n test_attribute: proc_macro2::TokenStream,\n\n) -> TokenStream {\n\n let ast = input.body.clone();\n\n let test_name = input.test_name.clone();\n\n let num_parameters = input.parameters.elems.len();\n\n let params = input.parameters.clone();\n\n let tests = (0..num_parameters).map(|i| {\n\n let param_name = match &input.parameters.elems[i] {\n\n Expr::Field(f) => match &f.member {\n\n Member::Named(n) => n.to_string(),\n\n _ => panic!(\"Unsupported enum with unnamed members\"),\n\n },\n\n Expr::Path(p) => p.path.segments.last().unwrap().ident.to_string(),\n\n _ => panic!(\"Unexpected input\"),\n\n };\n\n let test_name = Ident::new(\n\n &format!(\"{}_{}\", test_name.to_string(), param_name),\n\n Span::call_site(),\n", "file_path": "test_macros/src/lib.rs", "rank": 74, "score": 66974.8516962939 }, { "content": "#[test]\n\nfn membership_tag() {\n\n for ciphersuite in Config::supported_ciphersuites() {\n\n let credential_bundle = CredentialBundle::new(\n\n vec![7, 8, 9],\n\n CredentialType::Basic,\n\n ciphersuite.signature_scheme(),\n\n )\n\n .unwrap();\n\n let group_context = GroupContext::new(\n\n GroupId::random(ciphersuite),\n\n GroupEpoch(1),\n\n vec![],\n\n vec![],\n\n &[],\n\n )\n\n .unwrap();\n\n let membership_key =\n\n MembershipKey::from_secret(Secret::random(ciphersuite, None /* MLS version */));\n\n let mut mls_plaintext = MlsPlaintext::new_application(\n\n LeafIndex::from(2u32),\n", "file_path": "openmls/src/framing/test_framing.rs", "rank": 75, "score": 66974.8516962939 }, { "content": "#[test]\n\nfn test_protocol_version() {\n\n use crate::config::ProtocolVersion;\n\n let mls10_version = ProtocolVersion::Mls10;\n\n let default_version = ProtocolVersion::default();\n\n let mls10_e = mls10_version.tls_serialize_detached().unwrap();\n\n assert_eq!(mls10_e[0], mls10_version as u8);\n\n let default_e = default_version.tls_serialize_detached().unwrap();\n\n assert_eq!(default_e[0], default_version as u8);\n\n assert_eq!(mls10_e[0], 1);\n\n assert_eq!(default_e[0], 1);\n\n}\n\n\n\n/// This struct contains a credential and the corresponding private key.\n\n#[derive(Debug)]\n\n#[cfg_attr(test, derive(PartialEq))]\n\npub struct CredentialBundle {\n\n credential: Credential,\n\n signature_private_key: SignaturePrivateKey,\n\n}\n\n\n", "file_path": "openmls/src/credentials/mod.rs", "rank": 76, "score": 66974.8516962939 }, { "content": "#[test]\n\nfn test_node_in_tree() {\n\n let tests = [(0u32, 2u32), (1, 2), (2, 2), (5, 5), (8, 5)];\n\n for test in tests.iter() {\n\n node_in_tree(test.0.into(), test.1.into()).unwrap();\n\n }\n\n}\n\n\n", "file_path": "openmls/src/tree/treemath.rs", "rank": 77, "score": 66974.8516962939 }, { "content": "#[test]\n\nfn test_leaf_in_tree() {\n\n let tests = [(0u32, 2u32), (1, 2), (4, 5), (9, 10)];\n\n for test in tests.iter() {\n\n leaf_in_tree(test.0.into(), test.1.into()).unwrap();\n\n }\n\n}\n\n\n", "file_path": "openmls/src/tree/treemath.rs", "rank": 78, "score": 66974.8516962939 }, { "content": "fn generate(\n\n ciphersuite: &'static Ciphersuite,\n\n init_secret: &InitSecret,\n\n group_id: &[u8],\n\n epoch: u64,\n\n) -> (\n\n Vec<u8>,\n\n CommitSecret,\n\n JoinerSecret,\n\n PskSecret,\n\n WelcomeSecret,\n\n EpochSecrets,\n\n Vec<u8>,\n\n GroupContext,\n\n HpkeKeyPair,\n\n) {\n\n let tree_hash = ciphersuite.randombytes(ciphersuite.hash_length());\n\n let commit_secret = CommitSecret::random(ciphersuite);\n\n let psk_secret = PskSecret::random(ciphersuite);\n\n let joiner_secret = JoinerSecret::new(&commit_secret, init_secret);\n", "file_path": "openmls/src/schedule/kat_key_schedule.rs", "rank": 79, "score": 66974.8516962939 }, { "content": "#[test]\n\nfn test_node_not_in_tree() {\n\n let tests = [(3u32, 2u32), (13, 7)];\n\n for test in tests.iter() {\n\n assert_eq!(\n\n node_in_tree(test.0.into(), test.1.into()),\n\n Err(TreeMathError::NodeNotInTree)\n\n );\n\n }\n\n}\n\n\n", "file_path": "openmls/src/tree/treemath.rs", "rank": 80, "score": 66974.8516962939 }, { "content": "#[test]\n\nfn unknown_sender() {\n\n for ciphersuite in Config::supported_ciphersuites() {\n\n let group_aad = b\"Alice's test group\";\n\n\n\n // Define credential bundles\n\n let alice_credential_bundle = CredentialBundle::new(\n\n \"Alice\".into(),\n\n CredentialType::Basic,\n\n ciphersuite.signature_scheme(),\n\n )\n\n .unwrap();\n\n let bob_credential_bundle = CredentialBundle::new(\n\n \"Bob\".into(),\n\n CredentialType::Basic,\n\n ciphersuite.signature_scheme(),\n\n )\n\n .unwrap();\n\n let charlie_credential_bundle = CredentialBundle::new(\n\n \"Charlie\".into(),\n\n CredentialType::Basic,\n", "file_path": "openmls/src/framing/test_framing.rs", "rank": 81, "score": 66974.8516962939 }, { "content": "#[test]\n\nfn test_leaf_not_in_tree() {\n\n let tests = [(2u32, 2u32), (7, 7)];\n\n for test in tests.iter() {\n\n assert_eq!(\n\n leaf_in_tree(test.0.into(), test.1.into()),\n\n Err(TreeMathError::LeafNotInTree)\n\n );\n\n }\n\n}\n", "file_path": "openmls/src/tree/treemath.rs", "rank": 82, "score": 66974.8516962939 }, { "content": "#[test]\n\nfn basic_group_setup() {\n\n for ciphersuite in Config::supported_ciphersuites() {\n\n let group_aad = b\"Alice's test group\";\n\n\n\n // Define credential bundles\n\n let alice_credential_bundle = CredentialBundle::new(\n\n \"Alice\".into(),\n\n CredentialType::Basic,\n\n ciphersuite.signature_scheme(),\n\n )\n\n .unwrap();\n\n let bob_credential_bundle = CredentialBundle::new(\n\n \"Bob\".into(),\n\n CredentialType::Basic,\n\n ciphersuite.signature_scheme(),\n\n )\n\n .unwrap();\n\n\n\n // Generate KeyPackages\n\n let bob_key_package_bundle =\n", "file_path": "openmls/tests/test_group.rs", "rank": 83, "score": 66974.8516962939 }, { "content": "#[test]\n\nfn supported_ciphersuites() {\n\n const SUPPORTED_CIPHERSUITE_NAMES: &[CiphersuiteName] = &[\n\n CiphersuiteName::MLS10_128_DHKEMX25519_AES128GCM_SHA256_Ed25519,\n\n CiphersuiteName::MLS10_128_DHKEMX25519_CHACHA20POLY1305_SHA256_Ed25519,\n\n CiphersuiteName::MLS10_128_DHKEMP256_AES128GCM_SHA256_P256,\n\n ];\n\n\n\n const UNSUPPORTED_CIPHERSUITE_NAMES: &[CiphersuiteName] = &[\n\n CiphersuiteName::MLS10_256_DHKEMX448_AES256GCM_SHA512_Ed448,\n\n CiphersuiteName::MLS10_256_DHKEMP521_AES256GCM_SHA512_P521,\n\n CiphersuiteName::MLS10_256_DHKEMX448_CHACHA20POLY1305_SHA512_Ed448,\n\n ];\n\n\n\n for ciphersuite_name in SUPPORTED_CIPHERSUITE_NAMES {\n\n // Instantiate ciphersuite\n\n let ciphersuite = Ciphersuite::new(*ciphersuite_name)\n\n .expect(\"Could not instantiate a Ciphersuite object.\");\n\n // Create signature keypair\n\n let _signature_keypair = SignatureKeypair::new(ciphersuite.signature_scheme())\n\n .expect(\"Could not create signature keypair.\");\n", "file_path": "openmls/src/ciphersuite/tests/test_ciphersuite.rs", "rank": 84, "score": 65956.47618875418 }, { "content": "#[test]\n\nfn create_commit_optional_path() {\n\n for ciphersuite in Config::supported_ciphersuites() {\n\n let group_aad = b\"Alice's test group\";\n\n\n\n // Define identities\n\n let alice_credential_bundle = CredentialBundle::new(\n\n \"Alice\".into(),\n\n CredentialType::Basic,\n\n ciphersuite.signature_scheme(),\n\n )\n\n .unwrap();\n\n let bob_credential_bundle = CredentialBundle::new(\n\n \"Bob\".into(),\n\n CredentialType::Basic,\n\n ciphersuite.signature_scheme(),\n\n )\n\n .unwrap();\n\n\n\n // Mandatory extensions, will be fixed in #164\n\n let lifetime_extension = Extension::LifeTime(LifetimeExtension::new(60));\n", "file_path": "openmls/tests/test_group.rs", "rank": 85, "score": 65956.47618875418 }, { "content": "#[test]\n\nfn test_signatures() {\n\n for ciphersuite in Config::supported_ciphersuites() {\n\n // Test that valid signatures are properly verified.\n\n let payload = vec![0u8];\n\n let signature_scheme =\n\n SignatureScheme::try_from(ciphersuite.name()).expect(\"error deriving signature scheme\");\n\n let keypair =\n\n SignatureKeypair::new(signature_scheme).expect(\"error generating signature keypair\");\n\n let mut signature = keypair.sign(&payload).expect(\"error creating signature\");\n\n println!(\"Done signing payload\\n\");\n\n keypair\n\n .verify(&signature, &payload)\n\n .expect(\"error verifying signature\");\n\n println!(\"Done verifying payload\\n\");\n\n\n\n // Tamper with signature such that verification fails. We choose a byte\n\n // somewhere in the middle to make the verification fail, not the DER\n\n // decoding (in the case of ECDSA signatures).\n\n let mut modified_signature = signature.value.as_slice().to_vec();\n\n modified_signature[20] ^= 0xFF;\n", "file_path": "openmls/src/ciphersuite/tests/test_ciphersuite.rs", "rank": 86, "score": 65956.47618875418 }, { "content": "#[test]\n\nfn test_random() {\n\n random_usize();\n\n randombytes(0);\n\n}\n\n\n", "file_path": "openmls/tests/utils/mls_utils/mod.rs", "rank": 87, "score": 65956.47618875418 }, { "content": "#[test]\n\nfn test_pgs() {\n\n for ciphersuite in Config::supported_ciphersuites() {\n\n let group_aad = b\"Alice's test group\";\n\n\n\n // Define credential bundles\n\n let alice_credential_bundle = CredentialBundle::new(\n\n \"Alice\".into(),\n\n CredentialType::Basic,\n\n ciphersuite.signature_scheme(),\n\n )\n\n .unwrap();\n\n let bob_credential_bundle = CredentialBundle::new(\n\n \"Bob\".into(),\n\n CredentialType::Basic,\n\n ciphersuite.signature_scheme(),\n\n )\n\n .unwrap();\n\n\n\n // Generate KeyPackages\n\n let bob_key_package_bundle =\n", "file_path": "openmls/src/messages/tests/test_pgs.rs", "rank": 88, "score": 65956.47618875418 }, { "content": "#[test]\n\nfn proposals_codec() {\n\n let ciphersuite =\n\n &Ciphersuite::new(CiphersuiteName::MLS10_128_DHKEMX25519_AES128GCM_SHA256_Ed25519).unwrap();\n\n\n\n // Proposal\n\n\n\n let remove_proposal = RemoveProposal { removed: 123 };\n\n let proposal = Proposal::Remove(remove_proposal);\n\n let proposal_or_ref = ProposalOrRef::Proposal(proposal.clone());\n\n let encoded = proposal_or_ref.tls_serialize_detached().unwrap();\n\n let decoded = ProposalOrRef::tls_deserialize(&mut encoded.as_slice()).unwrap();\n\n\n\n assert_eq!(proposal_or_ref, decoded);\n\n\n\n // Reference\n\n\n\n let reference = ProposalReference::from_proposal(ciphersuite, &proposal).unwrap();\n\n let proposal_or_ref = ProposalOrRef::Reference(reference);\n\n let encoded = proposal_or_ref.tls_serialize_detached().unwrap();\n\n let decoded = ProposalOrRef::tls_deserialize(&mut encoded.as_slice()).unwrap();\n\n\n\n assert_eq!(proposal_or_ref, decoded);\n\n}\n", "file_path": "openmls/src/messages/tests/test_proposals.rs", "rank": 89, "score": 65956.47618875418 }, { "content": "/// This test tests encoding and decoding of remove proposals.\n\nfn test_remove_proposal_encoding() {\n\n let test_setup = create_encoding_test_setup();\n\n let test_clients = test_setup.clients.borrow();\n\n let alice = test_clients.get(\"alice\").unwrap().borrow();\n\n\n\n for group_state in alice.group_states.borrow_mut().values_mut() {\n\n let credential_bundle = alice\n\n .credential_bundles\n\n .get(&group_state.ciphersuite().name())\n\n .unwrap();\n\n\n\n let remove = group_state\n\n .create_remove_proposal(&[], credential_bundle, LeafIndex::from(1u32))\n\n .expect(\"Could not create proposal.\");\n\n let remove_encoded = remove\n\n .tls_serialize_detached()\n\n .expect(\"Could not encode proposal.\");\n\n let remove_decoded =\n\n match VerifiableMlsPlaintext::tls_deserialize(&mut remove_encoded.as_slice()) {\n\n Ok(a) => a,\n", "file_path": "openmls/tests/test_encoding.rs", "rank": 90, "score": 65956.47618875418 }, { "content": "#[test]\n\nfn secret_init() {\n\n let csuite = Ciphersuite::default();\n\n\n\n // These two secrets must be incompatible\n\n let default_secret = Secret::random(csuite, None);\n\n let draft_secret = Secret::random(csuite, ProtocolVersion::Mls10Draft11);\n\n\n\n let derived_default_secret = default_secret.derive_secret(\"my_test_label\");\n\n let derived_draft_secret = draft_secret.derive_secret(\"my_test_label\");\n\n assert_ne!(derived_default_secret, derived_draft_secret);\n\n}\n\n\n", "file_path": "openmls/src/ciphersuite/tests/test_secrets.rs", "rank": 91, "score": 65956.47618875418 }, { "content": "/// This test tests encoding and decoding of application messages.\n\nfn test_application_message_encoding() {\n\n let test_setup = create_encoding_test_setup();\n\n let test_clients = test_setup.clients.borrow();\n\n let alice = test_clients.get(\"alice\").unwrap().borrow();\n\n\n\n // Create a message in each group and test the padding.\n\n for group_state in alice.group_states.borrow_mut().values_mut() {\n\n let credential_bundle = alice\n\n .credential_bundles\n\n .get(&group_state.ciphersuite().name())\n\n .unwrap();\n\n for _ in 0..100 {\n\n // Test encoding/decoding of Application messages.\n\n let message = randombytes(random_usize() % 1000);\n\n let aad = randombytes(random_usize() % 1000);\n\n let encrypted_message = group_state\n\n .create_application_message(&aad, &message, credential_bundle, 0)\n\n .unwrap();\n\n let encrypted_message_bytes = encrypted_message.tls_serialize_detached().unwrap();\n\n let encrypted_message_decoded =\n\n match MlsCiphertext::tls_deserialize(&mut encrypted_message_bytes.as_slice()) {\n\n Ok(a) => a,\n\n Err(err) => panic!(\"Error decoding MlsCiphertext: {:?}\", err),\n\n };\n\n assert_eq!(encrypted_message, encrypted_message_decoded);\n\n }\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "openmls/tests/test_encoding.rs", "rank": 92, "score": 65956.47618875418 }, { "content": "#[test]\n\nfn test_managed_api() {\n\n // Some basic setup functions for the managed group.\n\n let handshake_message_format = HandshakeMessageFormat::Plaintext;\n\n let update_policy = UpdatePolicy::default();\n\n let callbacks = ManagedGroupCallbacks::default();\n\n let managed_group_config = ManagedGroupConfig::new(\n\n handshake_message_format,\n\n update_policy,\n\n 0,\n\n 0,\n\n false, // use_ratchet_tree_extension\n\n callbacks,\n\n );\n\n let number_of_clients = 20;\n\n let setup = ManagedTestSetup::new(\n\n managed_group_config,\n\n number_of_clients,\n\n CodecUse::SerializedMessages,\n\n );\n\n\n", "file_path": "openmls/tests/test_managed_api.rs", "rank": 93, "score": 65956.47618875418 }, { "content": "/// This test tests encoding and decoding of update proposals.\n\nfn test_update_proposal_encoding() {\n\n let test_setup = create_encoding_test_setup();\n\n let test_clients = test_setup.clients.borrow();\n\n let alice = test_clients.get(\"alice\").unwrap().borrow();\n\n\n\n for group_state in alice.group_states.borrow_mut().values_mut() {\n\n let credential_bundle = alice\n\n .credential_bundles\n\n .get(&group_state.ciphersuite().name())\n\n .unwrap();\n\n\n\n let capabilities_extension = Extension::Capabilities(CapabilitiesExtension::new(\n\n None,\n\n Some(&[group_state.ciphersuite().name()]),\n\n None,\n\n ));\n\n let lifetime_extension = Extension::LifeTime(LifetimeExtension::new(60));\n\n let mandatory_extensions: Vec<Extension> = vec![capabilities_extension, lifetime_extension];\n\n\n\n let key_package_bundle = KeyPackageBundle::new(\n", "file_path": "openmls/tests/test_encoding.rs", "rank": 94, "score": 65956.47618875418 }, { "content": "fn psk_output(\n\n ciphersuite: &'static Ciphersuite,\n\n psk_fetcher_option: Option<PskFetcher>,\n\n presharedkeys: &PreSharedKeys,\n\n) -> Result<Option<PskSecret>, PskError> {\n\n if !presharedkeys.psks.is_empty() {\n\n // Check if a PSK fetcher function was provided\n\n match psk_fetcher_option {\n\n Some(psk_fetcher) => {\n\n // Try to fetch the PSKs with the IDs\n\n match psk_fetcher(presharedkeys, ciphersuite) {\n\n Some(psks) => {\n\n // Combine the PSKs in to a PskSecret\n\n let psk_secret =\n\n PskSecret::new(ciphersuite, presharedkeys.psks.as_slice(), &psks)?;\n\n Ok(Some(psk_secret))\n\n }\n\n None => Err(PskError::PskIdNotFound),\n\n }\n\n }\n\n None => Err(PskError::NoPskFetcherProvided),\n\n }\n\n } else {\n\n Ok(None)\n\n }\n\n}\n", "file_path": "openmls/src/group/mls_group/mod.rs", "rank": 95, "score": 65956.47618875418 }, { "content": "/// This test tests encoding and decoding of add proposals.\n\nfn test_add_proposal_encoding() {\n\n let test_setup = create_encoding_test_setup();\n\n let test_clients = test_setup.clients.borrow();\n\n let alice = test_clients.get(\"alice\").unwrap().borrow();\n\n\n\n for group_state in alice.group_states.borrow_mut().values_mut() {\n\n let credential_bundle = alice\n\n .credential_bundles\n\n .get(&group_state.ciphersuite().name())\n\n .unwrap();\n\n\n\n let capabilities_extension = Extension::Capabilities(CapabilitiesExtension::new(\n\n None,\n\n Some(&[group_state.ciphersuite().name()]),\n\n None,\n\n ));\n\n let lifetime_extension = Extension::LifeTime(LifetimeExtension::new(60));\n\n let mandatory_extensions: Vec<Extension> = vec![capabilities_extension, lifetime_extension];\n\n\n\n let key_package_bundle = KeyPackageBundle::new(\n", "file_path": "openmls/tests/test_encoding.rs", "rank": 96, "score": 65956.47618875418 }, { "content": "#[test]\n\n#[should_panic]\n\nfn secret_incompatible() {\n\n let csuite = Ciphersuite::default();\n\n\n\n // These two secrets must be incompatible\n\n let default_secret = Secret::random(csuite, None);\n\n let draft_secret = Secret::random(csuite, ProtocolVersion::Mls10Draft11);\n\n\n\n // This must panic because the two secrets have incompatible MLS versions.\n\n let _default_extracted = default_secret.hkdf_extract(&draft_secret);\n\n}\n", "file_path": "openmls/src/ciphersuite/tests/test_secrets.rs", "rank": 97, "score": 65956.47618875418 }, { "content": "#[test]\n\nfn managed_group_operations() {\n\n for ciphersuite in Config::supported_ciphersuites() {\n\n for handshake_message_format in vec![\n\n HandshakeMessageFormat::Plaintext,\n\n HandshakeMessageFormat::Ciphertext,\n\n ]\n\n .into_iter()\n\n {\n\n let group_id = GroupId::from_slice(b\"Test Group\");\n\n\n\n let key_store = KeyStore::default();\n\n\n\n // Generate credential bundles\n\n let alice_credential = key_store\n\n .generate_credential_bundle(\n\n \"Alice\".into(),\n\n CredentialType::Basic,\n\n ciphersuite.signature_scheme(),\n\n )\n\n .unwrap();\n", "file_path": "openmls/tests/test_managed_group.rs", "rank": 98, "score": 65956.47618875418 }, { "content": "#[test]\n\nfn confirmation_tag_presence() {\n\n for ciphersuite in Config::supported_ciphersuites() {\n\n let group_aad = b\"Alice's test group\";\n\n\n\n // Define credential bundles\n\n let alice_credential_bundle = CredentialBundle::new(\n\n \"Alice\".into(),\n\n CredentialType::Basic,\n\n ciphersuite.signature_scheme(),\n\n )\n\n .unwrap();\n\n let bob_credential_bundle = CredentialBundle::new(\n\n \"Bob\".into(),\n\n CredentialType::Basic,\n\n ciphersuite.signature_scheme(),\n\n )\n\n .unwrap();\n\n\n\n // Generate KeyPackages\n\n let bob_key_package_bundle =\n", "file_path": "openmls/src/framing/test_framing.rs", "rank": 99, "score": 65956.47618875418 } ]
Rust
compiler/crates/graphql-ir/src/transform.rs
nathalia234/relay
e1435c834383927947d2f38d32948305ed3baed9
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ use crate::ir::*; use common::Spanned; use std::sync::Arc; pub trait Transformer { const NAME: &'static str; const VISIT_ARGUMENTS: bool; const VISIT_DIRECTIVES: bool; fn transform_fragment( &mut self, fragment: &FragmentDefinition, ) -> Transformed<FragmentDefinition> { self.default_transform_fragment(fragment) } fn default_transform_fragment( &mut self, fragment: &FragmentDefinition, ) -> Transformed<FragmentDefinition> { let selections = self.transform_selections(&fragment.selections); if let Some(selections) = &selections { if selections.is_empty() { return Transformed::Delete; } } let directives = self.transform_directives(&fragment.directives); if selections.is_none() && directives.is_none() { return Transformed::Keep; } Transformed::Replace(FragmentDefinition { directives: directives.unwrap_or_else(|| fragment.directives.clone()), selections: selections.unwrap_or_else(|| fragment.selections.clone()), ..fragment.clone() }) } fn transform_operation( &mut self, operation: &OperationDefinition, ) -> Transformed<OperationDefinition> { self.default_transform_operation(operation) } fn default_transform_operation( &mut self, operation: &OperationDefinition, ) -> Transformed<OperationDefinition> { let selections = self.transform_selections(&operation.selections); if let Some(selections) = &selections { if selections.is_empty() { return Transformed::Delete; } } let directives = self.transform_directives(&operation.directives); if selections.is_none() && directives.is_none() { return Transformed::Keep; } Transformed::Replace(OperationDefinition { directives: directives.unwrap_or_else(|| operation.directives.clone()), selections: selections.unwrap_or_else(|| operation.selections.clone()), ..operation.clone() }) } fn transform_selections(&mut self, selections: &[Selection]) -> Option<Vec<Selection>> { self.transform_list(selections, Self::transform_selection) } fn transform_selection(&mut self, selection: &Selection) -> Transformed<Selection> { self.default_transform_selection(selection) } fn default_transform_selection(&mut self, selection: &Selection) -> Transformed<Selection> { match selection { Selection::FragmentSpread(selection) => self .transform_fragment_spread(selection) .map(Selection::FragmentSpread), Selection::InlineFragment(selection) => self .transform_inline_fragment(selection) .map(Selection::InlineFragment), Selection::LinkedField(selection) => self .transform_linked_field(selection) .map(Selection::LinkedField), Selection::ScalarField(selection) => self .transform_scalar_field(selection) .map(Selection::ScalarField), } } fn transform_scalar_field(&mut self, field: &ScalarField) -> Transformed<Arc<ScalarField>> { self.default_transform_scalar_field(field) } fn default_transform_scalar_field( &mut self, field: &ScalarField, ) -> Transformed<Arc<ScalarField>> { let arguments = self.transform_arguments(&field.arguments); let directives = self.transform_directives(&field.directives); if arguments.is_none() && directives.is_none() { return Transformed::Keep; } Transformed::Replace(Arc::new(ScalarField { arguments: arguments.unwrap_or_else(|| field.arguments.clone()), directives: directives.unwrap_or_else(|| field.directives.clone()), ..field.clone() })) } fn transform_linked_field(&mut self, field: &LinkedField) -> Transformed<Arc<LinkedField>> { self.default_transform_linked_field(field) } fn default_transform_linked_field( &mut self, field: &LinkedField, ) -> Transformed<Arc<LinkedField>> { let selections = self.transform_selections(&field.selections); if let Some(selections) = &selections { if selections.is_empty() { return Transformed::Delete; } } let arguments = self.transform_arguments(&field.arguments); let directives = self.transform_directives(&field.directives); if selections.is_none() && arguments.is_none() && directives.is_none() { return Transformed::Keep; } Transformed::Replace(Arc::new(LinkedField { arguments: arguments.unwrap_or_else(|| field.arguments.clone()), directives: directives.unwrap_or_else(|| field.directives.clone()), selections: selections.unwrap_or_else(|| field.selections.clone()), ..field.clone() })) } fn transform_inline_fragment( &mut self, fragment: &InlineFragment, ) -> Transformed<Arc<InlineFragment>> { self.default_transform_inline_fragment(fragment) } fn default_transform_inline_fragment( &mut self, fragment: &InlineFragment, ) -> Transformed<Arc<InlineFragment>> { let selections = self.transform_selections(&fragment.selections); if let Some(selections) = &selections { if selections.is_empty() { return Transformed::Delete; } } let directives = self.transform_directives(&fragment.directives); if selections.is_none() && directives.is_none() { return Transformed::Keep; } Transformed::Replace(Arc::new(InlineFragment { directives: directives.unwrap_or_else(|| fragment.directives.clone()), selections: selections.unwrap_or_else(|| fragment.selections.clone()), ..fragment.clone() })) } fn transform_fragment_spread( &mut self, spread: &FragmentSpread, ) -> Transformed<Arc<FragmentSpread>> { self.default_transform_fragment_spread(spread) } fn default_transform_fragment_spread( &mut self, spread: &FragmentSpread, ) -> Transformed<Arc<FragmentSpread>> { let arguments = self.transform_arguments(&spread.arguments); let directives = self.transform_directives(&spread.directives); if arguments.is_none() && directives.is_none() { return Transformed::Keep; } Transformed::Replace(Arc::new(FragmentSpread { arguments: arguments.unwrap_or_else(|| spread.arguments.clone()), directives: directives.unwrap_or_else(|| spread.directives.clone()), ..spread.clone() })) } fn transform_directives(&mut self, directives: &[Directive]) -> Option<Vec<Directive>> { if Self::VISIT_DIRECTIVES { self.transform_list(directives, Self::transform_directive) } else { None } } fn transform_directive(&mut self, directive: &Directive) -> Transformed<Directive> { self.default_transform_directive(directive) } fn default_transform_directive(&mut self, directive: &Directive) -> Transformed<Directive> { let arguments = self.transform_arguments(&directive.arguments); match arguments { None => Transformed::Keep, Some(replacement) => Transformed::Replace(Directive { arguments: replacement, ..directive.clone() }), } } fn transform_arguments(&mut self, arguments: &[Argument]) -> Option<Vec<Argument>> { if Self::VISIT_ARGUMENTS { self.transform_list(arguments, Self::transform_argument) } else { None } } fn transform_argument(&mut self, argument: &Argument) -> Transformed<Argument> { self.default_transform_argument(argument) } fn default_transform_argument(&mut self, argument: &Argument) -> Transformed<Argument> { match self.transform_value(&argument.value.item) { Transformed::Delete => Transformed::Delete, Transformed::Keep => Transformed::Keep, Transformed::Replace(replacement) => Transformed::Replace(Argument { value: Spanned::new(argument.value.span, replacement), ..argument.clone() }), } } fn transform_value(&mut self, value: &Value) -> Transformed<Value> { self.default_transform_value(value) } fn default_transform_value(&mut self, value: &Value) -> Transformed<Value> { match value { Value::Variable(variable) => self.transform_variable(variable).map(Value::Variable), Value::Constant(_) => Transformed::Keep, Value::List(items) => match self.transform_list(items, Self::transform_value) { None => Transformed::Keep, Some(replacement) => Transformed::Replace(Value::List(replacement)), }, Value::Object(arguments) => match self.transform_arguments(arguments) { None => Transformed::Keep, Some(replacement) => Transformed::Replace(Value::Object(replacement)), }, } } fn transform_variable(&mut self, value: &Variable) -> Transformed<Variable> { let _ = value; Transformed::Keep } fn transform_list<F, T>(&mut self, list: &[T], f: F) -> Option<Vec<T>> where F: Fn(&mut Self, &T) -> Transformed<T>, T: Clone, { if list.is_empty() { return None; } let mut result = Vec::new(); let mut has_changes = false; for (index, prev_item) in list.iter().enumerate() { let next_item = f(self, prev_item); match next_item { Transformed::Keep => { if has_changes { result.push(prev_item.clone()); } } Transformed::Delete => { if !has_changes { debug_assert!(result.capacity() == 0); result.reserve(list.len()); result.extend(list.iter().take(index).cloned()); } has_changes = true; } Transformed::Replace(next_item) => { if !has_changes { debug_assert!(result.capacity() == 0); result.reserve(list.len()); result.extend(list.iter().take(index).cloned()); } result.push(next_item); has_changes = true; } } } if has_changes { Some(result) } else { None } } } #[derive(Clone)] pub enum Transformed<T> { Delete, Keep, Replace(T), } impl<T> Transformed<T> { pub fn map<F, U>(self, f: F) -> Transformed<U> where F: FnOnce(T) -> U, { match self { Transformed::Delete => Transformed::Delete, Transformed::Keep => Transformed::Keep, Transformed::Replace(replacement) => Transformed::Replace(f(replacement)), } } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ use crate::ir::*; use common::Spanned; use std::sync::Arc; pub trait Transformer { const NAME: &'static str; const VISIT_ARGUMENTS: bool; const VISIT_DIRECTIVES: bool; fn transform_fragment( &mut self, fragment: &FragmentDefinition, ) -> Transformed<FragmentDefinition> { self.default_transform_fragment(fragment) } fn default_transform_fragment( &mut self, fragment: &FragmentDefinition, ) -> Transformed<FragmentDefinition> { let selections = self.transform_selections(&fragment.selections); if let Some(selections) = &selections { if selections.is_empty() { return Transformed::Delete; } } let directives = self.transform_directives(&fragment.directives); if selections.is_none() && directives.is_none() { return Transformed::Keep; } Transformed::Replace(FragmentDefinition {
fn transform_operation( &mut self, operation: &OperationDefinition, ) -> Transformed<OperationDefinition> { self.default_transform_operation(operation) } fn default_transform_operation( &mut self, operation: &OperationDefinition, ) -> Transformed<OperationDefinition> { let selections = self.transform_selections(&operation.selections); if let Some(selections) = &selections { if selections.is_empty() { return Transformed::Delete; } } let directives = self.transform_directives(&operation.directives); if selections.is_none() && directives.is_none() { return Transformed::Keep; } Transformed::Replace(OperationDefinition { directives: directives.unwrap_or_else(|| operation.directives.clone()), selections: selections.unwrap_or_else(|| operation.selections.clone()), ..operation.clone() }) } fn transform_selections(&mut self, selections: &[Selection]) -> Option<Vec<Selection>> { self.transform_list(selections, Self::transform_selection) } fn transform_selection(&mut self, selection: &Selection) -> Transformed<Selection> { self.default_transform_selection(selection) } fn default_transform_selection(&mut self, selection: &Selection) -> Transformed<Selection> { match selection { Selection::FragmentSpread(selection) => self .transform_fragment_spread(selection) .map(Selection::FragmentSpread), Selection::InlineFragment(selection) => self .transform_inline_fragment(selection) .map(Selection::InlineFragment), Selection::LinkedField(selection) => self .transform_linked_field(selection) .map(Selection::LinkedField), Selection::ScalarField(selection) => self .transform_scalar_field(selection) .map(Selection::ScalarField), } } fn transform_scalar_field(&mut self, field: &ScalarField) -> Transformed<Arc<ScalarField>> { self.default_transform_scalar_field(field) } fn default_transform_scalar_field( &mut self, field: &ScalarField, ) -> Transformed<Arc<ScalarField>> { let arguments = self.transform_arguments(&field.arguments); let directives = self.transform_directives(&field.directives); if arguments.is_none() && directives.is_none() { return Transformed::Keep; } Transformed::Replace(Arc::new(ScalarField { arguments: arguments.unwrap_or_else(|| field.arguments.clone()), directives: directives.unwrap_or_else(|| field.directives.clone()), ..field.clone() })) } fn transform_linked_field(&mut self, field: &LinkedField) -> Transformed<Arc<LinkedField>> { self.default_transform_linked_field(field) } fn default_transform_linked_field( &mut self, field: &LinkedField, ) -> Transformed<Arc<LinkedField>> { let selections = self.transform_selections(&field.selections); if let Some(selections) = &selections { if selections.is_empty() { return Transformed::Delete; } } let arguments = self.transform_arguments(&field.arguments); let directives = self.transform_directives(&field.directives); if selections.is_none() && arguments.is_none() && directives.is_none() { return Transformed::Keep; } Transformed::Replace(Arc::new(LinkedField { arguments: arguments.unwrap_or_else(|| field.arguments.clone()), directives: directives.unwrap_or_else(|| field.directives.clone()), selections: selections.unwrap_or_else(|| field.selections.clone()), ..field.clone() })) } fn transform_inline_fragment( &mut self, fragment: &InlineFragment, ) -> Transformed<Arc<InlineFragment>> { self.default_transform_inline_fragment(fragment) } fn default_transform_inline_fragment( &mut self, fragment: &InlineFragment, ) -> Transformed<Arc<InlineFragment>> { let selections = self.transform_selections(&fragment.selections); if let Some(selections) = &selections { if selections.is_empty() { return Transformed::Delete; } } let directives = self.transform_directives(&fragment.directives); if selections.is_none() && directives.is_none() { return Transformed::Keep; } Transformed::Replace(Arc::new(InlineFragment { directives: directives.unwrap_or_else(|| fragment.directives.clone()), selections: selections.unwrap_or_else(|| fragment.selections.clone()), ..fragment.clone() })) } fn transform_fragment_spread( &mut self, spread: &FragmentSpread, ) -> Transformed<Arc<FragmentSpread>> { self.default_transform_fragment_spread(spread) } fn default_transform_fragment_spread( &mut self, spread: &FragmentSpread, ) -> Transformed<Arc<FragmentSpread>> { let arguments = self.transform_arguments(&spread.arguments); let directives = self.transform_directives(&spread.directives); if arguments.is_none() && directives.is_none() { return Transformed::Keep; } Transformed::Replace(Arc::new(FragmentSpread { arguments: arguments.unwrap_or_else(|| spread.arguments.clone()), directives: directives.unwrap_or_else(|| spread.directives.clone()), ..spread.clone() })) } fn transform_directives(&mut self, directives: &[Directive]) -> Option<Vec<Directive>> { if Self::VISIT_DIRECTIVES { self.transform_list(directives, Self::transform_directive) } else { None } } fn transform_directive(&mut self, directive: &Directive) -> Transformed<Directive> { self.default_transform_directive(directive) } fn default_transform_directive(&mut self, directive: &Directive) -> Transformed<Directive> { let arguments = self.transform_arguments(&directive.arguments); match arguments { None => Transformed::Keep, Some(replacement) => Transformed::Replace(Directive { arguments: replacement, ..directive.clone() }), } } fn transform_arguments(&mut self, arguments: &[Argument]) -> Option<Vec<Argument>> { if Self::VISIT_ARGUMENTS { self.transform_list(arguments, Self::transform_argument) } else { None } } fn transform_argument(&mut self, argument: &Argument) -> Transformed<Argument> { self.default_transform_argument(argument) } fn default_transform_argument(&mut self, argument: &Argument) -> Transformed<Argument> { match self.transform_value(&argument.value.item) { Transformed::Delete => Transformed::Delete, Transformed::Keep => Transformed::Keep, Transformed::Replace(replacement) => Transformed::Replace(Argument { value: Spanned::new(argument.value.span, replacement), ..argument.clone() }), } } fn transform_value(&mut self, value: &Value) -> Transformed<Value> { self.default_transform_value(value) } fn default_transform_value(&mut self, value: &Value) -> Transformed<Value> { match value { Value::Variable(variable) => self.transform_variable(variable).map(Value::Variable), Value::Constant(_) => Transformed::Keep, Value::List(items) => match self.transform_list(items, Self::transform_value) { None => Transformed::Keep, Some(replacement) => Transformed::Replace(Value::List(replacement)), }, Value::Object(arguments) => match self.transform_arguments(arguments) { None => Transformed::Keep, Some(replacement) => Transformed::Replace(Value::Object(replacement)), }, } } fn transform_variable(&mut self, value: &Variable) -> Transformed<Variable> { let _ = value; Transformed::Keep } fn transform_list<F, T>(&mut self, list: &[T], f: F) -> Option<Vec<T>> where F: Fn(&mut Self, &T) -> Transformed<T>, T: Clone, { if list.is_empty() { return None; } let mut result = Vec::new(); let mut has_changes = false; for (index, prev_item) in list.iter().enumerate() { let next_item = f(self, prev_item); match next_item { Transformed::Keep => { if has_changes { result.push(prev_item.clone()); } } Transformed::Delete => { if !has_changes { debug_assert!(result.capacity() == 0); result.reserve(list.len()); result.extend(list.iter().take(index).cloned()); } has_changes = true; } Transformed::Replace(next_item) => { if !has_changes { debug_assert!(result.capacity() == 0); result.reserve(list.len()); result.extend(list.iter().take(index).cloned()); } result.push(next_item); has_changes = true; } } } if has_changes { Some(result) } else { None } } } #[derive(Clone)] pub enum Transformed<T> { Delete, Keep, Replace(T), } impl<T> Transformed<T> { pub fn map<F, U>(self, f: F) -> Transformed<U> where F: FnOnce(T) -> U, { match self { Transformed::Delete => Transformed::Delete, Transformed::Keep => Transformed::Keep, Transformed::Replace(replacement) => Transformed::Replace(f(replacement)), } } }
directives: directives.unwrap_or_else(|| fragment.directives.clone()), selections: selections.unwrap_or_else(|| fragment.selections.clone()), ..fragment.clone() }) }
function_block-function_prefix_line
[ { "content": "pub fn parse(source: &str, file: &str) -> SyntaxResult<Document> {\n\n let parser = Parser::new(source, file);\n\n parser.parse_document()\n\n}\n\n\n", "file_path": "compiler/crates/graphql-syntax/src/lib.rs", "rank": 0, "score": 324088.214504415 }, { "content": "pub fn parse_type(source: &str, file: &str) -> SyntaxResult<TypeAnnotation> {\n\n let parser = Parser::new(source, file);\n\n parser.parse_type()\n\n}\n", "file_path": "compiler/crates/graphql-syntax/src/lib.rs", "rank": 1, "score": 317501.4701580747 }, { "content": "pub fn criterion_benchmark(c: &mut Criterion) {\n\n let mut path = env::current_dir().unwrap();\n\n path.push(\"benches\");\n\n path.push(\"fixtures\");\n\n\n\n for entry in fs::read_dir(&path).unwrap() {\n\n let file_path = entry.unwrap().path();\n\n if file_path.is_dir() {\n\n continue;\n\n }\n\n let file_name = file_path.file_stem().unwrap().to_str().unwrap();\n\n\n\n let file_data = fs::read_to_string(&file_path).unwrap();\n\n let ast = parse(&file_data, file_name)\n\n .unwrap_or_else(|error| panic!(\"failed to parse: {}: {:?}\", file_name, error));\n\n let ir = build(&TEST_SCHEMA, ast.definitions)\n\n .unwrap_or_else(|error| panic!(\"failed to build ir: {}: {:?}\", file_name, error));\n\n\n\n let context = CompilerContext::from_definitions(&TEST_SCHEMA, ir);\n\n\n", "file_path": "compiler/crates/graphql-transforms/benches/transforms.rs", "rank": 2, "score": 293742.3668157394 }, { "content": "/// Checks whether a file is signed *without* verifying the signature.\n\npub fn is_signed(data: &str) -> bool {\n\n RE.is_match(data)\n\n}\n\n\n", "file_path": "compiler/crates/signedsource/src/lib.rs", "rank": 3, "score": 285246.0163416808 }, { "content": "/// Verifies the signature in a signed file.\n\npub fn is_valid_signature(data: &str) -> bool {\n\n if let Some(mat) = RE.find(data) {\n\n let actual = &data[mat.start() + 25..mat.end() - 2];\n\n let unsigned = RE.replace(data, SIGNING_TOKEN);\n\n return hash(&unsigned) == actual;\n\n }\n\n false\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "compiler/crates/signedsource/src/lib.rs", "rank": 4, "score": 282030.07247104455 }, { "content": "/// Signs a source file which contains a signing token. Signing modifies only\n\n/// the signing token, so the token should be placed inside a comment in order\n\n/// for signing to not change code semantics.\n\npub fn sign_file(data: &str) -> String {\n\n assert!(\n\n data.contains(NEWTOKEN),\n\n \"sign_file(...): Cannot sign file without token {}\",\n\n NEWTOKEN\n\n );\n\n sign(data)\n\n}\n\n\n", "file_path": "compiler/crates/signedsource/src/lib.rs", "rank": 5, "score": 264640.93981351354 }, { "content": "fn transform_fragment(\n\n ctx: &CompilerContext<'_>,\n\n fragment: &FragmentDefinition,\n\n) -> FragmentDefinition {\n\n FragmentDefinition {\n\n name: fragment.name,\n\n type_condition: fragment.type_condition,\n\n directives: fragment.directives.clone(),\n\n variable_definitions: fragment.variable_definitions.clone(),\n\n used_global_variables: fragment.used_global_variables.clone(),\n\n selections: transform_selections(ctx, &mut fragment.selections.clone()),\n\n }\n\n}\n\n\n", "file_path": "compiler/crates/graphql-transforms/src/sort_selections.rs", "rank": 6, "score": 263467.7938023841 }, { "content": "fn transfrom_inline_fragment(\n\n ctx: &CompilerContext<'_>,\n\n node: &InlineFragment,\n\n) -> Arc<InlineFragment> {\n\n Arc::new(InlineFragment {\n\n type_condition: node.type_condition,\n\n directives: node.directives.clone(),\n\n selections: transform_selections(ctx, &mut node.selections.clone()),\n\n })\n\n}\n\n\n", "file_path": "compiler/crates/graphql-transforms/src/sort_selections.rs", "rank": 7, "score": 248237.51634036173 }, { "content": "fn has_typename_field(schema: &Schema, selections: &[Selection]) -> bool {\n\n let typename_field = schema.typename_field();\n\n selections.iter().any(|x| match x {\n\n Selection::ScalarField(child) => {\n\n child.alias.is_none() && child.definition.item == typename_field\n\n }\n\n _ => false,\n\n })\n\n}\n\n\n\n// A wrapper type that allows comparing pointer equality of references. Two\n\n// `PointerAddress` values are equal if they point to the same memory location.\n\n//\n\n// This type is _sound_, but misuse can easily lead to logical bugs if the memory\n\n// of one PointerAddress could not have been freed and reused for a subsequent\n\n// PointerAddress.\n", "file_path": "compiler/crates/graphql-transforms/src/generate_typename.rs", "rank": 8, "score": 246957.73454966163 }, { "content": "pub fn transform_fixture(fixture: &Fixture) -> Result<String, String> {\n\n let ast = parse(fixture.content, fixture.file_name).unwrap();\n\n let ir = build(&TEST_SCHEMA, ast.definitions).unwrap();\n\n let context = CompilerContext::from_definitions(&TEST_SCHEMA, ir);\n\n let next_context = sort_selections(&context);\n\n\n\n assert_eq!(\n\n next_context.fragments().count(),\n\n context.fragments().count()\n\n );\n\n let mut printed = next_context\n\n .fragments()\n\n .map(|def| print_fragment(&TEST_SCHEMA, def))\n\n .collect::<Vec<_>>();\n\n printed.sort();\n\n Ok(printed.join(\"\\n\\n\"))\n\n}\n", "file_path": "compiler/crates/graphql-transforms/tests/sort_selections/mod.rs", "rank": 9, "score": 238402.6723685661 }, { "content": "pub fn transform_fixture(fixture: &Fixture) -> Result<String, String> {\n\n let ast = parse(fixture.content, fixture.file_name).unwrap();\n\n let ir = build(&TEST_SCHEMA, ast.definitions).unwrap();\n\n let context = CompilerContext::from_definitions(&TEST_SCHEMA, ir);\n\n\n\n let next_context = inline_fragments(&context);\n\n\n\n assert_eq!(next_context.fragments().count(), 0);\n\n assert_eq!(\n\n next_context.operations().count(),\n\n context.operations().count()\n\n );\n\n\n\n let mut printed = next_context\n\n .operations()\n\n .map(|def| print_operation(&TEST_SCHEMA, def))\n\n .collect::<Vec<_>>();\n\n printed.sort();\n\n Ok(printed.join(\"\\n\\n\"))\n\n}\n", "file_path": "compiler/crates/graphql-transforms/tests/inline_fragments/mod.rs", "rank": 10, "score": 237887.3462457674 }, { "content": "///\n\n/// Sorts selections in the fragments and queries (and their selections)\n\n///\n\npub fn sort_selections<'s>(ctx: &'s CompilerContext<'s>) -> CompilerContext<'s> {\n\n let mut next_context = CompilerContext::new(ctx.schema());\n\n for fragment in ctx.fragments() {\n\n next_context.insert_fragment(transform_fragment(ctx, fragment));\n\n }\n\n for operation in ctx.operations() {\n\n next_context.insert_operation(transform_operation(ctx, operation));\n\n }\n\n next_context\n\n}\n\n\n", "file_path": "compiler/crates/graphql-transforms/src/sort_selections.rs", "rank": 11, "score": 236664.84559566196 }, { "content": "pub fn inline_fragments<'s>(ctx: &'s CompilerContext<'s>) -> CompilerContext<'s> {\n\n let mut next_context = CompilerContext::new(ctx.schema());\n\n let mut transformer = InlineFragmentsTransform::new(ctx);\n\n for operation in ctx.operations() {\n\n match transformer.transform_operation(operation) {\n\n Transformed::Delete => {}\n\n Transformed::Keep => next_context.insert_operation(operation.clone()),\n\n Transformed::Replace(replacement) => next_context.insert_operation(replacement),\n\n }\n\n }\n\n next_context\n\n}\n\n\n", "file_path": "compiler/crates/graphql-transforms/src/inline_fragments.rs", "rank": 12, "score": 235993.4488102606 }, { "content": "fn transform_selections(\n\n ctx: &CompilerContext<'_>,\n\n selections: &mut Vec<Selection>,\n\n) -> Vec<Selection> {\n\n selections.sort_unstable();\n\n\n\n selections\n\n .iter_mut()\n\n .map(|selection| match selection {\n\n Selection::ScalarField(_) => selection.clone(),\n\n Selection::FragmentSpread(_) => selection.clone(),\n\n Selection::LinkedField(node) => {\n\n Selection::LinkedField(transfrom_linked_field(ctx, node))\n\n }\n\n Selection::InlineFragment(node) => {\n\n Selection::InlineFragment(transfrom_inline_fragment(ctx, node))\n\n }\n\n })\n\n .collect()\n\n}\n", "file_path": "compiler/crates/graphql-transforms/src/sort_selections.rs", "rank": 14, "score": 221225.87471938244 }, { "content": "#[test]\n\nfn sort_selections_transform() {\n\n let input = include_str!(\"sort_selections/fixtures/sort-selections-transform.graphql\");\n\n let expected = include_str!(\"sort_selections/fixtures/sort-selections-transform.expected\");\n\n test_fixture(transform_fixture, \"sort-selections-transform.graphql\", \"sort_selections/fixtures/sort-selections-transform.expected\", input, expected);\n\n}\n", "file_path": "compiler/crates/graphql-transforms/tests/sort_selections_test.rs", "rank": 15, "score": 217152.33361009264 }, { "content": "// An implmentation of murmurHash.js of relay-compiler that produces the same output excepet for non-alphanumeric strings\n\n// It does the 32bit murmurhash3 with seed 0, and applies a base62 to get the final string hash\n\npub fn murmurhash(data: &str) -> String {\n\n let bytes = data.as_bytes();\n\n let nbytes = bytes.len();\n\n let mut hash = 0; // Hardcoded seed 0\n\n let mut i = 0;\n\n\n\n let iterations = nbytes / 4;\n\n for _ in 0..iterations {\n\n hash = (hash ^ calculate_k(read_u32(&bytes[i..i + 4])))\n\n .rotate_left(R2)\n\n .wrapping_mul(M)\n\n .wrapping_add(N);\n\n i += 4;\n\n }\n\n\n\n match nbytes - i {\n\n 1 => {\n\n hash ^= calculate_k(bytes[i] as u32);\n\n }\n\n 2 => {\n", "file_path": "compiler/crates/common/src/murmurhash.rs", "rank": 16, "score": 213816.34554225294 }, { "content": "/// Extract graphql`text` literals from JS-like code. This should work for Flow\n\n/// or TypeScript alike.\n\npub fn parse_chunks(input: &str) -> Result<Vec<&str>, String> {\n\n if !input.contains(\"graphql`\") {\n\n return Ok(vec![]);\n\n }\n\n let mut res = vec![];\n\n let mut it = input.char_indices().peekable();\n\n 'code: while let Some((i, c)) = it.next() {\n\n match c {\n\n 'g' => {\n\n for expected in ['r', 'a', 'p', 'h', 'q', 'l', '`'].iter() {\n\n if let Some((_, c)) = it.next() {\n\n if c != *expected {\n\n consume_identifier(&mut it);\n\n continue 'code;\n\n }\n\n }\n\n }\n\n let start = i;\n\n while let Some((i, c)) = it.next() {\n\n match c {\n", "file_path": "compiler/crates/extract-graphql/src/lib.rs", "rank": 17, "score": 211082.3447172494 }, { "content": "pub fn print_fragment(schema: &Schema, fragment: &FragmentDefinition) -> String {\n\n let mut result = String::new();\n\n let printer = Printer::new(&schema, &mut result);\n\n printer.print_fragment(fragment).unwrap();\n\n result\n\n}\n\n\n", "file_path": "compiler/crates/graphql-printer/src/printer.rs", "rank": 18, "score": 210774.1819081385 }, { "content": "fn transform_operation(\n\n ctx: &CompilerContext<'_>,\n\n operation: &OperationDefinition,\n\n) -> OperationDefinition {\n\n OperationDefinition {\n\n kind: operation.kind.clone(),\n\n name: operation.name,\n\n type_: operation.type_,\n\n directives: operation.directives.clone(),\n\n variable_definitions: operation.variable_definitions.clone(),\n\n selections: transform_selections(ctx, &mut operation.selections.clone()),\n\n }\n\n}\n\n\n", "file_path": "compiler/crates/graphql-transforms/src/sort_selections.rs", "rank": 19, "score": 209155.04739212844 }, { "content": "pub fn build_schema_with_extensions(sdl: &str, extensions_sdl: &str) -> Result<Schema> {\n\n let builtins = parse_definitions(BUILTINS)?;\n\n let definitions = parse_definitions(sdl)?;\n\n let extensions = parse_definitions(extensions_sdl)?;\n\n Schema::build(builtins, definitions, extensions)\n\n}\n\n\n", "file_path": "compiler/crates/schema/src/lib.rs", "rank": 20, "score": 209130.354387439 }, { "content": "pub fn test_schema_with_extensions(extensions_sdl: &str) -> Schema {\n\n build_schema_with_extensions(\n\n TEST_SCHEMA_DATA,\n\n &format!(\"{}{}\", extensions_sdl, RELAY_EXTENSIONS),\n\n )\n\n .expect(\"Expected test schema (and extensions) to be valid\")\n\n}\n", "file_path": "compiler/crates/test-schema/src/lib.rs", "rank": 21, "score": 204584.919837606 }, { "content": "#[test]\n\nfn inlines_nested_fragments() {\n\n let input = include_str!(\"inline_fragments/fixtures/inlines-nested-fragments.graphql\");\n\n let expected = include_str!(\"inline_fragments/fixtures/inlines-nested-fragments.expected\");\n\n test_fixture(transform_fixture, \"inlines-nested-fragments.graphql\", \"inline_fragments/fixtures/inlines-nested-fragments.expected\", input, expected);\n\n}\n", "file_path": "compiler/crates/graphql-transforms/tests/inline_fragments_test.rs", "rank": 22, "score": 204135.13154687994 }, { "content": "pub fn build_schema(sdl: &str) -> Result<Schema> {\n\n let builtins = parse_definitions(BUILTINS)?;\n\n let definitions = parse_definitions(sdl)?;\n\n Schema::build(builtins, definitions, Vec::new())\n\n}\n\n\n", "file_path": "compiler/crates/schema/src/lib.rs", "rank": 23, "score": 203463.075891103 }, { "content": "pub fn print_directives(schema: &Schema, directives: &[Directive]) -> String {\n\n let mut result = String::new();\n\n let mut printer = Printer::new(&schema, &mut result);\n\n printer.print_directives(directives).unwrap();\n\n result\n\n}\n\n\n", "file_path": "compiler/crates/graphql-printer/src/printer.rs", "rank": 24, "score": 199404.93454298237 }, { "content": "fn read_name(input: &str) -> ReaderResult<'_> {\n\n let end = input\n\n .chars()\n\n .skip(1)\n\n .position(|c| match c {\n\n '_' | '0'..='9' | 'a'..='z' | 'A'..='Z' => false,\n\n _ => true,\n\n })\n\n .map(|len| len + 1)\n\n .unwrap_or_else(|| input.len());\n\n (&input[end..], TokenKind::Name(&input[0..end]))\n\n}\n\n\n", "file_path": "compiler/crates/schema/src/lexer.rs", "rank": 25, "score": 194070.6902702786 }, { "content": "#[test]\n\nfn directive_match_on_fragment_invalid() {\n\n let input = include_str!(\"parse/fixtures/directive-match-on-fragment.invalid.graphql\");\n\n let expected = include_str!(\"parse/fixtures/directive-match-on-fragment.invalid.expected\");\n\n test_fixture(transform_fixture, \"directive-match-on-fragment.invalid.graphql\", \"parse/fixtures/directive-match-on-fragment.invalid.expected\", input, expected);\n\n}\n\n\n", "file_path": "compiler/crates/graphql-ir/tests/parse_test.rs", "rank": 26, "score": 192372.48984159814 }, { "content": "#[test]\n\nfn type_name_does_not_exist() {\n\n let input = include_str!(\"generate_typename/fixtures/type-name-does-not-exist.graphql\");\n\n let expected = include_str!(\"generate_typename/fixtures/type-name-does-not-exist.expected\");\n\n test_fixture(transform_fixture, \"type-name-does-not-exist.graphql\", \"generate_typename/fixtures/type-name-does-not-exist.expected\", input, expected);\n\n}\n\n\n", "file_path": "compiler/crates/graphql-transforms/tests/generate_typename_test.rs", "rank": 27, "score": 192310.15492601646 }, { "content": "#[test]\n\nfn type_name_exists() {\n\n let input = include_str!(\"generate_typename/fixtures/type-name-exists.graphql\");\n\n let expected = include_str!(\"generate_typename/fixtures/type-name-exists.expected\");\n\n test_fixture(transform_fixture, \"type-name-exists.graphql\", \"generate_typename/fixtures/type-name-exists.expected\", input, expected);\n\n}\n", "file_path": "compiler/crates/graphql-transforms/tests/generate_typename_test.rs", "rank": 28, "score": 192310.15492601646 }, { "content": "pub fn parse_definitions(input: &str) -> Result<Vec<ast::Definition>> {\n\n let lexer = Lexer::new(input);\n\n let parser = Parser::new(lexer);\n\n parser.parse_schema_document()\n\n}\n", "file_path": "compiler/crates/schema/src/lib.rs", "rank": 29, "score": 190765.73079523287 }, { "content": "pub fn transform_fixture(fixture: &Fixture) -> Result<String, String> {\n\n let ast = parse(fixture.content, fixture.file_name).unwrap();\n\n let ir = build(&TEST_SCHEMA, ast.definitions).unwrap();\n\n let context = CompilerContext::from_definitions(&TEST_SCHEMA, ir);\n\n\n\n let next_context = generate_typename(&context);\n\n\n\n assert_eq!(next_context.document_count(), context.document_count());\n\n\n\n let mut printed = next_context\n\n .operations()\n\n .map(|def| print_operation(&TEST_SCHEMA, def))\n\n .chain(\n\n next_context\n\n .fragments()\n\n .map(|def| print_fragment(&TEST_SCHEMA, def)),\n\n )\n\n .collect::<Vec<_>>();\n\n printed.sort();\n\n Ok(printed.join(\"\\n\\n\"))\n\n}\n", "file_path": "compiler/crates/graphql-transforms/tests/generate_typename/mod.rs", "rank": 30, "score": 188841.18696094447 }, { "content": "function useStaticFragmentNodeWarning(\n\n fragmentNode: ReaderFragment,\n\n warningContext: string,\n\n): void {\n\n if (__DEV__) {\n\n // This is calling `useRef` conditionally, but based on the environment\n\n // __DEV__ setting which shouldn't change. This allows us to only pay the\n\n // cost of `useRef` in development mode to produce the warning.\n\n // eslint-disable-next-line react-hooks/rules-of-hooks\n\n const initialPropRef = useRef(fragmentNode.name);\n\n warning(\n\n initialPropRef.current === fragmentNode.name,\n\n 'Relay: The %s has to remain the same over the lifetime of a component. ' +\n\n 'Changing it is not supported and will result in unexpected behavior.',\n\n warningContext,\n\n );\n\n }\n", "file_path": "packages/relay-experimental/useStaticFragmentNodeWarning.js", "rank": 31, "score": 187586.5515115303 }, { "content": "const useStaticFragmentNodeWarning = require('./useStaticFragmentNodeWarning');\n", "file_path": "packages/relay-experimental/useFragment.js", "rank": 32, "score": 179826.48800902488 }, { "content": "const useStaticFragmentNodeWarning = require('./useStaticFragmentNodeWarning');\n", "file_path": "packages/relay-experimental/usePaginationFragment.js", "rank": 33, "score": 177944.39944041157 }, { "content": "const useStaticFragmentNodeWarning = require('./useStaticFragmentNodeWarning');\n", "file_path": "packages/relay-experimental/useRefetchableFragment.js", "rank": 34, "score": 177944.39944041157 }, { "content": "// TODO: Test without using snapshot tests\n\npub fn transform_fixture(fixture: &Fixture) -> Result<String, String> {\n\n let parts: Vec<&str> = fixture.content.split(\"%definitions%\").collect();\n\n let first_line: &str = fixture.content.lines().next().unwrap();\n\n\n\n let changed_names = first_line[1..]\n\n .trim()\n\n .split(\",\")\n\n .map(|name| name.trim())\n\n .filter(|name| !name.is_empty())\n\n .map(|name| name.intern())\n\n .collect();\n\n\n\n let mut asts = parse(parts[0], fixture.file_name).unwrap().definitions;\n\n let mut base_names = FnvHashSet::default();\n\n for part in parts.iter().skip(1) {\n\n let defs = parse(part, fixture.file_name).unwrap().definitions;\n\n for def in defs {\n\n base_names.insert(match &def {\n\n graphql_syntax::ExecutableDefinition::Operation(node) => {\n\n node.name.clone().unwrap().value\n", "file_path": "compiler/crates/dependency-analyzer/tests/ir/mod.rs", "rank": 35, "score": 177797.53994354827 }, { "content": "pub fn transform_fixture(fixture: &Fixture) -> Result<String, String> {\n\n let ast = parse(fixture.content, fixture.file_name).unwrap();\n\n build(&TEST_SCHEMA, ast.definitions)\n\n .map(|definitions| print_ir(&TEST_SCHEMA, &definitions).join(\"\\n\\n\"))\n\n .map_err(|errors| {\n\n errors\n\n .errors()\n\n .into_iter()\n\n .map(|error| format!(\"{:?}\", error))\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\\n\")\n\n })\n\n}\n", "file_path": "compiler/crates/graphql-printer/tests/print/mod.rs", "rank": 36, "score": 177792.1326528463 }, { "content": "pub fn transform_fixture(fixture: &Fixture) -> Result<String, String> {\n\n let parts: Vec<_> = fixture.content.split(\"%extensions%\").collect();\n\n let result = match parts.as_slice() {\n\n [base] => build_schema(base),\n\n [base, extensions] => build_schema_with_extensions(base, extensions),\n\n _ => panic!(\"Expected a single extension block\"),\n\n };\n\n\n\n result\n\n .map(|schema| schema.snapshot_print())\n\n .map_err(|err| format!(\"{}\", err))\n\n}\n", "file_path": "compiler/crates/schema/tests/build_schema/mod.rs", "rank": 37, "score": 177792.1326528463 }, { "content": "pub fn transform_fixture(fixture: &Fixture) -> Result<String, String> {\n\n parse(fixture.content, fixture.file_name)\n\n .map(|x| format!(\"{:#?}\", x))\n\n .map_err(|errors| {\n\n errors\n\n .into_iter()\n\n .map(|error| error.print(fixture.content))\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\\n\")\n\n })\n\n}\n", "file_path": "compiler/crates/graphql-syntax/tests/parse/mod.rs", "rank": 38, "score": 177792.1326528463 }, { "content": "pub fn transform_fixture(fixture: &Fixture) -> Result<String, String> {\n\n let ast = parse(fixture.content, fixture.file_name).unwrap();\n\n let mut sources = FnvHashMap::default();\n\n sources.insert(FileKey::new(fixture.file_name), fixture.content);\n\n\n\n build(&TEST_SCHEMA, ast.definitions)\n\n .map(|x| format!(\"{:#?}\", x))\n\n .map_err(|errors| {\n\n errors\n\n .errors()\n\n .into_iter()\n\n .map(|error| error.print(&sources))\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\\n\")\n\n })\n\n}\n", "file_path": "compiler/crates/graphql-ir/tests/parse/mod.rs", "rank": 39, "score": 177792.1326528463 }, { "content": "pub fn transform_fixture(fixture: &Fixture) -> Result<String, String> {\n\n let parts: Vec<&str> = fixture.content.split(\"%definitions%\").collect();\n\n\n\n let definitions = parse(parts[0], fixture.file_name).unwrap();\n\n let base_definitions = parts\n\n .iter()\n\n .skip(1)\n\n .map(|part| parse(part, fixture.file_name).unwrap().definitions)\n\n .collect();\n\n let (result, base_definitions) = get_reachable_ast(definitions.definitions, base_definitions)?;\n\n\n\n let mut texts = result\n\n .into_iter()\n\n .map(|x| format_definition(x))\n\n .collect::<Vec<_>>();\n\n texts.sort_unstable();\n\n texts.push(\"========== Base definitions ==========\".to_string());\n\n let mut defs = base_definitions\n\n .iter()\n\n .map(|key| key.lookup())\n\n .collect::<Vec<_>>();\n\n defs.sort_unstable();\n\n texts.push(defs.join(\", \"));\n\n Ok(texts.join(\"\\n\\n\"))\n\n}\n", "file_path": "compiler/crates/dependency-analyzer/tests/ast/mod.rs", "rank": 40, "score": 177792.13265284634 }, { "content": "pub fn transform_fixture(fixture: &Fixture) -> Result<String, String> {\n\n Ok(fixture.content.to_uppercase())\n\n}\n", "file_path": "compiler/crates/fixture-tests/tests/uppercase/mod.rs", "rank": 41, "score": 177792.1326528463 }, { "content": "pub fn transform_fixture(fixture: &Fixture) -> Result<String, String> {\n\n parse_chunks(fixture.content).map(|chunks| format!(\"{:#?}\", chunks))\n\n}\n", "file_path": "compiler/crates/extract-graphql/tests/extract/mod.rs", "rank": 42, "score": 177792.1326528463 }, { "content": "const useStaticFragmentNodeWarning = require('./useStaticFragmentNodeWarning');\n", "file_path": "packages/relay-experimental/useBlockingPaginationFragment.js", "rank": 43, "score": 176103.30796974144 }, { "content": "pub fn transform_fixture(fixture: &Fixture) -> Result<String, String> {\n\n let mut sources = FnvHashMap::default();\n\n sources.insert(FileKey::new(fixture.file_name), fixture.content);\n\n\n\n let parts: Vec<_> = fixture.content.split(\"%extensions%\").collect();\n\n if let [base, extensions] = parts.as_slice() {\n\n let ast = parse(base, fixture.file_name).unwrap();\n\n let schema = test_schema_with_extensions(extensions);\n\n build(&schema, ast.definitions)\n\n .map(|x| format!(\"{:#?}\", x))\n\n .map_err(|errors| {\n\n errors\n\n .errors()\n\n .into_iter()\n\n .map(|error| error.print(&sources))\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\\n\")\n\n })\n\n } else {\n\n panic!(\"Expected exactly one %extensions% section marker.\")\n\n }\n\n}\n", "file_path": "compiler/crates/graphql-ir/tests/parse_with_extensions/mod.rs", "rank": 44, "score": 175714.06818296603 }, { "content": "/// Transform to add the `__typename` field to any LinkedField that both a) returns an\n\n/// abstract type and b) does not already directly query `__typename`.\n\npub fn generate_typename<'s>(ctx: &'s CompilerContext<'s>) -> CompilerContext<'s> {\n\n let mut next_context = CompilerContext::new(ctx.schema());\n\n let mut transform = GenerateTypenameTransform::new(ctx);\n\n for operation in ctx.operations() {\n\n match transform.transform_operation(operation) {\n\n Transformed::Delete => {}\n\n Transformed::Keep => next_context.insert_operation(operation.clone()),\n\n Transformed::Replace(replacement) => next_context.insert_operation(replacement),\n\n }\n\n }\n\n for fragment in ctx.fragments() {\n\n match transform.transform_fragment(fragment) {\n\n Transformed::Delete => {}\n\n Transformed::Keep => next_context.insert_fragment(fragment.clone()),\n\n Transformed::Replace(replacement) => next_context.insert_fragment(replacement),\n\n }\n\n }\n\n next_context\n\n}\n\n\n", "file_path": "compiler/crates/graphql-transforms/src/generate_typename.rs", "rank": 45, "score": 173273.5056926957 }, { "content": "function transformSelections(\n\n context: CompilerContext,\n\n fragments: Map<string, PendingFragment>,\n\n scope: Scope,\n\n selections: $ReadOnlyArray<Selection>,\n\n errorContext: $ReadOnlyArray<IR>,\n\n): ?$ReadOnlyArray<Selection> {\n\n let nextSelections = null;\n\n selections.forEach(selection => {\n\n let nextSelection;\n\n if (\n\n selection.kind === 'ClientExtension' ||\n\n selection.kind === 'InlineDataFragmentSpread' ||\n\n selection.kind === 'InlineFragment' ||\n\n selection.kind === 'ModuleImport' ||\n\n selection.kind === 'Defer' ||\n\n selection.kind === 'Stream'\n\n ) {\n\n nextSelection = transformNode(\n\n context,\n\n fragments,\n\n scope,\n\n selection,\n\n errorContext,\n\n );\n\n } else if (selection.kind === 'FragmentSpread') {\n\n nextSelection = transformFragmentSpread(\n\n context,\n\n fragments,\n\n scope,\n\n selection,\n\n errorContext,\n\n );\n\n } else if (selection.kind === 'Condition') {\n\n const conditionSelections = transformCondition(\n\n context,\n\n fragments,\n\n scope,\n\n selection,\n\n errorContext,\n\n );\n\n if (conditionSelections) {\n\n nextSelections = nextSelections || [];\n\n nextSelections.push(...conditionSelections);\n\n }\n\n } else if (\n\n selection.kind === 'LinkedField' ||\n\n selection.kind === 'ScalarField'\n\n ) {\n\n nextSelection = transformField(\n\n context,\n\n fragments,\n\n scope,\n\n selection,\n\n errorContext,\n\n );\n\n } else {\n\n (selection: empty);\n\n throw createCompilerError(\n\n `ApplyFragmentArgumentTransform: Unsupported kind '${selection.kind}'.`,\n\n [selection.loc],\n\n );\n\n }\n\n if (nextSelection) {\n\n nextSelections = nextSelections || [];\n\n nextSelections.push(nextSelection);\n\n }\n\n });\n\n return nextSelections;\n", "file_path": "packages/relay-compiler/transforms/ApplyFragmentArgumentTransform.js", "rank": 46, "score": 172446.0509609798 }, { "content": "function transformDirectives(\n\n scope: Scope,\n\n directives: $ReadOnlyArray<Directive>,\n\n errorContext: $ReadOnlyArray<IR>,\n\n): $ReadOnlyArray<Directive> {\n\n return directives.map(directive => {\n\n const args = transformArguments(scope, directive.args, errorContext);\n\n return {\n\n ...directive,\n\n args,\n\n };\n\n });\n", "file_path": "packages/relay-compiler/transforms/ApplyFragmentArgumentTransform.js", "rank": 47, "score": 172427.97306069586 }, { "content": "/// Finds the roots of a set of paths. This filters any paths\n\n/// that are a subdirectory of other paths in the input.\n\nfn unify_roots(mut paths: Vec<PathBuf>) -> Vec<PathBuf> {\n\n paths.sort();\n\n let mut roots = Vec::new();\n\n for path in paths {\n\n match roots.last() {\n\n Some(prev) if path.starts_with(&prev) => {\n\n // skip\n\n }\n\n _ => {\n\n roots.push(path);\n\n }\n\n }\n\n }\n\n roots\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n", "file_path": "compiler/crates/relay-compiler/src/watchman/graphql_finder.rs", "rank": 48, "score": 172338.1763670636 }, { "content": "fn read_comment(input: &str) -> &str {\n\n let end = input\n\n .chars()\n\n .position(|c| c == '\\n')\n\n .map(|pos| pos + 1)\n\n .unwrap_or_else(|| input.len());\n\n &input[end..]\n\n}\n\n\n", "file_path": "compiler/crates/schema/src/lexer.rs", "rank": 49, "score": 171616.28832103172 }, { "content": "function visitFragmentOrRoot<N: Fragment | Root>(\n\n node: N,\n\n options: Options,\n\n): ?N {\n\n const transformedNode = this.traverse(node, options);\n\n const connectionMetadata = options.connectionMetadata;\n\n if (connectionMetadata.length) {\n\n return {\n\n ...transformedNode,\n\n metadata: {\n\n ...transformedNode.metadata,\n\n connection: connectionMetadata,\n\n },\n\n };\n\n }\n\n return transformedNode;\n", "file_path": "packages/relay-compiler/transforms/ConnectionTransform.js", "rank": 50, "score": 168783.26788018056 }, { "content": "function fragmentMetadata({mask, plural}): MixedObj {\n\n invariant(\n\n plural === undefined || typeof plural === 'boolean',\n\n 'RelayDirectiveTransform: Expected the \"plural\" argument to @relay ' +\n\n 'to be a boolean literal if specified.',\n\n );\n\n invariant(\n\n mask === undefined || typeof mask === 'boolean',\n\n 'RelayDirectiveTransform: Expected the \"mask\" argument to @relay ' +\n\n 'to be a boolean literal if specified.',\n\n );\n\n return {mask, plural};\n", "file_path": "packages/relay-compiler/transforms/RelayDirectiveTransform.js", "rank": 51, "score": 168764.92486496485 }, { "content": "const RelayDirectiveTransform = require('../RelayDirectiveTransform');\n", "file_path": "packages/relay-compiler/transforms/__tests__/RefetchableFragmentTransform-test.js", "rank": 52, "score": 168637.8922196697 }, { "content": "pub fn try_all<T, U, I, F>(items: I, mut f: F) -> ValidationResult<Vec<T>>\n\nwhere\n\n I: IntoIterator<Item = U>,\n\n F: FnMut(U) -> ValidationResult<T>,\n\n{\n\n let iter = items.into_iter();\n\n let mut errors = ValidationErrors::new(Vec::new());\n\n let mut values = Vec::with_capacity(iter.size_hint().1.unwrap_or_default());\n\n for item in iter {\n\n match f(item) {\n\n Ok(value) => values.push(value),\n\n Err(error) => errors.extend(error),\n\n }\n\n }\n\n if errors.is_empty() {\n\n Ok(values)\n\n } else {\n\n Err(errors)\n\n }\n\n}\n\n\n", "file_path": "compiler/crates/graphql-ir/src/error_combinators.rs", "rank": 53, "score": 167405.95097028752 }, { "content": "const InlineFragmentsTransform = require('../InlineFragmentsTransform');\n", "file_path": "packages/relay-compiler/transforms/__tests__/GenerateTypeNameTransform-test.js", "rank": 54, "score": 166797.46555779706 }, { "content": "function fragmentSpreadMetadata({mask}): MixedObj {\n\n invariant(\n\n mask === undefined || typeof mask === 'boolean',\n\n 'RelayDirectiveTransform: Expected the \"mask\" argument to @relay ' +\n\n 'to be a boolean literal if specified.',\n\n );\n\n return {mask};\n", "file_path": "packages/relay-compiler/transforms/RelayDirectiveTransform.js", "rank": 55, "score": 166675.6257438921 }, { "content": "pub trait Visitor {\n\n const NAME: &'static str;\n\n const VISIT_ARGUMENTS: bool;\n\n const VISIT_DIRECTIVES: bool;\n\n\n\n // Fragment Definition\n\n fn visit_fragment(&mut self, fragment: &FragmentDefinition) {\n\n self.default_visit_fragment(fragment)\n\n }\n\n\n\n fn default_visit_fragment(&mut self, fragment: &FragmentDefinition) {\n\n self.visit_selections(&fragment.selections);\n\n self.visit_directives(&fragment.directives);\n\n }\n\n\n\n // Operation Definition\n\n fn visit_operation(&mut self, operation: &OperationDefinition) {\n\n self.default_visit_operation(operation)\n\n }\n\n\n", "file_path": "compiler/crates/graphql-ir/src/visitor.rs", "rank": 56, "score": 165942.48852635204 }, { "content": "/// A type that acts as an intern key, uniquely identifying the original value\n\n/// as well as supporting fast lookup back to a reference to the original value.\n\npub trait InternKey {\n\n type Value;\n\n\n\n fn from_raw(raw: RawInternKey) -> Self;\n\n\n\n fn into_raw(self) -> RawInternKey;\n\n\n\n fn lookup(self) -> &'static Self::Value;\n\n}\n", "file_path": "compiler/crates/interner/src/types.rs", "rank": 57, "score": 165942.48852635204 }, { "content": "/// Returns all root directories of JS source files for the config.\n\nfn get_source_roots(config: &Config) -> Vec<PathBuf> {\n\n config.sources.keys().cloned().collect()\n\n}\n\n\n", "file_path": "compiler/crates/relay-compiler/src/watchman/graphql_finder.rs", "rank": 58, "score": 165548.89855848992 }, { "content": "const inferRootArgumentDefinitions = require('../core/inferRootArgumentDefinitions');\n", "file_path": "packages/relay-compiler/transforms/RefetchableFragmentTransform.js", "rank": 59, "score": 164657.51450806347 }, { "content": "function getRefetchQueryName(fragment: Fragment): string | null {\n\n const refetchableDirective = fragment.directives.find(\n\n directive => directive.name === 'refetchable',\n\n );\n\n if (refetchableDirective == null) {\n\n return null;\n\n }\n\n const refetchArguments = getLiteralArgumentValues(refetchableDirective.args);\n\n const queryName = refetchArguments.queryName;\n\n if (queryName == null) {\n\n throw createUserError(\n\n \"Expected the 'queryName' argument of @refetchable to be provided\",\n\n [refetchableDirective.loc],\n\n );\n\n } else if (typeof queryName !== 'string') {\n\n const queryNameArg = refetchableDirective.args.find(\n\n arg => arg.name === 'queryName',\n\n );\n\n throw createUserError(\n\n `Expected the 'queryName' argument of @refetchable to be a string, got '${String(\n\n queryName,\n\n )}'.`,\n\n [queryNameArg?.loc ?? refetchableDirective.loc],\n\n );\n\n }\n\n return queryName;\n", "file_path": "packages/relay-compiler/transforms/RefetchableFragmentTransform.js", "rank": 60, "score": 164625.17653441927 }, { "content": "function getFragmentSpreadName(fragmentSpread: FragmentSpread): string {\n\n if (fragmentSpread.args.length === 0) {\n\n return fragmentSpread.name;\n\n }\n\n const sortedArgs = [...fragmentSpread.args].sort((a, b) => {\n\n return a.name < b.name ? -1 : a.name > b.name ? 1 : 0;\n\n });\n\n const hash = murmurHash(JSON.stringify(sortedArgs));\n\n return `${fragmentSpread.name}_${hash}`;\n", "file_path": "packages/relay-compiler/transforms/DeferStreamTransform.js", "rank": 61, "score": 164625.17653441927 }, { "content": "/// Converts a self-contained corpus of definitions into typed IR, or returns\n\n/// a list of errors if the corpus is invalid.\n\npub fn build_ir(\n\n schema: &Schema,\n\n definitions: Vec<graphql_syntax::ExecutableDefinition>,\n\n) -> ValidationResult<Vec<ExecutableDefinition>> {\n\n let signatures = build_signatures(schema, &definitions)?;\n\n try_all(definitions, |definition| {\n\n let mut builder = Builder::new(schema, &signatures, definition.location());\n\n builder.build_definition(definition)\n\n })\n\n}\n\n\n", "file_path": "compiler/crates/graphql-ir/src/build.rs", "rank": 62, "score": 163792.90204388197 }, { "content": "pub fn build_signatures(\n\n schema: &Schema,\n\n definitions: &[graphql_syntax::ExecutableDefinition],\n\n) -> ValidationResult<FragmentSignatures> {\n\n let mut seen_signatures: FnvHashMap<StringKey, FragmentSignature> =\n\n FnvHashMap::with_capacity_and_hasher(definitions.len(), Default::default());\n\n let signatures = try_all(definitions, |definition| match definition {\n\n graphql_syntax::ExecutableDefinition::Fragment(fragment) => {\n\n Ok(Some(build_fragment_signature(schema, fragment)?))\n\n }\n\n graphql_syntax::ExecutableDefinition::Operation(_) => Ok(None),\n\n })?;\n\n let mut errors: ValidationErrors = Default::default();\n\n for signature in signatures {\n\n if let Some(signature) = signature {\n\n let previous_signature = seen_signatures.get(&signature.name.item);\n\n if let Some(previous_signature) = previous_signature {\n\n errors.push(ValidationError::new(\n\n ValidationMessage::DuplicateDefinition(signature.name.item),\n\n vec![\n", "file_path": "compiler/crates/graphql-ir/src/signatures.rs", "rank": 63, "score": 163782.1179702746 }, { "content": "pub fn detect_changes(\n\n current_definitions: &[Definition],\n\n current_text: &str,\n\n previous_text: &str,\n\n) -> SchemaChange {\n\n if current_text == previous_text {\n\n return SchemaChange::None;\n\n }\n\n let previous_definitions =\n\n parse_definitions(previous_text).expect(\"Failed to parse previous schema\");\n\n diff(current_definitions, previous_definitions)\n\n}\n", "file_path": "compiler/crates/schema-diff/src/lib.rs", "rank": 64, "score": 163782.1179702746 }, { "content": "const useStaticFragmentNodeWarning = require('./useStaticFragmentNodeWarning');\n", "file_path": "packages/relay-experimental/useIsParentQueryInFlight.js", "rank": 65, "score": 163102.45543048737 }, { "content": "pub fn build_constant_value(\n\n schema: &Schema,\n\n value: &graphql_syntax::ConstantValue,\n\n type_: &TypeReference,\n\n location: Location,\n\n validation: ValidationLevel,\n\n) -> ValidationResult<ConstantValue> {\n\n let signatures = Default::default();\n\n let mut builder = Builder::new(schema, &signatures, location);\n\n builder.build_constant_value(value, type_, validation)\n\n}\n\n\n\n// Helper Types\n\n\n", "file_path": "compiler/crates/graphql-ir/src/build.rs", "rank": 66, "score": 162126.9112754824 }, { "content": "pub fn get_reachable_ir(\n\n definitions: Vec<ExecutableDefinition>,\n\n base_definitions: FnvHashSet<StringKey>,\n\n changed_names: Vec<StringKey>,\n\n) -> Vec<ExecutableDefinition> {\n\n if changed_names.is_empty() {\n\n return vec![];\n\n }\n\n\n\n let mut trees = build_dependency_trees(definitions);\n\n\n\n let mut visited = FnvHashSet::default();\n\n let mut filtered_definitions = FnvHashMap::default();\n\n\n\n for key in changed_names.into_iter() {\n\n if trees.contains_key(&key) {\n\n add_related_nodes(\n\n &mut visited,\n\n &mut filtered_definitions,\n\n &mut trees,\n", "file_path": "compiler/crates/dependency-analyzer/src/ir.rs", "rank": 67, "score": 162126.9112754824 }, { "content": "pub fn get_reachable_ast(\n\n project_definitions: Vec<ExecutableDefinition>,\n\n base_definitions: Vec<Vec<ExecutableDefinition>>,\n\n) -> Result<(Vec<ExecutableDefinition>, FnvHashSet<StringKey>), String> {\n\n let base_len = base_definitions\n\n .iter()\n\n .map(|definition| definition.len())\n\n .sum();\n\n if base_len == 0 {\n\n return Ok((project_definitions, FnvHashSet::default()));\n\n }\n\n\n\n let mut reachable_base_asts = FnvHashSet::default();\n\n let mut base_definitions_map =\n\n FnvHashMap::with_capacity_and_hasher(base_len, Default::default());\n\n\n\n // Duplicate between base defnitions are allowed until they are referenced by the project definition\n\n let mut duplicate_base_definitions = FnvHashSet::default();\n\n\n\n // Preprocess all base fragment definitions\n", "file_path": "compiler/crates/dependency-analyzer/src/ast.rs", "rank": 68, "score": 162126.9112754824 }, { "content": "pub fn build_type_annotation(\n\n schema: &Schema,\n\n annotation: graphql_syntax::TypeAnnotation,\n\n location: Location,\n\n) -> ValidationResult<TypeReference> {\n\n let signatures = Default::default();\n\n let mut builder = Builder::new(schema, &signatures, location);\n\n builder.build_type_annotation(annotation)\n\n}\n\n\n", "file_path": "compiler/crates/graphql-ir/src/build.rs", "rank": 69, "score": 162126.9112754824 }, { "content": "fn diff(current: &str, previous: &str) -> SchemaChange {\n\n let definitions = parse_definitions(current).unwrap();\n\n let mut change = detect_changes(&definitions, current, previous);\n\n sort_change(&mut change);\n\n change\n\n}\n\n\n", "file_path": "compiler/crates/schema-diff/src/test.rs", "rank": 70, "score": 157459.68276070428 }, { "content": "const warning = require('warning');\n", "file_path": "packages/relay-experimental/useStaticFragmentNodeWarning.js", "rank": 71, "score": 156630.93013037072 }, { "content": "fn transfrom_linked_field(ctx: &CompilerContext<'_>, node: &LinkedField) -> Arc<LinkedField> {\n\n Arc::new(LinkedField {\n\n alias: node.alias,\n\n definition: node.definition,\n\n arguments: node.arguments.clone(),\n\n directives: node.directives.clone(),\n\n selections: transform_selections(ctx, &mut node.selections.clone()),\n\n })\n\n}\n\n\n", "file_path": "compiler/crates/graphql-transforms/src/sort_selections.rs", "rank": 72, "score": 154654.93807479346 }, { "content": "function transformFragmentArguments(\n\n context: CompilerContext,\n\n transformed: Map<string, ArgumentMap>,\n\n fragment: Fragment,\n\n): ArgumentMap {\n\n const name = fragment.name;\n\n const transformedArguments = transformed.get(name);\n\n if (transformedArguments != null) {\n\n return transformedArguments;\n\n }\n\n // Start with only the explicitly defined local arguments, recover the\n\n // correct set of root variables excluding invalid @arguments values.\n\n const argumentDefinitions: ArgumentMap = new Map();\n\n fragment.argumentDefinitions.forEach(argDef => {\n\n if (argDef.kind === 'LocalArgumentDefinition') {\n\n argumentDefinitions.set(argDef.name, argDef);\n\n }\n\n });\n\n // Break cycles by initially caching a version that only has local\n\n // arguments. If the current fragment is reached again, it won't have\n\n // any root variables to add to its parents. The traversal below will\n\n // find any root variables and update the cached version of the\n\n // fragment.\n\n transformed.set(name, argumentDefinitions);\n\n visit(context, transformed, argumentDefinitions, fragment);\n\n transformed.set(name, argumentDefinitions);\n\n return argumentDefinitions;\n", "file_path": "packages/relay-compiler/core/inferRootArgumentDefinitions.js", "rank": 73, "score": 151815.97627995277 }, { "content": "function transform(node: mixed, moduleMap: {[string]: mixed, ...}): mixed {\n\n if (node == null) {\n\n return node;\n\n } else if (Array.isArray(node)) {\n\n return node.map(item => transform(item, moduleMap));\n\n } else if (typeof node === 'object') {\n\n const next = {};\n\n Object.keys(node).forEach(key => {\n\n next[key] = transform(node[key], moduleMap);\n\n });\n\n return next;\n\n } else if (typeof node === 'string') {\n\n const match = /^@@MODULE_START@@(.*?)@@MODULE_END@@$/.exec(node);\n\n if (match != null) {\n\n const moduleName = match[1];\n\n if (moduleMap.hasOwnProperty(moduleName)) {\n\n return moduleMap[moduleName];\n\n } else {\n\n throw new Error(\n\n `Could not find a value for CodeMarker value '${moduleName}', ` +\n\n 'make sure to supply one in the module mapping.',\n\n );\n\n }\n\n } else if (node.indexOf('@@MODULE_START') >= 0) {\n\n throw new Error(`Found unprocessed CodeMarker value '${node}'.`);\n\n }\n\n return node;\n\n } else {\n\n // mixed\n\n return node;\n\n }\n", "file_path": "packages/relay-compiler/util/CodeMarker.js", "rank": 74, "score": 151477.62853249008 }, { "content": "fn hash(data: &str) -> String {\n\n let mut md5 = Md5::new();\n\n md5.input_str(data);\n\n md5.result_str()\n\n}\n", "file_path": "compiler/crates/signedsource/src/lib.rs", "rank": 75, "score": 148878.0631139296 }, { "content": "fn sign(data: &str) -> String {\n\n data.replace(NEWTOKEN, &format!(\"SignedSource<<{}>>\", hash(data)))\n\n}\n\n\n\npub const NEWTOKEN: &str = \"<<SignedSource::*O*zOeWoEQle#+L!plEphiEmie\\x40IsG>>\";\n\n/// The signing token to be embedded in the file you wish to be signed.\n\npub const SIGNING_TOKEN: &str = \"\\x40generated <<SignedSource::*O*zOeWoEQle#+L!plEphiEmie\\x40IsG>>\";\n\n\n", "file_path": "compiler/crates/signedsource/src/lib.rs", "rank": 76, "score": 148878.0631139296 }, { "content": "#[allow(dead_code)]\n\nfn is_newline(ch: char) -> bool {\n\n match ch {\n\n LINE_FEED | CARRIAGE_RETURN | LINE_SEPARATOR | PARAGRAPH_SEPARATOR => true,\n\n _ => false,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n macro_rules! assert_token {\n\n ($src:expr, $kind:expr, $start:expr, $length:expr) => {\n\n assert_eq!(\n\n Lexer::new($src).next(),\n\n $crate::syntax_node::Token {\n\n span: Span::new($start, $length),\n\n inner_span: Span::new($start, $length),\n\n kind: $kind\n\n },\n", "file_path": "compiler/crates/graphql-syntax/src/lexer.rs", "rank": 77, "score": 147249.55819288592 }, { "content": "fn is_digit(ch: char) -> bool {\n\n ch >= DIGIT_0 && ch <= DIGIT_9\n\n}\n\n\n", "file_path": "compiler/crates/graphql-syntax/src/lexer.rs", "rank": 78, "score": 147243.9356571061 }, { "content": "let fragment;\n", "file_path": "packages/relay-experimental/__tests__/useIsParentQueryInFlight-test.js", "rank": 79, "score": 146600.664631356 }, { "content": "fn is_identifer_part(ch: char) -> bool {\n\n is_identifier_start(ch) || is_digit(ch)\n\n}\n\n\n", "file_path": "compiler/crates/graphql-syntax/src/lexer.rs", "rank": 80, "score": 145637.2989153579 }, { "content": "fn is_identifier_start(ch: char) -> bool {\n\n (ch >= CHAR_A && ch <= CHAR_Z) || (ch >= CHAR_LOWER_A && ch <= CHAR_LOWER_Z) || ch == UNDERSCORE\n\n}\n\n\n", "file_path": "compiler/crates/graphql-syntax/src/lexer.rs", "rank": 81, "score": 145637.2989153579 }, { "content": "fn sanitize_identifier(input: &str) -> String {\n\n input\n\n .chars()\n\n .map(|chr| match chr {\n\n 'a'..='z' | 'A'..='Z' | '0'..='9' | '_' => chr,\n\n _ => '_',\n\n })\n\n .collect()\n\n}\n", "file_path": "compiler/crates/fixture-tests/src/main.rs", "rank": 82, "score": 145615.9239126509 }, { "content": "// Visit the selections of current IR, set the `children` for the node representing the IR,\n\n// and the `parents` for nodes representing the children IR\n\nfn visit_selections(\n\n trees: &mut FnvHashMap<StringKey, Node>,\n\n selections: &[Selection],\n\n parent_name: StringKey,\n\n children: &mut Vec<StringKey>,\n\n) {\n\n for selection in selections {\n\n match selection {\n\n Selection::FragmentSpread(node) => {\n\n let key = node.fragment.item;\n\n match trees.get_mut(&key) {\n\n None => {\n\n trees.insert(\n\n key,\n\n Node {\n\n ir: None,\n\n parents: vec![parent_name],\n\n children: vec![],\n\n },\n\n );\n", "file_path": "compiler/crates/dependency-analyzer/src/ir.rs", "rank": 83, "score": 144668.4662618988 }, { "content": "fn visit_selections(\n\n base_definitions_map: &FnvHashMap<StringKey, ExecutableDefinition>,\n\n reachable_base_asts: &mut FnvHashSet<StringKey>,\n\n duplicate_base_definitions: &FnvHashSet<StringKey>,\n\n selections: &List<Selection>,\n\n is_base: bool,\n\n) -> Result<(), String> {\n\n for selection in &selections.items {\n\n match selection {\n\n graphql_syntax::Selection::FragmentSpread(selection) => {\n\n if is_base || base_definitions_map.contains_key(&selection.name.value) {\n\n traverse_base_ast_definition(\n\n base_definitions_map,\n\n reachable_base_asts,\n\n duplicate_base_definitions,\n\n selection.name.value,\n\n )?\n\n }\n\n }\n\n graphql_syntax::Selection::LinkedField(selection) => visit_selections(\n", "file_path": "compiler/crates/dependency-analyzer/src/ast.rs", "rank": 84, "score": 144663.10830734935 }, { "content": "#[test]\n\nfn test_sign_file() {\n\n let new_signing_result_1 = sign_file(&format!(\"# {}\\ntest 1\", SIGNING_TOKEN));\n\n assert_eq!(\n\n new_signing_result_1,\n\n \"# \\x40generated SignedSource<<d9b7b52f54978f54b84a0fd48145e470>>\\ntest 1\"\n\n );\n\n\n\n let new_signing_result_2 = sign_file(&format!(\"# {}\\ntest 2\", SIGNING_TOKEN));\n\n assert_eq!(\n\n new_signing_result_2,\n\n \"# \\x40generated SignedSource<<4c0c1ae4f5863c72731b2f543e830fd5>>\\ntest 2\"\n\n );\n\n}\n\n\n", "file_path": "compiler/crates/signedsource/src/tests.rs", "rank": 85, "score": 144632.6361557169 }, { "content": "fn sort_change(change: &mut SchemaChange) {\n\n if let SchemaChange::DefinitionChanges(changes) = change {\n\n changes.sort();\n\n for c in changes {\n\n match c {\n\n DefinitionChange::EnumChanged {\n\n ref mut added,\n\n ref mut removed,\n\n ..\n\n } => {\n\n added.sort();\n\n removed.sort();\n\n }\n\n DefinitionChange::UnionChanged {\n\n ref mut added,\n\n ref mut removed,\n\n ..\n\n } => {\n\n added.sort();\n\n removed.sort();\n", "file_path": "compiler/crates/schema-diff/src/test.rs", "rank": 86, "score": 144081.32083943006 }, { "content": "fn is_non_newline_whitespace(ch: char) -> bool {\n\n match ch {\n\n SPACE | TAB | VERTICAL_TAB | FORM_FEED => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "compiler/crates/graphql-syntax/src/lexer.rs", "rank": 87, "score": 144077.7852779069 }, { "content": "#[test]\n\nfn basic_directives() {\n\n let input = include_str!(\"print/fixtures/basic_directives.graphql\");\n\n let expected = include_str!(\"print/fixtures/basic_directives.expected\");\n\n test_fixture(transform_fixture, \"basic_directives.graphql\", \"print/fixtures/basic_directives.expected\", input, expected);\n\n}\n\n\n", "file_path": "compiler/crates/graphql-printer/tests/print_test.rs", "rank": 88, "score": 142937.1041587761 }, { "content": "#[test]\n\nfn directive_include() {\n\n let input = include_str!(\"parse/fixtures/directive-include.graphql\");\n\n let expected = include_str!(\"parse/fixtures/directive-include.expected\");\n\n test_fixture(transform_fixture, \"directive-include.graphql\", \"parse/fixtures/directive-include.expected\", input, expected);\n\n}\n\n\n", "file_path": "compiler/crates/graphql-ir/tests/parse_test.rs", "rank": 89, "score": 142937.1041587761 }, { "content": "#[test]\n\nfn directive_generic() {\n\n let input = include_str!(\"parse/fixtures/directive-generic.graphql\");\n\n let expected = include_str!(\"parse/fixtures/directive-generic.expected\");\n\n test_fixture(transform_fixture, \"directive-generic.graphql\", \"parse/fixtures/directive-generic.expected\", input, expected);\n\n}\n\n\n", "file_path": "compiler/crates/graphql-ir/tests/parse_test.rs", "rank": 90, "score": 142937.1041587761 }, { "content": "#[test]\n\nfn keyword_as_name() {\n\n let input = include_str!(\"parse/fixtures/keyword_as_name.graphql\");\n\n let expected = include_str!(\"parse/fixtures/keyword_as_name.expected\");\n\n test_fixture(transform_fixture, \"keyword_as_name.graphql\", \"parse/fixtures/keyword_as_name.expected\", input, expected);\n\n}\n\n\n", "file_path": "compiler/crates/graphql-syntax/tests/parse_test.rs", "rank": 91, "score": 142921.47302005446 }, { "content": "#[test]\n\nfn basic_fragment() {\n\n let input = include_str!(\"print/fixtures/basic_fragment.graphql\");\n\n let expected = include_str!(\"print/fixtures/basic_fragment.expected\");\n\n test_fixture(transform_fixture, \"basic_fragment.graphql\", \"print/fixtures/basic_fragment.expected\", input, expected);\n\n}\n\n\n", "file_path": "compiler/crates/graphql-printer/tests/print_test.rs", "rank": 92, "score": 142385.6293686185 }, { "content": "#[test]\n\nfn fragment_with_arguments() {\n\n let input = include_str!(\"parse/fixtures/fragment-with-arguments.graphql\");\n\n let expected = include_str!(\"parse/fixtures/fragment-with-arguments.expected\");\n\n test_fixture(transform_fixture, \"fragment-with-arguments.graphql\", \"parse/fixtures/fragment-with-arguments.expected\", input, expected);\n\n}\n\n\n", "file_path": "compiler/crates/graphql-ir/tests/parse_test.rs", "rank": 93, "score": 142385.6293686185 }, { "content": "#[test]\n\nfn simple_fragment() {\n\n let input = include_str!(\"parse/fixtures/simple-fragment.graphql\");\n\n let expected = include_str!(\"parse/fixtures/simple-fragment.expected\");\n\n test_fixture(transform_fixture, \"simple-fragment.graphql\", \"parse/fixtures/simple-fragment.expected\", input, expected);\n\n}\n\n\n", "file_path": "compiler/crates/graphql-ir/tests/parse_test.rs", "rank": 94, "score": 142385.6293686185 }, { "content": "fn build_fragment_signature(\n\n schema: &Schema,\n\n fragment: &graphql_syntax::FragmentDefinition,\n\n) -> ValidationResult<FragmentSignature> {\n\n let type_name = fragment.type_condition.type_.value;\n\n let type_condition = match schema.get_type(type_name) {\n\n Some(type_condition) => match type_condition {\n\n Type::Interface(..) | Type::Object(..) | Type::Union(..) => Ok(type_condition),\n\n _ => Err(ValidationError::new(\n\n ValidationMessage::ExpectedCompositeType(type_condition),\n\n vec![fragment\n\n .location\n\n .with_span(fragment.type_condition.type_.span)],\n\n )),\n\n },\n\n None => Err(ValidationError::new(\n\n ValidationMessage::UnknownType(type_name),\n\n vec![fragment\n\n .location\n\n .with_span(fragment.type_condition.type_.span)],\n", "file_path": "compiler/crates/graphql-ir/src/signatures.rs", "rank": 95, "score": 142385.6293686185 }, { "content": "/**\n\n * Copyright (c) Facebook, Inc. and its affiliates.\n\n *\n\n * This source code is licensed under the MIT license found in the\n\n * LICENSE file in the root directory of this source tree.\n\n *\n\n * @emails oncall+relay\n\n * @flow strict-local\n\n * @format\n\n */\n\n\n\n// flowlint ambiguous-object-type:error\n\n\n\n'use strict';\n\n\n\nconst warning = require('warning');\n\n\n\nconst {useRef} = require('react');\n\n\n\nimport type {ReaderFragment} from 'relay-runtime';\n\n\n\nfunction useStaticFragmentNodeWarning(\n\n fragmentNode: ReaderFragment,\n\n warningContext: string,\n\n): void {\n\n if (__DEV__) {\n\n // This is calling `useRef` conditionally, but based on the environment\n\n // __DEV__ setting which shouldn't change. This allows us to only pay the\n\n // cost of `useRef` in development mode to produce the warning.\n\n // eslint-disable-next-line react-hooks/rules-of-hooks\n\n const initialPropRef = useRef(fragmentNode.name);\n\n warning(\n\n initialPropRef.current === fragmentNode.name,\n\n 'Relay: The %s has to remain the same over the lifetime of a component. ' +\n\n 'Changing it is not supported and will result in unexpected behavior.',\n\n warningContext,\n\n );\n\n }\n\n}\n\n\n\nmodule.exports = useStaticFragmentNodeWarning;\n", "file_path": "packages/relay-experimental/useStaticFragmentNodeWarning.js", "rank": 96, "score": 142106.29284569298 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_sign_file_preexisting_token() {\n\n sign_file(\n\n \"# \\x40generated SignedSource<<eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee>>\\nalready signed test\",\n\n );\n\n}\n\n\n", "file_path": "compiler/crates/signedsource/src/tests.rs", "rank": 97, "score": 141271.49704809656 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_sign_file_without_token() {\n\n sign_file(\"# \\x40generated no-token\\nnot signed\");\n\n}\n\n\n", "file_path": "compiler/crates/signedsource/src/tests.rs", "rank": 98, "score": 141271.49704809656 }, { "content": "fn read_token(input: &str) -> ReaderResult<'_> {\n\n if let Some(c) = input.chars().next() {\n\n match c {\n\n '\\t' | ' ' | ',' | '\\u{feff}' | '\\n' | '\\r' => read_token(&input[1..]),\n\n '#' => read_token(read_comment(&input)),\n\n '&' => read_char(input, TokenKind::Amp),\n\n '!' => read_char(input, TokenKind::Bang),\n\n '$' => read_char(input, TokenKind::Dollar),\n\n '(' => read_char(input, TokenKind::ParenL),\n\n ')' => read_char(input, TokenKind::ParenR),\n\n ':' => read_char(input, TokenKind::Colon),\n\n '=' => read_char(input, TokenKind::Equals),\n\n '@' => read_char(input, TokenKind::At),\n\n '[' => read_char(input, TokenKind::BracketL),\n\n ']' => read_char(input, TokenKind::BracketR),\n\n '{' => read_char(input, TokenKind::BraceL),\n\n '|' => read_char(input, TokenKind::Pipe),\n\n '}' => read_char(input, TokenKind::BraceR),\n\n 'a'..='z' | 'A'..='Z' | '_' => read_name(&input),\n\n '-' | '0'..='9' => read_number(&input),\n\n '\"' => read_string(&input),\n\n _ => (input, TokenKind::Error),\n\n }\n\n } else {\n\n (input, TokenKind::EOF)\n\n }\n\n}\n\n\n", "file_path": "compiler/crates/schema/src/lexer.rs", "rank": 99, "score": 140854.76797257492 } ]
Rust
crush/src/soc/bdd/differential/post_processing.rs
Simula-UiB/CRHS
8f3dd34c8b99680188d9314c6897e0c790f5358f
use std::collections::VecDeque; use std::num::NonZeroUsize; use std::ops::Range; use vob::Vob; use crate::soc::bdd::Bdd; use crate::soc::system::System; use super::dependency_finder::DepPathFinder; #[allow(unused_variables, dead_code)] pub struct PostProcessing { soc: System, step: usize, active_area: Range<usize>, } impl PostProcessing{ pub fn new(soc: System, step: usize, active_area: Range<usize>) -> Self { Self { soc, step, active_area } } } #[allow(unused_variables, dead_code)] pub struct ASolution { lhs: Vec<Vob>, rhs: Vec<bool>, } impl Bdd { pub fn extract_an_lsb_path(&self, active_area: &Range<usize>, step: usize) -> Vec<(Vob, bool)> { let top = active_area.start; let (arena, _) = self.ensure_level_is_in_arena(&top, &active_area, step); let (lsb, _) = arena.lowest_lsb_in_level(&top); let mut candidate_nodes = arena.nodes_with_lsb_at_level(&top, lsb); let mut path: VecDeque<bool> = VecDeque::with_capacity(self.get_levels_size()); let start_node = candidate_nodes.pop().unwrap().0; let mut deps = DepPathFinder::new(start_node, top, NonZeroUsize::new(step).unwrap(), &self); let mut root_lsb = lsb; let second_last = active_area.end - (step*2); for root_depth in (top..=second_last).step_by(step) { for (id, sub_path) in deps.iter() { let mut traversed_ones = false; for edge in sub_path.iter() { traversed_ones |= edge; } if traversed_ones { let shifted_lsb = root_lsb - 1; if arena.node_lsb(&(root_depth + step), id) == shifted_lsb { root_lsb = shifted_lsb; path.extend(sub_path); deps = DepPathFinder::new(*id, root_depth+step, NonZeroUsize::new(step).unwrap(), &self); break; } } else { if arena.node_lsb(&(root_depth + step), id) == root_lsb { path.extend(sub_path); deps = DepPathFinder::new(*id, root_depth+step, NonZeroUsize::new(step).unwrap(), &self); break; } } } } let mut trivial_root = None; for (id, sub_path) in deps.iter() { let mut traversed_ones = false; for edge in sub_path.iter() { traversed_ones |= edge; } if !traversed_ones { path.extend(sub_path); trivial_root = Some(id); break; } } if trivial_root.is_some() { } else { let (id, sub_path) = deps.iter().next().expect("Missing dependencies?"); path.extend(sub_path); } #[cfg(debug_assertions)] let control = path.len(); let mut i = top; let mut current_node = start_node; loop { if i == 0 { break; } for parent_node in self.levels[i-1].iter_nodes() { if let Some(e0) = parent_node.1.get_e0() { if e0 == current_node { path.push_front(false); current_node = *parent_node.0; break; } } if let Some(e1) = parent_node.1.get_e1() { if e1 == current_node { path.push_front(true); current_node = *parent_node.0; break; } } } i -= 1; } #[cfg(debug_assertions)] { assert_ne!(control, path.len(), "We were unsuccessful in finding a path from source to start of active area!"); assert_eq!(self.get_levels_size()-1, path.len(), "The path is not of same length as we have levels!"); } self.get_lhs().iter() .zip(path.iter()) .map(|(vob, edge)| (vob.clone(), *edge) ) .collect() } pub fn extract_a_sol(&self, active_area: &Range<usize>, step: usize) -> String { let a_path = self.extract_an_lsb_path(active_area, step); let mut formatted = String::new(); let setup: Vec<(usize, usize)> = a_path.iter() .map(|(lhs, rhs)| { let iter = lhs.iter_set_bits(..); let count = iter.clone().count(); let max_elem_size = iter.last().expect("Encountered an unexpected all zero LHS!") .to_string().chars().count(); (count, max_elem_size) }) .collect(); let elem_size = setup.iter() .map(|(_, max_elem_size)| max_elem_size).max().unwrap(); let max_vars = setup.iter() .map(|(count, _)| count).max().unwrap(); let elem_size = elem_size + 1; let row_len = max_vars * (elem_size + 3) - 3; for (i, (lhs, rhs)) in a_path.iter().enumerate() { if i % step == 0 { formatted.push_str(&format!("{:->r$}\n", "", r = row_len + 3)); } let mut lhs_buff = String::new(); for int in lhs.iter_set_bits(..) { lhs_buff.push_str(&format!("{: >e$} + ", &format!("x{}", int), e = elem_size)); } lhs_buff.pop(); lhs_buff.pop(); lhs_buff.pop(); formatted.push_str(&format!("{: <r$}: {}\n", lhs_buff, *rhs as u8, r = row_len)); } formatted } pub fn stringify_sol_as_hex(&self, active_area: &Range<usize>, step: usize) -> String { let a_path = self.extract_an_lsb_path(active_area, step); let mut lhss = Vec::new(); let mut rhss = Vec::new(); for (lhs, rhs) in a_path.iter() { let lhs_int = lhs.iter_set_bits(..).next().unwrap(); lhss.push(lhs_int); rhss.push(*rhs); } let rhs_bytes = Self::bools_to_u8(&rhss); let s = Bdd::u8s_to_hex_separated(&rhs_bytes); s } fn u8s_to_hex_separated(bytes: &[u8] ) -> String { let bytes_rev: Vec<u8> = bytes.into_iter().rev().cloned().collect(); let mut s = String::new(); for four_bytes in bytes_rev.chunks(4) { println!("Four bytes: {:?}", four_bytes); for byte in four_bytes.iter(){ s.push_str(&format!("{:0>2x}", byte)); } s.push_str(" "); } s.pop(); s } fn bools_to_u8(bits: &Vec<bool>) -> Vec<u8>{ let b = bits.chunks(8) .map(|v| { v.iter().enumerate() .fold(0u8, |acc, (idx, x)| { acc | ((*x as u8) << idx)} ) }) .collect(); println!("As vec of u8: {:?}", &b); b } } #[cfg(test)] mod test { use super::*; #[test] fn test_bool_to_hex() { let bools = vec![ false, true, true, false, false, false, false, false, false, false, false, true, false, false, false, false, false, false, false, false, false, true, false, false, false, false, false]; let expected_u8s = vec![6, 8, 32, 0]; assert_eq!(expected_u8s, Bdd::bools_to_u8(&bools)); println!("Passed first assert."); let expected_hex = "00200806".to_owned(); assert_eq!(expected_hex, Bdd::u8s_to_hex_separated(&expected_u8s)); } }
use std::collections::VecDeque; use std::num::NonZeroUsize; use std::ops::Range; use vob::Vob; use crate::soc::bdd::Bdd; use crate::soc::system::System; use super::dependency_finder::DepPathFinder; #[allow(unused_variables, dead_code)] pub struct PostProcessing { soc: System, step: usize, active_area: Range<usize>, } impl PostProcessing{ pub fn new(soc: System, step: usize, active_area: Range<usize>) -> Self { Self { soc, step, active_area } } } #[allow(unused_variables, dead_code)] pub struct ASolution { lhs: Vec<Vob>, rhs: Vec<bool>, } impl Bdd { pub fn extract_an_lsb_path(&self, active_area: &Range<usize>, step: usize) -> Vec<(Vob, bool)> { let top = active_area.start; let (arena, _) = self.ensure_level_is_in_arena(&top, &active_area, step); let (lsb, _) = arena.lowest_lsb_in_level(&top); let mut candidate_nodes = arena.nodes_with_lsb_at_level(&top, lsb); let mut path: VecDeque<bool> = VecDeque::with_capacity(self.get_levels_size()); let start_node = candidate_nodes.pop().unwrap().0; let mut deps = DepPathFinder::new(start_node, top, NonZeroUsize::new(step).unwrap(), &self); let mut root_lsb = lsb; let second_last = active_area.end - (step*2); for root_depth in (top..=second_last).step_by(step) { for (id, sub_path) in deps.iter() { let mut traversed_ones = false; for edge in sub_path.iter() { traversed_ones |= edge; } if traversed_ones { let shifted_lsb = root_lsb - 1; if arena.node_lsb(&(root_depth + step), id) == shifted_lsb { root_lsb = shifted_lsb; path.extend(sub_path); deps = DepPathFinder::new(*id, root_depth+step, NonZeroUsize::new(step).unwrap(), &self); break; } } else { if arena.node_lsb(&(root_depth + step), id) == root_lsb { path.extend(sub_path); deps = DepPathFinder::new(*id, root_depth+step, NonZeroUsize::new(step).unwrap(), &self); break; } } } } let mut trivial_root = None; for (id, sub_path) in deps.iter() { let mut traversed_ones = false; for edge in sub_path.iter() { traversed_ones |= edge; } if !traversed_ones { path.extend(sub_path); trivial_root = Some(id); break; } } if trivial_root.is_some() { } else { let (id, sub_path) = deps.iter().next().expect("Missing dependencies?"); path.extend(sub_path); } #[cfg(debug_assertions)] let control = path.len(); let mut i = top; let mut current_node = start_node; loop { if i == 0 { break; } for parent_node in self.levels[i-1].iter_nodes() { if let Some(e0) = parent_node.1.get_e0() { if e0 == current_node { path.push_front(false); current_node = *parent_node.0; break; } } if let Some(e1) = parent_node.1.get_e1() { if e1 == current_node { path.push_front(true); current_node = *parent_node.0; break; } } } i -= 1; } #[cfg(debug_assertions)] { assert_ne!(control, path.len(), "We were unsuccessful in finding a path from source to start of active area!"); assert_eq!(self.get_levels_size()-1, path.len(), "The path is not of same length as we have levels!"); } self.get_lhs().iter() .zip(path.iter()) .map(|(vob, edge)| (vob.clone(), *edge) ) .collect() } pub fn extract_a_sol(&self, active_area: &Range<usize>, step: usize) -> String { let a_path = self.extract_an_lsb_path(active_area, step); let mut formatted = String::new(); let setup: Vec<(usize, usize)> = a_path.iter() .map(|(lhs, rhs)| { let iter = lhs.iter_set_bits(..); let count = iter.clone().count(); let max_elem_size = iter.last().expect("Encountered an unexpected all zero LHS!") .to_string().chars().count(); (count, max_elem_size) }) .collect(); let elem_size = setup.iter() .map(|(_, max_elem_size)| max_elem_size).max().unwrap(); let max_vars = setup.iter() .map(|(count, _)| count).max().unwrap(); let elem_size = elem_size + 1; let row_len = max_vars * (elem_size + 3) - 3; for (i, (lhs, rhs)) in a_path.iter().enumerate() { if i % step == 0 { formatted.push_str(&format!("{:->r$}\n", "", r = row_len + 3)); } let mut lhs_buff = String::new(); for int in lhs.iter_set_bits(..) { lhs_buff.push_str(&format!("{: >e$} + ", &format!("x{}", int), e = elem_size)); } lhs_buff.pop(); lhs_buff.pop(); lhs_buff.pop(); formatted.push_str(&format!("{: <r$}: {}\n", lhs_buff, *rhs as u8, r = row_len)); } formatted } pub fn stringify_sol_as_hex(&self, active_area: &Range<usize>, step: usize) -> String { let a_path = self.extract_an_lsb_path(active_area, step); let mut lhss = Vec::new(); let mut rhss = Vec::new(); for (lhs, rhs) in a_path.iter() { let lhs_int = lhs.iter_set_bits(..).next().unwrap(); lhss.push(lhs_int); rhss.push(*rhs); } let rhs_bytes = Self::bools_to_u8(&rhss); let s = Bdd::u8s_to_hex_separated(&rhs_bytes); s }
fn bools_to_u8(bits: &Vec<bool>) -> Vec<u8>{ let b = bits.chunks(8) .map(|v| { v.iter().enumerate() .fold(0u8, |acc, (idx, x)| { acc | ((*x as u8) << idx)} ) }) .collect(); println!("As vec of u8: {:?}", &b); b } } #[cfg(test)] mod test { use super::*; #[test] fn test_bool_to_hex() { let bools = vec![ false, true, true, false, false, false, false, false, false, false, false, true, false, false, false, false, false, false, false, false, false, true, false, false, false, false, false]; let expected_u8s = vec![6, 8, 32, 0]; assert_eq!(expected_u8s, Bdd::bools_to_u8(&bools)); println!("Passed first assert."); let expected_hex = "00200806".to_owned(); assert_eq!(expected_hex, Bdd::u8s_to_hex_separated(&expected_u8s)); } }
fn u8s_to_hex_separated(bytes: &[u8] ) -> String { let bytes_rev: Vec<u8> = bytes.into_iter().rev().cloned().collect(); let mut s = String::new(); for four_bytes in bytes_rev.chunks(4) { println!("Four bytes: {:?}", four_bytes); for byte in four_bytes.iter(){ s.push_str(&format!("{:0>2x}", byte)); } s.push_str(" "); } s.pop(); s }
function_block-full_function
[ { "content": "/// From a `BddSpec` and a `nvar` build a `Bdd` following the specifications.\n\n/// \n\n/// We create an empty `Bdd`, set its `id` according to the spec then create all the levels\n\n/// (removing the `-1` from the `lhs` beforehand) without connecting the nodes.\n\n/// \n\n/// Once all the level have been created we connect all the nodes to each other following the\n\n/// `e0` and `e1` specs. All the id of the nodes are then reset to initialize `next_id` of the\n\n/// `Bdd`. Finally we remove any jumping edges by calling `add_same_edge_node_at_level` on all the\n\n/// levels of the `Bdd`.\n\n/// WARNING! There is an unconfirmed case which indicates that the removal of jumping edges does NOT\n\n/// work as intended! This will be investigated when I get the time.\n\n// FIXME, the case referred to is the original PRINCE or LowMC S-box used in our differential\n\n// experiments. It was built from a .bdd file, and we did had to change the .bdd to not include\n\n// jumping edges b/c they caused us trouble. I was not aware of the fact that this fn is supposed\n\n// to handle jumping edges at the time, otherwise I would have looked into it then. Maybe it's a\n\n// too early short-circuit again?\n\n// Of course, it may also have been some other error on our part, which is why this case is\n\n// \"unconfirmed\".\n\npub fn build_bdd_from_spec(spec: &mut BddSpec, nvar: usize) -> Bdd {\n\n let mut bdd = Bdd::new();\n\n bdd.set_id(spec.id);\n\n let next_id = spec.levels.iter().fold(0,|last_id,level|\n\n {\n\n let level_id =level.rhs.iter().fold(0,|last_id_level,node| {\n\n if *node.id > last_id_level {\n\n *node.id\n\n } else {\n\n last_id_level\n\n }\n\n });\n\n if level_id>last_id {\n\n level_id\n\n } else {\n\n last_id\n\n }\n\n });\n\n for (i,level_spec) in spec.levels.iter_mut().enumerate(){\n\n level_spec.remove_minus_one();\n", "file_path": "crush/src/soc/utils.rs", "rank": 0, "score": 372533.91231172474 }, { "content": "/// Write .bdd representation of a system to a file at path\n\npub fn print_system_to_file(system: &System, path: &PathBuf){\n\n let write_file = File::create(path).unwrap();\n\n let mut writer = BufWriter::new(&write_file);\n\n writeln!(writer,\"{} {}\",system.get_nvar(),system.iter_bdds().len()).unwrap();\n\n let mut ids = Vec::new();\n\n for bdd in system.iter_bdds() {\n\n ids.push(bdd.0);\n\n}\n\n ids.sort();\n\n for id in ids {\n\n print_bdd_to_file_format(&system.get_bdd(*id).unwrap().borrow(), &mut writer);\n\n }\n\n}\n\n\n", "file_path": "crush/src/soc/utils.rs", "rank": 1, "score": 362587.14357321314 }, { "content": "/// Write .dot language representation of the given bdd to a file at path\n\npub fn print_bdd_to_graphviz(bdd: &Bdd, path:&PathBuf) {\n\n let write_file = File::create(path).unwrap();\n\n let mut writer = BufWriter::new(&write_file);\n\n\n\n to_dot_format(&bdd, &mut writer);\n\n\n\n writer.flush().expect(\"Failed to write to file\");\n\n}\n\n\n", "file_path": "crush/src/soc/utils.rs", "rank": 2, "score": 360878.99224338576 }, { "content": "/// Write .dot language representation of the given bdd to a file at path\n\npub fn to_dot_format<W: Write> (shard: &Bdd, writer: &mut BufWriter<W>) {\n\n\n\n writeln!(writer, \"digraph \\\"DD\\\" {{\").unwrap();\n\n writeln!(writer, \"size = \\\"7.5,10\\\"\").unwrap();\n\n writeln!(writer, \"center = true;\").unwrap();\n\n writeln!(writer, \"edge [dir = none];\").unwrap();\n\n writeln!(writer, \"{{ node [shape = plaintext];\").unwrap();\n\n writeln!(writer, \"edge [style = invis];\").unwrap();\n\n writeln!(writer, \"\\\"CONST NODES\\\" [style = invis];\").unwrap();\n\n for (i,level) in shard.iter_levels().enumerate() {\n\n write!(writer, \"\\\"{}. \",i).unwrap();\n\n if level.iter_set_lhs().count() == 0 {\n\n write!(writer, \"0\").unwrap();\n\n } else {\n\n for (j,bit) in level.iter_set_lhs().enumerate() {\n\n if j > 0 {\n\n write!(writer, \" + \").unwrap();\n\n }\n\n write!(writer, \"x{}\",bit).unwrap();\n\n }\n", "file_path": "crush/src/soc/utils.rs", "rank": 3, "score": 346684.5790010057 }, { "content": "/// From a `SystemSpec` build a `System` following the specifications.\n\n/// \n\n/// We create an empty `System` with the `nvar` set to the spec and \n\n/// push to it every `Bdd` created using the spec.\n\n/// If some Id of Bdds in the spec are not unique their order is used as Id\n\npub fn build_system_from_spec(mut spec: SystemSpec) -> System {\n\n let mut system = System::new();\n\n system.set_nvar(spec.nvar as usize);\n\n let ids:HashSet<Id> = spec.bdds.iter().map(|bdd| bdd.id).collect();\n\n let nbr_bdd = spec.bdds.len();\n\n for (i,bdd_spec) in spec.bdds.iter_mut().enumerate(){\n\n if ids.len() != nbr_bdd {\n\n bdd_spec.id = Id::new(i);\n\n }\n\n system.push_bdd(build_bdd_from_spec(bdd_spec,spec.nvar as usize)).expect(\"No reason to crash since we are using the nvar of the system\n\n to set the one of the Bdds we are pushing\");\n\n }\n\n system\n\n}\n\n\n", "file_path": "crush/src/soc/utils.rs", "rank": 4, "score": 344052.0137797758 }, { "content": "/// Draw a graph representation of the Shard, using GraphViz.\n\n/// The output format is PDF.\n\n///\n\n/// **NOTE:** Requires that `GraphViz` is installed!\n\n///\n\n/// **WARNING!** The resulting output file may be very large!\n\n//FIXME unstable, does not always print...\n\npub fn draw_shard_as_pdf(shard: &Bdd, path:&PathBuf) {\n\n use std::process::{Command, Stdio};\n\n use std::fs::OpenOptions;\n\n use std::thread;\n\n use std::time::Duration;\n\n\n\n let mut args = vec![\"-Tpdf\",];\n\n let mut path = path.clone();\n\n path.set_extension(\"pdf\");\n\n\n\n println!(\"Path: {}\", path.display());\n\n let opath = format!(\"-o{}\", path.display().to_string());\n\n args.push(&opath);\n\n\n\n OpenOptions::new().write(true).create(true).open(&path).unwrap();\n\n println!(\"Args: {:?}\", &args);\n\n\n\n let mut dot = Command::new(\"dot\")\n\n .args(&args)\n\n .stdin(Stdio::piped())\n", "file_path": "crush/src/soc/utils.rs", "rank": 5, "score": 337281.31622502947 }, { "content": "// Transitions from one round to another is defined to be at any application of a non-linear layer,\n\n// partial or complete alike. That means that round 0 is from the initial input to the cipher,\n\n// through any linear transformation, and then as input to the first non-linear layer. The shards\n\n// for round 0 will be the shards made form that input, and from the fresh variables which are the\n\n// output of the non-linear part of the non-linear layer.\n\n//\n\n// The terms 'in' and 'out' will in general refer to the input and output of a non-linear layer.\n\npub fn make_soc<L, S>(llb: &L, sh: &S, nr_rounds: usize) -> (System, Vec<Vec<Id>>)\n\n where\n\n L: LLHandler,\n\n S: SBoxHandler,\n\n{\n\n let nvar = count_nvar(llb, sh, nr_rounds);\n\n let mut shards = Vec::new();\n\n\n\n // Initial in-block:\n\n let init_block_size = llb.block_size(0);\n\n let mut initial = Vec::with_capacity(init_block_size);\n\n for i in 0..init_block_size {\n\n let mut lhs = Vob::from_elem(nvar, false);\n\n lhs.set(i, true);\n\n initial.push(lhs);\n\n }\n\n // Apply round 0 linear layer,\n\n // 'inn' is input block to the non-linear layer.\n\n // let mut inn = llb.apply_linear_layer(0, initial);\n\n let mut inn = initial;\n", "file_path": "pathfinder/src/code_gen/soc_gen.rs", "rank": 6, "score": 329415.0517790043 }, { "content": "/// Solve a linear system represented by a `Matrix` (left hand side) and a `Vob` (right hand side).\n\n///\n\n/// To solve we augment the lhs with the rhs and use gaussian elimination.\n\n///\n\n/// Once the matrix is reduced the solution will be a `Vec` of `Some(bool)` for every fixed variable,\n\n/// and `None` for every free variable.\n\npub fn solve_linear_system(mut lhs: Matrix, mut rhs: Vob) -> Vec<Option<bool>> {\n\n for i in (0..lhs.row_size()).rev() {\n\n let mut highest_set_bit = get_max_set_bit(&lhs.rows[i]);\n\n let mut max_row = i;\n\n for j in (0..i).rev() {\n\n if get_max_set_bit(&lhs.rows[j]).is_some()\n\n && (highest_set_bit.is_none()\n\n || get_max_set_bit(&lhs.rows[j]).unwrap() > highest_set_bit.unwrap())\n\n {\n\n highest_set_bit = get_max_set_bit(&lhs.rows[j]);\n\n max_row = j;\n\n }\n\n }\n\n if let Some(highest_set_bit) = highest_set_bit {\n\n if max_row < i {\n\n lhs.rows.swap(i, max_row);\n\n let value_max_row = rhs[max_row];\n\n let value_i = rhs[i];\n\n rhs.set(i, value_max_row);\n\n rhs.set(max_row, value_i);\n", "file_path": "crush/src/algebra/mod.rs", "rank": 7, "score": 321303.7642801914 }, { "content": "pub fn find_best_bdd_pattern_dep(deps: &[NodeRankedDependency]) -> Vec<NodeRankedDependency> {\n\n let mut best_deps = Vec::new();\n\n let mut patterns: Vec<BDDPatern> = Vec::new();\n\n deps.iter().enumerate().for_each(|(index_dep, dep)| {\n\n let (mut ids, weigth) = dep\n\n .involved_bdds()\n\n .fold((Vec::new(), 0), |(mut i, w), bdd| {\n\n i.push(bdd.get_id());\n\n (i, w + bdd.get_total_size())\n\n });\n\n ids.sort();\n\n if let Some(p) = patterns.iter_mut().find(|p| p.ids == ids) {\n\n p.deps.push(index_dep);\n\n } else {\n\n patterns.push(BDDPatern {\n\n ids,\n\n deps: vec![index_dep],\n\n weigth,\n\n });\n\n }\n", "file_path": "cryptapath/src/strategy.rs", "rank": 8, "score": 321041.26931224257 }, { "content": "/// Return a SystemSpec from the parsing of a .bdd file using the correct format\n\npub fn parse_system_spec_from_file(path: &PathBuf) -> SystemSpec {\n\n let file = File::open(path).unwrap();\n\n let mut file_content = String::new();\n\n BufReader::new(file).read_to_string(&mut file_content).unwrap();\n\n let result = full_parser(CompleteStr(&file_content)).expect(\"Parsing file\");\n\n result.1\n\n}\n\n\n", "file_path": "crush/src/soc/utils.rs", "rank": 9, "score": 319363.21470206877 }, { "content": "/// bools to less than 8. Takes 'chunk_size' number of bits from 'bits' and turns them into an u8.\n\n /// Returns a vector with the u8's.\n\npub fn bools_to_lt8(bits: &[bool], chunk_size: usize) -> Vec<u8> {\n\n if chunk_size > 8 {\n\n panic!(\"Chunk-sizes above 8 won't fit in an u8!\");\n\n }\n\n let res = bits.chunks(chunk_size)\n\n .map(|chunk| {\n\n chunk.iter().enumerate()\n\n .fold(0u8, |acc, (idx, x)| { acc | ((*x as u8) << idx)} )\n\n })\n\n .collect();\n\n res\n\n}\n\n\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/utils/mod.rs", "rank": 10, "score": 316285.6163926205 }, { "content": "/// Write .bdd representation of a bdd to a Buffered write of a file\n\nfn print_bdd_to_file_format(bdd: &Bdd,writer: &mut BufWriter<&File>){\n\n writeln!(writer, \"{} {}\",*bdd.get_id(),bdd.iter_levels().count()).unwrap();\n\n for level in bdd.iter_levels() {\n\n for (i,bit) in level.iter_set_lhs().enumerate(){\n\n if i != 0 {\n\n write!(writer,\"+\").unwrap();\n\n }\n\n write!(writer,\"{}\",bit).unwrap();\n\n }\n\n write!(writer,\":\").unwrap();\n\n for (id,node) in level.iter_nodes() {\n\n let e0 = match node.get_e0(){\n\n Some(e0) => *e0,\n\n None => 0,\n\n };\n\n let e1 = match node.get_e1(){\n\n Some(e1) => *e1,\n\n None => 0,\n\n };\n\n write!(writer,\"({};{},{})\",*id,e0,e1).unwrap();\n\n }\n\n writeln!(writer,\"|\").unwrap();\n\n }\n\n writeln!(writer,\"---\").unwrap();\n\n}\n\n\n", "file_path": "crush/src/soc/utils.rs", "rank": 11, "score": 315188.5871192516 }, { "content": "fn construct_an_alpha<H: BTHandler> (scheduler: &H, s_box_out: &Vec<bool>, step: NonZeroUsize) -> Path {\n\n let col_nr_s = utils::bools_to_lt8(&s_box_out, step.get());\n\n let mut out = Vec::with_capacity(col_nr_s.len());\n\n\n\n for (s_box_pos, col_nr) in col_nr_s.iter().enumerate() {\n\n let mut best = 0;\n\n let mut row = 1000;\n\n\n\n // Check the whole column for the best entry, where best is the highest entry\n\n let bt = scheduler.bt(0, s_box_pos);\n\n for i in 0..(bt.nr_of_columns()) {\n\n let entry = bt.get_entry(i as u8, *col_nr)\n\n .expect(\"Index out of bounds?\");\n\n if entry > best {\n\n best = entry;\n\n row = i;\n\n }\n\n }\n\n\n\n if best == 0 { panic!(\"We didn't find a valid DDT entry!\")}\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/hull_calc/construct_alpha_beta.rs", "rank": 12, "score": 314859.37127378106 }, { "content": "fn construct_an_alpha<H: BTHandler> (scheduler: &H, s_box_out: &Vec<bool>, step: NonZeroUsize) -> Path {\n\n let col_nr_s = utils::bools_to_lt8(&s_box_out, step.get());\n\n let mut out = Vec::with_capacity(col_nr_s.len());\n\n\n\n for (s_box_pos, col_nr) in col_nr_s.iter().enumerate() {\n\n let mut best = 0;\n\n let mut row = 1000;\n\n\n\n // Check the whole column for the best entry, where best is the highest entry\n\n let bt = scheduler.bt(0, s_box_pos);\n\n for i in 0..(bt.nr_of_columns()) {\n\n let entry = bt.get_entry(i as u8, *col_nr)\n\n .expect(\"Index out of bounds?\");\n\n if entry > best {\n\n best = entry;\n\n row = i;\n\n }\n\n }\n\n\n\n if best == 0 { panic!(\"We didn't find a valid DDT entry!\")}\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/hull_calc_v2/construct_alpha_beta.rs", "rank": 13, "score": 312196.515034009 }, { "content": "// Empty arrays returns an empty string\n\n//\n\npub fn bools_to_bin_string(bits: &[bool]) -> Result<String, FmtError> {\n\n if bits.is_empty() {\n\n return Ok(String::new());\n\n }\n\n\n\n let len = bits.len();\n\n // Chop bits into u64, making it easy to transform them into binary Strings\n\n let mut u64s = bools_to_u64(bits);\n\n\n\n // We want the LSB to be rightmost and writing to a string starts leftmost (i.e. with\n\n // MSB). Since MSB is in the last u64 in the Vec, we need to deal with each block of\n\n // u64 in reverse order.\n\n\n\n let mut buff = String::new();\n\n // We cannot assume that 64 divides bits.len, meaning that we may not want a width of 64\n\n // for the u64 block containing the MSB. We need to check and handle accordingly:\n\n let remainder = len%64;\n\n if remainder == 0 {\n\n write!(buff, \"{:0>64b}\", u64s.pop().expect(\"Empty inputs should've been returned earlier\"))?;\n\n } else {\n\n write!(buff, \"{:0>w$b}\", u64s.pop().expect(\"Empty inputs should've been returned earlier\"),\n\n w = (len%64)/4)?;\n\n }\n\n // Deal with the remaining\n\n for num in u64s.iter().rev() {\n\n write!(buff, \"{:0>64b}\", num)?;\n\n }\n\n Ok(buff)\n\n}\n\n\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/utils/mod.rs", "rank": 14, "score": 290894.64252589736 }, { "content": "pub fn bools_to_hex_string(bits: &[bool]) -> Result<String, FmtError> {\n\n if bits.is_empty() {\n\n return Ok(String::new());\n\n }\n\n\n\n let len = bits.len();\n\n // Chop bits into u64, making it easy to transform them into hex Strings\n\n let mut u64s = bools_to_u64(bits);\n\n\n\n // We want the LSB to be rightmost and writing to a string starts leftmost (i.e. with\n\n // MSB). Since MSB is in the last u64 in the Vec, we need to deal with each block of\n\n // u64 in reverse order.\n\n\n\n let mut buff = String::new();\n\n // We cannot assume that 64 divides bits.len, meaning that we may not want a width of 16\n\n // for the u64 block containing the MSB. We need to check and handle accordingly:\n\n let remainder = len%64;\n\n if remainder == 0 {\n\n write!(buff, \"{:0>16x}\", u64s.pop().expect(\"Empty inputs should've been returned earlier\"))?;\n\n } else {\n\n write!(buff, \"{:0>w$x}\", u64s.pop().expect(\"Empty inputs should've been returned earlier\"),\n\n w = (len%64)/4)?;\n\n }\n\n // Deal with the remaining\n\n for num in u64s.iter().rev() {\n\n write!(buff, \"{:0>16x}\", num)?;\n\n }\n\n Ok(buff)\n\n}\n\n\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/utils/mod.rs", "rank": 15, "score": 290889.2001194406 }, { "content": "fn extract(lfsr: &mut VecDeque<bool>, t: usize) -> Vec<bool> {\n\n let mut out = Vec::with_capacity(t);\n\n for _ in 0..t {\n\n let mut tmp = false;\n\n let mut choice = false;\n\n while !choice {\n\n tmp = lfsr[62] ^ lfsr[51] ^ lfsr[38] ^ lfsr[23] ^ lfsr[13] ^ lfsr[0];\n\n lfsr.pop_front();\n\n lfsr.push_back(tmp);\n\n choice = tmp;\n\n tmp = lfsr[62] ^ lfsr[51] ^ lfsr[38] ^ lfsr[23] ^ lfsr[13] ^ lfsr[0];\n\n lfsr.pop_front();\n\n lfsr.push_back(tmp);\n\n }\n\n out.push(tmp)\n\n }\n\n out\n\n}\n\n\n", "file_path": "cryptapath/src/targets/lowmc.rs", "rank": 16, "score": 270189.671072131 }, { "content": "/// Return the matrix of linear dependencies of the linear system represented\n\n/// by `mat`.\n\n///\n\n/// To compute the matrix of linear dependencies :\n\n///\n\n/// -> augment the given matrix with the identity matrix\n\n///\n\n/// -> gauss the matrix and apply the same operations on the identity matrix\n\n///\n\n/// -> return the lower part of the identity containing the dependencies\n\npub fn extract_linear_dependencies(mut mat: Matrix) -> Matrix {\n\n let mut id = identity(mat.row_size());\n\n let mut loop_id = 0;\n\n for i in (0..mat.row_size()).rev() {\n\n let mut highest_set_bit = get_max_set_bit(&mat.rows[i]);\n\n let mut max_row = i;\n\n for j in (0..i).rev() {\n\n if get_max_set_bit(&mat.rows[j]).is_some()\n\n && (highest_set_bit.is_none()\n\n || get_max_set_bit(&mat.rows[j]).unwrap() > highest_set_bit.unwrap())\n\n {\n\n highest_set_bit = get_max_set_bit(&mat.rows[j]);\n\n max_row = j;\n\n }\n\n }\n\n if let Some(highest_set_bit) = highest_set_bit {\n\n if max_row < i {\n\n mat.rows.swap(i, max_row);\n\n id.rows.swap(i, max_row);\n\n }\n", "file_path": "crush/src/algebra/mod.rs", "rank": 17, "score": 257928.35219826715 }, { "content": "fn extract_a_beta<H: BTHandler> (scheduler: &H, s_box_in: &[bool], step: NonZeroUsize) -> Vob {\n\n let row_nr_s = utils::bools_to_lt8(&s_box_in, step.get());\n\n let mut out = Vec::with_capacity(row_nr_s.len());\n\n\n\n for (s_box_pos, row_nr) in row_nr_s.iter().enumerate() {\n\n let mut best = 0;\n\n let mut col = 1000;\n\n\n\n let bt = scheduler.bt(scheduler.nr_of_rounds() -1, s_box_pos);\n\n let row = bt.row(*row_nr as usize)\n\n .expect(\"Something went wrong in the conversion\");\n\n\n\n // Check the row for the best entry, where best is the highest entry\n\n for (i, entry) in row.iter().enumerate() {\n\n if *entry > best {\n\n best = *entry;\n\n col = i;\n\n }\n\n }\n\n if best == 0 { panic!(\"We didn't find a valid DDT entry!\")}\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/hull_calc/construct_alpha_beta.rs", "rank": 18, "score": 257288.94117134693 }, { "content": "/// The weight of a Path. TODO consider to move to super.\n\ntype PathWeight = u8;\n\n\n\n/// Distribution which keeps track of what weight is seen, as well as how many Paths have this\n\n/// count.\n\n/// This is intended to be a replacement for WDCount, with the goal of significantly reduce memory\n\n/// consumption. This reduction is expected to come with a run-time performance cost. We'll see.\n\n#[derive(Hash, Debug, Clone, Eq, PartialEq)]\n\npub struct WDCountV2 {\n\n // BTreeMap to allow for Hashing, Option to allow for dist increment\n\n // FIXME is there a way to get rid of especially the Option? Use unsafe?\n\n // Obs, some features now rely on the BTreeMap being sorted. Needs to change if BTreeMap is replaced!\n\n dist: Option<BTreeMap<PathWeight, PathCount>>,\n\n}\n\n\n\nimpl NWDistribution for WDCountV2 {\n\n const SUPPORTED_DISTRIBUTION_LEN: usize = PathWeight::MAX as usize;\n\n\n\n #[inline]\n\n fn new_zeroed() -> Self {\n\n Self {\n", "file_path": "crush/src/soc/bdd/differential/wd/distribution/count_v2.rs", "rank": 19, "score": 256138.8286437249 }, { "content": "#[inline(always)]\n\npub fn compress(x: u128, level: usize) -> u128 {\n\n // We use bit patterns to reduce the amount of work done\n\n let mut y = x;\n\n for i in 0..(3 - level) {\n\n y = y | (y >> (1 << i));\n\n }\n\n\n\n y & COMP_PATTERN[level]\n\n}\n\n\n\n/// A struct representing a progress bar for progress printing on the command line.\n\npub struct ProgressBar {\n\n current_items: f64,\n\n item_size: f64,\n\n used: bool,\n\n}\n\n\n\nimpl ProgressBar {\n\n /// Creates a new progress for tracking progress of `num_items` steps.\n\n pub fn new(num_items: usize) -> ProgressBar {\n", "file_path": "soccs/src/dl/cg_original/utility.rs", "rank": 20, "score": 255439.3946110148 }, { "content": "fn extract_a_beta<H: BTHandler> (scheduler: &H, s_box_in: &[bool], step: NonZeroUsize) -> Vob {\n\n let row_nr_s = utils::bools_to_lt8(&s_box_in, step.get());\n\n let mut out = Vec::with_capacity(row_nr_s.len());\n\n\n\n for (s_box_pos, row_nr) in row_nr_s.iter().enumerate() {\n\n let mut best = 0;\n\n let mut col = 1000;\n\n\n\n let bt = scheduler.bt(scheduler.nr_of_rounds() -1, s_box_pos);\n\n let row = bt.row(*row_nr as usize)\n\n .expect(\"Something went wrong in the conversion\");\n\n\n\n // Check the row for the best entry, where best is the highest entry\n\n for (i, entry) in row.iter().enumerate() {\n\n if *entry > best {\n\n best = *entry;\n\n col = i;\n\n }\n\n }\n\n if best == 0 { panic!(\"We didn't find a valid DDT entry!\")}\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/hull_calc_v2/construct_alpha_beta.rs", "rank": 21, "score": 255013.77582067158 }, { "content": "/// Extracts one alpha path and one beta path.\n\n///\n\n/// Invariants\n\n/// Only the SESS start node is left on the Alpha level\n\n/// Only the SESS end node is left on the Beta level\n\npub fn extract_alpha_beta_path(master: Arc<Shard>, master_md: &SolvedSocMeta) -> (Path, Path) {\n\n let alpha = hull_calc::extract_a_single_path(master.clone(), 0, master_md.alpha_lvl_depth);\n\n let sink_depth = master.get_sink_level_index();\n\n let beta = hull_calc::extract_a_single_path(master, master_md.beta_lvl_depth, sink_depth);\n\n (alpha, beta)\n\n}\n\n\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/hull_calc/extract_alpha_beta.rs", "rank": 22, "score": 242603.0664429976 }, { "content": "/// Extracts one alpha path and one beta path.\n\n///\n\n/// Invariants\n\n/// Only the SESS start node is left on the Alpha level\n\n/// Only the SESS end node is left on the Beta level\n\npub fn extract_alpha_beta_path(master: Arc<Shard>, master_md: &SolvedSocMeta) -> (Path, Path) {\n\n let alpha = hull_calc::extract_a_single_path(master.clone(), 0, master_md.alpha_lvl_depth);\n\n let sink_depth = master.get_sink_level_index();\n\n let beta = hull_calc::extract_a_single_path(master, master_md.beta_lvl_depth, sink_depth);\n\n (alpha, beta)\n\n}\n\n\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/hull_calc_v2/extract_alpha_beta.rs", "rank": 23, "score": 240955.41272178426 }, { "content": "/// Extracts a path from the alpha node to the given Id, expected to be on the n-level. (Will panic\n\n/// if not). Returns the path.\n\nfn extract_alpha_to_n(master: &Arc<Shard>, master_md: &SolvedSocMeta, best_id: &Id) -> Path {\n\n // Note that we double the depth of alpha to get the depth of n. This will hold for ciphers with\n\n // complete S-box layers. However, this *may of may not* work on ciphers with *incomplete* S-box layers.\n\n // It depends on how the cipher is constructed and what the levels above the alpha level represents.\n\n // (Only the input to the non-linear S-box layer, or does it also include the input to the identity\n\n // element/linear part of the S-box layer?). => Invariant checks may need to be introduced.\n\n let inner_top_area = Range { start: master_md.alpha_lvl_depth, end: master_md.alpha_lvl_depth * 2 };\n\n\n\n // We only need to know which Centurions connects to the n-level, as all paths are of the same\n\n // weight anyways, down to the n-level.\n\n let n_alpha_arena: WDArena<WDPresence> = master.weight_distributions_arena_for_level (\n\n master_md.alpha_lvl_depth,\n\n &inner_top_area,\n\n master_md.step.clone(),\n\n &TargetedFactory::new(vec![best_id.clone()]),\n\n );\n\n\n\n let start_nodes = master\n\n .level(master_md.alpha_lvl_depth)\n\n .expect(\"Start level is missing!\")\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/hull_calc_v2/priming.rs", "rank": 24, "score": 234809.32026449102 }, { "content": "/// Convert a Vec<Bit> to a binary string by taking the value\n\n/// of the constants. If some variable are in the bit they will be ignored.\n\npub fn bits_to_binary_string(bits: Vec<Bit>) -> String {\n\n bits.iter()\n\n .map(|bit| if bit.constant() { '1' } else { '0' })\n\n .collect::<String>()\n\n}\n\n\n", "file_path": "cryptapath/src/bit.rs", "rank": 25, "score": 225976.00647714915 }, { "content": "/// Convert a Vec<Bit> to an hexadecimal string by taking the value\n\n/// of the constants. If some variable are in the bit they will be ignored.\n\npub fn bits_to_hex_string(bits: Vec<Bit>) -> String {\n\n assert!(bits.len() % 8 == 0);\n\n let mut hex = String::with_capacity(bits.len() / 4);\n\n for i in 0..bits.len() / 8 {\n\n hex.push_str(&format!(\n\n \"{:02x}\",\n\n usize::from_str_radix(\n\n bits.iter()\n\n .skip(i * 8)\n\n .take(8)\n\n .map(|bit| if bit.constant() { '1' } else { '0' })\n\n .collect::<String>()\n\n .as_str(),\n\n 2\n\n )\n\n .unwrap()\n\n ));\n\n }\n\n hex\n\n}\n\n\n", "file_path": "cryptapath/src/bit.rs", "rank": 26, "score": 225976.00647714915 }, { "content": "pub fn bits_to_hex_string_keccak(bits: Vec<Bit>) -> String {\n\n assert!(bits.len() % 8 == 0);\n\n let mut hex = String::with_capacity(bits.len() / 4);\n\n for i in 0..bits.len() / 8 {\n\n hex.push_str(&format!(\n\n \"{:02x}\",\n\n usize::from_str_radix(\n\n bits.iter()\n\n .skip(i * 8)\n\n .take(8)\n\n .rev()\n\n .map(|bit| if bit.constant() { '1' } else { '0' })\n\n .collect::<String>()\n\n .as_str(),\n\n 2\n\n )\n\n .unwrap()\n\n ));\n\n }\n\n hex\n", "file_path": "cryptapath/src/targets/keccak.rs", "rank": 27, "score": 222235.60831177942 }, { "content": "/// Make a BDDSpec out of the entry bits and the out bits of the SBox by using the \n\n/// lookup table to build the graph.\n\n/// First the top part of the BDD is made by making the top layers of the BDD (from the top_layers).\n\n/// The nodes from the last level of the top layers are connected to the corresponding node from\n\n/// the bottom layers. To find which node should be connected to which we use the lookup table\n\n/// (provided by the parameter mapping). The bottom layers are constructed like the top layers\n\n/// but starting from the sink, in reverse order. So, using the mapping the node corresponding\n\n/// to the entry path n as to be connected to mapping[n] with its bit reversed.\n\n/// The lhs of each top level comes from the variables of the entry bits (with -1 if the constant\n\n/// is true). The lhs of each bottom level comes from the variables of the out bits.\n\n/// Finally we make the last level with an empty lhs and a single node.\n\n/// \n\n/// The Id of the BDDSpec is always set to 0 (the function building BDD from BDDspec will manage).\n\nfn buid_bdd_spec(top_layers: Vec<Bit>, lower_layers: Vec<Bit>, mapping: &[u8]) -> BddSpec {\n\n let mut levels = Vec::with_capacity(top_layers.len() + lower_layers.len() + 1);\n\n let mut next_node_id = 1;\n\n for (level_index, bit) in top_layers.iter().enumerate() {\n\n let nb_nodes = 2usize.pow(level_index as u32);\n\n let mut nodes = Vec::with_capacity(nb_nodes);\n\n for i in 0..nb_nodes {\n\n let id = next_node_id + i;\n\n let (e0, e1);\n\n if level_index != top_layers.len() - 1 {\n\n e0 = next_node_id + nb_nodes + 2 * i;\n\n e1 = next_node_id + nb_nodes + 1 + 2 * i;\n\n } else {\n\n let e0_destination = reverse_bits(mapping[i * 2], lower_layers.len());\n\n e0 = next_node_id + nb_nodes + e0_destination as usize;\n\n let e1_destination = reverse_bits(mapping[i * 2 + 1], lower_layers.len());\n\n e1 = next_node_id + nb_nodes + e1_destination as usize;\n\n }\n\n nodes.push(NodeSpec::new(Id::new(id), Id::new(e0), Id::new(e1)));\n\n }\n", "file_path": "cryptapath/src/sbox.rs", "rank": 28, "score": 220389.0718754458 }, { "content": "/// Searches for the alpha nodes whose nt_lew are contained in the weight range:\n\n/// > weight_range = alpha_lvl_nt_lew..alpha_lvl_nt_lew + 3\n\n///\n\n/// Returns a tuple, where first value is the alpha level nt lew, and the second value is a\n\n/// vec of vecs, each inner vec containing nodes sorted by the node nt lew values.\n\n/// Example:\n\n/// > index 0 contains the nodes with nt_lew == alpha_lvl_nt_lew.\n\n/// > index 1 contains the nodes with nt_lew == alpha_lvl_nt_lew + 1.\n\n///\n\n/// The vecs are empty iff no such nodes were found.\n\n///\n\n/// Consumes the TauAlphaDistribution Level.\n\npub fn alpha_candidates(tau_alpha_dists: WDLevel<WDPresence>) -> (u32, Vec<Vec<(Id, WDPresence)>>) {\n\n\n\n // Non-trivial lew for the Alpha level as a whole. Since this is from Tau to Alpha, this\n\n // implies that the level nt lew is the lowest existing non trivial path from one Alpha path\n\n // to the sink, and thus the overall fewest active S-boxes we've found.\n\n let lvl_nt_lew = match tau_alpha_dists.nt_lew() {\n\n Some(nt) => nt.0,\n\n None => panic!(\"We only have the trivial path present!\"),\n\n };\n\n\n\n // Range of weights in which want to investigate further.\n\n let weight_range = lvl_nt_lew..lvl_nt_lew + 3;\n\n\n\n // Collect and return the alpha nodes which nt_lew is within the weight_range, sorted into vec's by weight\n\n let candidates = tau_alpha_dists.into_iter()\n\n .fold(vec![vec![], vec![], vec![]],\n\n |mut acc, dist| {\n\n let nt_lew = dist.1.lowest_existing_non_trivial_weight();\n\n // If does not have an nt_lew, then not a candidate\n\n if nt_lew.is_none() {\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/sess_handling.rs", "rank": 29, "score": 219169.51613576856 }, { "content": "#[test]\n\nfn count_path_test() {\n\n let bdd = bdd!(5;0;[(\"1+2\",[(1;2,3)]);(\"3+2\",[(2;4,5);(3;4,0)]);(\"0+4\",[(4;0,6);(5;6,0)]);(\"\",[(6;0,0)])]);\n\n assert_eq!(bdd.count_paths(), 3_usize.into());\n\n\n\n let bdd = bdd!(5;0;[(\"0+4\",[(4;6,6)]);(\"\",[(6;0,0)])]);\n\n assert_eq!(bdd.count_paths(), 2_usize.into());\n\n\n\n let bdd = bdd!(5;0;[(\"\",[(6;0,0)])]);\n\n assert_eq!(bdd.count_paths(), 0_usize.into());\n\n}\n\n\n", "file_path": "crush/src/soc/test.rs", "rank": 30, "score": 218053.59832591022 }, { "content": "/// LSB is assumed to be at index 0.\n\npub fn bools_to_u64(bits: &[bool]) -> Vec<u64> {\n\n // LSB is assumed to be at index 0.\n\n bits.chunks(64)\n\n .map(|chunk| {\n\n chunk.iter().enumerate()\n\n .fold(0u64, |acc, (idx, x)| { acc | ((*x as u64) << idx)} )\n\n })\n\n .collect()\n\n}\n\n\n\n\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::mem::size_of;\n\n\n\n use vob::vob;\n\n\n\n use super::*;\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/utils/mod.rs", "rank": 31, "score": 216731.8199028155 }, { "content": "/// Return a new Vec<Bit> produced by XORing each bits of the two vectors.\n\n/// The two vectors must contains the same number of bits.\n\npub fn bit_vector_xoring(mut a: Vec<Bit>, mut b: Vec<Bit>) -> Vec<Bit> {\n\n assert_eq!(a.len(), b.len());\n\n a.drain(..)\n\n .zip(b.drain(..))\n\n .map(|(a_bit, b_bit)| a_bit ^ b_bit)\n\n .collect::<Vec<Bit>>()\n\n}\n\n\n\nimpl BitXor for Bit {\n\n type Output = Self;\n\n\n\n fn bitxor(self, rhs: Bit) -> Self::Output {\n\n Bit {\n\n vars: self\n\n .vars\n\n .symmetric_difference(&rhs.vars)\n\n .copied().collect(),\n\n constant: self.constant ^ rhs.constant,\n\n }\n\n }\n", "file_path": "cryptapath/src/bit.rs", "rank": 32, "score": 213805.0923248598 }, { "content": "pub fn build_system_sponge(hash: &dyn SpongeHash) -> (Vec<Bit>, System) {\n\n let mut message_bits = Vec::with_capacity(hash.message_length());\n\n for i in 0..hash.message_length() {\n\n message_bits.push(Bit::from_variable_id(i));\n\n }\n\n let output = hash.hash(message_bits);\n\n let mut sbox = hash.sbox();\n\n let bdds = sbox.bdds();\n\n let mut n_state = hash.message_length() / hash.rate_length();\n\n if hash.message_length() % hash.rate_length() > 0 {\n\n n_state += 1\n\n }\n\n n_state += hash.output_length() / hash.rate_length();\n\n if hash.output_length() % hash.rate_length() > 0 {\n\n n_state += 1\n\n }\n\n let system_spec = SystemSpec::new(\n\n hash.message_length() + (hash.state_length() * hash.n_rounds()) * n_state,\n\n bdds,\n\n );\n\n (output, build_system_from_spec(system_spec))\n\n}\n\n\n", "file_path": "cryptapath/src/targets/mod.rs", "rank": 33, "score": 213551.97456348035 }, { "content": "/// Reverse the order of the last n_bit bits: \n\n/// 0000_1001 with a 5 n_bit value will return 0001_0010\n\nfn reverse_bits(bits: u8, n_bit: usize) -> u8 {\n\n let mut tmp = bits;\n\n tmp = (tmp & 0b1111_0000) >> 4 | (tmp & 0b0000_1111) << 4;\n\n tmp = (tmp & 0b1100_1100) >> 2 | (tmp & 0b0011_0011) << 2;\n\n tmp = (tmp & 0b1010_1010) >> 1 | (tmp & 0b0101_0101) << 1;\n\n tmp.wrapping_shr(8 - n_bit as u32)\n\n}\n\n\n", "file_path": "cryptapath/src/sbox.rs", "rank": 34, "score": 208981.50553242018 }, { "content": "#[deprecated]\n\nfn expand_to_full_path(lhss: &Matrix, rhs: &Vob) -> Vob<usize> {\n\n let mut res = Vob::with_capacity(rhs.len()*2);\n\n\n\n for lhs in lhss.iter_rows() {\n\n let mut lhs = lhs.clone();\n\n lhs.and(rhs);\n\n res.push(lhs.iter_set_bits(..).count() % 2 == 1);\n\n }\n\n\n\n res\n\n}", "file_path": "pathfinder/src/diff_solver/post_processing_v5/hull_calc/construct_alpha_beta.rs", "rank": 35, "score": 208119.82376533112 }, { "content": "#[test]\n\nfn simple_test_depfinder_iter() {\n\n let simple = crate::bdd!(5;0;\n\n [\n\n (\"1+2\",[(1;2,3)]);\n\n (\"3+2\",[(2;4,5);(3;4,0)]);\n\n (\"0+4\",[(4;0,6);(5;6,0)]);\n\n (\"\",[(6;0,0)])\n\n ]);\n\n\n\n //Node Id's are re-named when imported through the macro(?). Updated below accordingly.\n\n let deps = DepBoolFinder::new(Id::new(10000),\n\n 0,\n\n NonZeroUsize::new(2).unwrap(),\n\n &simple);\n\n\n\n let mut d_iter = deps.iter();\n\n\n\n assert_eq!(d_iter.next(), Some((Id::new(40000), false)).as_ref());\n\n assert_eq!(d_iter.next(), Some((Id::new(50000), true)).as_ref());\n\n assert_eq!(d_iter.next(), Some((Id::new(40000), true)).as_ref());\n\n assert_eq!(d_iter.next(), None);\n\n}\n\n\n\n\n", "file_path": "crush/src/soc/bdd/differential/test.rs", "rank": 36, "score": 208076.71930850012 }, { "content": "#[test]\n\nfn simple_test_sbox_count() {\n\n // First and second assert checks full range, for step 3 and 2.\n\n // Third and onwards tests various ranges/offsets\n\n let simple = crate::bdd!(6;1;\n\n [\n\n (\"0\",[(1;2,3)]);\n\n (\"1\",[(2;4,5);(3;6,7)]);\n\n (\"2\",[(4;8,9);(5;10,11);(6;11,12);(7;0,12)]);\n\n (\"3\",[(8;13,14);(9;14,0);(10;13,14);(11;0,15);(12;15,0)]);\n\n (\"4\",[(13;16,0);(14;0,16);(15;0,17)]);\n\n (\"5\",[(16;18,0);(17;0,18)]);\n\n (\"\",[(18;0,0)])\n\n ]);\n\n\n\n let arena = simple.identify_trails_and_weights(.., 3);\n\n\n\n let arena_e: BTreeMap<usize,\n\n HashMap<Id, u128, BuildHasherDefault<ahash::AHasher>>> =\n\n [(3,\n\n [(Id::new(80001), 3),\n", "file_path": "crush/src/soc/bdd/differential/test.rs", "rank": 37, "score": 207929.9429617279 }, { "content": "pub trait PPFactory {\n\n type ProgressBar: StyledProgressBar;\n\n\n\n fn new_progress_bar(&self, len: u64) -> Self::ProgressBar;\n\n}\n\n\n", "file_path": "crush/src/soc/bdd/differential/wide_count_prune_core.rs", "rank": 38, "score": 207528.98172503262 }, { "content": "pub fn build_system_cipher(cipher: &dyn Cipher) -> (Vec<Bit>, Vec<Bit>, System) {\n\n let mut message_bits = Vec::with_capacity(cipher.message_length());\n\n let mut key_bits = Vec::with_capacity(cipher.key_length());\n\n for i in 0..cipher.key_length() {\n\n key_bits.push(Bit::from_variable_id(i));\n\n }\n\n for i in cipher.key_length()..cipher.message_length() + cipher.key_length() {\n\n message_bits.push(Bit::from_variable_id(i));\n\n }\n\n let output = cipher.encrypt(message_bits.clone(), key_bits);\n\n let mut sbox = cipher.sbox();\n\n let bdds = sbox.bdds();\n\n let system_spec = SystemSpec::new(sbox.next_var_id(), bdds);\n\n (message_bits, output, build_system_from_spec(system_spec))\n\n}\n\n\n", "file_path": "cryptapath/src/targets/mod.rs", "rank": 39, "score": 207410.23211672046 }, { "content": "#[deprecated]\n\nfn expand_to_full_path(lhss: &Matrix, rhs: &Vob) -> Vob<usize> {\n\n let mut res = Vob::with_capacity(rhs.len()*2);\n\n\n\n for lhs in lhss.iter_rows() {\n\n let mut lhs = lhs.clone();\n\n lhs.and(rhs);\n\n res.push(lhs.iter_set_bits(..).count() % 2 == 1);\n\n }\n\n\n\n res\n\n}", "file_path": "pathfinder/src/diff_solver/post_processing_v5/hull_calc_v2/construct_alpha_beta.rs", "rank": 40, "score": 206055.61603748676 }, { "content": "fn matrix_from_vec_bool(matrix: &[bool], n_rows: usize, n_columns: usize) -> Matrix {\n\n assert_eq!(matrix.len(), n_rows * n_columns);\n\n let mut rows = Vec::with_capacity(n_rows);\n\n for row_index in 0..matrix.len() / n_columns {\n\n let mut row = Vob::new();\n\n let row_bits = &matrix\n\n .iter()\n\n .skip(row_index * n_columns)\n\n .take(n_columns)\n\n .collect::<Vec<&bool>>();\n\n for bit in row_bits.iter() {\n\n row.push(**bit);\n\n }\n\n rows.push(row);\n\n }\n\n matrix![rows]\n\n}\n\n\n", "file_path": "cryptapath/src/targets/lowmc.rs", "rank": 41, "score": 201350.9519633039 }, { "content": "/// A trait allowing for flexibility for the user to choose what underlying ProgressBar they want to\n\n/// use. In essence, this trait is almost like a newtype over any ProgressBar, with only the calls\n\n/// to update and manipulate stat present. (I.e. all styling etc must be handled by the user when\n\n/// implementing the Trait).\n\npub trait StyledProgressBar: Clone + Send {\n\n fn inc(&self, delta: u64);\n\n fn set_message(&self, msg: &str);\n\n fn finish_with_message(&self, msg: &str);\n\n fn finish_and_clear(&self);\n\n fn println(&self, msg: &str);\n\n}\n\n\n\nimpl Bdd {\n\n pub fn complexity_based_wide_prune_v3<R, L, F>(&mut self,\n\n complexity_target: usize,\n\n working_area: R,\n\n step: usize,\n\n librarian: &mut L,\n\n progress: F,\n\n\n\n )\n\n where\n\n R: RangeBounds<usize>,\n\n L: PruneLogger,\n", "file_path": "crush/src/soc/bdd/differential/wide_count_prune_core.rs", "rank": 42, "score": 197626.07554540192 }, { "content": "fn matrix_rank(matrix: &[bool], n_rows: usize, n_columns: usize) -> usize {\n\n let mut m = matrix_from_vec_bool(matrix, n_rows, n_columns);\n\n let rank = if n_rows > n_columns {\n\n m = transpose(&m);\n\n n_columns\n\n } else {\n\n n_rows\n\n };\n\n let dep = extract_linear_dependencies(m);\n\n rank - dep.row_size()\n\n}\n\n\n", "file_path": "cryptapath/src/targets/lowmc.rs", "rank": 43, "score": 196939.14192998834 }, { "content": "pub fn fix_system_values_sponge(\n\n hash: &dyn SpongeHash,\n\n system: &mut System,\n\n hash_value: &[Bit],\n\n output_bits: &[Bit],\n\n) {\n\n let padding_bit = {\n\n if hash.message_length() <= hash.rate_length() {\n\n hash.rate_length() - 1\n\n } else {\n\n hash.message_length() + hash.message_length() % hash.rate_length() - 1\n\n }\n\n };\n\n //fixing padding (every padding end with a one regardless of the message_length)\n\n system.fix(vec![padding_bit], true).unwrap();\n\n //fixing the value of the output\n\n for (output_bit, expected_bit) in output_bits.iter().zip(hash_value) {\n\n system\n\n .fix(\n\n output_bit.vars.iter().map(|var| var.id()).collect(),\n\n output_bit.constant() ^ expected_bit.constant(),\n\n )\n\n .unwrap();\n\n }\n\n}\n\n\n", "file_path": "cryptapath/src/targets/mod.rs", "rank": 44, "score": 196928.47163772507 }, { "content": "pub fn fix_system_values_cipher(\n\n system: &mut System,\n\n plaintext: &[Bit],\n\n ciphertext: &[Bit],\n\n input_bits: &[Bit],\n\n output_bits: &[Bit],\n\n) {\n\n for (plaintext_vars, plaintext_bits) in input_bits.iter().zip(plaintext) {\n\n system\n\n .fix(\n\n plaintext_vars.vars.iter().map(|var| var.id()).collect(),\n\n plaintext_vars.constant() ^ plaintext_bits.constant(),\n\n )\n\n .unwrap();\n\n }\n\n for (ciphertext_vars, expected_bit) in output_bits.iter().zip(ciphertext) {\n\n system\n\n .fix(\n\n ciphertext_vars.vars.iter().map(|var| var.id()).collect(),\n\n ciphertext_vars.constant() ^ expected_bit.constant(),\n\n )\n\n .unwrap();\n\n }\n\n}\n\n\n", "file_path": "cryptapath/src/targets/mod.rs", "rank": 45, "score": 196928.47163772507 }, { "content": "pub fn estimate_best_sess_connections<P>(alpha_candidates: Vec<(Id, WDPresence)>,\n\n beta_level_dists: Arc<WDLevel<WDPresence>>,\n\n alpha_beta_dist: Arc<WDLevel<EndNodeDist>>,\n\n alpha_level_nt_lew: u32,\n\n // NT lew of alpha candidates in alpha_candidates: Vec\n\n candidates_nt_lew: u32,\n\n // How many sess cons max to return\n\n max_connections: usize,\n\n progress: &P,\n\n k: f64,\n\n) -> Vec<SessEstimate>\n\n where\n\n P: PPFactory,\n\n{\n\n if alpha_candidates.is_empty() {\n\n return vec![];\n\n }\n\n\n\n // Make sure that the number of SESS candidates does not grow into the sky, by setting an upper\n\n // limit. We set a limit either to 10% more than 'max_connection\", or at 20000, whichever is\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/sess_handling.rs", "rank": 46, "score": 194481.12701898805 }, { "content": "pub fn fix_system_values_cipher_with_partial_key(\n\n system: &mut System,\n\n plaintext: &[Bit],\n\n ciphertext: &[Bit],\n\n partial_key: (Vec<Bit>, Vec<usize>),\n\n input_bits: &[Bit],\n\n output_bits: &[Bit],\n\n) {\n\n // This assumes that the key variables are always the n first (from 1 to key_length)\n\n // In pratice this is safe because we use this assumption everywhere but in case\n\n // someone would like to tinker with the library this has to be taken into account.\n\n for known_bit in partial_key.1.iter() {\n\n system\n\n .fix(vec![*known_bit], partial_key.0[*known_bit].constant())\n\n .unwrap();\n\n }\n\n fix_system_values_cipher(system, plaintext, ciphertext, input_bits, output_bits);\n\n}\n\n\n", "file_path": "cryptapath/src/targets/mod.rs", "rank": 47, "score": 192636.16550648067 }, { "content": "pub fn fix_system_values_sponge_with_partial_preimage(\n\n hash: &dyn SpongeHash,\n\n system: &mut System,\n\n hash_value: &[Bit],\n\n output_bits: &[Bit],\n\n mut partial_preimage: (Vec<Bit>,Vec<usize>)\n\n) {\n\n fix_system_values_sponge(hash, system, hash_value, output_bits);\n\n let padding_bit = {\n\n if hash.message_length() <= hash.rate_length() {\n\n hash.rate_length() - 1\n\n } else {\n\n hash.message_length() + hash.message_length() % hash.rate_length() - 1\n\n }\n\n };\n\n // We already fixed the padding bit, so if the last bit of the preimage\n\n // is known (and it has to be a 1 then we skip it)\n\n let last_known_bit = *partial_preimage.1.iter().last().unwrap();\n\n if last_known_bit == padding_bit {\n\n partial_preimage.1.pop();\n\n partial_preimage.0.pop();\n\n }\n\n //fixing the known bits of the preimage\n\n for known_bit in partial_preimage.1.iter() {\n\n system\n\n .fix(vec![*known_bit], partial_preimage.0[*known_bit].constant())\n\n .unwrap();\n\n }\n\n}\n\n\n", "file_path": "cryptapath/src/targets/mod.rs", "rank": 48, "score": 192636.16550648067 }, { "content": "/// Create an identity matrix (a matrix where only the [a,a] elements are set)\n\npub fn identity(size: usize) -> Matrix {\n\n let mut m = Matrix::new(size, size);\n\n for i in 0..size {\n\n m.rows[i].set(i, true);\n\n }\n\n m\n\n}\n\n\n", "file_path": "crush/src/algebra/mod.rs", "rank": 49, "score": 190977.82729514345 }, { "content": "/// Turn a u128 into a Vob\n\nfn nn_to_vob(int: u128) -> Vob<usize> {\n\n // Fixme untested\n\n let as_bytes = int.to_be_bytes();\n\n let vob = Vob::from_bytes(&as_bytes);\n\n let vob: Vob = vob.iter().rev().collect();\n\n vob\n\n}\n\n\n\n#[allow(dead_code)]\n", "file_path": "soccs/src/dl/builders/cg/mod.rs", "rank": 50, "score": 188359.48178374593 }, { "content": "/// Find the node on the n-level which we want our example path to go through.\n\n/// Return the Id of the n-node.\n\nfn identify_n_node(beta_n_dists: &WDLevel<WDCountV2>, beta_n_lvl_nt_lew: u32, nodes_with_nt_lew: Vec<Id>\n\n) -> Id {\n\n // This fn can later be modified to take into account that the node with the most paths of weight\n\n // == lvl_nt_lew is not always the best node. However, the example path is always expected to\n\n // have weight == lvl nt lew, which means that the n-node still should have at least one path\n\n // of weight level nt-lew... Anyways, this is not expected to be a big thing, although that is\n\n // based on anecdotal \"evidence\" and not data...\n\n\n\n let mut nodes_iter = nodes_with_nt_lew.iter();\n\n // Setting up compare\n\n let mut best_id = nodes_iter.next().unwrap().clone();\n\n let mut best_count = beta_n_dists.get(&best_id).unwrap()\n\n .paths_for_weight(beta_n_lvl_nt_lew).unwrap();\n\n\n\n // Find best node, based on number of paths with n-level nt lew going through it\n\n for id in nodes_iter {\n\n if let Some(count) = beta_n_dists.get(id).unwrap()\n\n .paths_for_weight(beta_n_lvl_nt_lew)\n\n {\n\n if count > best_count {\n\n best_count = count;\n\n best_id = id.clone();\n\n }\n\n }\n\n };\n\n best_id\n\n}\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/hull_calc_v2/priming.rs", "rank": 51, "score": 188119.13448032187 }, { "content": " Some((lew.clone(), *start_id, *end_id, paths))\n\n }\n\n}\n\n\n\nimpl<W> FromIterator<(NodeId, W)> for WDLevel<W> {\n\n fn from_iter<I: IntoIterator<Item = (NodeId, W)>>(iter: I) -> Self {\n\n let mut level = HashMap::default();\n\n level.extend(iter);\n\n Self {\n\n depth: None,\n\n dists: level\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, W> FromIterator<(&'a NodeId, W)> for WDLevel<W> {\n\n fn from_iter<I: IntoIterator<Item = (&'a NodeId, W)>>(iter: I) -> Self {\n\n let mut level = HashMap::default();\n\n level.extend(\n\n iter.into_iter()\n", "file_path": "crush/src/soc/bdd/differential/wd/arenas/level.rs", "rank": 52, "score": 187789.1801564317 }, { "content": " /// end-node to existing weights to count\n\n pub fn existing_weights_with_paths_per_connection(&self)\n\n -> AHashMap<NodeId, AHashMap<NodeId, BTreeMap<u32, &PathCount>>> {\n\n\n\n let mut map = HashMap::default();\n\n for (start_id, w_map) in self.dists.iter()\n\n .map(|(start_id, dist)| (start_id.clone(), dist.existing_weights_with_paths_per_connection()))\n\n {\n\n map.insert(start_id, w_map);\n\n }\n\n map\n\n }\n\n\n\n\n\n /// Identify (one of) the connection(s) which contains the most nr of paths for the level nt-lew.\n\n /// Returns None if none of the connections have a nt-lew\n\n /// FIXME make generic, i.e. for any given weight, find a connection with max paths\n\n pub fn nt_lew_connection_max_paths(&self) -> Option<(u32, NodeId, NodeId, PathCount)> {\n\n let mut sorted = BTreeMap::new();\n\n\n", "file_path": "crush/src/soc/bdd/differential/wd/arenas/level.rs", "rank": 53, "score": 187786.49407152235 }, { "content": " .map(|(id, w)| (id.clone(), w))\n\n .collect::<HashMap<NodeId, W, BuildHasherDefault<AHasher>>>()\n\n );\n\n Self {\n\n depth: None,\n\n dists: level\n\n }\n\n }\n\n}\n\n\n\nimpl<W: NWDistribution> fmt::Debug for WDLevel<W> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self.depth {\n\n Some(depth) => writeln!(f, \"{: >4}Depth {}:\", \"\", depth)?,\n\n None => writeln!(f, \"{: >4}Depth: N/A\", \"\")?,\n\n };\n\n\n\n\n\n\n\n for (id, weight) in self.dists.iter() {\n", "file_path": "crush/src/soc/bdd/differential/wd/arenas/level.rs", "rank": 54, "score": 187784.4992895553 }, { "content": "\n\nuse super::super::PathCount;\n\n\n\n// ************************************* Contents **************************************************\n\n// struct WDLevel<W>\n\n// - impl<W: NWDistribution> WDLevel<W>\n\n// - impl<W: NcWDistribution> WDLevel<W>\n\n// - impl<W: Node2NodeDistribution> WDLevel<W>\n\n// - impl<W> FromIterator<(NodeId, W)> for WDLevel<W>\n\n// - impl<'a, W> FromIterator<(&'a NodeId, W)> for WDLevel<W>\n\n// - impl<W: NWDistribution> fmt::Debug for WDLevel<W>\n\n// *********************************** Contents End ************************************************\n\n\n\n#[derive(Clone)]\n\npub struct WDLevel<W> {\n\n /// Depth of level, optional to set.\n\n depth: Option<Depth>,\n\n dists: HashMap<NodeId, W, BuildHasherDefault<AHasher>>,\n\n}\n\n\n", "file_path": "crush/src/soc/bdd/differential/wd/arenas/level.rs", "rank": 55, "score": 187779.43195654437 }, { "content": " if maybe_nt.is_some() {\n\n // Get the starting id's which contains the level nt-lew\n\n let (_, mut start_ids) = maybe_nt.unwrap();\n\n // Not sure if having it sorted speeds up any searching, but I hope it does.\n\n start_ids.sort();\n\n\n\n // Filter away the rest\n\n self.dists.iter().\n\n filter(|(start_id, _)| start_ids.contains(start_id))\n\n .collect()\n\n\n\n } else {\n\n // We don't have a level nt lew, b/c all distributions in level only have the trivial lew/\n\n // Therefore, we return, in effect, None\n\n vec![]\n\n };\n\n g.into_iter()\n\n }\n\n\n\n /// For each connection from self to end node, return a mapping from\n", "file_path": "crush/src/soc/bdd/differential/wd/arenas/level.rs", "rank": 56, "score": 187778.0923942492 }, { "content": "impl<W: Node2NodeDistribution> WDLevel<W> {\n\n\n\n /// Iterate over all connections which does not contain the trivial lew. Each connection may\n\n /// have different lews, meaning that also connections with their lew being different than the\n\n /// level lew are present\n\n pub fn iter_nt_lew_connections(&self) -> Filter<Iter<'_, Id, W>, fn(&(&'_ Id, &'_ W)) -> bool> {\n\n self.iter().filter(|(_, dist)| !dist.contains_trivial_lew())\n\n }\n\n\n\n // the name is slightly misvisende, as it returns the id and distribution of all level nt-lew\n\n // nodes, a connection would be one start node to one end node, not one start node to potentially\n\n // multiple end nodes, as it does now.\n\n /// Returns an iterator over start_ids and their distributions, whose lew is the level nt-lew.\n\n ///\n\n pub fn iter_level_nt_lew_connections(&self) -> IntoIter<(&Id, &W)> {\n\n // This is awkward way to do this, but it gets the job done...\n\n\n\n // get level nt-lew\n\n let maybe_nt = self.nt_lew();\n\n let g: Vec<(&Id, &W)> =\n", "file_path": "crush/src/soc/bdd/differential/wd/arenas/level.rs", "rank": 57, "score": 187776.79136889588 }, { "content": "// #[cfg(feature = \"unstable\")]\n\n// use std::collections::TryReserveError;\n\n\n\nuse ahash::AHasher;\n\nuse std::collections::BTreeMap;\n\nuse std::collections::hash_map::{Entry, Keys};\n\nuse std::collections::hash_map;\n\nuse std::collections::hash_map::Iter;\n\nuse std::collections::HashMap;\n\nuse std::fmt;\n\nuse std::hash::BuildHasherDefault;\n\nuse std::iter::Filter;\n\nuse std::iter::FromIterator;\n\nuse std::vec::IntoIter;\n\n\n\nuse crate::AHashMap;\n\nuse crate::soc::{Id as NodeId, Id};\n\nuse crate::soc::bdd::differential::Depth;\n\nuse crate::soc::bdd::differential::wd::distribution::{Node2NodeDistribution, NWDistribution};\n\nuse crate::soc::bdd::differential::wd::NcWDistribution;\n", "file_path": "crush/src/soc/bdd/differential/wd/arenas/level.rs", "rank": 58, "score": 187776.76460806731 }, { "content": "impl<W> WDLevel<W> {\n\n pub fn new(depth: Option<Depth>) -> Self {\n\n Self {\n\n depth,\n\n dists: HashMap::default(),\n\n }\n\n }\n\n\n\n pub fn with_capacity(capacity: usize) -> Self {\n\n let mut dists = HashMap::default();\n\n dists.reserve(capacity);\n\n Self {\n\n depth: None,\n\n dists,\n\n }\n\n }\n\n\n\n pub fn set_depth(&mut self, depth: Depth) {\n\n self.depth = Some(depth);\n\n }\n", "file_path": "crush/src/soc/bdd/differential/wd/arenas/level.rs", "rank": 59, "score": 187776.68085022664 }, { "content": " let mut map = BTreeMap::new();\n\n for (id, ws) in self.dists.iter()\n\n .map(|(id, dist)| (id, dist.existing_weights()))\n\n {\n\n for w in ws {\n\n let ids = map.entry(w).or_insert(Vec::new());\n\n ids.push(id.clone());\n\n }\n\n }\n\n map\n\n }\n\n}\n\n\n\nimpl<W: NcWDistribution> WDLevel<W> {\n\n\n\n pub fn paths_for_weight(&self) {\n\n todo!()\n\n }\n\n}\n\n\n", "file_path": "crush/src/soc/bdd/differential/wd/arenas/level.rs", "rank": 60, "score": 187774.47755518317 }, { "content": "\n\n pub fn into_iter(self) -> hash_map::IntoIter<Id, W> {\n\n self.dists.into_iter()\n\n }\n\n\n\n pub fn keys(&self) -> Keys<'_, Id, W> {\n\n self.dists.keys()\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n self.dists.len()\n\n }\n\n\n\n /// Returns the 'width' of the level, that is, the number of nodes registered.\n\n pub fn width(&self) -> usize {\n\n self.dists.len()\n\n }\n\n\n\n /// Clears the Level, removing all NodeId-W pairs. Keeps the allocated memory for reuse.\n\n pub fn clear(&mut self) {\n", "file_path": "crush/src/soc/bdd/differential/wd/arenas/level.rs", "rank": 61, "score": 187774.36607069912 }, { "content": " for (start_id, maybe_nt) in self.dists.iter()\n\n .map(|(start_id, dist)| {\n\n // (start_id, Option<(lew, Vec<end_id>)>)\n\n (start_id, dist.nt_lew_and_e_ids())\n\n }) {\n\n\n\n if maybe_nt.is_some() {\n\n let (lew, end_ids) = maybe_nt.unwrap();\n\n // We have lew, start, end_ids\n\n // I'd like to have lew, start, end, max paths w/lew\n\n let e_id_lew_paths: BTreeMap<&PathCount, &NodeId> = end_ids.iter()\n\n .map(|e_id| (e_id,\n\n self.dists.get(start_id)\n\n .unwrap()\n\n .paths_for_weight_in_id(lew, e_id)\n\n .unwrap())\n\n )\n\n // .filter(|(_, paths)| paths.is_some())\n\n .map(|(id, paths)| (paths, id))\n\n .collect();\n", "file_path": "crush/src/soc/bdd/differential/wd/arenas/level.rs", "rank": 62, "score": 187774.13755421527 }, { "content": " self.depth = None;\n\n self.dists.clear();\n\n }\n\n\n\n // #[cfg(feature = \"unstable\")]\n\n // pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {\n\n // self.dists.try_reserve(additional)\n\n // }\n\n}\n\n\n\nimpl<W: NWDistribution> WDLevel<W> {\n\n /// Returns the highest 'lew' (lowest existing weight) from all the weight distributions in the\n\n /// level.\n\n ///\n\n /// Panics if the level is empty!\n\n pub fn highest_lew(&self) -> u32 {\n\n self.dists.iter()\n\n .map(|(_, distr)| distr.lowest_existing_weight())\n\n .max().expect(\"The level is empty!\")\n\n }\n", "file_path": "crush/src/soc/bdd/differential/wd/arenas/level.rs", "rank": 63, "score": 187773.6607855716 }, { "content": " .collect()\n\n }\n\n}\n\n\n\nimpl<W: NWDistribution> WDLevel<W> {\n\n pub fn lew(&self) -> (u32, Vec<NodeId>) {\n\n let mut map = BTreeMap::new();\n\n for (id, lew) in self.dists.iter()\n\n .map(|(id, dist)| (id, dist.lowest_existing_weight()))\n\n {\n\n let ids = map.entry(lew).or_insert(Vec::new());\n\n ids.push(id.clone());\n\n }\n\n map.into_iter().next().unwrap()\n\n }\n\n\n\n /// Returns all dists whose lew is the level nt-lew.\n\n /// This means that nodes whose lew is the trivial lew but which also the level nt-lew present\n\n /// are *not* included. If they are desired, then currently you must filter for them yourself.\n\n /// (NOTE: under normal differential/linear hull searches, a node will have its lew be either\n", "file_path": "crush/src/soc/bdd/differential/wd/arenas/level.rs", "rank": 64, "score": 187773.25960807948 }, { "content": "\n\n /// A sorted set of the LEWs (Lowest Existing Weight) in the level, and the count of how many\n\n /// nodes have that LEW.\n\n pub fn count_lews(&self) -> BTreeMap<u32, i32> {\n\n let mut map = BTreeMap::new();\n\n self.dists.iter()\n\n .map(|(_, distr)| distr.lowest_existing_weight())\n\n .for_each(|lew| {\n\n let count = map.entry(lew).or_insert(0);\n\n *count += 1;\n\n });\n\n map\n\n }\n\n\n\n // todo remove or change into returning an iterator instead! This is a potential memory hog!\n\n pub fn nodes_with_lew(&self, lew: u32) -> Vec<NodeId> {\n\n self.dists.iter()\n\n .map(|(id, distr)|(id, distr.lowest_existing_weight()))\n\n .filter(|(_, nlew)| *nlew == lew)\n\n .map(|(id, _)| id.clone())\n", "file_path": "crush/src/soc/bdd/differential/wd/arenas/level.rs", "rank": 65, "score": 187771.46748712787 }, { "content": " let (paths, end_id) = e_id_lew_paths.into_iter().next().unwrap();\n\n // Mulig fixme\n\n // Now we have start, end, lew, paths\n\n let p = sorted.entry(lew).or_insert(BTreeMap::new());\n\n // (lew, (paths, (start_id, end_id)))\n\n p.insert(paths.clone(), (*start_id, *end_id));\n\n\n\n }\n\n }\n\n\n\n if sorted.is_empty() {\n\n return None;\n\n }\n\n // Get lowest nt lew\n\n let (lew, paths) = sorted.iter().next().unwrap();\n\n // get highest paths for that lew, and start and end node\n\n let (path, (start_id, end_id)) = paths.iter().last().unwrap();\n\n\n\n\n\n let paths = path.clone();\n", "file_path": "crush/src/soc/bdd/differential/wd/arenas/level.rs", "rank": 66, "score": 187770.72427827562 }, { "content": " /// the trivial lew OR the nt_lew, but not both. So this may be an issue only for other use cases).\n\n pub fn nt_lew(&self) -> Option<(u32, Vec<NodeId>)> {\n\n let mut map = BTreeMap::new();\n\n\n\n for (id, lew) in self.dists.iter()\n\n .map(|(id, dist)| (id, dist.lowest_existing_non_trivial_weight()))\n\n {\n\n if lew.is_some() {\n\n let ids = map.entry(lew.unwrap()).or_insert(Vec::new());\n\n ids.push(id.clone());\n\n }\n\n }\n\n if map.is_empty() {\n\n None\n\n } else {\n\n Some(map.into_iter().next().unwrap())\n\n }\n\n }\n\n\n\n pub fn existing_weights(&self) -> BTreeMap<u32, Vec<NodeId>> {\n", "file_path": "crush/src/soc/bdd/differential/wd/arenas/level.rs", "rank": 67, "score": 187770.08246175718 }, { "content": "\n\n pub fn depth(&self) -> Option<usize> {\n\n self.depth\n\n }\n\n\n\n pub fn insert(&mut self, node: NodeId, distribution: W) {\n\n self.dists.insert(node, distribution);\n\n }\n\n\n\n pub fn entry(&mut self, node: NodeId) -> Entry<NodeId, W> {\n\n self.dists.entry(node)\n\n }\n\n\n\n pub fn get(&self, node_id: &NodeId) -> Option<&W> {\n\n self.dists.get(node_id)\n\n }\n\n\n\n pub fn iter(&self) -> Iter<'_, Id, W> {\n\n self.dists.iter()\n\n }\n", "file_path": "crush/src/soc/bdd/differential/wd/arenas/level.rs", "rank": 68, "score": 187769.43257803886 }, { "content": " let highest = *weight.existing_weights().iter().last().unwrap();\n\n let dist = format!(\"{:0>w$b}\", weight.existing_weights().iter()\n\n .fold(0_u128,\n\n |acc, i| {acc | 1 << i }\n\n ), w = highest as usize\n\n );\n\n\n\n\n\n // writeln!(f, \"{: >8}Id: {: >11}, trail_high: {: >3}, trail_low: {: >3}. Dist len: {}, Dist: {}. Raw: {:?}\",\n\n writeln!(f, \"{: >8}Id: {: >11}, trail_high: {: >3}, trail_low: {: >3}. Dist len: {}, Dist: {}.\",\n\n \"\",\n\n id,\n\n highest,\n\n weight.lowest_existing_weight(),\n\n W::SUPPORTED_DISTRIBUTION_LEN,\n\n dist,\n\n // weight,\n\n )?;\n\n };\n\n Ok(())\n\n }\n\n}", "file_path": "crush/src/soc/bdd/differential/wd/arenas/level.rs", "rank": 69, "score": 187756.79565533195 }, { "content": "fn count_nvar(llc: &dyn LLHandler, sc: &dyn SBoxHandler, nr_rounds: usize) -> usize {\n\n // Account for initial in block variables\n\n let mut n_vars = llc.block_size(0);\n\n\n\n for r in 0..nr_rounds {\n\n // Add the count of out-bits for each S-box\n\n for s in 0..sc.num_sboxes(r) {\n\n n_vars += sc.sbox_size_out(r, s);\n\n }\n\n }\n\n n_vars\n\n}", "file_path": "pathfinder/src/code_gen/soc_gen.rs", "rank": 70, "score": 187455.46769209704 }, { "content": "/// Produce a Vec<Bit> of the provided len with constants random bits.\n\npub fn random_bits(len: usize) -> Vec<Bit> {\n\n let mut rng = rand::thread_rng();\n\n let die = Uniform::from(0..2);\n\n let mut bits = Vec::with_capacity(len);\n\n for _ in 0..len {\n\n let throw = die.sample(&mut rng);\n\n match throw {\n\n 0 => bits.push(bit!(false)),\n\n 1 => bits.push(bit!(true)),\n\n _ => panic!(\"not supposed to happen\"),\n\n }\n\n }\n\n bits\n\n}\n\n\n", "file_path": "cryptapath/src/bit.rs", "rank": 71, "score": 187072.362536896 }, { "content": "/// Convert a binary string (ie a string composed of '0' and '1') to the corresponding Vec<Bit>\n\n/// with all Bit in the Vec constants.\n\npub fn bits_from_binary_string(b_str: &str) -> Vec<Bit> {\n\n let out_bits: Vec<Bit> = b_str\n\n .chars()\n\n .map(|char| match char {\n\n '0' => bit!(false),\n\n '1' => bit!(true),\n\n _ => panic!(\"{} this is not a binary string\", b_str),\n\n })\n\n .collect();\n\n out_bits\n\n}\n\n\n", "file_path": "cryptapath/src/bit.rs", "rank": 72, "score": 185032.91829835434 }, { "content": "/// Convert an hex string (ie a string composed of hexadecimal characters) to the corresponding Vec<Bit>\n\n/// with all Bit in the Vec constants.\n\npub fn bits_from_hex_string(h_str: &str) -> Vec<Bit> {\n\n let h_str = h_str\n\n .replace(\"0x\", \"\")\n\n .replace(\"0X\", \"\")\n\n .replace(\"\\\\x\", \"\")\n\n .replace(\"\\\\X\", \"\")\n\n .replace(\"x\", \"\")\n\n .replace(\"X\", \"\")\n\n .replace(\" \", \"\");\n\n assert!(h_str.len() % 2 == 0);\n\n let mut b_str = String::new();\n\n for i in 0..h_str.len() / 2 {\n\n b_str.push_str(\n\n format!(\n\n \"{:08b}\",\n\n u8::from_str_radix(\n\n h_str\n\n .chars()\n\n .skip(i * 2)\n\n .take(2)\n", "file_path": "cryptapath/src/bit.rs", "rank": 73, "score": 185032.88126455504 }, { "content": "#[derive(Default, Clone)]\n\nstruct LinBank {\n\n lin_eqs: Vec<LinEq>,\n\n}\n\n\n\nimpl System {\n\n /// Construct a new System with default parameters\n\n pub fn new() -> System {\n\n Default::default()\n\n }\n\n\n\n /// Construct a `System` from a `Vec` of `Bdd` using the `nvar` of the first `Bdd`\n\n /// as its `nvar`.\n\n ///\n\n /// Will return an `Error` if all Bdds don't have the same `nvar`.\n\n pub fn from_elem(bdds: Vec<Bdd>) -> Result<System, Error> {\n\n let mut bdds = bdds;\n\n let mut sys = System::new();\n\n if bdds.is_empty() {\n\n return Err(Error::new(ErrorKind::InvalidInput, \"Empty vec\"));\n\n }\n", "file_path": "crush/src/soc/system.rs", "rank": 74, "score": 182366.4202134697 }, { "content": "pub fn bits_from_hex_string_keccak(h_str: &str) -> Vec<Bit> {\n\n let h_str = h_str\n\n .replace(\"0x\", \"\")\n\n .replace(\"0X\", \"\")\n\n .replace(\"\\\\x\", \"\")\n\n .replace(\"\\\\X\", \"\")\n\n .replace(\"x\", \"\")\n\n .replace(\"X\", \"\")\n\n .replace(\" \", \"\");\n\n assert!(h_str.len() % 2 == 0);\n\n let mut b_str = String::new();\n\n for i in 0..h_str.len() / 2 {\n\n b_str.push_str(\n\n format!(\n\n \"{:08b}\",\n\n u8::from_str_radix(\n\n h_str\n\n .chars()\n\n .skip(i * 2)\n\n .take(2)\n", "file_path": "cryptapath/src/targets/keccak.rs", "rank": 75, "score": 180986.00067697163 }, { "content": "#[inline]\n\npub fn get_max_set_bit(vob: &Vob) -> Option<usize> {\n\n vob.iter_set_bits(..).last()\n\n}\n\n\n", "file_path": "crush/src/algebra/mod.rs", "rank": 76, "score": 180920.07700058405 }, { "content": "pub fn start_post_processing<B, S, P> (\n\n mut master: Shard,\n\n master_meta: SolvedSocMeta,\n\n lhss: Matrix,\n\n handlers: Handlers<B, S>,\n\n progress: P,\n\n _cipher_name: String,\n\n tx: Sender<LogType>,\n\n mode: AnalysisMode,\n\n\n\n) -> ProcessedResult\n\n where\n\n B: BTHandler + Debug,\n\n S: SBoxHandler,\n\n P: PPFactory,\n\n{\n\n let main_pb = progress.new_progress_bar(5);\n\n main_pb.set_message(\"Starting PP!\");\n\n //quickfix, out destination to be given as param, not created here\n\n\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/mod.rs", "rank": 77, "score": 178924.85713083256 }, { "content": "struct EmptyLibrarian {\n\n\n\n}\n\n\n\nimpl PruneLogger for EmptyLibrarian {\n\n fn record(&mut self, rec: PruneRecord) {\n\n }\n\n}\n", "file_path": "crush/src/soc/bdd/differential/test.rs", "rank": 78, "score": 176334.53355095568 }, { "content": "pub fn example_expanded_path<B,S>(\n\n inner: Path,\n\n handlers: &Handlers<B,S>,\n\n alpha_path: Path,\n\n beta_path: Path,\n\n lhss: Matrix,\n\n) -> Path\n\n where\n\n B: BTHandler,\n\n S: SBoxHandler,\n\n{\n\n let num_rounds = handlers.bt_handler().nr_of_rounds();\n\n let sbh = handlers.sb_handler();\n\n\n\n let mut sierra_tau_path = alpha_path;\n\n sierra_tau_path.append(&inner);\n\n sierra_tau_path.append(&beta_path);\n\n\n\n let expanded = sierra_tau_path.expand_to_full_path(&lhss, sbh, num_rounds);\n\n expanded\n\n}\n\n\n\n\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/hull_calc/extract_alpha_beta.rs", "rank": 79, "score": 175241.14156384152 }, { "content": "/// Extracts exactly one path from 'start_depth' to 'end_depth'. Calling this fn twice with the same\n\n/// parameters may yield the same path twice, or it may yield two different paths. No guarantees are\n\n/// given.\n\n///\n\n/// *INVARIANT* This method assumes that there is exactly *one* node on the start_depth level and\n\n/// exactly *one* node on the end depth level.\n\nfn extract_a_single_path(master: Arc<Shard>, start_depth: Depth, end_depth: Depth) -> Path {\n\n\n\n let start_nodes = master\n\n .level(start_depth)\n\n .expect(\"Start level is missing!\")\n\n .get_nodes();\n\n assert_eq!(start_nodes.len(), 1);\n\n let mut current_node = start_nodes.values().next().expect(\"Start level is empty!\");\n\n\n\n let mut current_depth: Depth = start_depth;\n\n let mut path = Vob::with_capacity(end_depth - start_depth);\n\n\n\n loop {\n\n if current_depth == end_depth {\n\n break path.into();\n\n }\n\n let child_depth = current_depth + 1;\n\n\n\n // Perhaps add an rand call of sorts, to vary which edge we try first? Any alpha path ending\n\n // in the same alpha node will have the same weight, but this implementation will somewhat\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/hull_calc/mod.rs", "rank": 80, "score": 174114.6444764007 }, { "content": "pub fn fill_partial_value(partial_value: &str) -> (Vec<Bit>, Vec<usize>) {\n\n let mut known_bits = Vec::new();\n\n let mut value = Vec::with_capacity(partial_value.len());\n\n partial_value\n\n .chars()\n\n .enumerate()\n\n .for_each(|(i, c)| match c {\n\n 'x' | 'X' => value.push(bit::random_bits(1).pop().unwrap()),\n\n '0' => {\n\n value.push(bit!(false));\n\n known_bits.push(i)\n\n }\n\n '1' => {\n\n value.push(bit!(true));\n\n known_bits.push(i)\n\n }\n\n _ => panic!(\"illegal char in value string, should only contain X, x, 0 or 1\"),\n\n });\n\n (value, known_bits)\n\n}\n\n\n", "file_path": "cryptapath/src/targets/mod.rs", "rank": 81, "score": 173993.99581976066 }, { "content": "pub fn example_expanded_path<B,S>(\n\n inner: Path,\n\n handlers: &Handlers<B,S>,\n\n alpha_path: Path,\n\n beta_path: Path,\n\n lhss: Matrix,\n\n) -> Path\n\n where\n\n B: BTHandler,\n\n S: SBoxHandler,\n\n{\n\n let num_rounds = handlers.bt_handler().nr_of_rounds();\n\n let sbh = handlers.sb_handler();\n\n\n\n let mut sierra_tau_path = alpha_path;\n\n sierra_tau_path.append(&inner);\n\n sierra_tau_path.append(&beta_path);\n\n\n\n let expanded = sierra_tau_path.expand_to_full_path(&lhss, sbh, num_rounds);\n\n expanded\n\n}\n\n\n\n\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/hull_calc_v2/extract_alpha_beta.rs", "rank": 82, "score": 173567.94102536712 }, { "content": "/// The construction of the Alpha->Beta WDLevel is the most memory intensive part of the analysis,\n\n/// and may easily far bypass the memory consumption of the Master shard, even pre-pruning!\n\n/// Special care is therefore needed, and this fn intends to provide just that care.\n\n///\n\n/// My current main theory for what the issue may be, is that if we have 'many' candidates, and\n\n/// also a very wide widest level (or maybe even before!), even the total number of Dists drives the\n\n/// memory consumption too high. My plan to fix this is to see if we cannot batch handle a smaller\n\n/// amount of candidates at a time, before folding them together into a final WDLevel. This *will*\n\n/// increase time consumption (maybe unless I manage to parallelize it w/o returning to the original\n\n/// problem), but should allow us to keep the memory consumption within reason.\n\n///\n\n/// For now, I will not do this, as finding the right batch size will require some work, and I'm out\n\n/// of time....\n\nfn make_alpha_beta_level(alpha_candidates: &Vec<Vec<(Id, WDPresence)>>,\n\n cache: &mut Cache,\n\n master: &Shard,\n\n progress: &impl PPFactory)\n\n-> Arc<WDLevel<EndNodeDist>>\n\n{\n\n let (_widest, _s_widest) = cache.record_master_layout(master);\n\n\n\n let targets: Vec<Id> = alpha_candidates.iter()\n\n .flat_map(|vec| vec.iter())\n\n .map(|(id, _)| id.clone())\n\n .collect();\n\n\n\n let alpha_beta_dists = Arc::new(cache.make_and_analyse_alpha_beta(master, targets, progress));\n\n\n\n alpha_beta_dists\n\n}", "file_path": "pathfinder/src/diff_solver/post_processing_v5/mod.rs", "rank": 83, "score": 173492.35529387285 }, { "content": "/// Extracts exactly one path from 'start_depth' to 'end_depth'. Calling this fn twice with the same\n\n/// parameters may yield the same path twice, or it may yield two different paths. No guarantees are\n\n/// given.\n\n///\n\n/// *INVARIANT* This method assumes that there is exactly *one* node on the start_depth level and\n\n/// exactly *one* node on the end depth level.\n\nfn extract_a_single_path(master: Arc<Shard>, start_depth: Depth, end_depth: Depth) -> Path {\n\n\n\n let start_nodes = master\n\n .level(start_depth)\n\n .expect(\"Start level is missing!\")\n\n .get_nodes();\n\n assert_eq!(start_nodes.len(), 1);\n\n let mut current_node = start_nodes.values().next().expect(\"Start level is empty!\");\n\n\n\n let mut current_depth: Depth = start_depth;\n\n let mut path = Vob::with_capacity(end_depth - start_depth);\n\n\n\n loop {\n\n if current_depth == end_depth {\n\n break path.into();\n\n }\n\n let child_depth = current_depth + 1;\n\n\n\n // Perhaps add an rand call of sorts, to vary which edge we try first? Any alpha path ending\n\n // in the same alpha node will have the same weight, but this implementation will somewhat\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/hull_calc_v2/mod.rs", "rank": 84, "score": 172710.6422382766 }, { "content": "/// Keys are the probability/weight, value is the number of paths with that probability/weight\n\npub fn calculate_path_probabilities<B, S, P> (\n\n step: NonZeroUsize,\n\n rx: Receiver<Path>,\n\n handlers: &Handlers<B, S>,\n\n alpha_path: Path,\n\n beta_path: Path,\n\n lhss: Matrix,\n\n pb: P::ProgressBar,\n\n upper_lim: usize,\n\n) -> Result<BTreeMap<usize, usize>, BTreeMap<usize, usize>>\n\n where\n\n B: BTHandler,\n\n S: SBoxHandler,\n\n P: PPFactory,\n\n{\n\n pb.set_message(\"Calculating...\");\n\n // Where all the probabilities are collected\n\n let mut all_probs = BTreeMap::new();\n\n let mut sum_paths = 0;\n\n //TODO make able to set by user\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/hull_calc/extract_alpha_beta.rs", "rank": 85, "score": 171721.53959006973 }, { "content": "/// Keys are the probability/weight, value is the number of paths with that probability/weight\n\npub fn calculate_path_probabilities<B, S, P> (\n\n step: NonZeroUsize,\n\n rx: Receiver<Path>,\n\n handlers: &Handlers<B, S>,\n\n alpha_path: Path,\n\n beta_path: Path,\n\n lhss: Matrix,\n\n pb: P::ProgressBar,\n\n) -> Result<BTreeMap<usize, usize>, BTreeMap<usize, usize>>\n\n where\n\n B: BTHandler,\n\n S: SBoxHandler,\n\n P: PPFactory,\n\n{\n\n pb.set_message(\"Calculating...\");\n\n // Where all the probabilities are collected\n\n let mut all_probs = BTreeMap::new();\n\n let mut sum_paths = 0;\n\n //TODO make able to set by user\n\n let upper_lim = 2_usize.pow(26);\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/hull_calc_v2/extract_alpha_beta.rs", "rank": 86, "score": 170093.31795893575 }, { "content": "pub fn build_cipher_by_name(name: &str, rounds: usize) -> Option<Box<dyn Cipher>> {\n\n match name {\n\n \"skinny64128\" => Some(Box::new(Skinny64::new(128, rounds))),\n\n \"skinny128128\" => Some(Box::new(Skinny128::new(128, rounds))),\n\n \"lowmc64\" => Some(Box::new(LowMC::new(rounds, 64, 80, 1))),\n\n \"lowmc128\" => Some(Box::new(LowMC::new(rounds, 128, 80, 31))),\n\n \"lowmc256\" => Some(Box::new(LowMC::new(rounds, 256, 256, 1))),\n\n \"miniaes2x2\" => Some(Box::new(MiniAES2x2::new(rounds))),\n\n \"miniaes4x4\" => Some(Box::new(MiniAES4x4::new(rounds))),\n\n \"present80\" => Some(Box::new(Present80::new(rounds))),\n\n \"prince\" => Some(Box::new(Prince::new(rounds, true))),\n\n \"prince-core\" => Some(Box::new(Prince::new(rounds, false))),\n\n \"des\" => Some(Box::new(DES::new(rounds))),\n\n _ => None,\n\n }\n\n}\n", "file_path": "cryptapath/src/targets/mod.rs", "rank": 87, "score": 169426.6334959523 }, { "content": "/// Quickfix\n\npub trait PruneLogger {\n\n fn record(&mut self, rec: PruneRecord);\n\n}\n\n\n", "file_path": "crush/src/soc/bdd/differential/logging/mod.rs", "rank": 88, "score": 166768.8293974516 }, { "content": "fn mult_08(y: u8) -> u8 {\n\n mult_02(mult_04(y))\n\n}\n\n\n\nimpl Cipher for Klein {\n\n fn structure(&self) -> CipherStructure {\n\n CipherStructure::Spn\n\n }\n\n\n\n fn size(&self) -> usize {\n\n self.size\n\n }\n\n\n\n fn key_size(&self) -> usize {\n\n self.key_size\n\n }\n\n\n\n fn num_sboxes(&self) -> usize {\n\n self.size / self.sbox.size_in()\n\n }\n", "file_path": "soccs/src/dl/cg_original/cipher/klein.rs", "rank": 89, "score": 166007.99091641232 }, { "content": "fn mult_02(y: u8) -> u8 {\n\n let t = y << 1;\n\n let u = 0xff * ((y >> 7) & 0x1);\n\n (u & 0x1b) ^ t\n\n}\n\n\n\n/**\n\nCalculate y*04 in the Rijndael finite field\n\n*/\n", "file_path": "soccs/src/dl/cg_original/cipher/klein.rs", "rank": 90, "score": 166007.99091641232 }, { "content": "fn mult_04(y: u8) -> u8 {\n\n mult_02(mult_02(y))\n\n}\n\n\n\n/**\n\nCalculate y*08 in the Rijndael finite field\n\n*/\n", "file_path": "soccs/src/dl/cg_original/cipher/klein.rs", "rank": 91, "score": 166007.99091641232 }, { "content": "/// NodeWeightDistribution:\n\n/// The weight distribution for a single node\n\n///\n\n/// A 'present' weight is a weight with at least one path present in the distribution. That means\n\n/// that at least one path of that weight is present in the Node this distribution belongs to.\n\n///\n\n///\n\n/// IMPORTANT: The trivial path is expected to have weight 0.\n\npub trait NWDistribution\n\n where Self: Add<Output = Self> + AddAssign + Clone + Debug + Hash {\n\n\n\n /// The maximum number of weights supported. I.e. this is the maximum number of active S-boxes\n\n /// the implementor can keep track of.\n\n const SUPPORTED_DISTRIBUTION_LEN: usize;\n\n\n\n /// Return a new instance of self, where all weights are zero.\n\n fn new_zeroed() -> Self;\n\n\n\n /// Return a new instance of self, where *only* the *trivial* path is present.\n\n /// Argument 'id' is expected to be the id of the node at the end of the path.\n\n /// This enables a path to have both a start node_id and end node_id.\n\n fn new_trivial(id: &Id) -> Self;\n\n\n\n /// We've passed a one edge, and need to update the distribution accordingly. Increment the\n\n /// weights by one.\n\n fn increment_distribution(&mut self);\n\n\n\n /// Return the weight of the lowest present weight.\n", "file_path": "crush/src/soc/bdd/differential/wd/distribution/mod.rs", "rank": 92, "score": 164702.38325604922 }, { "content": "#[ignore]\n\n#[test]\n\nfn test_prune_simple() {\n\n todo!(\"Rework to make compile again, after much modification in paren module\");\n\n // let mut actual = crate::bdd!(6;1;\n\n // [\n\n // (\"0\",[(1;2,3)]);\n\n // (\"1\",[(2;4,5);(3;6,7)]);\n\n // (\"2\",[(4;8,0);(5;0,8);(6;9,0);(7;10,0)]);\n\n // (\"3\",[(8;11,0);(9;12,0);(10;0,12)]);\n\n // (\"4\",[(11;13,0);(12;0,14)]);\n\n // (\"5\",[(13;15,0);(14;0,15)]);\n\n // (\"\",[(15;0,0)])\n\n // ]);\n\n //\n\n // println!(\"Before: \\n{:#?}\", actual);\n\n // let deleted = actual.weight_based_prune(2, .., 2);\n\n // println!(\"After : \\n{:#?}\", actual);\n\n //\n\n // let arena_e: BTreeMap<usize,\n\n // HashMap<Id, u128, BuildHasherDefault<ahash::AHasher>>> =\n\n // [\n", "file_path": "crush/src/soc/bdd/differential/test.rs", "rank": 93, "score": 163828.72114988047 }, { "content": "/// For the given weights and the corresponding count of paths with those weights, calculate the\n\n/// estimated differential/hull probability of this alpha -> beta characteristic.\n\n/// The given weights must be the weights for the complete (but not the extended) trail.\n\nfn make_estimate(tau_alpha_sub_dist: &BTreeMap<u32, PathCount>, k: f64, alpha_level_nt_lew: u32) -> f64 {\n\n // Assuming that the weights are for the complete (but not extended) trail, then\n\n // the estimate will be calculated as\n\n // count0 * 2^(-(weight0 - alpha__level__nt lew)*k)\n\n // + count1 * 2^(-(weight1 - alpha__level__nt lew)*k)\n\n // + etc...\n\n // This will allow us to compare apples with apples later on.\n\n\n\n tau_alpha_sub_dist.iter()\n\n .fold( 0.0,\n\n |mut acc, (weight, count)|\n\n {\n\n let pow = 2_f64.powf\n\n (\n\n -k * (weight - alpha_level_nt_lew)\n\n .to_f64().expect(\"Failed to convert count to f64\")\n\n ); //FIXME check how the estimatges are sorted later on!\n\n\n\n acc += *count as f64 * pow;\n\n acc\n\n }\n\n )\n\n}", "file_path": "pathfinder/src/diff_solver/post_processing_v5/sess_handling.rs", "rank": 94, "score": 163436.8098954961 }, { "content": "/// NodeCountedWeightDistribution:\n\n/// The paths in the distribution has some number associated with them\n\npub trait NcWDistribution\n\n where Self: NWDistribution {\n\n\n\n /// For the given weight, returns the associated count. In terms of a path distribution, this\n\n /// 'associated count' usually means the number of paths with the given weight reachable\n\n /// from 'self'. (See discussion on 'number of paths' in ?? ).\n\n fn paths_for_weight(&self, weight: u32) -> Option<&PathCount>;\n\n\n\n /// Returns the weight of the 'lowest existing weight' (lew) of the node\n\n /// distribution, along with the associated count.\n\n fn lew_with_paths(&self) -> (u32, &PathCount);\n\n\n\n /// Returns the weight of the 'lowest existing non-trivial weight' (nt-lew) of the node\n\n /// distribution, along with the associated count.\n\n /// Returns None if only the trivial lew is present in the distribution.\n\n fn nt_lew_with_paths(&self) -> Option<(u32, &PathCount)>;\n\n\n\n /// Returns a mapping between the existing weights in the distribution, and their respective\n\n /// associated counts.\n\n /// That means that any weight missing from this map should have '0' as their respective\n\n /// associated count.\n\n fn existing_weights_with_counts(&self) -> BTreeMap<u32, &PathCount>;\n\n\n\n /// Returns the total number of paths present in the distribution, across all weights.\n\n /// Returns a tuple of the addition along with a boolean indicating whether an arithmetic\n\n /// overflow would occur. If an overflow would have occurred then the wrapped value is returned.\n\n fn total_number_of_paths_overflowing(&self) -> (usize, bool);\n\n\n\n}\n\n\n", "file_path": "crush/src/soc/bdd/differential/wd/distribution/mod.rs", "rank": 95, "score": 162696.7826593647 }, { "content": "pub trait Node2NodeDistribution\n\n where Self: NWDistribution\n\n{\n\n type W: NWDistribution;\n\n\n\n ///\n\n fn nt_lew_and_e_ids(&self) -> Option<(u32, Vec<Id>)>;\n\n\n\n /// Returns the corresponding count iff id is in self, and weight in id is present.\n\n fn paths_for_weight_in_id(&self, weight: u32, id: &Id) -> Option<&PathCount>;\n\n\n\n /// For the given weight, return any end_node the count corresponding to weight iff\n\n /// the weight is present, or None otherwise.\n\n fn paths_for_weight(&self, weight: u32) -> Option<AHashMap<Id, &PathCount>>;\n\n\n\n /// For each connection from self to end node, return the lew and its count\n\n /// The keys in the HashMap are the Id's of the end point of the connection.\n\n // TODO make return value into a NonNullPathCount\n\n fn lew_with_paths_per_connection(&self) -> AHashMap<Id, (u32, &PathCount)>;\n\n\n", "file_path": "crush/src/soc/bdd/differential/wd/distribution/mod.rs", "rank": 96, "score": 162686.19754409054 }, { "content": "#[test]\n\nfn join_empty_bdd() -> Result<(), Error> {\n\n let empty_bdd = bdd!(5;0;[(\"\",[(1;0,0)])]);\n\n let bdd = bdd!(5;1;[(\"1+2\",[(1;2,3)]);(\"3+2\",[(2;4,5);(3;4,0)]);(\"0+4\",[(4;0,6);(5;6,0)]);(\"\",[(6;0,0)])]);\n\n let mut system = system![bdd, empty_bdd]?;\n\n let join_id = system.join_bdds(Id::new(0), Id::new(1))?;\n\n let result = system\n\n .pop_bdd(join_id)\n\n .expect(\"Bdd of id joined should be in the system\");\n\n let expected_result = bdd!(5;1;[(\"1+2\",[(1;2,3)]);(\"3+2\",[(2;4,5);(3;4,0)]);(\"0+4\",[(4;0,6);(5;6,0)]);(\"\",[(6;0,0)])]);\n\n assert_eq!(result, expected_result);\n\n\n\n // check that bdd is unchanged when joined with an empty BDD independantly of the order\n\n let empty_bdd = bdd!(5;0;[(\"\",[(1;0,0)])]);\n\n let bdd = bdd!(5;1;[(\"1+2\",[(1;2,3)]);(\"3+2\",[(2;4,5);(3;4,0)]);(\"0+4\",[(4;0,6);(5;6,0)]);(\"\",[(6;0,0)])]);\n\n let mut system = system![bdd, empty_bdd]?;\n\n let join_id = system.join_bdds(Id::new(1), Id::new(0))?;\n\n let result = system\n\n .pop_bdd(join_id)\n\n .expect(\"Bdd of id joined should be in the system\");\n\n let expected_result = bdd!(5;1;[(\"1+2\",[(1;2,3)]);(\"3+2\",[(2;4,5);(3;4,0)]);(\"0+4\",[(4;0,6);(5;6,0)]);(\"\",[(6;0,0)])]);\n\n assert_eq!(result, expected_result);\n\n Ok(())\n\n}\n\n\n", "file_path": "crush/src/soc/test.rs", "rank": 97, "score": 161823.2490320967 }, { "content": "pub trait DistFactory<W> {\n\n\n\n /// Return a new instance of W, where all weights are zero.\n\n fn new_zeroed(&self) -> W;\n\n\n\n /// Return a new instance of W, where *only* the *trivial* path is present.\n\n /// Argument 'id' is expected to be the id of the node at the end of the path.\n\n /// This enables a path to have both a start node_id and end node_id.\n\n fn new_trivial(&self, id: &Id) -> W;\n\n}\n\n\n\n\n\n/// A factory which return always returns W::new_trivial(), independently of the Id given.\n\n/// Use this factory when making an Arena/Level covering all nodes.\n\npub struct TransparentFactory {}\n\n\n\nimpl TransparentFactory {\n\n #[inline]\n\n pub fn new() -> Self {\n\n Self {}\n", "file_path": "crush/src/soc/bdd/differential/wd/distribution/dist_factories.rs", "rank": 98, "score": 159549.08953493394 }, { "content": "/// Deleting all nodes which are not relevant for the hull.\n\n/// Deleting these other nodes will leave us with only the inner paths which are relevant for our\n\n/// differential/hull calculation, saving us space (we no longer need an WDArena for the inner\n\n/// levels, which could take more space than master itself!), and also saving us computation (no\n\n/// longer need to check every node in every cohort to see which path takes us to the End Node.\n\n/// Now they all do).\n\nfn delete_non_sess_estimate_nodes(master: &mut Shard, master_md: &SolvedSocMeta, sess_estimate: &SessEstimate) {\n\n // Deleting from beta level\n\n let beta_level = master.level(master_md.beta_lvl_depth)\n\n .expect(\"Beta level is missing\");\n\n\n\n let mut to_delete = beta_level.get_nodes().clone();\n\n let target_node = to_delete.remove(&sess_estimate.end());\n\n // Sanity check\n\n if target_node.is_none() {\n\n panic!(\"We failed for some reason to remove the End Node from the set of nodes to be deleted\");\n\n }\n\n master.delete_all_marked_nodes_from_level(to_delete.keys().collect(),\n\n master_md.beta_lvl_depth);\n\n\n\n // Deleting from alpha level\n\n let alpha_level = master.level(master_md.alpha_lvl_depth)\n\n .expect(\"Alpha level is missing\");\n\n\n\n let mut to_delete = alpha_level.get_nodes().clone();\n\n let target_node = to_delete.remove(&sess_estimate.start());\n\n // Sanity check\n\n if target_node.is_none() {\n\n panic!(\"We failed for some reason to remove the Start Node from the set of nodes to be deleted\");\n\n }\n\n master.delete_all_marked_nodes_from_level(to_delete.keys().collect(),\n\n master_md.alpha_lvl_depth);\n\n}\n\n\n", "file_path": "pathfinder/src/diff_solver/post_processing_v5/mod.rs", "rank": 99, "score": 159485.5210447514 } ]
Rust
examples/aes/ta/src/main.rs
mathias-arm/rust-optee-trustzone-sdk
a1954b8a95dd77e396fb86e21dea2dec3fa5d9a4
#![no_main] use optee_utee::{ ta_close_session, ta_create, ta_destroy, ta_invoke_command, ta_open_session, trace_println, }; use optee_utee::{AlgorithmId, Cipher, OperationMode}; use optee_utee::{AttributeId, AttributeMemref, TransientObject, TransientObjectType}; use optee_utee::{Error, ErrorKind, Parameters, Result}; use proto::{Algo, Command, KeySize, Mode}; use std::boxed::Box; pub struct AesCipher { pub key_size: usize, pub cipher: Cipher, pub key_object: TransientObject, } impl Default for AesCipher { fn default() -> Self { Self { key_size: 0, cipher: Cipher::null(), key_object: TransientObject::null_object(), } } } #[ta_create] fn create() -> Result<()> { trace_println!("[+] TA create"); Ok(()) } #[ta_open_session] fn open_session(_params: &mut Parameters, _sess_ctx: &mut AesCipher) -> Result<()> { trace_println!("[+] TA open session"); Ok(()) } #[ta_close_session] fn close_session(_sess_ctx: &mut AesCipher) { trace_println!("[+] TA close session"); } #[ta_destroy] fn destroy() { trace_println!("[+] TA destory"); } #[ta_invoke_command] fn invoke_command(sess_ctx: &mut AesCipher, cmd_id: u32, params: &mut Parameters) -> Result<()> { trace_println!("[+] TA invoke command"); match Command::from(cmd_id) { Command::Prepare => { return alloc_resources(sess_ctx, params); } Command::SetKey => { return set_aes_key(sess_ctx, params); } Command::SetIV => { return reset_aes_iv(sess_ctx, params); } Command::Cipher => { return cipher_buffer(sess_ctx, params); } _ => { return Err(Error::new(ErrorKind::BadParameters)); } } } pub fn ta2tee_algo_id(algo_id: u32) -> Result<AlgorithmId> { match Algo::from(algo_id) { Algo::ECB => Ok(AlgorithmId::AesEcbNopad), Algo::CBC => Ok(AlgorithmId::AesCbcNopad), Algo::CTR => Ok(AlgorithmId::AesCtr), _ => Err(Error::new(ErrorKind::BadParameters)), } } pub fn ta2tee_key_size(key_sz: u32) -> Result<usize> { match KeySize::from(key_sz) { KeySize::Bit128 | KeySize::Bit256 => Ok(key_sz as usize), _ => Err(Error::new(ErrorKind::BadParameters)), } } pub fn ta2tee_mode_id(mode: u32) -> Result<OperationMode> { match Mode::from(mode) { Mode::Encode => Ok(OperationMode::Encrypt), Mode::Decode => Ok(OperationMode::Decrypt), _ => Err(Error::new(ErrorKind::BadParameters)), } } pub fn alloc_resources(aes: &mut AesCipher, params: &mut Parameters) -> Result<()> { let algo_value = unsafe { params.0.as_value().unwrap().a() }; let key_size_value = unsafe { params.1.as_value().unwrap().a() }; let mode_id_value = unsafe { params.2.as_value().unwrap().a() }; aes.key_size = ta2tee_key_size(key_size_value).unwrap(); aes.cipher = Cipher::allocate( ta2tee_algo_id(algo_value).unwrap(), ta2tee_mode_id(mode_id_value).unwrap(), aes.key_size * 8, ) .unwrap(); aes.key_object = TransientObject::allocate(TransientObjectType::Aes, aes.key_size * 8).unwrap(); let key = vec![0u8; aes.key_size as usize]; let attr = AttributeMemref::from_ref(AttributeId::SecretValue, &key); aes.key_object.populate(&[attr.into()])?; aes.cipher.set_key(&aes.key_object)?; Ok(()) } pub fn set_aes_key(aes: &mut AesCipher, params: &mut Parameters) -> Result<()> { let mut param0 = unsafe { params.0.as_memref().unwrap() }; let key = param0.buffer(); if key.len() != aes.key_size { trace_println!("[+] Get wrong key size !\n"); return Err(Error::new(ErrorKind::BadParameters)); } let attr = AttributeMemref::from_ref(AttributeId::SecretValue, &key); aes.key_object.reset(); aes.key_object.populate(&[attr.into()])?; aes.cipher.set_key(&aes.key_object)?; Ok(()) } pub fn reset_aes_iv(aes: &mut AesCipher, params: &mut Parameters) -> Result<()> { let mut param0 = unsafe { params.0.as_memref().unwrap() }; let iv = param0.buffer(); aes.cipher.init(iv); trace_println!("[+] TA initial vectore reset done!"); Ok(()) } pub fn cipher_buffer(aes: &mut AesCipher, params: &mut Parameters) -> Result<()> { let mut param0 = unsafe { params.0.as_memref().unwrap() }; let mut param1 = unsafe { params.1.as_memref().unwrap() }; let input = param0.buffer(); let output = param1.buffer(); if output.len() < input.len() { return Err(Error::new(ErrorKind::BadParameters)); } trace_println!("[+] TA tries to update ciphers!"); let tmp_size = aes.cipher.update(input, output).unwrap(); param1.set_updated_size(tmp_size); Ok(()) } const TA_FLAGS: u32 = 0; const TA_STACK_SIZE: u32 = 2 * 1024; const TA_DATA_SIZE: u32 = 1 * 1024 * 1024; const TA_VERSION: &[u8] = b"Undefined version\0"; const TA_DESCRIPTION: &[u8] = b"This is an AES example\0"; const EXT_PROP_VALUE_1: &[u8] = b"AES TA\0"; const EXT_PROP_VALUE_2: u32 = 0x0010; const TRACE_LEVEL: i32 = 4; const TRACE_EXT_PREFIX: &[u8] = b"TA\0"; const TA_FRAMEWORK_STACK_SIZE: u32 = 2048; include!(concat!(env!("OUT_DIR"), "/user_ta_header.rs"));
#![no_main] use optee_utee::{ ta_close_session, ta_create, ta_destroy, ta_invoke_command, ta_open_session, trace_println, }; use optee_utee::{AlgorithmId, Cipher, OperationMode}; use optee_utee::{AttributeId, AttributeMemref, TransientObject, TransientObjectType}; use optee_utee::{Error, ErrorKind, Parameters, Result}; use proto::{Algo, Command, KeySize, Mode}; use std::boxed::Box; pub struct AesCipher { pub key_size: usize, pub cipher: Cipher, pub key_object: TransientObject, } impl Default for AesCipher { fn default() -> Self { Self { key_size: 0, cipher: Cipher::null(), key_object: TransientObject::null_object(), } } } #[ta_create] fn create() -> Result<()> { trace_println!("[+] TA create"); Ok(()) } #[ta_open_session] fn open_session(_params: &mut Parameters, _sess_ctx: &mut AesCipher) -> Result<()> { trace_println!("[+] TA open session"); Ok(()) } #[ta_close_session] fn close_session(_sess_ctx: &mut AesCipher) { trace_println!("[+] TA close session"); } #[ta_destroy] fn destroy() { trace_println!("[+] TA destory"); } #[ta_invoke_command] fn invoke_command(sess_ctx: &mut AesCipher, cmd_id: u32, params: &mut Parameters) -> Result<()> { trace_println!("[+] TA invoke command"); match Command::from(cmd_id) { Command::Prepare => { return alloc_resources(sess_ctx, params); } Command::SetKey => { return set_aes_key(sess_ctx, params); } Command::SetIV => { return reset_aes_iv(sess_ctx, params); } Command::Cipher => { return cipher_buffer(sess_ctx, params); } _ => { return Err(Error::new(ErrorKind::BadParameters)); } } } pub fn ta2tee_algo_id(algo_id: u32) -> Result<AlgorithmId> { match Algo::from(algo_id) { Algo::ECB => Ok(AlgorithmId::AesEcbNopad), Algo::CBC => Ok(AlgorithmId::AesCbcNopad), Algo::CTR => Ok(AlgorithmId::AesCtr), _ => Err(Error::new(ErrorKind::BadParameters)), } } pub fn ta2tee_key_size(key_sz: u32) -> Result<usize> { match KeySize::from(key_sz) { KeySize::Bit128 | KeySize::Bit256 => Ok(key_sz as usize), _ => Err(Error::new(ErrorKind::BadParameters)), } } pub fn ta2tee_mode_id(mode: u32) -> Result<OperationMode> { match Mode::from(mode) { Mode::Encode => Ok(OperationMode::Encrypt), Mode::Decode => Ok(OperationMode::Decrypt), _ => Err(Error::new(ErrorKind::BadParameters)), } } pub fn alloc_resources(aes: &mut AesCipher, params: &mut Parameters) -> Result<()> { let algo_value = unsafe { params.0.as_value().unwrap().a() }; let key_size_value = unsafe { params.1.as_value().unwrap().a() }; let mode_id_value = unsafe { params.2.as_value().unwrap().a() }; aes.key_size = ta2tee_key_size(key_size_value).unwrap(); aes.cipher = Cipher::allocate( ta2tee_algo_id(algo_value).unwrap(), ta2tee_mode_id(mode_id_value).unwrap(), aes.key_size * 8, ) .unwrap(); aes.key_object = TransientObject::allocate(TransientObjectType::Aes, aes.key_size * 8).unwrap(); let key = vec![0u8; aes.key_size as usize]; let attr = AttributeMemref::from_ref(AttributeId::SecretValue, &key); aes.key_object.populate(&[attr.into()])?; aes.cipher.set_key(&aes.key_object)?; Ok(()) } pub fn set_aes_key(aes: &mut AesCipher, params: &mut Parameters) -> Result<()> { let mut param0 = unsafe { params.0.as_memref().unwrap() }; let key = param0.buffer(); if key.len() != aes.key_size { trace_println!("[+] Get wrong key size !\n"); return Err(Error::new(ErrorKind::BadParameters)); } let attr = AttributeMemref::from_ref(AttributeId::SecretValue, &key); aes.key_object.reset(); aes.key_object.populate(&[attr.into()])?; aes.cipher.set_key(&aes.key_object)?; Ok(()) } pub fn reset_aes_iv(aes: &mut AesCipher, params: &mut Parameters) -> Result<()> { let mut param0 = unsafe { params.0.as_memref().unwrap() }; let iv = param0.buffer(); aes.cipher.init(iv); trace_println!("[+] TA initial vectore reset done!"); Ok(()) }
const TA_FLAGS: u32 = 0; const TA_STACK_SIZE: u32 = 2 * 1024; const TA_DATA_SIZE: u32 = 1 * 1024 * 1024; const TA_VERSION: &[u8] = b"Undefined version\0"; const TA_DESCRIPTION: &[u8] = b"This is an AES example\0"; const EXT_PROP_VALUE_1: &[u8] = b"AES TA\0"; const EXT_PROP_VALUE_2: u32 = 0x0010; const TRACE_LEVEL: i32 = 4; const TRACE_EXT_PREFIX: &[u8] = b"TA\0"; const TA_FRAMEWORK_STACK_SIZE: u32 = 2048; include!(concat!(env!("OUT_DIR"), "/user_ta_header.rs"));
pub fn cipher_buffer(aes: &mut AesCipher, params: &mut Parameters) -> Result<()> { let mut param0 = unsafe { params.0.as_memref().unwrap() }; let mut param1 = unsafe { params.1.as_memref().unwrap() }; let input = param0.buffer(); let output = param1.buffer(); if output.len() < input.len() { return Err(Error::new(ErrorKind::BadParameters)); } trace_println!("[+] TA tries to update ciphers!"); let tmp_size = aes.cipher.update(input, output).unwrap(); param1.set_updated_size(tmp_size); Ok(()) }
function_block-full_function
[ { "content": "#[ta_invoke_command]\n\nfn invoke_command(sess_ctx: &mut RsaCipher, cmd_id: u32, params: &mut Parameters) -> Result<()> {\n\n trace_println!(\"[+] TA invoke command\");\n\n match Command::from(cmd_id) {\n\n Command::GenKey => gen_key(sess_ctx, params),\n\n Command::GetSize => get_size(sess_ctx, params),\n\n Command::Encrypt => encrypt(sess_ctx, params),\n\n Command::Decrypt => decrypt(sess_ctx, params),\n\n _ => Err(Error::new(ErrorKind::BadParameters)),\n\n }\n\n}\n\n\n\n// TA configurations\n\nconst TA_FLAGS: u32 = 0;\n\nconst TA_DATA_SIZE: u32 = 32 * 1024;\n\nconst TA_STACK_SIZE: u32 = 2 * 1024;\n\nconst TA_VERSION: &[u8] = b\"0.1\\0\";\n\nconst TA_DESCRIPTION: &[u8] = b\"Example of TA using asymmetric cipher.\\0\";\n\nconst EXT_PROP_VALUE_1: &[u8] = b\"Acipher TA\\0\";\n\nconst EXT_PROP_VALUE_2: u32 = 0x0010;\n\nconst TRACE_LEVEL: i32 = 4;\n\nconst TRACE_EXT_PREFIX: &[u8] = b\"TA\\0\";\n\nconst TA_FRAMEWORK_STACK_SIZE: u32 = 2048;\n\n\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/user_ta_header.rs\"));\n", "file_path": "examples/acipher/ta/src/main.rs", "rank": 1, "score": 411887.8386623064 }, { "content": "#[ta_invoke_command]\n\nfn invoke_command(cmd_id: u32, params: &mut Parameters) -> Result<()> {\n\n trace_println!(\"[+] TA invoke command\");\n\n match Command::from(cmd_id) {\n\n Command::RandomGenerator => {\n\n return random_number_generate(params);\n\n }\n\n _ => {\n\n return Err(Error::new(ErrorKind::BadParameters));\n\n }\n\n }\n\n}\n\n\n\n// TA configurations\n\nconst TA_FLAGS: u32 = 0;\n\nconst TA_DATA_SIZE: u32 = 32 * 1024;\n\nconst TA_STACK_SIZE: u32 = 2 * 1024;\n\nconst TA_VERSION: &[u8] = b\"0.1\\0\";\n\nconst TA_DESCRIPTION: &[u8] = b\"This is a random generator example.\\0\";\n\nconst EXT_PROP_VALUE_1: &[u8] = b\"Random TA\\0\";\n\nconst EXT_PROP_VALUE_2: u32 = 0x0010;\n\nconst TRACE_LEVEL: i32 = 4;\n\nconst TRACE_EXT_PREFIX: &[u8] = b\"TA\\0\";\n\nconst TA_FRAMEWORK_STACK_SIZE: u32 = 2048;\n\n\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/user_ta_header.rs\"));\n", "file_path": "examples/random/ta/src/main.rs", "rank": 2, "score": 403549.8519866019 }, { "content": "#[ta_invoke_command]\n\nfn invoke_command(cmd_id: u32, params: &mut Parameters) -> Result<()> {\n\n trace_println!(\"[+] TA invoke command\");\n\n match Command::from(cmd_id) {\n\n Command::DefaultOp => {\n\n let mut p = unsafe { params.0.as_memref().unwrap() };\n\n let mut buffer = p.buffer();\n\n let point = Point { x: 1, y: 2 };\n\n\n\n // Convert the Point to a JSON string.\n\n let serialized = serde_json::to_string(&point).unwrap();\n\n let len = buffer.write(serialized.as_bytes()).unwrap();\n\n\n\n // update size of output buffer\n\n unsafe { (*p.raw()).size = len as u32 };\n\n\n\n // Prints serialized = {\"x\":1,\"y\":2}\n\n trace_println!(\"serialized = {}\", serialized);\n\n\n\n // Convert the JSON string back to a Point.\n\n let deserialized: Point = serde_json::from_str(&serialized).unwrap();\n", "file_path": "examples/serde/ta/src/main.rs", "rank": 3, "score": 403549.85198660195 }, { "content": "#[ta_invoke_command]\n\nfn invoke_command(cmd_id: u32, _params: &mut Parameters) -> Result<()> {\n\n trace_println!(\"[+] TA invoke command\");\n\n match Command::from(cmd_id) {\n\n Command::Test => {\n\n time()?;\n\n Ok(())\n\n }\n\n _ => Err(Error::new(ErrorKind::BadParameters)),\n\n }\n\n}\n\n\n", "file_path": "examples/time/ta/src/main.rs", "rank": 4, "score": 403549.85198660195 }, { "content": "#[ta_invoke_command]\n\nfn invoke_command(cmd_id: u32, params: &mut Parameters) -> Result<()> {\n\n trace_println!(\"[+] TA invoke command\");\n\n let mut n0_buffer = unsafe { params.0.as_memref().unwrap() };\n\n let n1_value = unsafe { params.1.as_value().unwrap() };\n\n\n\n let mut n0 = BigInt::new(64);\n\n let mut n1 = BigInt::new(2);\n\n\n\n n0.convert_from_octet_string(n0_buffer.buffer(), 0)?;\n\n n1.convert_from_s32(n1_value.a() as i32);\n\n\n\n match Command::from(cmd_id) {\n\n Command::Compare => compare(&n0, &n1),\n\n Command::Convert => convert(&n0, &n1),\n\n Command::Add => add(&n0, &n1),\n\n Command::Sub => sub(&n0, &n1),\n\n Command::Multiply => multiply(&n0, &n1),\n\n Command::Divide => divide(&n0, &n1),\n\n Command::Module => module(&n0, &n1),\n\n _ => Err(Error::new(ErrorKind::BadParameters)),\n", "file_path": "examples/big_int/ta/src/main.rs", "rank": 6, "score": 397435.2362477853 }, { "content": "#[ta_invoke_command]\n\nfn invoke_command(cmd_id: u32, params: &mut Parameters) -> Result<()> {\n\n trace_println!(\"[+] TA invoke command\");\n\n let mut values = unsafe { params.0.as_value().unwrap() };\n\n match Command::from(cmd_id) {\n\n Command::IncValue => {\n\n values.set_a(values.a() + 100);\n\n Ok(())\n\n }\n\n Command::DecValue => {\n\n values.set_a(values.a() - 100);\n\n Ok(())\n\n }\n\n _ => Err(Error::new(ErrorKind::BadParameters)),\n\n }\n\n}\n\n\n\n// TA configurations\n\nconst TA_FLAGS: u32 = 0;\n\nconst TA_DATA_SIZE: u32 = 32 * 1024;\n\nconst TA_STACK_SIZE: u32 = 2 * 1024;\n\nconst TA_VERSION: &[u8] = b\"0.1\\0\";\n\nconst TA_DESCRIPTION: &[u8] = b\"This is a hello world example.\\0\";\n\nconst EXT_PROP_VALUE_1: &[u8] = b\"Hello World TA\\0\";\n\nconst EXT_PROP_VALUE_2: u32 = 0x0010;\n\nconst TRACE_LEVEL: i32 = 4;\n\nconst TRACE_EXT_PREFIX: &[u8] = b\"TA\\0\";\n\nconst TA_FRAMEWORK_STACK_SIZE: u32 = 2048;\n\n\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/user_ta_header.rs\"));\n", "file_path": "examples/hello_world/ta/src/main.rs", "rank": 7, "score": 397435.23624778533 }, { "content": "#[ta_invoke_command]\n\nfn invoke_command(cmd_id: u32, params: &mut Parameters) -> Result<()> {\n\n trace_println!(\"[+] TA invoke command\");\n\n match Command::from(cmd_id) {\n\n Command::Write => {\n\n return create_raw_object(params);\n\n }\n\n Command::Read => {\n\n return read_raw_object(params);\n\n }\n\n Command::Delete => {\n\n return delete_object(params);\n\n }\n\n _ => {\n\n return Err(Error::new(ErrorKind::NotSupported));\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/secure_storage/ta/src/main.rs", "rank": 8, "score": 397435.23624778533 }, { "content": "#[ta_invoke_command]\n\nfn invoke_command(cmd_id: u32, params: &mut Parameters) -> Result<()> {\n\n trace_println!(\"[+] TA invoke command\");\n\n let mut p0 = unsafe { params.0.as_memref().unwrap()};\n\n let mut p1 = unsafe { params.1.as_memref().unwrap()};\n\n let mut p2 = unsafe { params.2.as_value().unwrap() };\n\n\n\n let input: proto::EnclaveInput = proto::serde_json::from_slice(p0.buffer()).unwrap();\n\n let output = handle_invoke(Command::from(cmd_id), input).unwrap();\n\n\n\n let output_vec = proto::serde_json::to_vec(&output).unwrap();\n\n p1.buffer().write(&output_vec).unwrap();\n\n p2.set_a(output_vec.len() as u32);\n\n\n\n Ok(())\n\n}\n\n\n\n// TA configurations\n\nconst TA_FLAGS: u32 = 0;\n\nconst TA_DATA_SIZE: u32 = 64 * 1024;\n\nconst TA_STACK_SIZE: u32 = 4 * 1024;\n\nconst TA_VERSION: &[u8] = b\"0.1\\0\";\n\nconst TA_DESCRIPTION: &[u8] = b\"This is a hello world example.\\0\";\n\nconst EXT_PROP_VALUE_1: &[u8] = b\"Hello World TA\\0\";\n\nconst EXT_PROP_VALUE_2: u32 = 0x0010;\n\nconst TRACE_LEVEL: i32 = 4;\n\nconst TRACE_EXT_PREFIX: &[u8] = b\"TA\\0\";\n\nconst TA_FRAMEWORK_STACK_SIZE: u32 = 2048;\n\n\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/user_ta_header.rs\"));\n", "file_path": "examples/message_passing_interface/ta/src/main.rs", "rank": 9, "score": 391570.34563787444 }, { "content": "#[ta_invoke_command]\n\nfn invoke_command(sess_ctx: &mut DigestOp, cmd_id: u32, params: &mut Parameters) -> Result<()> {\n\n trace_println!(\"[+] TA invoke command\");\n\n match Command::from(cmd_id) {\n\n Command::Update => {\n\n return update(sess_ctx, params);\n\n }\n\n Command::DoFinal => {\n\n return do_final(sess_ctx, params);\n\n }\n\n _ => {\n\n return Err(Error::new(ErrorKind::BadParameters));\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/digest/ta/src/main.rs", "rank": 10, "score": 381903.2067203056 }, { "content": "#[ta_invoke_command]\n\nfn invoke_command(sess_ctx: &mut HmacOtp, cmd_id: u32, params: &mut Parameters) -> Result<()> {\n\n trace_println!(\"[+] TA invoke command\");\n\n match Command::from(cmd_id) {\n\n Command::RegisterSharedKey => {\n\n return register_shared_key(sess_ctx, params);\n\n }\n\n Command::GetHOTP => {\n\n return get_hotp(sess_ctx, params);\n\n }\n\n _ => {\n\n return Err(Error::new(ErrorKind::BadParameters));\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/hotp/ta/src/main.rs", "rank": 11, "score": 381903.20672030555 }, { "content": "#[ta_invoke_command]\n\nfn invoke_command(sess_ctx: &mut AEOp, cmd_id: u32, params: &mut Parameters) -> Result<()> {\n\n trace_println!(\"[+] TA invoke command\");\n\n match Command::from(cmd_id) {\n\n Command::Prepare => {\n\n return prepare(sess_ctx, params);\n\n }\n\n Command::Update => {\n\n return update(sess_ctx, params);\n\n }\n\n Command::EncFinal => {\n\n return encrypt_final(sess_ctx, params);\n\n }\n\n Command::DecFinal => {\n\n return decrypt_final(sess_ctx, params);\n\n }\n\n _ => {\n\n return Err(Error::new(ErrorKind::BadParameters));\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/authentication/ta/src/main.rs", "rank": 12, "score": 381903.2067203056 }, { "content": "#[ta_open_session]\n\nfn open_session(_params: &mut Parameters, _sess_ctx: &mut RsaCipher) -> Result<()> {\n\n trace_println!(\"[+] TA open session\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/acipher/ta/src/main.rs", "rank": 14, "score": 380736.4376935545 }, { "content": "fn get_size(rsa: &mut RsaCipher, params: &mut Parameters) -> Result<()> {\n\n let key_info = rsa.key.info().unwrap();\n\n unsafe {\n\n params\n\n .0\n\n .as_value()\n\n .unwrap()\n\n .set_a((key_info.object_size() / 8) as u32)\n\n };\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/acipher/ta/src/main.rs", "rank": 16, "score": 361838.5441656541 }, { "content": "fn gen_key(session: &mut Session, key_size: u32) -> optee_teec::Result<()> {\n\n let p0 = ParamValue::new(key_size, 0, ParamType::ValueInput);\n\n let mut operation = Operation::new(0, p0, ParamNone, ParamNone, ParamNone);\n\n\n\n session.invoke_command(Command::GenKey as u32, &mut operation)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/acipher/host/src/main.rs", "rank": 19, "score": 356146.6440658802 }, { "content": "#[ta_open_session]\n\nfn open_session(_params: &mut Parameters, _sess_ctx: &mut HmacOtp) -> Result<()> {\n\n trace_println!(\"[+] TA open session\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/hotp/ta/src/main.rs", "rank": 20, "score": 348914.9161422748 }, { "content": "#[ta_open_session]\n\nfn open_session(_params: &mut Parameters, _sess_ctx: &mut DigestOp) -> Result<()> {\n\n trace_println!(\"[+] TA open session\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/digest/ta/src/main.rs", "rank": 21, "score": 348914.9161422748 }, { "content": "#[ta_open_session]\n\nfn open_session(_params: &mut Parameters, _sess_ctx: &mut AEOp) -> Result<()> {\n\n trace_println!(\"[+] TA open session\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/authentication/ta/src/main.rs", "rank": 22, "score": 348914.91614227486 }, { "content": "pub fn create_raw_object(params: &mut Parameters) -> Result<()> {\n\n let mut p0 = unsafe { params.0.as_memref().unwrap() };\n\n let mut p1 = unsafe { params.1.as_memref().unwrap() };\n\n let mut obj_id = vec![0; p0.buffer().len() as usize];\n\n obj_id.clone_from_slice(p0.buffer());\n\n\n\n let obj_data_flag = DataFlag::ACCESS_READ\n\n | DataFlag::ACCESS_WRITE\n\n | DataFlag::ACCESS_WRITE_META\n\n | DataFlag::OVERWRITE;\n\n\n\n let mut init_data: [u8; 0] = [0; 0];\n\n match PersistentObject::create(\n\n ObjectStorageConstants::Private,\n\n &mut obj_id,\n\n obj_data_flag,\n\n None,\n\n &mut init_data,\n\n ) {\n\n Err(e) => {\n", "file_path": "examples/secure_storage/ta/src/main.rs", "rank": 23, "score": 347433.9295820702 }, { "content": "pub fn get_hotp(hotp: &mut HmacOtp, params: &mut Parameters) -> Result<()> {\n\n let mut mac: [u8; SHA1_HASH_SIZE] = [0x0; SHA1_HASH_SIZE];\n\n\n\n hmac_sha1(hotp, &mut mac)?;\n\n\n\n for i in (0..hotp.counter.len()).rev() {\n\n hotp.counter[i] += 1;\n\n if hotp.counter[i] > 0 {\n\n break;\n\n }\n\n }\n\n let hotp_val = truncate(&mut mac);\n\n let mut p = unsafe { params.0.as_value().unwrap() };\n\n p.set_a(hotp_val);\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/hotp/ta/src/main.rs", "rank": 24, "score": 345480.5127262586 }, { "content": "#[ta_open_session]\n\nfn open_session(_params: &mut Parameters, _sess_ctx: &mut DiffieHellman) -> Result<()> {\n\n trace_println!(\"[+] TA open session\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/diffie_hellman/ta/src/main.rs", "rank": 26, "score": 343981.91397444875 }, { "content": "#[ta_open_session]\n\nfn open_session(_params: &mut Parameters) -> Result<()> {\n\n trace_println!(\"[+] TA open session\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/random/ta/src/main.rs", "rank": 27, "score": 341134.4750385531 }, { "content": "#[ta_open_session]\n\nfn open_session(_params: &mut Parameters) -> Result<()> {\n\n trace_println!(\"[+] TA open session\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/serde/ta/src/main.rs", "rank": 28, "score": 341134.4750385531 }, { "content": "#[ta_open_session]\n\nfn open_session(_params: &mut Parameters) -> Result<()> {\n\n trace_println!(\"[+] TA open session\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/time/ta/src/main.rs", "rank": 29, "score": 341134.475038553 }, { "content": "pub fn register_shared_key(hotp: &mut HmacOtp, params: &mut Parameters) -> Result<()> {\n\n let mut p = unsafe { params.0.as_memref().unwrap() };\n\n let buffer = p.buffer();\n\n hotp.key_len = buffer.len();\n\n hotp.key[..hotp.key_len].clone_from_slice(buffer);\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/hotp/ta/src/main.rs", "rank": 30, "score": 340488.1110325377 }, { "content": "#[ta_open_session]\n\nfn open_session(_params: &mut Parameters) -> Result<()> {\n\n trace_println!(\"[+] TA open session\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/hello_world/ta/src/main.rs", "rank": 31, "score": 334960.56616973947 }, { "content": "#[ta_open_session]\n\nfn open_session(_params: &mut Parameters) -> Result<()> {\n\n trace_println!(\"[+] TA open session\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/secure_storage/ta/src/main.rs", "rank": 32, "score": 334960.56616973947 }, { "content": "#[ta_open_session]\n\nfn open_session(_params: &mut Parameters) -> Result<()> {\n\n trace_println!(\"[+] TA open session\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/big_int/ta/src/main.rs", "rank": 33, "score": 334960.56616973947 }, { "content": "fn gen_key(rsa: &mut RsaCipher, params: &mut Parameters) -> Result<()> {\n\n let key_size = unsafe { params.0.as_value().unwrap().a() };\n\n rsa.key =\n\n TransientObject::allocate(TransientObjectType::RsaKeypair, key_size as usize).unwrap();\n\n rsa.key.generate_key(key_size as usize, &[])?;\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/acipher/ta/src/main.rs", "rank": 34, "score": 329210.2612484756 }, { "content": "#[ta_open_session]\n\nfn open_session(_params: &mut Parameters) -> Result<()> {\n\n trace_println!(\"[+] TA open session\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/message_passing_interface/ta/src/main.rs", "rank": 35, "score": 329055.25880877697 }, { "content": "pub fn delete_object(params: &mut Parameters) -> Result<()> {\n\n let mut p0 = unsafe { params.0.as_memref().unwrap() };\n\n\n\n let mut obj_id = vec![0; p0.buffer().len() as usize];\n\n obj_id.clone_from_slice(p0.buffer());\n\n\n\n match PersistentObject::open(\n\n ObjectStorageConstants::Private,\n\n &mut obj_id,\n\n DataFlag::ACCESS_READ | DataFlag::ACCESS_WRITE_META,\n\n ) {\n\n Err(e) => {\n\n return Err(e);\n\n }\n\n\n\n Ok(mut object) => {\n\n object.close_and_delete()?;\n\n std::mem::forget(object);\n\n return Ok(());\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/secure_storage/ta/src/main.rs", "rank": 37, "score": 319192.61995885964 }, { "content": "pub fn random_number_generate(params: &mut Parameters) -> Result<()> {\n\n let mut p = unsafe { params.0.as_memref().unwrap()};\n\n Random::generate(p.buffer());\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/random/ta/src/main.rs", "rank": 38, "score": 319192.61995885964 }, { "content": "pub fn prepare(ae: &mut AEOp, params: &mut Parameters) -> Result<()> {\n\n let p0 = unsafe { params.0.as_value().unwrap() };\n\n let mut p1 = unsafe { params.1.as_memref().unwrap() };\n\n let mut p2 = unsafe { params.2.as_memref().unwrap() };\n\n let mut p3 = unsafe { params.3.as_memref().unwrap() };\n\n let mode = match Mode::from(p0.a()) {\n\n Mode::Encrypt => OperationMode::Encrypt,\n\n Mode::Decrypt => OperationMode::Decrypt,\n\n _ => OperationMode::IllegalValue,\n\n };\n\n let nonce = p1.buffer();\n\n let key = p2.buffer();\n\n let aad = p3.buffer();\n\n\n\n ae.op = AE::allocate(AlgorithmId::AesCcm, mode, KEY_SIZE * 8).unwrap();\n\n\n\n let mut key_object = TransientObject::allocate(TransientObjectType::Aes, KEY_SIZE * 8).unwrap();\n\n let attr = AttributeMemref::from_ref(AttributeId::SecretValue, key);\n\n key_object.populate(&[attr.into()])?;\n\n ae.op.set_key(&key_object)?;\n\n ae.op\n\n .init(&nonce, TAG_LEN * 8, AAD_LEN, BUFFER_SIZE * PAYLOAD_NUMBER)?;\n\n ae.op.update_aad(aad);\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/authentication/ta/src/main.rs", "rank": 39, "score": 318044.8555946718 }, { "content": "pub fn update(digest: &mut DigestOp, params: &mut Parameters) -> Result<()> {\n\n let mut p = unsafe { params.0.as_memref().unwrap() };\n\n let buffer = p.buffer();\n\n digest.op.update(buffer);\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/digest/ta/src/main.rs", "rank": 40, "score": 318044.85559467174 }, { "content": "pub fn update(digest: &mut AEOp, params: &mut Parameters) -> Result<()> {\n\n let mut p0 = unsafe { params.0.as_memref().unwrap() };\n\n let mut p1 = unsafe { params.1.as_memref().unwrap() };\n\n let src = p0.buffer();\n\n let res = p1.buffer();\n\n digest.op.update(src, res)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/authentication/ta/src/main.rs", "rank": 41, "score": 318044.85559467174 }, { "content": "pub fn do_final(digest: &mut DigestOp, params: &mut Parameters) -> Result<()> {\n\n let mut p0 = unsafe { params.0.as_memref().unwrap() };\n\n let mut p1 = unsafe { params.1.as_memref().unwrap() };\n\n let mut p2 = unsafe { params.2.as_value().unwrap() };\n\n let input = p0.buffer();\n\n let output = p1.buffer();\n\n match digest.op.do_final(input, output) {\n\n Err(e) => Err(e),\n\n Ok(hash_length) => {\n\n p2.set_a(hash_length as u32);\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n\n// TA configurations\n\nconst TA_FLAGS: u32 = 0;\n\nconst TA_DATA_SIZE: u32 = 32 * 1024;\n\nconst TA_STACK_SIZE: u32 = 2 * 1024;\n\nconst TA_VERSION: &[u8] = b\"0.1\\0\";\n\nconst TA_DESCRIPTION: &[u8] = b\"This is a message digest example.\\0\";\n\nconst EXT_PROP_VALUE_1: &[u8] = b\"Digest TA\\0\";\n\nconst EXT_PROP_VALUE_2: u32 = 0x0010;\n\nconst TRACE_LEVEL: i32 = 4;\n\nconst TRACE_EXT_PREFIX: &[u8] = b\"TA\\0\";\n\nconst TA_FRAMEWORK_STACK_SIZE: u32 = 2048;\n\n\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/user_ta_header.rs\"));\n", "file_path": "examples/digest/ta/src/main.rs", "rank": 42, "score": 318044.8555946718 }, { "content": "pub fn read_raw_object(params: &mut Parameters) -> Result<()> {\n\n let mut p0 = unsafe { params.0.as_memref().unwrap() };\n\n let mut p1 = unsafe { params.1.as_memref().unwrap() };\n\n let mut obj_id = vec![0; p0.buffer().len() as usize];\n\n obj_id.clone_from_slice(p0.buffer());\n\n\n\n match PersistentObject::open(\n\n ObjectStorageConstants::Private,\n\n &mut obj_id,\n\n DataFlag::ACCESS_READ | DataFlag::SHARE_READ,\n\n ) {\n\n Err(e) => return Err(e),\n\n\n\n Ok(object) => {\n\n let obj_info = object.info()?;\n\n\n\n if obj_info.data_size() > p1.buffer().len() {\n\n p1.set_updated_size(obj_info.data_size());\n\n return Err(Error::new(ErrorKind::ShortBuffer));\n\n }\n", "file_path": "examples/secure_storage/ta/src/main.rs", "rank": 43, "score": 314257.45802406006 }, { "content": "pub fn encrypt_final(digest: &mut AEOp, params: &mut Parameters) -> Result<()> {\n\n let mut p0 = unsafe { params.0.as_memref().unwrap() };\n\n let mut p1 = unsafe { params.1.as_memref().unwrap() };\n\n let mut p2 = unsafe { params.2.as_memref().unwrap() };\n\n let clear = p0.buffer();\n\n let ciph = p1.buffer();\n\n let tag = p2.buffer();\n\n match digest.op.encrypt_final(clear, ciph, tag) {\n\n Err(e) => Err(e),\n\n Ok((_ciph_len, _tag_len)) => Ok(()),\n\n }\n\n}\n\n\n", "file_path": "examples/authentication/ta/src/main.rs", "rank": 44, "score": 313574.7127082301 }, { "content": "pub fn decrypt_final(digest: &mut AEOp, params: &mut Parameters) -> Result<()> {\n\n let mut p0 = unsafe { params.0.as_memref().unwrap() };\n\n let mut p1 = unsafe { params.1.as_memref().unwrap() };\n\n let mut p2 = unsafe { params.2.as_memref().unwrap() };\n\n let ciph = p0.buffer();\n\n let clear = p1.buffer();\n\n let tag = p2.buffer();\n\n match digest.op.decrypt_final(ciph, clear, tag) {\n\n Err(e) => Err(e),\n\n Ok(_clear_len) => Ok(()),\n\n }\n\n}\n\n\n\n// TA configurations\n\nconst TA_FLAGS: u32 = 0;\n\nconst TA_DATA_SIZE: u32 = 32 * 1024;\n\nconst TA_STACK_SIZE: u32 = 2 * 1024;\n\nconst TA_VERSION: &[u8] = b\"0.1\\0\";\n\nconst TA_DESCRIPTION: &[u8] = b\"This is an authentication encryption example.\\0\";\n\nconst EXT_PROP_VALUE_1: &[u8] = b\"AE TA\\0\";\n\nconst EXT_PROP_VALUE_2: u32 = 0x0010;\n\nconst TRACE_LEVEL: i32 = 4;\n\nconst TRACE_EXT_PREFIX: &[u8] = b\"TA\\0\";\n\nconst TA_FRAMEWORK_STACK_SIZE: u32 = 2048;\n\n\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/user_ta_header.rs\"));\n", "file_path": "examples/authentication/ta/src/main.rs", "rank": 45, "score": 313574.7127082301 }, { "content": "fn encrypt(rsa: &mut RsaCipher, params: &mut Parameters) -> Result<()> {\n\n let key_info = rsa.key.info().unwrap();\n\n let mut p0 = unsafe { params.0.as_memref().unwrap() };\n\n let plain_text = p0.buffer();\n\n let mut p1 = unsafe { params.1.as_memref().unwrap() };\n\n match Asymmetric::allocate(\n\n AlgorithmId::RsaesPkcs1V15,\n\n OperationMode::Encrypt,\n\n key_info.object_size(),\n\n ) {\n\n Err(e) => Err(e),\n\n Ok(cipher) => {\n\n cipher.set_key(&rsa.key)?;\n\n match cipher.encrypt(&[], &plain_text) {\n\n Err(e) => Err(e),\n\n Ok(cipher_text) => Ok(p1.buffer().clone_from_slice(&cipher_text)),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/acipher/ta/src/main.rs", "rank": 46, "score": 301351.403634183 }, { "content": "fn decrypt(rsa: &mut RsaCipher, params: &mut Parameters) -> Result<()> {\n\n let key_info = rsa.key.info().unwrap();\n\n let mut p0 = unsafe { params.0.as_memref().unwrap() };\n\n let mut cipher_text = p0.buffer();\n\n let mut p1 = unsafe { params.1.as_memref().unwrap() };\n\n match Asymmetric::allocate(\n\n AlgorithmId::RsaesPkcs1V15,\n\n OperationMode::Decrypt,\n\n key_info.object_size(),\n\n ) {\n\n Err(e) => Err(e),\n\n Ok(cipher) => {\n\n cipher.set_key(&rsa.key)?;\n\n match cipher.decrypt(&mut [], &mut cipher_text) {\n\n Err(e) => Err(e),\n\n Ok(plain_text) => Ok(p1.buffer().clone_from_slice(&plain_text)),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/acipher/ta/src/main.rs", "rank": 47, "score": 301351.403634183 }, { "content": "fn set_iv(session: &mut Session, iv: &[u8]) -> optee_teec::Result<()> {\n\n let p0 = ParamTmpRef::new_input(iv);\n\n let mut operation = Operation::new(0, p0, ParamNone, ParamNone, ParamNone);\n\n session.invoke_command(Command::SetIV as u32, &mut operation)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/aes/host/src/main.rs", "rank": 48, "score": 296163.3174520573 }, { "content": "fn set_key(session: &mut Session, key: &[u8]) -> optee_teec::Result<()> {\n\n let p0 = ParamTmpRef::new_input(key);\n\n let mut operation = Operation::new(0, p0, ParamNone, ParamNone, ParamNone);\n\n\n\n session.invoke_command(Command::SetKey as u32, &mut operation)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/aes/host/src/main.rs", "rank": 49, "score": 296076.7047690899 }, { "content": "pub fn truncate(hmac_result: &mut [u8]) -> u32 {\n\n let mut bin_code: u32;\n\n let offset: usize = (hmac_result[19] & 0xf) as usize;\n\n\n\n bin_code = ((hmac_result[offset] & 0x7f) as u32) << 24\n\n | ((hmac_result[offset + 1] & 0xff) as u32) << 16\n\n | ((hmac_result[offset + 2] & 0xff) as u32) << 8\n\n | ((hmac_result[offset + 3] & 0xff) as u32);\n\n\n\n bin_code %= DBC2_MODULO;\n\n return bin_code;\n\n}\n\n\n\n// TA configurations\n\nconst TA_FLAGS: u32 = 0;\n\nconst TA_DATA_SIZE: u32 = 32 * 1024;\n\nconst TA_STACK_SIZE: u32 = 2 * 1024;\n\nconst TA_VERSION: &[u8] = b\"0.1\\0\";\n\nconst TA_DESCRIPTION: &[u8] = b\"This is an HOTP example.\\0\";\n\nconst EXT_PROP_VALUE_1: &[u8] = b\"HOTP TA\\0\";\n\nconst EXT_PROP_VALUE_2: u32 = 0x0010;\n\nconst TRACE_LEVEL: i32 = 4;\n\nconst TRACE_EXT_PREFIX: &[u8] = b\"TA\\0\";\n\nconst TA_FRAMEWORK_STACK_SIZE: u32 = 2048;\n\n\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/user_ta_header.rs\"));\n", "file_path": "examples/hotp/ta/src/main.rs", "rank": 50, "score": 295105.8188964631 }, { "content": "fn generate_key(dh: &mut DiffieHellman, params: &mut Parameters) -> Result<()> {\n\n let p0 = unsafe { params.0.as_value().unwrap() };\n\n let mut p1 = unsafe { params.1.as_value().unwrap() };\n\n let mut p2 = unsafe { params.2.as_memref().unwrap() };\n\n let mut p3 = unsafe { params.3.as_memref().unwrap() };\n\n\n\n // Extract prime and base from parameters\n\n let prime_u32 = p0.a();\n\n let base_u32 = p0.b();\n\n let mut key_prime = BigInt::new(64);\n\n let mut key_base = BigInt::new(64);\n\n key_prime.convert_from_s32(prime_u32 as i32);\n\n key_base.convert_from_s32(base_u32 as i32);\n\n\n\n let prime_vec = key_prime.convert_to_octet_string().unwrap();\n\n let attr_prime = AttributeMemref::from_ref(AttributeId::DhPrime, prime_vec.as_slice());\n\n let base_slice = key_base.convert_to_octet_string().unwrap();\n\n let attr_base = AttributeMemref::from_ref(AttributeId::DhBase, base_slice.as_slice());\n\n\n\n // Generate key pair\n", "file_path": "examples/diffie_hellman/ta/src/main.rs", "rank": 51, "score": 292252.1379821325 }, { "content": "fn derive_key(dh: &mut DiffieHellman, params: &mut Parameters) -> Result<()> {\n\n let mut p0 = unsafe { params.0.as_memref().unwrap() };\n\n let mut p1 = unsafe { params.1.as_memref().unwrap() };\n\n let mut p2 = unsafe { params.2.as_value().unwrap() };\n\n\n\n let received_public = AttributeMemref::from_ref(AttributeId::DhPublicValue, p0.buffer());\n\n\n\n match DeriveKey::allocate(AlgorithmId::DhDeriveSharedSecret, KEY_SIZE) {\n\n Err(e) => Err(e),\n\n Ok(operation) => {\n\n operation.set_key(&dh.key)?;\n\n let mut derived_key =\n\n TransientObject::allocate(TransientObjectType::GenericSecret, KEY_SIZE).unwrap();\n\n operation.derive(&[received_public.into()], &mut derived_key);\n\n let key_size = derived_key\n\n .ref_attribute(AttributeId::SecretValue, p1.buffer())\n\n .unwrap();\n\n p2.set_a(key_size as u32);\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/diffie_hellman/ta/src/main.rs", "rank": 52, "score": 292252.1379821325 }, { "content": "fn derive_key(key0_pub: &Vec<u8>, session: &mut Session) -> Result<()> {\n\n let p0 = ParamTmpRef::new_input(key0_pub.as_slice());\n\n let mut shared_key = [0u8; KEY_SIZE];\n\n let p1 = ParamTmpRef::new_output(&mut shared_key);\n\n let p2 = ParamValue::new(0, 0, ParamType::ValueOutput);\n\n let mut operation = Operation::new(0, p0, p1, p2, ParamNone);\n\n\n\n session.invoke_command(Command::DeriveKey as u32, &mut operation)?;\n\n\n\n let key_size = operation.parameters().2.a() as usize;\n\n let mut derive_res = vec![0u8; key_size];\n\n derive_res.copy_from_slice(&shared_key[..key_size]);\n\n println!(\"Derived share key as {:?}\", derive_res);\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/diffie_hellman/host/src/main.rs", "rank": 53, "score": 288235.95274555613 }, { "content": "pub fn hmac_sha1(hotp: &mut HmacOtp, out: &mut [u8]) -> Result<usize> {\n\n if hotp.key_len < MIN_KEY_SIZE || hotp.key_len > MAX_KEY_SIZE {\n\n return Err(Error::new(ErrorKind::BadParameters));\n\n }\n\n\n\n match Mac::allocate(AlgorithmId::HmacSha1, hotp.key_len * 8) {\n\n Err(e) => return Err(e),\n\n Ok(mac) => {\n\n match TransientObject::allocate(TransientObjectType::HmacSha1, hotp.key_len * 8) {\n\n Err(e) => return Err(e),\n\n Ok(mut key_object) => {\n\n //KEY size can be larger than hotp.key_len\n\n let mut tmp_key = hotp.key.to_vec();\n\n tmp_key.truncate(hotp.key_len);\n\n let attr = AttributeMemref::from_ref(AttributeId::SecretValue, &tmp_key);\n\n key_object.populate(&[attr.into()])?;\n\n mac.set_key(&key_object)?;\n\n }\n\n }\n\n mac.init(&[0u8; 0]);\n\n mac.update(&hotp.counter);\n\n let out_len = mac.compute_final(&[0u8; 0], out).unwrap();\n\n Ok(out_len)\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/hotp/ta/src/main.rs", "rank": 54, "score": 279643.1998384661 }, { "content": "#[ta_close_session]\n\nfn close_session(_sess_ctx: &mut RsaCipher) {\n\n trace_println!(\"[+] TA close session\");\n\n}\n\n\n", "file_path": "examples/acipher/ta/src/main.rs", "rank": 56, "score": 275761.26405564364 }, { "content": "fn get_hotp(session: &mut Session) -> optee_teec::Result<()> {\n\n let p0 = ParamValue::new(0, 0, ParamType::ValueOutput);\n\n let mut operation = Operation::new(0, p0, ParamNone, ParamNone, ParamNone);\n\n\n\n for i in 0..TEST_SIZE {\n\n session.invoke_command(Command::GetHOTP as u32, &mut operation)?;\n\n let (p0, _, _, _) = operation.parameters();\n\n let hotp_value = p0.a();\n\n\n\n println!(\"Get HOTP: {}\", hotp_value);\n\n\n\n if hotp_value != RFC4226_TEST_VALUES[i] {\n\n println!(\n\n \"Wrong value get! Expected value: {}\",\n\n RFC4226_TEST_VALUES[i]\n\n );\n\n return Err(Error::new(ErrorKind::Generic));\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/hotp/host/src/main.rs", "rank": 57, "score": 273480.9562247049 }, { "content": "fn register_shared_key(session: &mut Session) -> optee_teec::Result<()> {\n\n let k: [u8; SIZE_K] = [\n\n 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35,\n\n 0x36, 0x37, 0x38, 0x39, 0x30,\n\n ];\n\n\n\n let p0 = ParamTmpRef::new_input(&k);\n\n let mut operation = Operation::new(0, p0, ParamNone, ParamNone, ParamNone);\n\n\n\n session.invoke_command(Command::RegisterSharedKey as u32, &mut operation)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/hotp/host/src/main.rs", "rank": 58, "score": 269964.6461216675 }, { "content": "fn do_final(session: &mut Session, src: &[u8], res: &mut [u8]) -> optee_teec::Result<usize> {\n\n let p0 = ParamTmpRef::new_input(src);\n\n let p1 = ParamTmpRef::new_output(res);\n\n let p2 = ParamValue::new(0, 0, ParamType::ValueOutput);\n\n let mut operation = Operation::new(0, p0, p1, p2, ParamNone);\n\n\n\n session.invoke_command(Command::DoFinal as u32, &mut operation)?;\n\n\n\n Ok(operation.parameters().2.a() as usize)\n\n}\n\n\n", "file_path": "examples/digest/host/src/main.rs", "rank": 59, "score": 258473.83912439036 }, { "content": "fn generate_key(session: &mut Session) -> Result<(Vec<u8>, Vec<u8>)> {\n\n // Pass in the prime and base\n\n let p0 = ParamValue::new(23, 5, ParamType::ValueInput);\n\n // Save public and private key size\n\n let p1 = ParamValue::new(0, 0, ParamType::ValueOutput);\n\n // Vector for generated keys\n\n let mut public_key = [0u8; KEY_SIZE];\n\n let mut private_key = [0u8; KEY_SIZE];\n\n let p2 = ParamTmpRef::new_output(&mut public_key);\n\n let p3 = ParamTmpRef::new_output(&mut private_key);\n\n\n\n let mut operation = Operation::new(0, p0, p1, p2, p3);\n\n session.invoke_command(Command::GenerateKey as u32, &mut operation)?;\n\n\n\n let public_size = operation.parameters().1.a() as usize;\n\n let private_size = operation.parameters().1.b() as usize;\n\n let mut public_res = vec![0u8; public_size];\n\n let mut private_res = vec![0u8; private_size];\n\n public_res.copy_from_slice(&public_key[..public_size]);\n\n private_res.copy_from_slice(&private_key[..private_size]);\n\n\n\n Ok((public_res, private_res))\n\n}\n\n\n", "file_path": "examples/diffie_hellman/host/src/main.rs", "rank": 60, "score": 253448.79766261805 }, { "content": "fn serde(session: &mut Session) -> optee_teec::Result<()> {\n\n let mut buffer = [0u8; 128];\n\n let p0 = ParamTmpRef::new_output(&mut buffer);\n\n let mut operation = Operation::new(0, p0 , ParamNone, ParamNone, ParamNone);\n\n\n\n session.invoke_command(Command::DefaultOp as u32, &mut operation)?;\n\n let updated_size = operation.parameters().0.updated_size();\n\n\n\n let p: Point = serde_json::from_slice(&buffer[..updated_size]).unwrap();\n\n println!(\"{:?}\", p);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/serde/host/src/main.rs", "rank": 62, "score": 242339.09738565795 }, { "content": "fn random(session: &mut Session) -> optee_teec::Result<()> {\n\n let mut random_uuid = [0u8; 16];\n\n\n\n let p0 = ParamTmpRef::new_output(&mut random_uuid);\n\n let mut operation = Operation::new(0, p0, ParamNone, ParamNone, ParamNone);\n\n\n\n println!(\"Invoking TA to generate random UUID...\");\n\n session.invoke_command(Command::RandomGenerator as u32, &mut operation)?;\n\n\n\n let generate_uuid = Uuid::from_slice(&random_uuid).unwrap();\n\n\n\n println!(\"Generate random UUID: {}\", generate_uuid);\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/random/host/src/main.rs", "rank": 63, "score": 242339.09738565795 }, { "content": "fn time(session: &mut Session) -> optee_teec::Result<()> {\n\n let mut operation = Operation::new(0, ParamNone, ParamNone, ParamNone, ParamNone);\n\n\n\n session.invoke_command(Command::Test as u32, &mut operation)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/time/host/src/main.rs", "rank": 64, "score": 242339.0973856579 }, { "content": "#[ta_close_session]\n\nfn close_session(_sess_ctx: &mut HmacOtp) {\n\n trace_println!(\"[+] TA close session\");\n\n}\n\n\n", "file_path": "examples/hotp/ta/src/main.rs", "rank": 65, "score": 241109.32420297843 }, { "content": "#[ta_close_session]\n\nfn close_session(_sess_ctx: &mut DigestOp) {\n\n trace_println!(\"[+] TA close session\");\n\n}\n\n\n", "file_path": "examples/digest/ta/src/main.rs", "rank": 66, "score": 241109.32420297843 }, { "content": "#[ta_close_session]\n\nfn close_session(_sess_ctx: &mut AEOp) {\n\n trace_println!(\"[+] TA close session\");\n\n}\n\n\n", "file_path": "examples/authentication/ta/src/main.rs", "rank": 67, "score": 241109.3242029784 }, { "content": "#[ta_close_session]\n\nfn close_session(_sess_ctx: &mut DiffieHellman) {\n\n trace_println!(\"[+] TA close session\");\n\n}\n\n\n", "file_path": "examples/diffie_hellman/ta/src/main.rs", "rank": 68, "score": 237430.74921848177 }, { "content": "fn hello_world(session: &mut Session) -> optee_teec::Result<()> {\n\n let p0 = ParamValue::new(29, 0, ParamType::ValueInout);\n\n let mut operation = Operation::new(0, p0, ParamNone, ParamNone, ParamNone);\n\n\n\n println!(\"original value is {:?}\", operation.parameters().0.a());\n\n\n\n session.invoke_command(Command::IncValue as u32, &mut operation)?;\n\n println!(\"inc value is {:?}\", operation.parameters().0.a());\n\n\n\n session.invoke_command(Command::DecValue as u32, &mut operation)?;\n\n println!(\"dec value is {:?}\", operation.parameters().0.a());\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/hello_world/host/src/main.rs", "rank": 69, "score": 236758.90271133994 }, { "content": "fn big_int(session: &mut Session) -> optee_teec::Result<()> {\n\n let number0 = [\n\n 0x01u8, 0x23u8, 0x45u8, 0x67u8, 0x89u8, 0xabu8, 0xcdu8, 0xefu8,\n\n ];\n\n let number1: u32 = 2;\n\n\n\n let p0 = ParamTmpRef::new_input(&number0);\n\n let p1 = ParamValue::new(number1, 0, ParamType::ValueInput);\n\n let mut operation = Operation::new(0, p0, p1, ParamNone, ParamNone);\n\n\n\n session.invoke_command(Command::Compare as u32, &mut operation)?;\n\n session.invoke_command(Command::Convert as u32, &mut operation)?;\n\n session.invoke_command(Command::Add as u32, &mut operation)?;\n\n session.invoke_command(Command::Sub as u32, &mut operation)?;\n\n session.invoke_command(Command::Multiply as u32, &mut operation)?;\n\n session.invoke_command(Command::Divide as u32, &mut operation)?;\n\n session.invoke_command(Command::Module as u32, &mut operation)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/big_int/host/src/main.rs", "rank": 70, "score": 236758.90271133994 }, { "content": "fn update(session: &mut Session, src: &[u8], res: &mut [u8]) -> optee_teec::Result<()> {\n\n let p0 = ParamTmpRef::new_input(src);\n\n let p1 = ParamTmpRef::new_output(res);\n\n let mut operation = Operation::new(0, p0, p1, ParamNone, ParamNone);\n\n\n\n session.invoke_command(Command::Update as u32, &mut operation)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/authentication/host/src/main.rs", "rank": 71, "score": 232621.53707388105 }, { "content": "fn update(session: &mut Session, src: &[u8]) -> optee_teec::Result<()> {\n\n let p0 = ParamTmpRef::new_input(src);\n\n let mut operation = Operation::new(0, p0, ParamNone, ParamNone, ParamNone);\n\n\n\n session.invoke_command(Command::Update as u32, &mut operation)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/digest/host/src/main.rs", "rank": 72, "score": 229844.10862050165 }, { "content": "fn prepare_aes(session: &mut Session, encode: i8) -> optee_teec::Result<()> {\n\n let p2_value = if encode == ENCODE {\n\n Mode::Encode as u32\n\n } else {\n\n Mode::Decode as u32\n\n };\n\n let p0 = ParamValue::new(Algo::CTR as u32, 0, ParamType::ValueInput);\n\n let p1 = ParamValue::new(KeySize::Bit128 as u32, 0, ParamType::ValueInput);\n\n let p2 = ParamValue::new(p2_value, 0, ParamType::ValueInput);\n\n let mut operation = Operation::new(0, p0, p1, p2, ParamNone);\n\n\n\n session.invoke_command(Command::Prepare as u32, &mut operation)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/aes/host/src/main.rs", "rank": 73, "score": 227215.32700544718 }, { "content": "fn enc_dec(session: &mut Session, plain_text: &[u8]) -> optee_teec::Result<()> {\n\n let p0 = ParamValue::new(0, 0, ParamType::ValueOutput);\n\n let mut operation = Operation::new(0, p0, ParamNone, ParamNone, ParamNone);\n\n\n\n session.invoke_command(Command::GetSize as u32, &mut operation)?;\n\n\n\n let mut cipher_text = vec![0u8; operation.parameters().0.a() as usize];\n\n let p0 = ParamTmpRef::new_input(plain_text);\n\n let p1 = ParamTmpRef::new_output(&mut cipher_text);\n\n let mut operation2 = Operation::new(0, p0, p1, ParamNone, ParamNone);\n\n\n\n session.invoke_command(Command::Encrypt as u32, &mut operation2)?;\n\n println!(\n\n \"Success encrypt input text \\\"{}\\\" as {} bytes cipher text: {:?}\",\n\n str::from_utf8(plain_text).unwrap(),\n\n cipher_text.len(),\n\n cipher_text\n\n );\n\n\n\n let p0 = ParamTmpRef::new_input(&cipher_text);\n", "file_path": "examples/acipher/host/src/main.rs", "rank": 74, "score": 224685.77753860923 }, { "content": "fn handle_invoke(command: Command, input: proto::EnclaveInput) -> Result<proto::EnclaveOutput> {\n\n match command {\n\n Command::Hello => {\n\n let output = proto::EnclaveOutput {\n\n message: format!(\"Hello, {}\", input.message)\n\n };\n\n Ok(output)\n\n },\n\n Command::Bye => {\n\n let output = proto::EnclaveOutput {\n\n message: format!(\"Bye, {}\", input.message)\n\n };\n\n Ok(output)\n\n },\n\n _ => Err(Error::new(ErrorKind::BadParameters)),\n\n }\n\n}\n\n\n", "file_path": "examples/message_passing_interface/ta/src/main.rs", "rank": 75, "score": 222022.7750873977 }, { "content": "fn delete_secure_object(session: &mut Session, obj_id: &[u8]) -> optee_teec::Result<()> {\n\n let p0 = ParamTmpRef::new_input(obj_id);\n\n let mut operation = Operation::new(0, p0, ParamNone, ParamNone, ParamNone);\n\n\n\n session.invoke_command(Command::Delete as u32, &mut operation)?;\n\n\n\n println!(\"- Delete the object\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/secure_storage/host/src/main.rs", "rank": 76, "score": 219902.26597474542 }, { "content": "#[proc_macro_attribute]\n\npub fn ta_invoke_command(_args: TokenStream, input: TokenStream) -> TokenStream {\n\n let f = parse_macro_input!(input as syn::ItemFn);\n\n let ident = &f.ident;\n\n\n\n // check the function signature\n\n let valid_signature = f.constness.is_none()\n\n && match f.vis {\n\n syn::Visibility::Inherited => true,\n\n _ => false,\n\n }\n\n && f.abi.is_none()\n\n && (f.decl.inputs.len() == 2 || f.decl.inputs.len() == 3)\n\n && f.decl.generics.where_clause.is_none()\n\n && f.decl.variadic.is_none();\n\n\n\n if !valid_signature {\n\n return syn::parse::Error::new(\n\n f.span(),\n\n \"`#[ta_invoke_command]` function must have signature `fn(&mut T, u32, &mut Parameters) -> Result<()>` or `fn(u32, &mut Parameters) -> Result<()>`\",\n\n )\n", "file_path": "optee-utee/macros/src/lib.rs", "rank": 77, "score": 217419.89229924761 }, { "content": "pub fn TEE_PARAM_TYPES(t0: u32, t1: u32, t2: u32, t3: u32) -> u32 {\n\n t0 | t1 << 4 | t2 << 8 | t3 << 12\n\n}\n\n\n\npub const TEE_NUM_PARAMS: u32 = 4;\n", "file_path": "optee-utee/optee-utee-sys/src/tee_api_defines.rs", "rank": 78, "score": 208614.788401327 }, { "content": "pub fn TEEC_PARAM_TYPES(p0:u32, p1:u32, p2:u32, p3:u32) -> u32 {\n\n let tmp = p1 << 4 | p2 << 8 | p3 << 12;\n\n return p0 | tmp;\n\n}\n\n\n\npub const TEEC_CONFIG_PAYLOAD_REF_COUNT: u32 = 4;\n\n\n\npub const TEEC_CONFIG_SHAREDMEM_MAX_SIZE: c_ulong = -1 as c_long as c_ulong;\n\n\n\npub const TEEC_NONE: u32 = 0x00000000;\n\npub const TEEC_VALUE_INPUT: u32 = 0x00000001;\n\npub const TEEC_VALUE_OUTPUT: u32 = 0x00000002;\n\npub const TEEC_VALUE_INOUT: u32 = 0x00000003;\n\npub const TEEC_MEMREF_TEMP_INPUT: u32 = 0x00000005;\n\npub const TEEC_MEMREF_TEMP_OUTPUT: u32 = 0x00000006;\n\npub const TEEC_MEMREF_TEMP_INOUT: u32 = 0x00000007;\n\npub const TEEC_MEMREF_WHOLE: u32 = 0x0000000C;\n\npub const TEEC_MEMREF_PARTIAL_INPUT: u32 = 0x0000000D;\n\npub const TEEC_MEMREF_PARTIAL_OUTPUT: u32 = 0x0000000E;\n\npub const TEEC_MEMREF_PARTIAL_INOUT: u32 = 0x0000000F;\n", "file_path": "optee-teec/optee-teec-sys/src/tee_client_api.rs", "rank": 79, "score": 208614.788401327 }, { "content": "#[ta_create]\n\nfn create() -> Result<()> {\n\n trace_println!(\"[+] TA create\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/random/ta/src/main.rs", "rank": 80, "score": 194429.0613178669 }, { "content": "#[ta_create]\n\nfn create() -> Result<()> {\n\n trace_println!(\"[+] TA create\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/digest/ta/src/main.rs", "rank": 81, "score": 194429.0613178669 }, { "content": "#[ta_create]\n\nfn create() -> Result<()> {\n\n trace_println!(\"[+] TA create\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/time/ta/src/main.rs", "rank": 83, "score": 194429.0613178669 }, { "content": "#[ta_create]\n\nfn create() -> Result<()> {\n\n trace_println!(\"[+] TA create\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/acipher/ta/src/main.rs", "rank": 84, "score": 194429.0613178669 }, { "content": "#[ta_create]\n\nfn create() -> Result<()> {\n\n trace_println!(\"[+] TA create\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/authentication/ta/src/main.rs", "rank": 85, "score": 194429.0613178669 }, { "content": "#[ta_create]\n\nfn create() -> Result<()> {\n\n trace_println!(\"[+] TA create\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/hotp/ta/src/main.rs", "rank": 86, "score": 194429.0613178669 }, { "content": "#[ta_create]\n\nfn create() -> Result<()> {\n\n trace_println!(\"[+] TA create\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/serde/ta/src/main.rs", "rank": 87, "score": 194429.0613178669 }, { "content": "#[ta_create]\n\nfn create() -> Result<()> {\n\n trace_println!(\"[+] TA create\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/big_int/ta/src/main.rs", "rank": 88, "score": 190740.20084151623 }, { "content": "#[ta_create]\n\nfn create() -> Result<()> {\n\n trace_println!(\"[+] TA create\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/diffie_hellman/ta/src/main.rs", "rank": 89, "score": 190740.20084151623 }, { "content": "#[ta_create]\n\nfn create() -> Result<()> {\n\n trace_println!(\"[+] TA create\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/hello_world/ta/src/main.rs", "rank": 90, "score": 190740.20084151623 }, { "content": "#[ta_create]\n\nfn create() -> Result<()> {\n\n trace_println!(\"[+] TA create\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/secure_storage/ta/src/main.rs", "rank": 91, "score": 190740.20084151623 }, { "content": "#[proc_macro_attribute]\n\npub fn ta_open_session(_args: TokenStream, input: TokenStream) -> TokenStream {\n\n let f = parse_macro_input!(input as syn::ItemFn);\n\n let ident = &f.ident;\n\n\n\n // check the function signature\n\n let valid_signature = f.constness.is_none()\n\n && match f.vis {\n\n syn::Visibility::Inherited => true,\n\n _ => false,\n\n }\n\n && f.abi.is_none()\n\n && (f.decl.inputs.len() == 1 || f.decl.inputs.len() == 2)\n\n && f.decl.generics.where_clause.is_none()\n\n && f.decl.variadic.is_none();\n\n\n\n if !valid_signature {\n\n return syn::parse::Error::new(\n\n f.span(),\n\n \"`#[ta_open_session]` function must have signature `fn(&mut Parameters) -> Result<()>` or `fn(&mut Parameters, &mut T) -> Result<()>`\",\n\n )\n", "file_path": "optee-utee/macros/src/lib.rs", "rank": 92, "score": 190676.4564536145 }, { "content": "#[proc_macro_attribute]\n\npub fn ta_close_session(_args: TokenStream, input: TokenStream) -> TokenStream {\n\n let f = parse_macro_input!(input as syn::ItemFn);\n\n let ident = &f.ident;\n\n\n\n // check the function signature\n\n let valid_signature = f.constness.is_none()\n\n && match f.vis {\n\n syn::Visibility::Inherited => true,\n\n _ => false,\n\n }\n\n && f.abi.is_none()\n\n && (f.decl.inputs.len() == 0 || f.decl.inputs.len() == 1)\n\n && f.decl.generics.where_clause.is_none()\n\n && f.decl.variadic.is_none()\n\n && match f.decl.output {\n\n syn::ReturnType::Default => true,\n\n _ => false,\n\n };\n\n\n\n if !valid_signature {\n", "file_path": "optee-utee/macros/src/lib.rs", "rank": 93, "score": 190676.45645361452 }, { "content": "#[proc_macro_attribute]\n\npub fn ta_destroy(_args: TokenStream, input: TokenStream) -> TokenStream {\n\n let f = parse_macro_input!(input as syn::ItemFn);\n\n let ident = &f.ident;\n\n\n\n // check the function signature\n\n let valid_signature = f.constness.is_none()\n\n && match f.vis {\n\n syn::Visibility::Inherited => true,\n\n _ => false,\n\n }\n\n && f.abi.is_none()\n\n && f.decl.inputs.is_empty()\n\n && f.decl.generics.where_clause.is_none()\n\n && f.decl.variadic.is_none();\n\n &&match f.decl.output {\n\n syn::ReturnType::Default => true,\n\n _ => false,\n\n };\n\n\n\n if !valid_signature {\n", "file_path": "optee-utee/macros/src/lib.rs", "rank": 94, "score": 188724.24680265077 }, { "content": "#[ta_create]\n\nfn create() -> Result<()> {\n\n trace_println!(\"[+] TA create\");\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/message_passing_interface/ta/src/main.rs", "rank": 95, "score": 187227.12273025964 }, { "content": "pub trait Param {\n\n fn into_raw(&mut self) -> raw::TEEC_Parameter;\n\n fn param_type(&self) -> ParamType;\n\n fn from_raw(raw: raw::TEEC_Parameter, param_type: ParamType) -> Self;\n\n}\n\n\n\n/// This type defines a parameter that is not referencing shared memory, but\n\n/// carries instead small raw data passed by value. It is used as a `Operation`\n\n/// parameter when the corresponding parameter type is one of `ValueInput`,\n\n/// `ValueOutput`, or `ValueInout`.\n\npub struct ParamValue {\n\n raw: raw::TEEC_Value,\n\n param_type: ParamType,\n\n}\n\n\n\nimpl ParamValue {\n\n /// Creates a value parameter with two `u32` integer and `ParamType` for\n\n /// operation.\n\n pub fn new(a: u32, b: u32, param_type: ParamType) -> Self {\n\n let raw = raw::TEEC_Value { a, b };\n", "file_path": "optee-teec/src/parameter.rs", "rank": 96, "score": 185172.64378373144 }, { "content": "#[ta_close_session]\n\nfn close_session() {\n\n trace_println!(\"[+] TA close session\");\n\n}\n\n\n", "file_path": "examples/serde/ta/src/main.rs", "rank": 97, "score": 168734.9003284693 }, { "content": "#[ta_close_session]\n\nfn close_session() {\n\n trace_println!(\"[+] TA close session\");\n\n}\n\n\n", "file_path": "examples/time/ta/src/main.rs", "rank": 98, "score": 168734.9003284693 }, { "content": "#[ta_close_session]\n\nfn close_session() {\n\n trace_println!(\"[+] TA close session\");\n\n}\n\n\n", "file_path": "examples/random/ta/src/main.rs", "rank": 99, "score": 168734.9003284693 } ]
Rust
crates/prost/tests/src/build.rs
Zha0Chan/crates-sgx
73dc6d9e130757d9e585ee757b3d94d5078512a9
#[macro_use] extern crate cfg_if; cfg_if! { if #[cfg(feature = "edition-2015")] { extern crate env_logger; extern crate prost_build; } } use std::env; use std::fs; use std::path::PathBuf; fn main() { env_logger::init(); let src = PathBuf::from("../tests/src"); let includes = &[src.clone()]; let mut config = prost_build::Config::new(); config.btree_map(&["."]); config.type_attribute("Foo.Bar_Baz.Foo_barBaz", "#[derive(Eq, PartialOrd, Ord)]"); config.type_attribute( "Foo.Bar_Baz.Foo_barBaz.fuzz_buster", "#[derive(Eq, PartialOrd, Ord)]", ); config.type_attribute("Foo.Custom.Attrs.Msg", "#[allow(missing_docs)]"); config.type_attribute("Foo.Custom.Attrs.Msg.field", "/// Oneof docs"); config.type_attribute("Foo.Custom.Attrs.AnEnum", "#[allow(missing_docs)]"); config.type_attribute("Foo.Custom.Attrs.AnotherEnum", "/// Oneof docs"); config.type_attribute( "Foo.Custom.OneOfAttrs.Msg.field", "#[derive(Eq, PartialOrd, Ord)]", ); config.field_attribute("Foo.Custom.Attrs.AnotherEnum.C", "/// The C docs"); config.field_attribute("Foo.Custom.Attrs.AnotherEnum.D", "/// The D docs"); config.field_attribute("Foo.Custom.Attrs.Msg.field.a", "/// Oneof A docs"); config.field_attribute("Foo.Custom.Attrs.Msg.field.b", "/// Oneof B docs"); config .compile_protos(&[src.join("ident_conversion.proto")], includes) .unwrap(); config .compile_protos(&[src.join("nesting.proto")], includes) .unwrap(); config .compile_protos(&[src.join("recursive_oneof.proto")], includes) .unwrap(); config .compile_protos(&[src.join("custom_attributes.proto")], includes) .unwrap(); config .compile_protos(&[src.join("oneof_attributes.proto")], includes) .unwrap(); config .compile_protos(&[src.join("no_unused_results.proto")], includes) .unwrap(); config .compile_protos(&[src.join("default_enum_value.proto")], includes) .unwrap(); config .compile_protos(&[src.join("groups.proto")], includes) .unwrap(); config .compile_protos(&[src.join("deprecated_field.proto")], includes) .unwrap(); config .compile_protos(&[src.join("well_known_types.proto")], includes) .unwrap(); config .compile_protos( &[src.join("packages/widget_factory.proto")], &[src.join("packages")], ) .unwrap(); config .compile_protos(&[src.join("no_package.proto")], includes) .err() .unwrap(); let out_dir = &PathBuf::from(env::var("OUT_DIR").expect("OUT_DIR environment variable not set")) .join("extern_paths"); fs::create_dir_all(out_dir).expect("failed to create prefix directory"); config.out_dir(out_dir); cfg_if! { if #[cfg(feature = "edition-2015")] { const EXTERN_PATH: &str = "::packages::gizmo"; } else { const EXTERN_PATH: &str = "crate::packages::gizmo"; } }; config.extern_path(".packages.gizmo", EXTERN_PATH); config .compile_protos( &[src.join("packages").join("widget_factory.proto")], &[src.join("packages")], ) .unwrap(); }
#[macro_use] extern crate cfg_if; cfg_if! { if #[cfg(feature = "edition-2015")] { extern crate env_logger; extern crate prost_build; } } use std::env; use std::fs; use std::path::PathBuf;
fn main() { env_logger::init(); let src = PathBuf::from("../tests/src"); let includes = &[src.clone()]; let mut config = prost_build::Config::new(); config.btree_map(&["."]); config.type_attribute("Foo.Bar_Baz.Foo_barBaz", "#[derive(Eq, PartialOrd, Ord)]"); config.type_attribute( "Foo.Bar_Baz.Foo_barBaz.fuzz_buster", "#[derive(Eq, PartialOrd, Ord)]", ); config.type_attribute("Foo.Custom.Attrs.Msg", "#[allow(missing_docs)]"); config.type_attribute("Foo.Custom.Attrs.Msg.field", "/// Oneof docs"); config.type_attribute("Foo.Custom.Attrs.AnEnum", "#[allow(missing_docs)]"); config.type_attribute("Foo.Custom.Attrs.AnotherEnum", "/// Oneof docs"); config.type_attribute( "Foo.Custom.OneOfAttrs.Msg.field", "#[derive(Eq, PartialOrd, Ord)]", ); config.field_attribute("Foo.Custom.Attrs.AnotherEnum.C", "/// The C docs"); config.field_attribute("Foo.Custom.Attrs.AnotherEnum.D", "/// The D docs"); config.field_attribute("Foo.Custom.Attrs.Msg.field.a", "/// Oneof A docs"); config.field_attribute("Foo.Custom.Attrs.Msg.field.b", "/// Oneof B docs"); config .compile_protos(&[src.join("ident_conversion.proto")], includes) .unwrap(); config .compile_protos(&[src.join("nesting.proto")], includes) .unwrap(); config .compile_protos(&[src.join("recursive_oneof.proto")], includes) .unwrap(); config .compile_protos(&[src.join("custom_attributes.proto")], includes) .unwrap(); config .compile_protos(&[src.join("oneof_attributes.proto")], includes) .unwrap(); config .compile_protos(&[src.join("no_unused_results.proto")], includes) .unwrap(); config .compile_protos(&[src.join("default_enum_value.proto")], includes) .unwrap(); config .compile_protos(&[src.join("groups.proto")], includes) .unwrap(); config .compile_protos(&[src.join("deprecated_field.proto")], includes) .unwrap(); config .compile_protos(&[src.join("well_known_types.proto")], includes) .unwrap(); config .compile_protos( &[src.join("packages/widget_factory.proto")], &[src.join("packages")], ) .unwrap(); config .compile_protos(&[src.join("no_package.proto")], includes) .err() .unwrap(); let out_dir = &PathBuf::from(env::var("OUT_DIR").expect("OUT_DIR environment variable not set")) .join("extern_paths"); fs::create_dir_all(out_dir).expect("failed to create prefix directory"); config.out_dir(out_dir); cfg_if! { if #[cfg(feature = "edition-2015")] { const EXTERN_PATH: &str = "::packages::gizmo"; } else { const EXTERN_PATH: &str = "crate::packages::gizmo"; } }; config.extern_path(".packages.gizmo", EXTERN_PATH); config .compile_protos( &[src.join("packages").join("widget_factory.proto")], &[src.join("packages")], ) .unwrap(); }
function_block-full_function
[]
Rust
src/double/div.rs
pwnorbitals/qd
2e4b3234d80adc90199bfd6496778e81568318a3
use crate::common::primitive as p; use crate::common::utils as u; use crate::double::Double; use std::ops::{Div, DivAssign}; #[inline] fn mul_f64(a: Double, b: f64) -> Double { let (p, e) = p::two_prod(a.0, b); let (a, b) = u::renorm2(p, e + a.1 * b); Double(a, b) } #[allow(clippy::suspicious_arithmetic_impl)] impl Div for Double { type Output = Double; fn div(self, other: Double) -> Double { match self.pre_div(&other) { Some(r) => r, None => { let q1 = self.0 / other.0; let mut r = self - mul_f64(other, q1); let q2 = r.0 / other.0; r -= mul_f64(other, q2); let q3 = r.0 / other.0; let (a, b) = u::renorm3(q1, q2, q3); Double(a, b) } } } } impl Div for &Double { type Output = Double; fn div(self, other: &Double) -> Double { (*self).div(*other) } } impl Div<&Double> for Double { type Output = Double; #[inline] fn div(self, other: &Double) -> Double { self.div(*other) } } impl Div<Double> for &Double { type Output = Double; #[inline] fn div(self, other: Double) -> Double { (*self).div(other) } } impl DivAssign for Double { #[inline] fn div_assign(&mut self, other: Double) { let r = self.div(other); self.0 = r.0; self.1 = r.1; } } impl DivAssign<&Double> for Double { #[inline] fn div_assign(&mut self, other: &Double) { let r = self.div(*other); self.0 = r.0; self.1 = r.1; } } impl Double { #[inline] fn pre_div(&self, other: &Double) -> Option<Double> { if self.is_nan() || other.is_nan() { Some(Double::NAN) } else if other.is_zero() { if self.is_zero() { Some(Double::NAN) } else if self.is_sign_negative() == other.is_sign_positive() { Some(Double::NEG_INFINITY) } else { Some(Double::INFINITY) } } else if self.is_infinite() { if other.is_infinite() { Some(Double::NAN) } else if self.is_sign_positive() == other.is_sign_positive() { Some(Double::INFINITY) } else { Some(Double::NEG_INFINITY) } } else if other.is_infinite() { if self.is_sign_positive() == other.is_sign_positive() { Some(Double::ZERO) } else { Some(Double::NEG_ZERO) } } else { None } } } #[cfg(test)] mod tests { use super::*; test_all_near!( num_num: dd!("1.1557273497909217179100931833126961"), Double::PI / Double::E; num_ref: dd!("1.1557273497909217179100931833126961"), Double::PI / &Double::E; ref_num: dd!("1.1557273497909217179100931833126961"), &Double::PI / Double::E; ref_ref: dd!("1.1557273497909217179100931833126961"), &Double::PI / &Double::E; num_neg_num: dd!("-1.1557273497909217179100931833126961"), Double::PI / -Double::E; num_neg_ref: dd!("-1.1557273497909217179100931833126961"), Double::PI / -&Double::E; ref_neg_num: dd!("-1.1557273497909217179100931833126961"), &Double::PI / -Double::E; ref_neg_ref: dd!("-1.1557273497909217179100931833126961"), &Double::PI / -&Double::E; num_id: Double::PI, Double::PI / Double::ONE; id_num: Double::FRAC_1_PI, Double::ONE / Double::PI; num_small: dd!("3141592653589793238462643383279.5039"), Double::PI / dd!("1e-30"); small_num: dd!("3.1830988618379067153776752674502853e-31"), dd!("1e-30") / Double::PI; three_nums: dd!("1.6673621161631071223063639072253465"), Double::PI / Double::E / Double::LN_2; lassoc: dd!("1.6673621161631071223063639072253465"), (Double::PI / Double::E) / Double::LN_2; rassoc: dd!("12.320232213560921976987672083576714"), Double::PI / (Double::LN_2 / Double::E); ); test_all_exact!( zero_inf: Double::ZERO, Double::ZERO / Double::INFINITY; zero_neg_inf: Double::NEG_ZERO, Double::ZERO / Double::NEG_INFINITY; inf_zero: Double::INFINITY, Double::INFINITY / Double::ZERO; neg_inf_zero: Double::NEG_INFINITY, Double::NEG_INFINITY / Double::ZERO; nan_zero: Double::NAN, Double::NAN / Double::ZERO; zero_nan: Double::NAN, Double::ZERO / Double::NAN; zero_zero: Double::NAN, Double::ZERO / Double::ZERO; one_inf: Double::ZERO, Double::ONE / Double::INFINITY; one_neg_inf: Double::NEG_ZERO, Double::ONE / Double::NEG_INFINITY; inf_one: Double::INFINITY, Double::INFINITY / Double::ONE; neg_inf_one: Double::NEG_INFINITY, Double::NEG_INFINITY / Double::ONE; inf_inf: Double::NAN, Double::INFINITY / Double::INFINITY; inf_neg_inf: Double::NAN, Double::INFINITY / Double::NEG_INFINITY; neg_inf_inf: Double::NAN, Double::NEG_INFINITY / Double::INFINITY; neg_inf_neg_inf: Double::NAN, Double::NEG_INFINITY / Double::NEG_INFINITY; one_zero: Double::INFINITY, Double::ONE / Double::ZERO; neg_one_zero: Double::NEG_INFINITY, Double::NEG_ONE / Double::ZERO; nan_one: Double::NAN, Double::NAN / Double::ONE; one_nan: Double::NAN, Double::ONE / Double::NAN; ); test_all!( assign_num: { let mut a = Double::PI; a /= Double::E; near!(dd!("1.1557273497909217179100931833126961"), a); } assign_ref: { let mut b = Double::PI; b /= &Double::E; near!(dd!("1.1557273497909217179100931833126961"), b); } ); test!(chain_tens: { let mut value = Double::LN_2; let ten = dd!(10); near!("6.9314718055994530941723212145818e-1", value); value /= ten; near!("6.9314718055994530941723212145818e-2", value); value /= ten; near!("6.9314718055994530941723212145818e-3", value); value /= ten; near!("6.9314718055994530941723212145818e-4", value); value /= ten; near!("6.9314718055994530941723212145818e-5", value); value /= ten; near!("6.9314718055994530941723212145818e-6", value); value /= ten; near!("6.9314718055994530941723212145818e-7", value); value /= ten; near!("6.9314718055994530941723212145818e-8", value); value /= ten; near!("6.9314718055994530941723212145818e-9", value); value /= ten; near!("6.9314718055994530941723212145818e-10", value); value /= ten; near!("6.9314718055994530941723212145818e-11", value); value /= ten; near!("6.9314718055994530941723212145818e-12", value); value /= ten; near!("6.9314718055994530941723212145818e-13", value); value /= ten; near!("6.9314718055994530941723212145818e-14", value); value /= ten; near!("6.9314718055994530941723212145818e-15", value); value /= ten; near!("6.9314718055994530941723212145818e-16", value); value /= ten; near!("6.9314718055994530941723212145818e-17", value); value /= ten; near!("6.9314718055994530941723212145818e-18", value); value /= ten; near!("6.9314718055994530941723212145818e-19", value); value /= ten; near!("6.9314718055994530941723212145818e-20", value); value /= ten; near!("6.9314718055994530941723212145818e-21", value); value /= ten; near!("6.9314718055994530941723212145818e-22", value); value /= ten; near!("6.9314718055994530941723212145818e-23", value); value /= ten; near!("6.9314718055994530941723212145818e-24", value); value /= ten; near!("6.9314718055994530941723212145818e-25", value); value /= ten; near!("6.9314718055994530941723212145818e-26", value); value /= ten; near!("6.9314718055994530941723212145818e-27", value); value /= ten; near!("6.9314718055994530941723212145818e-28", value); value /= ten; near!("6.9314718055994530941723212145818e-29", value); value /= ten; near!("6.9314718055994530941723212145818e-30", value); }); }
use crate::common::primitive as p; use crate::common::utils as u; use crate::double::Double; use std::ops::{Div, DivAssign}; #[inline] fn mul_f64(a: Double, b: f64) -> Double { let (p, e) = p::two_prod(a.0, b); let (a, b) = u::renorm2(p, e + a.1 * b); Double(a, b) } #[allow(clippy::suspicious_arithmetic_impl)] impl Div for Double { type Output = Double; fn div(self, other: Double) -> Double { match self.pre_div(&other) { Some(r) => r,
} impl Div for &Double { type Output = Double; fn div(self, other: &Double) -> Double { (*self).div(*other) } } impl Div<&Double> for Double { type Output = Double; #[inline] fn div(self, other: &Double) -> Double { self.div(*other) } } impl Div<Double> for &Double { type Output = Double; #[inline] fn div(self, other: Double) -> Double { (*self).div(other) } } impl DivAssign for Double { #[inline] fn div_assign(&mut self, other: Double) { let r = self.div(other); self.0 = r.0; self.1 = r.1; } } impl DivAssign<&Double> for Double { #[inline] fn div_assign(&mut self, other: &Double) { let r = self.div(*other); self.0 = r.0; self.1 = r.1; } } impl Double { #[inline] fn pre_div(&self, other: &Double) -> Option<Double> { if self.is_nan() || other.is_nan() { Some(Double::NAN) } else if other.is_zero() { if self.is_zero() { Some(Double::NAN) } else if self.is_sign_negative() == other.is_sign_positive() { Some(Double::NEG_INFINITY) } else { Some(Double::INFINITY) } } else if self.is_infinite() { if other.is_infinite() { Some(Double::NAN) } else if self.is_sign_positive() == other.is_sign_positive() { Some(Double::INFINITY) } else { Some(Double::NEG_INFINITY) } } else if other.is_infinite() { if self.is_sign_positive() == other.is_sign_positive() { Some(Double::ZERO) } else { Some(Double::NEG_ZERO) } } else { None } } } #[cfg(test)] mod tests { use super::*; test_all_near!( num_num: dd!("1.1557273497909217179100931833126961"), Double::PI / Double::E; num_ref: dd!("1.1557273497909217179100931833126961"), Double::PI / &Double::E; ref_num: dd!("1.1557273497909217179100931833126961"), &Double::PI / Double::E; ref_ref: dd!("1.1557273497909217179100931833126961"), &Double::PI / &Double::E; num_neg_num: dd!("-1.1557273497909217179100931833126961"), Double::PI / -Double::E; num_neg_ref: dd!("-1.1557273497909217179100931833126961"), Double::PI / -&Double::E; ref_neg_num: dd!("-1.1557273497909217179100931833126961"), &Double::PI / -Double::E; ref_neg_ref: dd!("-1.1557273497909217179100931833126961"), &Double::PI / -&Double::E; num_id: Double::PI, Double::PI / Double::ONE; id_num: Double::FRAC_1_PI, Double::ONE / Double::PI; num_small: dd!("3141592653589793238462643383279.5039"), Double::PI / dd!("1e-30"); small_num: dd!("3.1830988618379067153776752674502853e-31"), dd!("1e-30") / Double::PI; three_nums: dd!("1.6673621161631071223063639072253465"), Double::PI / Double::E / Double::LN_2; lassoc: dd!("1.6673621161631071223063639072253465"), (Double::PI / Double::E) / Double::LN_2; rassoc: dd!("12.320232213560921976987672083576714"), Double::PI / (Double::LN_2 / Double::E); ); test_all_exact!( zero_inf: Double::ZERO, Double::ZERO / Double::INFINITY; zero_neg_inf: Double::NEG_ZERO, Double::ZERO / Double::NEG_INFINITY; inf_zero: Double::INFINITY, Double::INFINITY / Double::ZERO; neg_inf_zero: Double::NEG_INFINITY, Double::NEG_INFINITY / Double::ZERO; nan_zero: Double::NAN, Double::NAN / Double::ZERO; zero_nan: Double::NAN, Double::ZERO / Double::NAN; zero_zero: Double::NAN, Double::ZERO / Double::ZERO; one_inf: Double::ZERO, Double::ONE / Double::INFINITY; one_neg_inf: Double::NEG_ZERO, Double::ONE / Double::NEG_INFINITY; inf_one: Double::INFINITY, Double::INFINITY / Double::ONE; neg_inf_one: Double::NEG_INFINITY, Double::NEG_INFINITY / Double::ONE; inf_inf: Double::NAN, Double::INFINITY / Double::INFINITY; inf_neg_inf: Double::NAN, Double::INFINITY / Double::NEG_INFINITY; neg_inf_inf: Double::NAN, Double::NEG_INFINITY / Double::INFINITY; neg_inf_neg_inf: Double::NAN, Double::NEG_INFINITY / Double::NEG_INFINITY; one_zero: Double::INFINITY, Double::ONE / Double::ZERO; neg_one_zero: Double::NEG_INFINITY, Double::NEG_ONE / Double::ZERO; nan_one: Double::NAN, Double::NAN / Double::ONE; one_nan: Double::NAN, Double::ONE / Double::NAN; ); test_all!( assign_num: { let mut a = Double::PI; a /= Double::E; near!(dd!("1.1557273497909217179100931833126961"), a); } assign_ref: { let mut b = Double::PI; b /= &Double::E; near!(dd!("1.1557273497909217179100931833126961"), b); } ); test!(chain_tens: { let mut value = Double::LN_2; let ten = dd!(10); near!("6.9314718055994530941723212145818e-1", value); value /= ten; near!("6.9314718055994530941723212145818e-2", value); value /= ten; near!("6.9314718055994530941723212145818e-3", value); value /= ten; near!("6.9314718055994530941723212145818e-4", value); value /= ten; near!("6.9314718055994530941723212145818e-5", value); value /= ten; near!("6.9314718055994530941723212145818e-6", value); value /= ten; near!("6.9314718055994530941723212145818e-7", value); value /= ten; near!("6.9314718055994530941723212145818e-8", value); value /= ten; near!("6.9314718055994530941723212145818e-9", value); value /= ten; near!("6.9314718055994530941723212145818e-10", value); value /= ten; near!("6.9314718055994530941723212145818e-11", value); value /= ten; near!("6.9314718055994530941723212145818e-12", value); value /= ten; near!("6.9314718055994530941723212145818e-13", value); value /= ten; near!("6.9314718055994530941723212145818e-14", value); value /= ten; near!("6.9314718055994530941723212145818e-15", value); value /= ten; near!("6.9314718055994530941723212145818e-16", value); value /= ten; near!("6.9314718055994530941723212145818e-17", value); value /= ten; near!("6.9314718055994530941723212145818e-18", value); value /= ten; near!("6.9314718055994530941723212145818e-19", value); value /= ten; near!("6.9314718055994530941723212145818e-20", value); value /= ten; near!("6.9314718055994530941723212145818e-21", value); value /= ten; near!("6.9314718055994530941723212145818e-22", value); value /= ten; near!("6.9314718055994530941723212145818e-23", value); value /= ten; near!("6.9314718055994530941723212145818e-24", value); value /= ten; near!("6.9314718055994530941723212145818e-25", value); value /= ten; near!("6.9314718055994530941723212145818e-26", value); value /= ten; near!("6.9314718055994530941723212145818e-27", value); value /= ten; near!("6.9314718055994530941723212145818e-28", value); value /= ten; near!("6.9314718055994530941723212145818e-29", value); value /= ten; near!("6.9314718055994530941723212145818e-30", value); }); }
None => { let q1 = self.0 / other.0; let mut r = self - mul_f64(other, q1); let q2 = r.0 / other.0; r -= mul_f64(other, q2); let q3 = r.0 / other.0; let (a, b) = u::renorm3(q1, q2, q3); Double(a, b) } } }
function_block-function_prefix_line
[ { "content": "#[inline]\n\nfn mul_f64(a: Quad, b: f64) -> Quad {\n\n let (h0, l0) = p::two_prod(a.0, b);\n\n let (h1, l1) = p::two_prod(a.1, b);\n\n let (h2, l2) = p::two_prod(a.2, b);\n\n let h3 = a.3 * b;\n\n\n\n let s0 = h0;\n\n let (s1, t0) = p::two_sum(h1, l0);\n\n let (s2, t1, t2) = u::three_three_sum(t0, h2, l1);\n\n let (s3, t3) = u::three_two_sum(t1, h3, l2);\n\n let s4 = t2 * t3;\n\n\n\n let (a, b, c, d) = u::renorm5(s0, s1, s2, s3, s4);\n\n Quad(a, b, c, d)\n\n}\n\n\n\nimpl Div for Quad {\n\n type Output = Quad;\n\n\n\n /// Divides this `Quad` by another, producing a new `Quad` as a result.\n", "file_path": "src/quad/div.rs", "rank": 1, "score": 143524.73048399895 }, { "content": "#[inline]\n\npub fn mul_pwr2(a: Double, b: f64) -> Double {\n\n Double(a.0 * b, a.1 * b)\n\n}\n\n\n\n/// Table of the reciprocals of factorials. This starts with 1/3!, as the inverse factorials\n\n/// before that are trivial (1/1! is 1 and 1/2! is 1/2). These are used in Taylor series\n\n/// calculations for exp, sin, and cos. \n\npub const INV_FACTS: [Double; 15] = [\n\n Double(1.6666666666666666e-1, 9.25185853854297e-18),\n\n Double(4.1666666666666664e-2, 2.3129646346357427e-18),\n\n Double(8.333333333333333e-3, 1.1564823173178714e-19),\n\n Double(1.388888888888889e-3, -5.300543954373577e-20),\n\n Double(1.984126984126984e-4, 1.7209558293420705e-22),\n\n Double(2.48015873015873e-5, 2.1511947866775882e-23),\n\n Double(2.7557319223985893e-6, -1.858393274046472e-22),\n\n Double(2.755731922398589e-7, 2.3767714622250297e-23),\n\n Double(2.505210838544172e-8, -1.448814070935912e-24),\n\n Double(2.08767569878681e-9, -1.20734505911326e-25),\n\n Double(1.6059043836821613e-10, 1.2585294588752098e-26),\n\n Double(1.1470745597729725e-11, 2.0655512752830745e-28),\n", "file_path": "src/double/common.rs", "rank": 2, "score": 139726.1936104436 }, { "content": "#[inline]\n\npub fn renorm5(a: f64, b: f64, c: f64, d: f64, e: f64) -> (f64, f64, f64, f64) {\n\n let (x, s4) = p::quick_two_sum(d, e);\n\n let (x, s3) = p::quick_two_sum(c, x);\n\n let (x, s2) = p::quick_two_sum(b, x);\n\n let (s0, s1) = p::quick_two_sum(a, x);\n\n\n\n if s1 != 0.0 {\n\n let (s1, s2) = p::quick_two_sum(s1, s2);\n\n if s2 != 0.0 {\n\n let (s2, s3) = p::quick_two_sum(s2, s3);\n\n if s3 != 0.0 {\n\n (s0, s1, s2, s3 + s4)\n\n } else {\n\n let (s2, s3) = p::quick_two_sum(s2, s4);\n\n (s0, s1, s2, s3)\n\n }\n\n } else {\n\n let (s1, s2) = p::quick_two_sum(s1, s3);\n\n if s2 != 0.0 {\n\n let (s2, s3) = p::quick_two_sum(s2, s4);\n", "file_path": "src/common/utils.rs", "rank": 3, "score": 133480.57464980366 }, { "content": "#[inline]\n\npub fn renorm2(a: f64, b: f64) -> (f64, f64) {\n\n p::quick_two_sum(a, b)\n\n}\n\n\n\n/// Renormalizes three components into a two-component value.\n\n///\n\n/// Renormalization ensures that the components of the returned tuple are arranged in such a\n\n/// way that the absolute value of the last component is no more than half the ULP of the\n\n/// first.\n", "file_path": "src/common/utils.rs", "rank": 4, "score": 131484.81872024157 }, { "content": "#[inline]\n\npub fn accumulate(a: f64, b: f64, c: f64) -> (f64, f64, f64) {\n\n let (s, b) = p::two_sum(b, c);\n\n let (s, a) = p::two_sum(a, s);\n\n\n\n let za = a == 0.0;\n\n let zb = b == 0.0;\n\n\n\n if !(za || zb) {\n\n (s, a, b)\n\n } else {\n\n (0.0, s, if zb { a } else { b })\n\n }\n\n}\n\n\n\n/// Renormalizes two components into a two-component value.\n\n///\n\n/// Renormalization ensures that the components of the returned tuple are arranged in such a\n\n/// way that the absolute value of the last component is no more than half the ULP of the\n\n/// first.\n", "file_path": "src/common/utils.rs", "rank": 5, "score": 130210.2964769311 }, { "content": "#[inline]\n\npub fn renorm3(a: f64, b: f64, c: f64) -> (f64, f64) {\n\n let (u, v) = p::quick_two_sum(a, b);\n\n let (s, w) = p::quick_two_sum(c, u);\n\n p::quick_two_sum(s, v + w)\n\n}\n\n\n\n/// Renormalizes four components into a four-component value.\n\n///\n\n/// Renormalization ensures that the components of the returned tuple are arranged in such a\n\n/// way that the absolute value of each component is no more than half of the ULP of the\n\n/// prior component.\n", "file_path": "src/common/utils.rs", "rank": 6, "score": 129716.90983015936 }, { "content": "#[inline]\n\npub fn two_sum(a: f64, b: f64) -> (f64, f64) {\n\n let s = a + b;\n\n let v = s - a;\n\n let e = (a - (s - v)) + (b - v);\n\n (s, e)\n\n}\n\n\n", "file_path": "src/common/primitive.rs", "rank": 7, "score": 129351.76634511503 }, { "content": "#[cfg(no_fma)]\n\n#[inline]\n\npub fn two_prod(a: f64, b: f64) -> (f64, f64) {\n\n let p = a * b;\n\n let (ahi, alo) = split(a);\n\n let (bhi, blo) = split(b);\n\n let e = ahi * bhi - p + ahi * blo + alo * bhi + alo * blo;\n\n (p, e)\n\n}\n\n\n\n/// Calculates fl(a * a) and err(a * a).\n\n///\n\n/// This implementation uses FMA and therefore requires 2 floating-point instructions, the\n\n/// same as for multiplication. If FMA is not available but the `no_fma` feature is not\n\n/// enabled, it will use considerably more operations.\n", "file_path": "src/common/primitive.rs", "rank": 8, "score": 129351.52494711874 }, { "content": "/// Calculates fl(a - b) and err(a - b).\n\n///\n\n/// This calculation performs 6 floating-point operations.\n\npub fn two_diff(a: f64, b: f64) -> (f64, f64) {\n\n let s = a - b;\n\n let v = s - a;\n\n let e = (a - (s - v)) - (b + v);\n\n (s, e)\n\n}\n\n\n\n/// Splits a number into equal-length high and low components.\n\n///\n\n/// This is a helper function for use in multiplication functions when FMA is not enabled\n\n/// (i.e., when the `no_fma` feature is enabled). It performs 4 floating-point operations,\n\n/// unless the number's absolute value is greater than\n\n/// [`SPLIT_THRESHOLD`](constant.SPLIT_THRESHOLD.html). In this case it performs 7\n\n/// floating-point operations to increase precision in the large number.\n", "file_path": "src/common/primitive.rs", "rank": 9, "score": 129346.6293155961 }, { "content": "#[inline]\n\npub fn six_three_sum(a: f64, b: f64, c: f64, d: f64, e: f64, f: f64) -> (f64, f64, f64) {\n\n let (p0, p1, p2) = three_three_sum(a, b, c);\n\n let (q0, q1, q2) = three_three_sum(d, e, f);\n\n let (r0, r1) = p::two_sum(p0, q0);\n\n let (s0, s1) = p::two_sum(p1, q1);\n\n let (t0, t1) = p::two_sum(s0, r1);\n\n let u0 = p2 + q2 + s1 + t1;\n\n (r0, t0, u0)\n\n}\n\n\n\n/// Calculates the sum of nine `f64`s in double-double precision.\n", "file_path": "src/common/utils.rs", "rank": 10, "score": 129147.39834814146 }, { "content": "#[inline]\n\npub fn renorm4(a: f64, b: f64, c: f64, d: f64) -> (f64, f64, f64, f64) {\n\n let (x, s3) = p::quick_two_sum(c, d);\n\n let (x, s2) = p::quick_two_sum(b, x);\n\n let (s0, s1) = p::quick_two_sum(a, x);\n\n\n\n if s1 != 0.0 {\n\n let (s1, s2) = p::quick_two_sum(s1, s2);\n\n if s2 != 0.0 {\n\n let (s2, s3) = p::quick_two_sum(s2, s3);\n\n (s0, s1, s2, s3)\n\n } else {\n\n let (s1, s2) = p::quick_two_sum(s1, s3);\n\n (s0, s1, s2, 0.0)\n\n }\n\n } else {\n\n let (s0, s1) = p::quick_two_sum(s0, s2);\n\n if s1 != 0.0 {\n\n let (s1, s2) = p::quick_two_sum(s1, s3);\n\n (s0, s1, s2, 0.0)\n\n } else {\n", "file_path": "src/common/utils.rs", "rank": 11, "score": 128301.00220731486 }, { "content": "#[inline]\n\npub fn quick_two_sum(a: f64, b: f64) -> (f64, f64) {\n\n let s = a + b;\n\n let e = b - (s - a);\n\n (s, e)\n\n}\n\n\n\n// #[inline]\n\n// pub fn quick_two_diff(a: f64, b: f64) -> (f64, f64) {\n\n// let s = a - b;\n\n// let e = (a - s) - b;\n\n// (s, e)\n\n// }\n\n\n\n/// Calculates fl(a + b) and err(a + b).\n\n///\n\n/// This calculation performs 6 floating-point operations. It is less efficient than\n\n/// [`quick_two_sum`](#fn.quick_two_sum) but it carries no restrictions on its input values.\n", "file_path": "src/common/primitive.rs", "rank": 12, "score": 127344.20545224796 }, { "content": "#[inline]\n\npub fn three_three_sum(a: f64, b: f64, c: f64) -> (f64, f64, f64) {\n\n let (u, v) = p::two_sum(a, b);\n\n let (s, w) = p::two_sum(c, u);\n\n let (e1, e2) = p::two_sum(v, w);\n\n (s, e1, e2)\n\n}\n\n\n\n/// Calculates the sum of four `f64`s in double-double precision.\n", "file_path": "src/common/utils.rs", "rank": 13, "score": 126835.04257332998 }, { "content": "#[inline]\n\npub fn three_two_sum(a: f64, b: f64, c: f64) -> (f64, f64) {\n\n let (u, v) = p::two_sum(a, b);\n\n let (s, w) = p::two_sum(c, u);\n\n (s, v + w)\n\n}\n\n\n\n/// Calculates the sum of three `f64`s in triple-double precision.\n", "file_path": "src/common/utils.rs", "rank": 14, "score": 125995.73526516245 }, { "content": "#[cfg(no_fma)]\n\n#[inline]\n\nfn split(a: f64) -> (f64, f64) {\n\n if a > SPLIT_THRESHOLD || a < -SPLIT_THRESHOLD {\n\n let s = a * SPLIT_SHIFT_DOWN;\n\n let t = SPLIT_FACTOR * s;\n\n let hi = t - (t - s);\n\n let lo = s - hi;\n\n (hi * SPLIT_SHIFT_UP, lo * SPLIT_SHIFT_UP)\n\n } else {\n\n let t = SPLIT_FACTOR * a;\n\n let hi = t - (t - a);\n\n let lo = a - hi;\n\n (hi, lo)\n\n }\n\n}\n\n\n\n/// Calculates fl(a * b) and err(a * b).\n\n///\n\n/// This implementation uses FMA and requires 2 floating-point operations because of it. If\n\n/// FMA is not available but the `no_fma` feature is not enabled, it will use considerably\n\n/// more operations.\n", "file_path": "src/common/primitive.rs", "rank": 15, "score": 125682.3013465503 }, { "content": "#[inline]\n\npub fn four_two_sum(a: f64, b: f64, c: f64, d: f64) -> (f64, f64) {\n\n let (s0, s1) = p::two_sum(a, c);\n\n (s0, s1 + b + d)\n\n}\n\n\n\n/// Calculates the sum of six `f64`s in triple-double precision.\n", "file_path": "src/common/utils.rs", "rank": 16, "score": 124692.05667307122 }, { "content": "#[cfg(no_fma)]\n\n#[inline]\n\npub fn two_sqr(a: f64) -> (f64, f64) {\n\n let p = a * a;\n\n let (hi, lo) = split(a);\n\n let e = hi * hi - p + 2.0 * hi * lo + lo * lo;\n\n (p, e)\n\n}\n", "file_path": "src/common/primitive.rs", "rank": 17, "score": 117366.64906045372 }, { "content": "fn from_i64(a: i64) -> Double {\n\n let sign = a.signum();\n\n // The first part prevents a.abs() from failing with overflow because the absolute\n\n // value of i64::MIN is i64::MAX + 1\n\n let a = if a == i64::MIN {\n\n i64::MAX as u64 + 1\n\n } else {\n\n a.abs() as u64\n\n };\n\n let (x, y) = split_u64(a);\n\n let (a, b) = u::renorm2(x as f64 * 2f64.powi(32), y as f64);\n\n if sign == -1 {\n\n Double(-a, -b)\n\n } else {\n\n Double(a, b)\n\n }\n\n}\n\n\n\n// FROM INTEGER IMPLEMENTATIONS\n\n//\n", "file_path": "src/double/from.rs", "rank": 18, "score": 95179.52278973206 }, { "content": "fn from_u64(a: u64) -> Double {\n\n let (x, y) = split_u64(a);\n\n let (a, b) = u::renorm2(x as f64 * 2f64.powi(32), y as f64);\n\n Double(a, b)\n\n}\n\n\n", "file_path": "src/double/from.rs", "rank": 19, "score": 95179.52278973206 }, { "content": "#[allow(clippy::many_single_char_names)]\n\nfn cos_taylor(a: Double) -> Double {\n\n if a.is_zero() {\n\n Double::ONE\n\n } else {\n\n let threshold = c::mul_pwr2(Double::EPSILON, 0.5);\n\n let x = -a.sqr();\n\n let mut r = x;\n\n let mut s = Double::ONE + c::mul_pwr2(r, 0.5);\n\n let mut i = 1;\n\n\n\n loop {\n\n r *= x;\n\n let t = r * c::INV_FACTS[i];\n\n s += t;\n\n i += 2;\n\n if i >= c::INV_FACTS.len() || t.abs() <= threshold {\n\n break;\n\n }\n\n }\n\n s\n\n }\n\n}\n\n\n", "file_path": "src/double/trig.rs", "rank": 20, "score": 95120.93058759707 }, { "content": "#[allow(clippy::many_single_char_names)]\n\nfn sin_taylor(a: Double) -> Double {\n\n if a.is_zero() {\n\n Double::ZERO\n\n } else {\n\n let threshold = c::mul_pwr2(a.abs() * Double::EPSILON, 0.5);\n\n let x = -a.sqr();\n\n let mut s = a;\n\n let mut r = a;\n\n let mut i = 0;\n\n\n\n loop {\n\n r *= x;\n\n let t = r * c::INV_FACTS[i];\n\n s += t;\n\n i += 2;\n\n if i >= c::INV_FACTS.len() || t.abs() <= threshold {\n\n break;\n\n }\n\n }\n\n s\n\n }\n\n}\n\n\n\n// Compute cos a using the Taylor series. This assumes that |a| <= π/32.\n", "file_path": "src/double/trig.rs", "rank": 21, "score": 95120.93058759707 }, { "content": "// Computes both the sine and cosine of a using the Taylor series. This is a bit quicker\n\n// than calling the two functions above separately, since if you have one of them you can\n\n// calculate the other more efficiently.\n\nfn sincos_taylor(a: Double) -> (Double, Double) {\n\n if a.is_zero() {\n\n (Double::ZERO, Double::ONE)\n\n } else {\n\n let sin_a = sin_taylor(a);\n\n (sin_a, (Double::ONE - sin_a.sqr()).sqrt())\n\n }\n\n}\n\n\n\n// Helper function to reduce the input to a value whose sin/cos can be calculated via Taylor\n\n// series. It firsts reduces modulo 2π, then π/2, then π/16. Aside from returning the\n\n// reduced value (`t`), it also returns the group within the next higher modulo in which the\n\n// value fell (`j` and `k`, this is the quadrant for `j`).\n", "file_path": "src/double/trig.rs", "rank": 22, "score": 94651.7657354733 }, { "content": "/// Determines whether a number is exact (true) or has floating-point error (false).\n\n///\n\n/// A number is exactly representable in binary if it can be rendered as a fraction with a\n\n/// power of two as an exponent. If so, then floating-point error doesn't exist and the\n\n/// number can be turned into a quad- or double-double much more efficiently.\n\npub fn is_dyadic(n: f64) -> bool {\n\n let f = n.fract();\n\n if f == 0.0 {\n\n true\n\n } else {\n\n let len = f.to_string().len() - 2; // ignore the leading \"0.\"\n\n let base = 2f64.powi(-(len as i32));\n\n f % base == 0.0\n\n }\n\n}\n", "file_path": "src/common/utils.rs", "rank": 23, "score": 93113.7878748071 }, { "content": "#[inline]\n\n#[allow(clippy::many_single_char_names)]\n\nfn reduce(a: Double) -> (i32, i32, Double) {\n\n // reduce modulo 2π\n\n let z = (a / Double::TAU).round();\n\n let r = a - z * Double::TAU;\n\n\n\n // reduce modulo π/2\n\n let mut q = (r.0 / Double::FRAC_PI_2.0 + 0.5).floor();\n\n let mut t = r - Double(q, 0.0) * Double::FRAC_PI_2;\n\n let j = q as i32;\n\n\n\n // reduce modulo π/16\n\n q = (t.0 / Double::FRAC_PI_16.0 + 0.5).floor();\n\n t -= Double(q, 0.0) * Double::FRAC_PI_16;\n\n let k = q as i32;\n\n\n\n (j, k, t)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/double/trig.rs", "rank": 24, "score": 89448.14256625314 }, { "content": "#[inline]\n\npub fn mul_pwr2(a: Quad, n: f64) -> Quad {\n\n Quad(a.0 * n, a.1 * n, a.2 * n, a.3 * n)\n\n}\n\n\n\n/// Reciprocals of factorials, rendered as Quads. These are used in Taylor series\n\n/// calculations.\n\npub const INV_FACTS: [Quad; 15] = [\n\n Quad(\n\n 1.6666666666666666e-1,\n\n 9.25185853854297e-18,\n\n 5.135813185032629e-34,\n\n 2.8509490240983423e-50,\n\n ),\n\n Quad(\n\n 4.1666666666666664e-2,\n\n 2.3129646346357427e-18,\n\n 1.2839532962581572e-34,\n\n 7.127372560245855e-51,\n\n ),\n\n Quad(\n", "file_path": "src/quad/common.rs", "rank": 25, "score": 82528.49348212738 }, { "content": "#[inline]\n\nfn index_and_inc(a: Quad, i: &mut usize) -> f64 {\n\n let r = a[*i];\n\n *i += 1;\n\n r\n\n}\n\n\n\nimpl Add for Quad {\n\n type Output = Quad;\n\n\n\n // This function is the real reason indexing was added to quads. Unlike multiplication,\n\n // where every component has a specific function and appears in a specific place in the\n\n // algorithm, addition is just a repeated iteration over each successive component.\n\n\n\n /// Adds this `Quad` to another, producing a new `Quad` as a result.\n\n ///\n\n /// This implements the `+` operator between two `Quad`s.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// # use qd::{qd, Quad};\n", "file_path": "src/quad/add.rs", "rank": 26, "score": 82528.49348212738 }, { "content": "#[inline]\n\nfn pre_from_str(s: &str) -> Option<Result<Double, ParseDoubleError>> {\n\n if s.is_empty() {\n\n Some(Err(ParseDoubleError {\n\n kind: ErrorKind::Empty,\n\n }))\n\n } else if s == \"nan\" {\n\n Some(Ok(Double::NAN))\n\n } else if s == \"inf\" || s == \"infinity\" {\n\n Some(Ok(Double::INFINITY))\n\n } else if s == \"-inf\" || s == \"-infinity\" {\n\n Some(Ok(Double::NEG_INFINITY))\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "src/double/from_str.rs", "rank": 27, "score": 79800.45881397172 }, { "content": "#[inline]\n\nfn split_u64(a: u64) -> (u32, u32) {\n\n let x = (a >> 32) as u32;\n\n let y = a as u32;\n\n (x, y)\n\n}\n\n\n", "file_path": "src/double/from.rs", "rank": 28, "score": 76165.76911133763 }, { "content": "// Extracts the decimal digits of `value` into an array of unsigned integers.\n\n//\n\n// This function assumes that `value` is positive. Zero and non-finite values are handled\n\n// before we get to this function, and the sign is already pushed to the output vector. With\n\n// that assumption, this function will return a vector of numbers from 0-9 - digits.\n\nfn extract_digits(value: &Double, exp: i32) -> Vec<u8> {\n\n // Normalize the number to have an exponent of 0 (i.e., one digit before the decimal\n\n // point). We don't actually otherwise need the exponent in this function, as all we're\n\n // doing is parsing digits from the mantissa. This normalization makes the math involved\n\n // much faster. It also ensures that really large numbers don't overflow on\n\n // multiplication by ten.\n\n let divisor = TEN.powi(exp);\n\n\n\n let mut value = value / divisor;\n\n let mut digits = vec![];\n\n\n\n for _ in 0..(MAX_ACCURACY + 1) {\n\n let digit = value.0.trunc();\n\n\n\n value -= Double(digit, 0.0);\n\n value *= TEN;\n\n\n\n digits.push(digit as u8);\n\n }\n\n\n", "file_path": "src/double/display.rs", "rank": 49, "score": 75321.6623178491 }, { "content": "fn push_sign(chars: &mut Vec<char>, value: &Double, f: &Formatter) -> bool {\n\n if value.is_sign_negative() {\n\n chars.push('-');\n\n true\n\n } else if f.sign_plus() {\n\n chars.push('+');\n\n true\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/double/display.rs", "rank": 50, "score": 68939.0640155334 }, { "content": "fn push_digits_exp(chars: &mut Vec<char>, value: &Double, f: &mut Formatter) {\n\n let value = value.abs();\n\n let exp = value.0.log10().floor() as i32;\n\n let prec = f.precision();\n\n\n\n let mut digits = extract_digits(&value, exp);\n\n d::adjust_zeros(&mut digits, 0);\n\n d::adjust_prec(&mut digits, 0, prec);\n\n\n\n chars.append(&mut d::place_decimal(digits, 0));\n\n}\n\n\n", "file_path": "src/double/display.rs", "rank": 51, "score": 67179.07958367188 }, { "content": "fn push_digits_fixed(chars: &mut Vec<char>, value: &Double, f: &mut Formatter) {\n\n let value = value.abs();\n\n let exp = value.0.log10().floor() as i32;\n\n let prec = f.precision();\n\n\n\n let mut digits = extract_digits(&value, exp);\n\n d::adjust_zeros(&mut digits, exp);\n\n d::adjust_prec(&mut digits, exp, prec);\n\n\n\n chars.append(&mut d::place_decimal(digits, exp));\n\n}\n\n\n", "file_path": "src/double/display.rs", "rank": 52, "score": 67179.07958367188 }, { "content": "#[allow(clippy::too_many_arguments)]\n\n#[inline]\n\npub fn nine_two_sum(\n\n a: f64,\n\n b: f64,\n\n c: f64,\n\n d: f64,\n\n e: f64,\n\n f: f64,\n\n g: f64,\n\n h: f64,\n\n i: f64,\n\n) -> (f64, f64) {\n\n let (p0, p1) = p::two_sum(a, b);\n\n let (q0, q1) = p::two_sum(c, d);\n\n let (r0, r1) = p::two_sum(e, f);\n\n let (s0, s1) = p::two_sum(g, h);\n\n let (t0, t1) = four_two_sum(p0, p1, q0, q1);\n\n let (u0, u1) = four_two_sum(r0, r1, s0, s1);\n\n let (v0, v1) = four_two_sum(t0, t1, u0, u1);\n\n let (w0, w1) = p::two_sum(v0, i);\n\n (w0, w1 + v1)\n\n}\n\n\n\n/// Adds a float to an value/error pair.\n\n///\n\n/// If the result of this addition doesn't fit in two `f64`s, the sum is output as the first\n\n/// tuple component and the second and third contain the remainder. Otherwise, the first\n\n/// tuple component is `0.0` and the sum is in the other two components.\n", "file_path": "src/common/utils.rs", "rank": 53, "score": 53264.582452248826 }, { "content": "#[allow(clippy::many_single_char_names)]\n\nfn from_i128(a: i128) -> Quad {\n\n let sign = a.signum();\n\n // The first part prevents a.abs() from failing with overflow because the absolute\n\n // value of i128::MIN is i128::MAX + 1\n\n let a = if a == i128::MIN {\n\n i128::MAX as u128 + 1\n\n } else {\n\n a.abs() as u128\n\n };\n\n let (w, x, y, z) = split_u128(a);\n\n let (a, b, c, d) = u::renorm4(\n\n w as f64 * 2f64.powi(96),\n\n x as f64 * 2f64.powi(64),\n\n y as f64 * 2f64.powi(32),\n\n z as f64,\n\n );\n\n if sign == -1 {\n\n Quad(-a, -b, -c, -d)\n\n } else {\n\n Quad(a, b, c, d)\n", "file_path": "src/quad/from.rs", "rank": 54, "score": 51244.16096108341 }, { "content": "fn from_u64(a: u64) -> Quad {\n\n let (x, y) = split_u64(a);\n\n let (a, b, c, d) = u::renorm4(x as f64 * 2f64.powi(32), y as f64, 0.0, 0.0);\n\n Quad(a, b, c, d)\n\n}\n\n\n", "file_path": "src/quad/from.rs", "rank": 55, "score": 51244.16096108341 }, { "content": "fn from_i64(a: i64) -> Quad {\n\n let sign = a.signum();\n\n // The first part prevents a.abs() from failing with overflow because the absolute\n\n // value of i64::MIN is i64::MAX + 1\n\n let a = if a == i64::MIN {\n\n i64::MAX as u64 + 1\n\n } else {\n\n a.abs() as u64\n\n };\n\n let (x, y) = split_u64(a);\n\n let (a, b, c, d) = u::renorm4(x as f64 * 2f64.powi(32), y as f64, 0.0, 0.0);\n\n if sign == -1 {\n\n Quad(-a, -b, -c, -d)\n\n } else {\n\n Quad(a, b, c, d)\n\n }\n\n}\n\n\n", "file_path": "src/quad/from.rs", "rank": 56, "score": 51244.16096108341 }, { "content": "#[allow(clippy::many_single_char_names)]\n\nfn from_u128(a: u128) -> Quad {\n\n let (w, x, y, z) = split_u128(a);\n\n let (a, b, c, d) = u::renorm4(\n\n w as f64 * 2f64.powi(96),\n\n x as f64 * 2f64.powi(64),\n\n y as f64 * 2f64.powi(32),\n\n z as f64,\n\n );\n\n Quad(a, b, c, d)\n\n}\n\n\n", "file_path": "src/quad/from.rs", "rank": 57, "score": 51244.16096108341 }, { "content": "#[allow(clippy::many_single_char_names)]\n\nfn cos_taylor(a: Quad) -> Quad {\n\n if a.is_zero() {\n\n Quad::ONE\n\n } else {\n\n let threshold = c::mul_pwr2(Quad::EPSILON, 0.5);\n\n let x = -a.sqr();\n\n let mut r = x;\n\n let mut s = Quad::ONE + c::mul_pwr2(r, 0.5);\n\n let mut i = 1;\n\n\n\n loop {\n\n r *= x;\n\n let t = r * c::INV_FACTS[i];\n\n s += t;\n\n i += 2;\n\n if i >= c::INV_FACTS.len() || t.abs() <= threshold {\n\n break;\n\n }\n\n }\n\n s\n\n }\n\n}\n\n\n", "file_path": "src/quad/trig.rs", "rank": 58, "score": 48035.471218427425 }, { "content": "#[allow(clippy::many_single_char_names)]\n\nfn sin_taylor(a: Quad) -> Quad {\n\n if a.is_zero() {\n\n Quad::ZERO\n\n } else {\n\n let threshold = c::mul_pwr2(Quad::EPSILON * a.abs(), 0.5);\n\n let x = -a.sqr();\n\n let mut s = a;\n\n let mut r = a;\n\n let mut i = 0;\n\n\n\n loop {\n\n r *= x;\n\n let t = r * c::INV_FACTS[i];\n\n s += t;\n\n i += 2;\n\n if i >= c::INV_FACTS.len() || t.abs() <= threshold {\n\n break;\n\n }\n\n }\n\n s\n\n }\n\n}\n\n\n\n// Compute cos a using the Taylor series. This assumes that |a| <= π/2048.\n", "file_path": "src/quad/trig.rs", "rank": 59, "score": 48035.471218427425 }, { "content": "#[inline]\n\nfn split_u64(a: u64) -> (u32, u32) {\n\n let x = (a >> 32) as u32;\n\n let y = a as u32;\n\n (x, y)\n\n}\n\n\n", "file_path": "src/quad/from.rs", "rank": 60, "score": 46279.28722142968 }, { "content": "// Computes both the sine and cosine of a using the Taylor series. This is a bit quicker\n\n// than calling the two functions above separately, since if you have one of them you can\n\n// calculate the other more efficiently.\n\nfn sincos_taylor(a: Quad) -> (Quad, Quad) {\n\n if a.is_zero() {\n\n (Quad::ZERO, Quad::ONE)\n\n } else {\n\n let sin_a = sin_taylor(a);\n\n (sin_a, (Quad::ONE - sin_a.sqr()).sqrt())\n\n }\n\n}\n\n\n\n// Helper function to reduce the input to a value whose sin/cos can be calculated via Taylor\n\n// series. It firsts reduces modulo 2π, then π/2, then π/1024. Aside from returning the\n\n// reduced value (`t`), it also returns the group within the next higher modulo in which the\n\n// value fell (`j` and `k`, this is the quadrant for `j`).\n", "file_path": "src/quad/trig.rs", "rank": 61, "score": 44884.23906381233 }, { "content": "#[inline]\n\n#[allow(clippy::many_single_char_names)]\n\nfn reduce(a: Quad) -> (i32, i32, Quad) {\n\n // reduce modulo 2π\n\n let z = (a / Quad::TAU).round();\n\n let r = a - z * Quad::TAU;\n\n\n\n // reduce modulo π/2\n\n let mut q = (r.0 / Quad::FRAC_PI_2.0 + 0.5).floor();\n\n let mut t = r - Quad(q, 0.0, 0.0, 0.0) * Quad::FRAC_PI_2;\n\n let j = q as i32;\n\n\n\n // reduce modulo π/1024\n\n q = (t.0 / FRAC_PI_1024.0 + 0.5).floor();\n\n t -= Quad(q, 0.0, 0.0, 0.0) * FRAC_PI_1024;\n\n let k = q as i32;\n\n\n\n (j, k, t)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/quad/trig.rs", "rank": 62, "score": 43468.80430366777 }, { "content": "#[inline]\n\n#[allow(clippy::many_single_char_names)]\n\nfn split_u128(a: u128) -> (u32, u32, u32, u32) {\n\n let w = (a >> 96) as u32;\n\n let x = (a >> 64) as u32;\n\n let y = (a >> 32) as u32;\n\n let z = a as u32;\n\n (w, x, y, z)\n\n}\n\n\n", "file_path": "src/quad/from.rs", "rank": 63, "score": 41028.626967259086 }, { "content": "// Add an \"infinity\" representation to the input vector.\n\npub fn push_inf(chars: &mut Vec<char>) {\n\n chars.append(&mut \"inf\".chars().collect());\n\n}\n\n\n", "file_path": "src/common/display.rs", "rank": 64, "score": 41014.02076020196 }, { "content": "// Add a \"not-a-number\" representation to the input vector.\n\npub fn push_nan(chars: &mut Vec<char>) {\n\n chars.append(&mut \"NaN\".chars().collect());\n\n}\n\n\n", "file_path": "src/common/display.rs", "rank": 65, "score": 41014.02076020196 }, { "content": "\n\n #[inline]\n\n fn div(self, other: &Quad) -> Quad {\n\n (*self).div(*other)\n\n }\n\n}\n\n\n\nimpl Div<&Quad> for Quad {\n\n type Output = Quad;\n\n\n\n /// Divides this `Quad` by a reference to another, producing a new `Quad` as a result.\n\n ///\n\n /// This implements the `/` operator between a `Quad` and a reference to a `Quad`.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// # use qd::{qd, Quad};\n\n /// let x = Quad::E / &Quad::PI;\n\n /// let expected = qd!(\"0.8652559794322650872177747896460896174287446239085155394543302889\");\n\n ///\n", "file_path": "src/quad/div.rs", "rank": 66, "score": 39056.71821243899 }, { "content": " /// let diff = (x - expected).abs();\n\n /// assert!(diff < qd!(1e-60));\n\n /// ```\n\n #[inline]\n\n fn div(self, other: &Quad) -> Quad {\n\n self.div(*other)\n\n }\n\n}\n\n\n\nimpl Div<Quad> for &Quad {\n\n type Output = Quad;\n\n\n\n /// Divides a reference to this `Quad` by another `Quad`, producing a new `Quad` as a\n\n /// result.\n\n ///\n\n /// This implements the `/` operator between a reference to a `Quad` and a `Quad`.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// # use qd::{qd, Quad};\n", "file_path": "src/quad/div.rs", "rank": 67, "score": 39053.77189634945 }, { "content": " /// let x = &Quad::E / Quad::PI;\n\n /// let expected = qd!(\"0.8652559794322650872177747896460896174287446239085155394543302889\");\n\n ///\n\n /// let diff = (x - expected).abs();\n\n /// assert!(diff < qd!(1e-60));\n\n /// ```\n\n #[inline]\n\n fn div(self, other: Quad) -> Quad {\n\n (*self).div(other)\n\n }\n\n}\n\n\n\nimpl DivAssign for Quad {\n\n /// Divides this `Quad` by another, modifying this one to equal the result.\n\n ///\n\n /// This implements the `/=` operator between two `Quad`s.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// # use qd::{qd, Quad};\n", "file_path": "src/quad/div.rs", "rank": 68, "score": 39053.50725934293 }, { "content": " ///\n\n /// This implements the `/` operator between two `Quad`s.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// # use qd::{qd, Quad};\n\n /// let x = Quad::E / Quad::PI;\n\n /// let expected = qd!(\"0.8652559794322650872177747896460896174287446239085155394543302889\");\n\n ///\n\n /// let diff = (x - expected).abs();\n\n /// assert!(diff < qd!(1e-60));\n\n /// ```\n\n #[allow(clippy::suspicious_arithmetic_impl)]\n\n fn div(self, other: Quad) -> Quad {\n\n match self.pre_div(&other) {\n\n Some(r) => r,\n\n None => {\n\n // Strategy:\n\n //\n\n // Divide the first component of `self` by the first component of `other`.\n", "file_path": "src/quad/div.rs", "rank": 69, "score": 39052.53674039402 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\n/// Divides a reference to this `Quad` by another, producing a new `Quad` as a result.\n\n///\n\n/// This implements the `/` operator between two references to `Quad`s.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// # use qd::{qd, Quad};\n\n/// let x = &Quad::E / &Quad::PI;\n\n/// let expected = qd!(\"0.8652559794322650872177747896460896174287446239085155394543302889\");\n\n///\n\n/// let diff = (x - expected).abs();\n\n/// assert!(diff < qd!(1e-60));\n\n/// ```\n\nimpl Div for &Quad {\n\n type Output = Quad;\n", "file_path": "src/quad/div.rs", "rank": 70, "score": 39052.06065841706 }, { "content": " /// let mut x = Quad::E;\n\n /// x /= Quad::PI;\n\n /// let expected = qd!(\"0.8652559794322650872177747896460896174287446239085155394543302889\");\n\n ///\n\n /// let diff = (x - expected).abs();\n\n /// assert!(diff < qd!(1e-60));\n\n /// ```\n\n #[inline]\n\n fn div_assign(&mut self, other: Quad) {\n\n let r = self.div(other);\n\n self.0 = r.0;\n\n self.1 = r.1;\n\n self.2 = r.2;\n\n self.3 = r.3;\n\n }\n\n}\n\n\n\nimpl DivAssign<&Quad> for Quad {\n\n /// Divides this `Quad` by a reference to another, modifying this one to equal the\n\n /// result.\n", "file_path": "src/quad/div.rs", "rank": 71, "score": 39051.584552424676 }, { "content": " ///\n\n /// This implements the `/=` operator between two `Quad`s.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// # use qd::{qd, Quad};\n\n /// let mut x = Quad::E;\n\n /// x /= &Quad::PI;\n\n /// let expected = qd!(\"0.8652559794322650872177747896460896174287446239085155394543302889\");\n\n ///\n\n /// let diff = (x - expected).abs();\n\n /// assert!(diff < qd!(1e-60));\n\n /// ```\n\n #[inline]\n\n fn div_assign(&mut self, other: &Quad) {\n\n let r = self.div(*other);\n\n self.0 = r.0;\n\n self.1 = r.1;\n\n self.2 = r.2;\n\n self.3 = r.3;\n", "file_path": "src/quad/div.rs", "rank": 72, "score": 39051.53769116169 }, { "content": "// Copyright (c) 2021 Thomas Otterson\n\n//\n\n// This software is released under the MIT License.\n\n// https://opensource.org/licenses/MIT\n\n\n\nuse crate::common::primitive as p;\n\nuse crate::common::utils as u;\n\nuse crate::quad::Quad;\n\nuse std::ops::{Div, DivAssign};\n\n\n\n// Quad x f64 analogue of full quad x quad multiplication above. This is here because we\n\n// don't want to depend on any Quad::from(x), where x is a single f64 (i.e., a non-tuple),\n\n// in arithmetic. Doing so will create infinite loops because arithmetic is used to parse\n\n// the f64s into quads in the first place. Multiplying the f64s directly into Quads bypasses\n\n// this.\n\n//\n\n// Division is the only place where this is necessary, so this multiplication function is\n\n// dropped nearby.\n\n#[inline]\n", "file_path": "src/quad/div.rs", "rank": 73, "score": 39050.493254074216 }, { "content": " None\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n // div tests\n\n test_all_near!(\n\n num_num:\n\n qd!(\"1.1557273497909217179100931833126962991208510231644158204997065353273\"),\n\n Quad::PI / Quad::E;\n\n num_ref:\n\n qd!(\"1.1557273497909217179100931833126962991208510231644158204997065353273\"),\n\n Quad::PI / &Quad::E;\n\n ref_num:\n\n qd!(\"1.1557273497909217179100931833126962991208510231644158204997065353273\"),\n\n &Quad::PI / Quad::E;\n", "file_path": "src/quad/div.rs", "rank": 74, "score": 39048.53099740267 }, { "content": " }\n\n}\n\n\n\nimpl Quad {\n\n // Precalc functions\n\n //\n\n // This series of functions returns `Some` with a value that is to be returned, if it\n\n // turns out that the function doesn't have to be calculated because a shortcut result\n\n // is known. They return `None` if the value has to be calculated normally.\n\n //\n\n // This keeps the public functions from being mucked up with code that does validation\n\n // rather than calculation.\n\n\n\n #[inline]\n\n fn pre_div(&self, other: &Quad) -> Option<Quad> {\n\n if self.is_nan() || other.is_nan() {\n\n Some(Quad::NAN)\n\n } else if other.is_zero() {\n\n if self.is_zero() {\n\n Some(Quad::NAN)\n", "file_path": "src/quad/div.rs", "rank": 75, "score": 39045.86918803761 }, { "content": " // Then divide the first component of the remainder by the first component\n\n // of `other`, then the first component of -that- remainder by the first\n\n // component of `other`, and so on until we have five terms we can\n\n // renormalize.\n\n let q0 = self.0 / other.0;\n\n let mut r = self - mul_f64(other, q0);\n\n\n\n let q1 = r.0 / other.0;\n\n r -= mul_f64(other, q1);\n\n\n\n let q2 = r.0 / other.0;\n\n r -= mul_f64(other, q2);\n\n\n\n let q3 = r.0 / other.0;\n\n r -= mul_f64(other, q3);\n\n\n\n let q4 = r.0 / other.0;\n\n\n\n let (a, b, c, d) = u::renorm5(q0, q1, q2, q3, q4);\n\n Quad(a, b, c, d)\n", "file_path": "src/quad/div.rs", "rank": 76, "score": 39042.978680890614 }, { "content": " ref_ref:\n\n qd!(\"1.1557273497909217179100931833126962991208510231644158204997065353273\"),\n\n &Quad::PI / &Quad::E;\n\n num_neg_num:\n\n qd!(\"-1.1557273497909217179100931833126962991208510231644158204997065353273\"),\n\n Quad::PI / -Quad::E;\n\n num_neg_ref:\n\n qd!(\"-1.1557273497909217179100931833126962991208510231644158204997065353273\"),\n\n Quad::PI / -&Quad::E;\n\n ref_neg_num:\n\n qd!(\"-1.1557273497909217179100931833126962991208510231644158204997065353273\"),\n\n &Quad::PI / -Quad::E;\n\n ref_neg_ref:\n\n qd!(\"-1.1557273497909217179100931833126962991208510231644158204997065353273\"),\n\n &Quad::PI / -&Quad::E;\n\n num_id:\n\n Quad::PI,\n\n Quad::PI / Quad::ONE;\n\n id_num:\n\n Quad::FRAC_1_PI,\n", "file_path": "src/quad/div.rs", "rank": 77, "score": 39042.860225990225 }, { "content": " // of the cases above.\n\n test_all!(\n\n assign_num: {\n\n let mut a = Quad::PI;\n\n a /= Quad::E;\n\n near!(qd!(\"1.1557273497909217179100931833126962991208510231644158204997065353273\"), a);\n\n }\n\n assign_ref: {\n\n let mut b = Quad::PI;\n\n b /= &Quad::E;\n\n near!(qd!(\"1.1557273497909217179100931833126962991208510231644158204997065353273\"), b);\n\n }\n\n );\n\n}\n", "file_path": "src/quad/div.rs", "rank": 78, "score": 39042.60915875874 }, { "content": " Quad::ONE / Quad::PI;\n\n num_small:\n\n qd!(\"3141592653589793238462643383279502884197169399375105820974944.5923061\"),\n\n Quad::PI / qd!(\"1e-60\");\n\n small_num:\n\n qd!(\"3.1830988618379067153776752674502872406891929148091289749533468811804e-61\"),\n\n qd!(\"1e-60\") / Quad::PI;\n\n three_nums:\n\n qd!(\"1.6673621161631071223063639072253467866814381989438981528114006093878\"),\n\n Quad::PI / Quad::E / Quad::LN_2;\n\n lassoc:\n\n qd!(\"1.6673621161631071223063639072253467866814381989438981528114006093878\"),\n\n (Quad::PI / Quad::E) / Quad::LN_2;\n\n rassoc:\n\n qd!(\"12.320232213560921976987672083576725232192678340447553172224165846265\"),\n\n Quad::PI / (Quad::LN_2 / Quad::E);\n\n );\n\n test_all_exact!(\n\n zero_inf:\n\n Quad::ZERO,\n", "file_path": "src/quad/div.rs", "rank": 79, "score": 39042.22290148455 }, { "content": " Quad::NEG_INFINITY / Quad::INFINITY;\n\n neg_inf_neg_inf:\n\n Quad::NAN,\n\n Quad::NEG_INFINITY / Quad::NEG_INFINITY;\n\n one_zero:\n\n Quad::INFINITY,\n\n Quad::ONE / Quad::ZERO;\n\n neg_one_zero:\n\n Quad::NEG_INFINITY,\n\n Quad::NEG_ONE / Quad::ZERO;\n\n\n\n nan_one:\n\n Quad::NAN,\n\n Quad::NAN / Quad::ONE;\n\n one_nan:\n\n Quad::NAN,\n\n Quad::ONE / Quad::NAN;\n\n );\n\n\n\n // Assign tests. Assign code delegates to div code, so there's no need to re-test all\n", "file_path": "src/quad/div.rs", "rank": 80, "score": 39040.12864700858 }, { "content": " one_inf:\n\n Quad::ZERO,\n\n Quad::ONE / Quad::INFINITY;\n\n one_neg_inf:\n\n Quad::NEG_ZERO,\n\n Quad::ONE / Quad::NEG_INFINITY;\n\n inf_one:\n\n Quad::INFINITY,\n\n Quad::INFINITY / Quad::ONE;\n\n neg_inf_one:\n\n Quad::NEG_INFINITY,\n\n Quad::NEG_INFINITY / Quad::ONE;\n\n inf_inf:\n\n Quad::NAN,\n\n Quad::INFINITY / Quad::INFINITY;\n\n inf_neg_inf:\n\n Quad::NAN,\n\n Quad::INFINITY / Quad::NEG_INFINITY;\n\n neg_inf_inf:\n\n Quad::NAN,\n", "file_path": "src/quad/div.rs", "rank": 81, "score": 39038.02999651966 }, { "content": " } else if self.is_sign_negative() == other.is_sign_positive() {\n\n Some(Quad::NEG_INFINITY)\n\n } else {\n\n Some(Quad::INFINITY)\n\n }\n\n } else if self.is_infinite() {\n\n if other.is_infinite() {\n\n Some(Quad::NAN)\n\n } else if self.is_sign_positive() == other.is_sign_positive() {\n\n Some(Quad::INFINITY)\n\n } else {\n\n Some(Quad::NEG_INFINITY)\n\n }\n\n } else if other.is_infinite() {\n\n if self.is_sign_positive() == other.is_sign_positive() {\n\n Some(Quad::ZERO)\n\n } else {\n\n Some(Quad::NEG_ZERO)\n\n }\n\n } else {\n", "file_path": "src/quad/div.rs", "rank": 82, "score": 39038.02999651966 }, { "content": " Quad::ZERO / Quad::INFINITY;\n\n zero_neg_inf:\n\n Quad::NEG_ZERO,\n\n Quad::ZERO / Quad::NEG_INFINITY;\n\n inf_zero:\n\n Quad::INFINITY,\n\n Quad::INFINITY / Quad::ZERO;\n\n neg_inf_zero:\n\n Quad::NEG_INFINITY,\n\n Quad::NEG_INFINITY / Quad::ZERO;\n\n nan_zero:\n\n Quad::NAN,\n\n Quad::NAN / Quad::ZERO;\n\n zero_nan:\n\n Quad::NAN,\n\n Quad::ZERO / Quad::NAN;\n\n zero_zero:\n\n Quad::NAN,\n\n Quad::ZERO / Quad::ZERO;\n\n\n", "file_path": "src/quad/div.rs", "rank": 83, "score": 39038.02999651966 }, { "content": " #[inline]\n\n fn from(a: Double) -> f64 {\n\n a.0\n\n }\n\n}\n\n\n\nimpl From<Double> for (f64, f64) {\n\n /// Converts a `Double` into a tuple of `f64`s.\n\n ///\n\n /// The components of the double become the components of the returned tuple. Note that,\n\n /// while the value of the first component is simply the `f64` cast of the `Double`\n\n /// itself, the second component encodes the next digits of the `Double` *plus* the\n\n /// rounding error in the first component. For that reason, it's not likely to be very\n\n /// useful outside of a `Double` context.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// # use qd::Double;\n\n /// let (a, b) = <(f64, f64)>::from(Double::PI);\n\n /// assert!(a == 3.141592653589793e0);\n", "file_path": "src/double/from.rs", "rank": 84, "score": 38784.04977286927 }, { "content": "impl From<Double> for f64 {\n\n /// Converts a `Double` into an `f64`.\n\n ///\n\n /// This will lose precision if the second component of the `Double` is not 0, but it\n\n /// will not lose range.\n\n ///\n\n /// No other conversions from `Double` to numeric types are provided, as every other one\n\n /// has the capability of losing range (for example, no other type could be used to\n\n /// represent `dd!(1e308)`). Casts can be made from the `f64` provided by this function\n\n /// to other numeric types as needed.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// # use qd::Double;\n\n /// let a = Double::PI;\n\n /// let x = f64::from(a);\n\n ///\n\n /// let diff = (x - std::f64::consts::PI).abs();\n\n /// assert!(diff < 1e-15);\n\n /// ```\n", "file_path": "src/double/from.rs", "rank": 85, "score": 38781.099972825505 }, { "content": "// These are simple enough - since integers are inherently dyadic (as long as they fit into\n\n// `f64`s - see below), they can just be cast to `f64`s and sent directly into the `Double`\n\n// constructor.\n\n//\n\n// The exceptions are `i64` and `u64`, which don't fit into `f64`s. They get their own\n\n// separate (non-macro) functions that split them into two 32-bit parts which are then\n\n// renormalized into a proper `Double`.\n\n\n\nmacro_rules! from_int_impl {\n\n ($(\n\n $(#[$m:meta])*\n\n $t:ty\n\n )*) => ($(\n\n $(#[$m])*\n\n impl From<$t> for Double {\n\n #[inline]\n\n fn from(a: $t) -> Double {\n\n Double(a.into(), 0.0)\n\n }\n\n }\n", "file_path": "src/double/from.rs", "rank": 86, "score": 38780.92201721527 }, { "content": " /// assert!(b == 1.2246467991473532e-16); // *not* the next 16 digits of π\n\n /// ```\n\n #[inline]\n\n fn from(a: Double) -> (f64, f64) {\n\n (a.0, a.1)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n // f32 tests\n\n test_all_exact!(\n\n f32_int:\n\n Double(1.0, 0.0),\n\n dd!(1.0f32);\n\n f32_float:\n\n Double(1.203125, 0.0),\n\n dd!(1.203125f32);\n", "file_path": "src/double/from.rs", "rank": 87, "score": 38780.74868200747 }, { "content": " /// ```\n\n /// # use qd::Double;\n\n /// let d = Double::ONE;\n\n /// assert!(d[0] == 1.0);\n\n /// assert!(d[1] == 0.0);\n\n /// ```\n\n fn index(&self, idx: usize) -> &f64 {\n\n match idx {\n\n 0 => &self.0,\n\n 1 => &self.1,\n\n _ => panic!(\n\n \"Index of double-double out of range (must be in range [0, 1]): {}\",\n\n idx\n\n ),\n\n }\n\n }\n\n}\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "src/double.rs", "rank": 88, "score": 38780.18075168609 }, { "content": "// Copyright (c) 2021 Thomas Otterson\n\n//\n\n// This software is released under the MIT License.\n\n// https://opensource.org/licenses/MIT\n\n\n\nuse crate::common::utils as u;\n\nuse crate::double::Double;\n\nuse std::f64;\n\n\n\n#[inline]\n", "file_path": "src/double/from.rs", "rank": 89, "score": 38780.16065353304 }, { "content": "}\n\n\n\nimpl Index<usize> for Double {\n\n type Output = f64;\n\n\n\n /// Returns one of the components of the `Double`.\n\n ///\n\n /// Using index `0` will return the first component and using index `1` will return the\n\n /// second.\n\n ///\n\n /// One capability that is *not* provided is mutable indexing; ensuring that a `Double`\n\n /// is normalized would be impossible if they could be individually changed at will.\n\n /// `Double`s are immutable like any other number; if you need a new value for a\n\n /// `Double`, you should simply create a new `Double`.\n\n ///\n\n /// This is primarily provided for making certain mathematical algorithms easier to\n\n /// implement. There isn't a lot meaning to an individual component of a `Double` other\n\n /// than the first.\n\n ///\n\n /// # Examples\n", "file_path": "src/double.rs", "rank": 90, "score": 38780.06127182421 }, { "content": " /// ```\n\n ///\n\n /// [`new`]: #method.new\n\n #[inline]\n\n fn from((a, b): (f64, f64)) -> Double {\n\n Double(a, b)\n\n }\n\n}\n\n\n\nimpl From<&str> for Double {\n\n /// Parses a string to create a `Double`.\n\n ///\n\n /// The parser works pretty similarly to parsers for `f32` and `f64`. It will fail if\n\n /// characters are present that are not digits, decimal points, signs, or exponent\n\n /// markers. It will also fail if there are multiples of these or if they're in the\n\n /// wrong places; two decimal points or a negative sign after the number will both be\n\n /// rejected, for instance.\n\n ///\n\n /// Failure will return [`NAN`]. This can be an issue because parsing the string `\"nan\"`\n\n /// *also* produces [`NAN`]. For this reason it's suggested to use [`from_str`] (or its\n", "file_path": "src/double/from.rs", "rank": 91, "score": 38779.5612730086 }, { "content": "}\n\n\n\nimpl From<(f64, f64)> for Double {\n\n /// Generates a `Double` from a 2-tuple of `f64`s.\n\n ///\n\n /// This conversion acts like [`new`] does: it assumes that if you're creating a\n\n /// `Double` out of a pair of numbers, you already know what you want those numbers to\n\n /// be. Therefore it neither renormalizes or accounts for rounding error.\n\n ///\n\n /// No other `From` implementations are provided for tuples. There is no way to provide\n\n /// a pre-normalized pair of integers, and since tuple conversion doesn't adjust for\n\n /// rounding error, it's better to make the user explicity cast `f32`s first in the\n\n /// manner of their choosing.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// # use qd::Double;\n\n /// // These are the components used to define Double::PI\n\n /// let d = Double::from((3.141592653589793e0, 1.2246467991473532e-16));\n\n /// assert!(d == Double::PI);\n", "file_path": "src/double/from.rs", "rank": 92, "score": 38779.064324196464 }, { "content": " )*);\n\n}\n\n\n\nfrom_int_impl! {\n\n /// Generates a `Double` from an `i8`.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// # use qd::Double;\n\n /// let x = -128i8;\n\n /// let a = Double::from(x);\n\n /// assert!(a.to_string() == \"-128\");\n\n /// ```\n\n i8\n\n /// Generates a `Double` from a `u8`.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// # use qd::Double;\n\n /// let x = 255u8;\n", "file_path": "src/double/from.rs", "rank": 93, "score": 38778.78940511895 }, { "content": " from_u64(a)\n\n }\n\n}\n\n\n\nimpl From<i64> for Double {\n\n /// Generates a `Double` from an `i64`.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// # use qd::Double;\n\n /// let x = -9_223_372_036_854_775_808i64;\n\n /// let a = Double::from(x);\n\n /// assert!(a.to_string() == \"-9223372036854775808\");\n\n /// ```\n\n fn from(a: i64) -> Double {\n\n from_i64(a)\n\n }\n\n}\n\n\n\n// FROM FLOAT IMPLEMENTATIONS\n", "file_path": "src/double/from.rs", "rank": 94, "score": 38777.95254788096 }, { "content": " /// # Examples\n\n /// ```\n\n /// # use qd::Double;\n\n /// // Exactly representable in binary\n\n /// let x = 0.9921875f32;\n\n /// let a = Double::from(x);\n\n /// assert!(a.to_string() == \"0.9921875\");\n\n ///\n\n /// // Xot exactly representable in binary\n\n /// let x = 0.9921876f32;\n\n /// let a = Double::from(x);\n\n /// assert!(a.to_string() == \"0.9921876\");\n\n /// ```\n\n f32\n\n /// Generates a `Double` from an `f64`.\n\n ///\n\n /// This function *does* account for floating point rounding error. Even though the\n\n /// first component of a `Double` is enough to fit an `f64`, if that `f64` is not\n\n /// exactly representable in binary, then the second component of the `Double` will\n\n /// account for the rounding error.\n", "file_path": "src/double/from.rs", "rank": 95, "score": 38777.09261801574 }, { "content": " ///\n\n /// Note that in order to do this, the `f64` needs to be parsed digit by digit. While\n\n /// the parser does work quite fast with integers or any `f64` that is represented\n\n /// perfectly in binary (any number that can be represented as a fraction with a power\n\n /// of 2 in the denominator), it's not a particularly fast operation otherwise.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// # use qd::Double;\n\n /// // Exactly representable in binary\n\n /// let x = 0.999969482421875f64;\n\n /// let a = Double::from(x);\n\n /// assert!(a.to_string() == \"0.999969482421875\");\n\n ///\n\n /// // Not exactly representable in binary\n\n /// let x = 0.999969482421876f64;\n\n /// let a = Double::from(x);\n\n /// assert!(a.to_string() == \"0.999969482421876\");\n\n /// ```\n\n f64\n", "file_path": "src/double/from.rs", "rank": 96, "score": 38777.0669307817 }, { "content": " /// let a = Double::from(x);\n\n /// assert!(a.to_string() == \"4294967295\");\n\n /// ```\n\n u32\n\n}\n\n\n\n// Separate implementations for the 64-bit integers because they require splitting to fit\n\n// into 53-bit mantissas, so their code is different.\n\n\n\nimpl From<u64> for Double {\n\n /// Generates a `Double` from a `u64`.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// # use qd::Double;\n\n /// let x = 18_446_744_073_709_551_615u64;\n\n /// let a = Double::from(x);\n\n /// assert!(a.to_string() == \"18446744073709551615\");\n\n /// ```\n\n fn from(a: u64) -> Double {\n", "file_path": "src/double/from.rs", "rank": 97, "score": 38776.84888590842 }, { "content": " /// ```\n\n #[inline]\n\n fn rem(self, other: &Double) -> Double {\n\n let n = self.div(*other).floor();\n\n *self - *other * n\n\n }\n\n}\n\n\n\nimpl Rem<&Double> for Double {\n\n type Output = Double;\n\n\n\n /// Divides this `Double` by a reference to another, producing a new `Double` of the\n\n /// remainder as a result. This operation uses floored division.\n\n ///\n\n /// This implements the `%` operator between a `Double` and a reference to a `Double`.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// # use qd::{dd, Double};\n\n /// let x = Double::PI % &Double::E;\n", "file_path": "src/double/rem.rs", "rank": 98, "score": 22.912531758501547 }, { "content": " /// ```\n\n #[inline]\n\n fn rem(self, other: Double) -> Double {\n\n let n = self.div(other).floor();\n\n *self - other * n\n\n }\n\n}\n\n\n\nimpl RemAssign for Double {\n\n /// Divides this `Double` by another, modifying this one to equal the remainder. This\n\n /// operation uses floored division.\n\n ///\n\n /// This implements the `%=` operator between two `Double`s.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// # use qd::{dd, Double};\n\n /// let mut x = Double::PI;\n\n /// x %= Double::E;\n\n /// let xpected = dd!(\"0.4233108251307480031023559119268\");\n", "file_path": "src/double/rem.rs", "rank": 99, "score": 20.2733716929986 } ]
Rust
rs/orchestrator/src/error.rs
Deland-Labs/ic
047172b01e0afc0e61448669d4ec98b2425c6853
use ic_http_utils::file_downloader::FileDownloadError; use ic_types::replica_version::ReplicaVersionParseError; use ic_types::{registry::RegistryClientError, NodeId, RegistryVersion, ReplicaVersion, SubnetId}; use std::error::Error; use std::fmt; use std::io; use std::path::{Path, PathBuf}; use std::process::Command; pub type OrchestratorResult<T> = Result<T, OrchestratorError>; #[derive(Debug)] #[allow(clippy::enum_variant_names)] pub enum OrchestratorError { NodeUnassignedError(NodeId, RegistryVersion), SubnetMissingError(SubnetId, RegistryVersion), RegistryClientError(RegistryClientError), MakeRegistryCupError(SubnetId, RegistryVersion), ReplicaVersionMissingError(ReplicaVersion, RegistryVersion), ReplicaVersionParseError(ReplicaVersionParseError), IoError(String, io::Error), FileDownloadError(FileDownloadError), ExecError(PathBuf, exec::Error), InvalidConfigurationError(String), UpgradeError(String), } impl OrchestratorError { pub(crate) fn file_write_error(file_path: &Path, e: io::Error) -> Self { OrchestratorError::IoError(format!("Failed to write to file: {:?}", file_path), e) } pub(crate) fn dir_create_error(dir: &Path, e: io::Error) -> Self { OrchestratorError::IoError(format!("Failed to create dir: {:?}", dir), e) } pub(crate) fn invalid_configuration_error(msg: impl ToString) -> Self { OrchestratorError::InvalidConfigurationError(msg.to_string()) } pub(crate) fn file_command_error(e: io::Error, cmd: &Command) -> Self { OrchestratorError::IoError(format!("Failed to executing command: {:?}", cmd), e) } } impl fmt::Display for OrchestratorError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { OrchestratorError::NodeUnassignedError(node_id, registry_version) => write!( f, "Node {:?} is not found in any subnet at registry version {:?}", node_id, registry_version ), OrchestratorError::RegistryClientError(e) => write!(f, "{:?}", e), OrchestratorError::ReplicaVersionMissingError(replica_version, registry_version) => { write!( f, "Replica version {} was not found in the Registry at registry version {:?}", replica_version, registry_version ) } OrchestratorError::IoError(msg, e) => { write!(f, "IO error, message: {:?}, error: {:?}", msg, e) } OrchestratorError::FileDownloadError(e) => write!(f, "File download error: {:?}", e), OrchestratorError::ExecError(path, e) => write!( f, "Failed to exec new Orchestrator process: {:?}, error: {:?}", path, e ), OrchestratorError::InvalidConfigurationError(msg) => { write!(f, "Invalid configuration: {}", msg) } OrchestratorError::SubnetMissingError(subnet_id, registry_version) => write!( f, "Subnet ID {:?} does not exist in the Registry at registry version {:?}", subnet_id, registry_version ), OrchestratorError::ReplicaVersionParseError(e) => { write!(f, "Failed to parse replica version: {}", e) } OrchestratorError::MakeRegistryCupError(subnet_id, registry_version) => write!( f, "Failed to construct the genesis/recovery CUP, subnet_id: {}, registry_version: {}", subnet_id, registry_version, ), OrchestratorError::UpgradeError(msg) => write!(f, "Failed to upgrade: {}", msg), } } } impl From<FileDownloadError> for OrchestratorError { fn from(e: FileDownloadError) -> Self { OrchestratorError::FileDownloadError(e) } } impl Error for OrchestratorError {}
use ic_http_utils::file_downloader::FileDownloadError; use ic_types::replica_version::ReplicaVersionParseError; use ic_types::{registry::RegistryClientError, NodeId, RegistryVersion, ReplicaVersion, SubnetId}; use std::error::Error; use std::fmt; use std::io; use std::path::{Path, PathBuf}; use std::process::Command; pub type OrchestratorResult<T> = Result<T, OrchestratorError>; #[derive(Debug)] #[allow(clippy::enum_variant_names)] pub enum OrchestratorError { NodeUnassignedError(NodeId, RegistryVersion), SubnetMissingError(SubnetId, RegistryVersion), RegistryClientError(RegistryClientError), MakeRegistryCupError(SubnetId, RegistryVersion), ReplicaVersionMissingError(ReplicaVersion, RegistryVersion), ReplicaVersionParseError(ReplicaVersionParseError), IoError(String, io::Error), FileDownloadError(FileDownloadError), ExecError(PathBuf, exec::Error), InvalidConfigurationError(String), UpgradeError(String), } impl OrchestratorError { pub(crate) fn file_write_error(file_path: &Path, e: io::Error) -> Self { OrchestratorError::IoError(format!("Failed to write to file: {:?}", file_path), e) } pub(crate) fn dir_create_error(dir: &Path, e: io::Error) -> Self { OrchestratorError::IoError(format!("Failed to create dir: {:?}", dir), e) } pub(crate) fn invalid_configuration_error(msg: impl ToString) -> Self { OrchestratorError::InvalidConfigurationError(msg.to_string()) } pub(crate) fn file_command_error(e: io::Error, cmd: &Command) -> Self { OrchestratorError::IoError(format!("Failed to executing command: {:?}", cmd), e) } } impl fmt::Display for OrchestratorError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { OrchestratorError::NodeUnassignedError(node_id, registry_version) => write!( f, "Node {:?} is not found in any subnet at registry version {:?}", node_id, registry_version ), OrchestratorError::RegistryClientError(e) => write!(f, "{:?}", e), OrchestratorError::ReplicaVersionMissingError(replica_version, registry_version) => { write!( f, "Replica version {} was not found in the Registry at registry version {:?}", replica_version, registry_version ) } OrchestratorError::IoError(msg, e) => { write!(f, "IO error, message: {:?}, error: {:?}", msg, e) } OrchestratorError::FileDownloadError(e) => write!(f, "File download error: {:?}", e), OrchestratorError::ExecError(path, e) => write!( f, "Failed to exec new Orchestrator process: {:?}, error: {:?}", path, e ), OrchestratorError::InvalidConfigurationError(msg) => { write!(f, "Invalid configuration: {}", msg) } OrchestratorError::SubnetMissingError(subnet_id, registry_version) => write!( f, "Subnet ID {:?} does not exist in the Registry at registry version {:?}", subnet_id, registry_version ),
} impl From<FileDownloadError> for OrchestratorError { fn from(e: FileDownloadError) -> Self { OrchestratorError::FileDownloadError(e) } } impl Error for OrchestratorError {}
OrchestratorError::ReplicaVersionParseError(e) => { write!(f, "Failed to parse replica version: {}", e) } OrchestratorError::MakeRegistryCupError(subnet_id, registry_version) => write!( f, "Failed to construct the genesis/recovery CUP, subnet_id: {}, registry_version: {}", subnet_id, registry_version, ), OrchestratorError::UpgradeError(msg) => write!(f, "Failed to upgrade: {}", msg), } }
function_block-function_prefix_line
[ { "content": "fn some_checkpoint_dir(backup_dir: &Path, subnet_id: &SubnetId) -> Option<PathBuf> {\n\n let dir = backup_dir\n\n .join(\"data\")\n\n .join(subnet_id.to_string())\n\n .join(\"ic_state\");\n\n if !dir.exists() {\n\n return None;\n\n }\n\n let lcp = last_checkpoint(&dir);\n\n if lcp == 0 {\n\n return None;\n\n }\n\n Some(dir.join(format!(\"checkpoints/{:016x}\", lcp)))\n\n}\n\n\n", "file_path": "rs/tests/src/orchestrator/backup_manager.rs", "rank": 0, "score": 560321.202696938 }, { "content": "/// Finds the `file` in the directories listed in the `PATH` environment\n\n/// variable.\n\npub fn find_file_on_path(file: impl AsRef<OsStr>) -> Option<path::PathBuf> {\n\n find_file(file, env::var_os(\"PATH\")?.as_os_str())\n\n}\n\n\n", "file_path": "rs/utils/src/command.rs", "rank": 1, "score": 553189.506444132 }, { "content": "/// Generates a `RegistryError` to handle cases where a record in the registry was unexpectedly\n\n/// absent. This error is always considered persistent.\n\nfn not_found_error(what: &str, subnet_id: Option<SubnetId>) -> ReadRegistryError {\n\n let errmsg = match subnet_id {\n\n Some(subnet_id) => format!(\"'{} for subnet {}' not found\", what, subnet_id),\n\n None => format!(\"'{}' not found\", what),\n\n };\n\n ReadRegistryError::Persistent(errmsg)\n\n}\n\n\n\n/// A mapping from node IDs to public keys.\n\n/// The public key is a DER-encoded Ed25519 key.\n\npub(crate) type NodePublicKeys = BTreeMap<NodeId, Vec<u8>>;\n\n\n\nimpl BatchProcessorImpl {\n\n fn new(\n\n state_manager: Arc<dyn StateManager<State = ReplicatedState>>,\n\n state_machine: Box<dyn StateMachine>,\n\n registry: Arc<dyn RegistryClient>,\n\n bitcoin_config: BitcoinConfig,\n\n metrics: Arc<MessageRoutingMetrics>,\n\n log: ReplicaLogger,\n", "file_path": "rs/messaging/src/message_routing.rs", "rank": 2, "score": 544900.1740210088 }, { "content": "/// Open an existing file for writing to it.\n\npub fn open_existing_file_for_write<P>(path: P) -> io::Result<std::fs::File>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n std::fs::OpenOptions::new()\n\n .write(true)\n\n .read(true)\n\n .create(false)\n\n .open(&path)\n\n}\n\n\n\n#[cfg(target_family = \"unix\")]\n", "file_path": "rs/utils/src/fs.rs", "rank": 3, "score": 532202.1732916204 }, { "content": "/// Serialize given protobuf message to file `dest` in a crash-safe manner\n\npub fn write_protobuf_using_tmp_file<P>(dest: P, message: &impl prost::Message) -> io::Result<()>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n write_using_tmp_file(dest, |writer| {\n\n let encoded_message = message.encode_to_vec();\n\n writer.write_all(&encoded_message)?;\n\n Ok(())\n\n })\n\n}\n\n\n\n#[cfg(target_family = \"unix\")]\n", "file_path": "rs/utils/src/fs.rs", "rank": 4, "score": 527051.8043701953 }, { "content": "/// Converts a NodeId into its protobuf definition. Normally, we would use\n\n/// `impl From<NodeId> for pb::NodeId` here however we cannot as both\n\n/// `Id` and `pb::NodeId` are defined in other crates.\n\npub fn node_id_into_protobuf(id: NodeId) -> pb::NodeId {\n\n pb::NodeId {\n\n principal_id: Some(pb::PrincipalId::from(id.get())),\n\n }\n\n}\n\n\n", "file_path": "rs/types/types/src/lib.rs", "rank": 5, "score": 516589.15224868525 }, { "content": "/// Write to file `dest` using `action` in a crash-safe manner\n\n///\n\n/// A new temporary file `dest.tmp` will be created. If it already exists,\n\n/// it will be deleted first. The file will be opened in exclusive mode\n\n/// and `action` executed with the BufWriter to that file.\n\n///\n\n/// The buffer and file will then be fsynced followed by renaming the\n\n/// `dest.tmp` to `dest`. Target file `dest` will be overwritten in that\n\n/// process if it already exists.\n\n///\n\n/// After renaming, the parent directory of `dest` will be fsynced.\n\n///\n\n/// The function will fail if `dest` exists but is a directory.\n\npub fn write_using_tmp_file<P, F>(dest: P, action: F) -> io::Result<()>\n\nwhere\n\n P: AsRef<Path>,\n\n F: FnOnce(&mut io::BufWriter<&std::fs::File>) -> io::Result<()>,\n\n{\n\n let dest_tmp = get_tmp_for_path(&dest);\n\n\n\n {\n\n let file = create_file_exclusive_and_open(&dest_tmp)?;\n\n let mut w = io::BufWriter::new(&file);\n\n action(&mut w)?;\n\n w.flush()?;\n\n file.sync_all()?;\n\n }\n\n\n\n let dest = dest.as_ref();\n\n fs::rename(dest_tmp.as_path(), dest)?;\n\n\n\n sync_path(dest.parent().unwrap_or_else(|| Path::new(\"/\")))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "rs/utils/src/fs.rs", "rank": 6, "score": 515030.27156969 }, { "content": "pub fn execute_command(cmd: Command) -> Result<Value> {\n\n let res = match cmd {\n\n Command::Snapshot {\n\n registry_spec,\n\n projection,\n\n } => {\n\n let snapshot = registry_spec_to_snapshot(registry_spec)?;\n\n let (normalized_snapshot, _) = normalization::normalize(snapshot.0);\n\n projection::project(normalized_snapshot.0, projection)\n\n }\n\n Command::CanisterToProto {\n\n start_version,\n\n latest_version,\n\n source_spec,\n\n path,\n\n } => {\n\n let (pb, latest_registry_version_in_pb) =\n\n registry_spec_to_delta_pb(source_spec, start_version, latest_version)?;\n\n\n\n let pb_path = if path.is_absolute() {\n", "file_path": "rs/registry/regedit/src/lib.rs", "rank": 7, "score": 514110.50914307975 }, { "content": "/// Converts a SubnetId into its protobuf definition. Normally, we would use\n\n/// `impl From<SubnetId> for pb::SubnetId` here however we cannot as both\n\n/// `Id` and `pb::SubnetId` are defined in other crates.\n\npub fn subnet_id_into_protobuf(id: SubnetId) -> pb::SubnetId {\n\n pb::SubnetId {\n\n principal_id: Some(pb::PrincipalId::from(id.get())),\n\n }\n\n}\n\n\n", "file_path": "rs/types/base_types/src/lib.rs", "rank": 8, "score": 511984.09369708854 }, { "content": "/// Converts a [`NodeId`] to a [`u64`].\n\n///\n\n/// This is meant to be used in tests only.\n\npub fn node_id_to_u64(node_id: NodeId) -> u64 {\n\n let id_vec = node_id.get().into_vec();\n\n let mut id_arr: [u8; 8] = [0; 8];\n\n id_arr.copy_from_slice(&id_vec[..8]);\n\n u64::from_le_bytes(id_arr)\n\n}\n\n\n", "file_path": "rs/types/types_test_utils/src/ids.rs", "rank": 9, "score": 505979.90772595606 }, { "content": "/// Returns a [`NodeId`] that can be used in tests.\n\npub fn new_node_test_id(i: u64) -> NodeId {\n\n NodeId::from(PrincipalId::new_node_test_id(i))\n\n}\n\n\n", "file_path": "rs/types/types_test_utils/src/ids.rs", "rank": 10, "score": 504205.6201615195 }, { "content": "/// Checks whether the given canister ID refers to the subnet (directly or as `IC_00`).\n\npub fn is_subnet_id(canister_id: CanisterId, own_subnet_id: SubnetId) -> bool {\n\n canister_id == IC_00 || canister_id.get_ref() == own_subnet_id.get_ref()\n\n}\n", "file_path": "rs/types/types/src/messages/ingress_messages.rs", "rank": 11, "score": 504112.03387151426 }, { "content": "/// Execute the given system [Command] in a blocking manner. Optionally return\n\n/// the commands output if it exists and execution was successful.\n\npub fn exec_cmd(command: &mut Command) -> RecoveryResult<Option<String>> {\n\n let output = command.output().map_err(|e| {\n\n RecoveryError::cmd_error(command, None, format!(\"Could not execute: {:?}\", e))\n\n })?;\n\n\n\n let exit_status = output.status;\n\n if !exit_status.success() {\n\n let mut output_string = String::from_utf8(output.stderr).map_err(|e| {\n\n RecoveryError::cmd_error(\n\n command,\n\n exit_status.code(),\n\n format!(\"Could not get stderr: {:?}\", e),\n\n )\n\n })?;\n\n if let Ok(mut s) = String::from_utf8(output.stdout) {\n\n s.push('\\n');\n\n s.push_str(&output_string);\n\n output_string = s;\n\n }\n\n return Err(RecoveryError::cmd_error(\n", "file_path": "rs/recovery/src/command_helper.rs", "rank": 12, "score": 501094.4744564724 }, { "content": "fn cup_error(message: impl Display, cup_path: &Path, error: impl Display) -> RecoveryError {\n\n RecoveryError::UnexpectedError(format!(\"{} ({}): {}\", message, cup_path.display(), error))\n\n}\n\n\n\npub(crate) fn canister_id_range_to_string(canister_id_range: &CanisterIdRange) -> String {\n\n format!(\"{}:{}\", canister_id_range.start, canister_id_range.end)\n\n}\n\n\n\npub(crate) fn canister_id_ranges_to_strings(canister_id_ranges: &[CanisterIdRange]) -> Vec<String> {\n\n canister_id_ranges\n\n .iter()\n\n .map(canister_id_range_to_string)\n\n .collect::<Vec<_>>()\n\n}\n\n\n\n/// Computes the state hash of the given checkpoint.\n\npub(crate) fn get_state_hash(checkpoint_dir: impl AsRef<Path>) -> RecoveryResult<String> {\n\n let manifest = manifest_from_path(checkpoint_dir.as_ref()).map_err(|e| {\n\n RecoveryError::CheckpointError(\n\n format!(\n", "file_path": "rs/recovery/subnet_splitting/src/utils.rs", "rank": 13, "score": 499956.2880068678 }, { "content": "/// Tests whether `file` exists in any of the directories listed in the `PATH`\n\n/// environment variable.\n\npub fn is_file_on_path(file: impl AsRef<OsStr>) -> bool {\n\n find_file_on_path(file).is_some()\n\n}\n\n\n", "file_path": "rs/utils/src/command.rs", "rank": 14, "score": 497762.40533709404 }, { "content": "/// Write a slice of slices to a file\n\n/// Replacement for std::io::Write::write_all_vectored as long as it's nightly rust only\n\npub fn write_all_vectored(file: &mut fs::File, bufs: &[&[u8]]) -> std::io::Result<()> {\n\n use io::ErrorKind;\n\n use io::IoSlice;\n\n\n\n let mut slices: Vec<IoSlice> = bufs.iter().map(|s| IoSlice::new(s)).collect();\n\n let mut front = 0;\n\n // Guarantee that bufs is empty if it contains no data,\n\n // to avoid calling write_vectored if there is no data to be written.\n\n while front < slices.len() && slices[front].is_empty() {\n\n front += 1;\n\n }\n\n while front < slices.len() {\n\n match file.write_vectored(&slices[front..]) {\n\n Ok(0) => {\n\n return Err(io::Error::new(\n\n ErrorKind::WriteZero,\n\n \"failed to write whole buffer\",\n\n ));\n\n }\n\n Ok(n) => {\n\n // drop n bytes from the front of the data\n\n advance_slices(&mut slices, &mut front, n, bufs);\n\n }\n\n Err(ref e) if e.kind() == ErrorKind::Interrupted => {}\n\n Err(e) => return Err(e),\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "rs/utils/src/fs.rs", "rank": 15, "score": 496273.6737171307 }, { "content": "fn node_id(id: u64) -> NodeId {\n\n NodeId::from(PrincipalId::new_node_test_id(id))\n\n}\n\n\n", "file_path": "rs/types/types/src/crypto/threshold_sig/ni_dkg/config/errors/tests.rs", "rank": 16, "score": 494693.4661571331 }, { "content": "/// Extract the contents of a given .tar.gz file into `target_dir`\n\npub fn extract_tar_gz_into_dir(tar_gz_path: &Path, target_dir: &Path) -> FileDownloadResult<()> {\n\n let map_to_untar_error = |e| FileDownloadError::untar_error(tar_gz_path, e);\n\n\n\n let tar_gz_file =\n\n File::open(tar_gz_path).map_err(|e| FileDownloadError::file_open_error(tar_gz_path, e))?;\n\n\n\n let tar = GzDecoder::new(tar_gz_file);\n\n let mut archive = Archive::new(tar);\n\n archive.unpack(target_dir).map_err(map_to_untar_error)?;\n\n Ok(())\n\n}\n\n\n\npub type FileDownloadResult<T> = Result<T, FileDownloadError>;\n\n\n\n/// Enumerates the possible errors that Orchestrator may encounter\n\n#[derive(Debug)]\n\npub enum FileDownloadError {\n\n /// An IO error occurred\n\n IoError(String, io::Error),\n\n\n", "file_path": "rs/http_utils/src/file_downloader.rs", "rank": 17, "score": 490871.3386332244 }, { "content": "pub fn create_dir(path: &Path) -> RecoveryResult<()> {\n\n fs::create_dir_all(path).map_err(|e| RecoveryError::dir_error(path, e))\n\n}\n\n\n", "file_path": "rs/recovery/src/file_sync_helper.rs", "rank": 18, "score": 489657.7165481881 }, { "content": "pub fn ecdsa_msg_id(msg: &EcdsaMessage) -> EcdsaArtifactId {\n\n match msg {\n\n EcdsaMessage::EcdsaSignedDealing(object) => object.message_id(),\n\n EcdsaMessage::EcdsaDealingSupport(object) => object.message_id(),\n\n EcdsaMessage::EcdsaSigShare(object) => object.message_id(),\n\n EcdsaMessage::EcdsaComplaint(object) => object.message_id(),\n\n EcdsaMessage::EcdsaOpening(object) => object.message_id(),\n\n }\n\n}\n", "file_path": "rs/types/types/src/consensus/ecdsa.rs", "rank": 19, "score": 488857.20560851705 }, { "content": "/// A helper to read information from stable memory and write to a writer, using\n\n/// a buffer. Expects the input to be length-prefix encoded.\n\npub fn read(input: &mut impl Read, output: impl Write) -> std::io::Result<u32> {\n\n let prefix = input.read_u32::<byteorder::LittleEndian>()?;\n\n let mut piped_bytes = 0_u32;\n\n let trimmed = input.take(prefix as u64);\n\n let mut buffered_trimmed_reader = BufReader::new(trimmed);\n\n let mut buffered_writer = BufWriter::new(output);\n\n loop {\n\n let buf = buffered_trimmed_reader.fill_buf()?;\n\n let len = buf.len(); // Must be stored in variable to avoid borrow error\n\n if len == 0 {\n\n // no bytes read indicate EOF.\n\n break;\n\n }\n\n piped_bytes += len as u32;\n\n buffered_writer.write_all(buf)?;\n\n buffered_trimmed_reader.consume(len);\n\n }\n\n buffered_writer.flush()?;\n\n match prefix.cmp(&piped_bytes) {\n\n Ordering::Equal => Ok(prefix),\n", "file_path": "rs/rust_canisters/stable_reader/src/lib.rs", "rank": 20, "score": 487653.0765913598 }, { "content": "// NOTE: the secret keys corresponding to the public keys returned by this helper are lost\n\n// when the helper completes (they are erased when `_temp_dir` goes out of scope).\n\n// This is intended, as this helper is for creating a valid registry.\n\npub fn new_node_keys_and_node_id() -> (ValidNodePublicKeys, NodeId) {\n\n let (config, _temp_dir) = CryptoConfig::new_in_temp_dir();\n\n let npks = generate_node_keys_once(&config, None).unwrap_or_else(|_| {\n\n panic!(\n\n \"Generation of new node keys with CryptoConfig {:?} failed\",\n\n &config\n\n )\n\n });\n\n let node_id = npks.node_id();\n\n (npks, node_id)\n\n}\n\n\n", "file_path": "rs/nns/test_utils/src/registry.rs", "rank": 21, "score": 482862.7702419847 }, { "content": "fn get_subnet_record(snapshot: &RegistrySnapshot, subnet_id: SubnetId) -> SubnetRecord {\n\n get_value_from_snapshot(snapshot, make_subnet_record_key(subnet_id))\n\n .unwrap_or_else(|| panic!(\"Could not get subnet record for subnet: {subnet_id}\"))\n\n}\n\n\n", "file_path": "rs/registry/canister/src/invariants/replica_version.rs", "rank": 22, "score": 480139.51899690216 }, { "content": "/// Makes a key for a SubnetRecord registry entry.\n\npub fn make_subnet_record_key(subnet_id: SubnetId) -> String {\n\n format!(\"{}{}\", SUBNET_RECORD_KEY_PREFIX, subnet_id)\n\n}\n\n\n", "file_path": "rs/registry/keys/src/lib.rs", "rank": 23, "score": 478246.61757399724 }, { "content": "/// Makes a key for a NodeRecord registry entry.\n\npub fn make_node_record_key(node_id: NodeId) -> String {\n\n format!(\"{}{}\", NODE_RECORD_KEY_PREFIX, node_id.get())\n\n}\n\n\n", "file_path": "rs/registry/keys/src/lib.rs", "rank": 24, "score": 478028.23550336435 }, { "content": "// Loads the replica version from the file specified as argument on\n\n// orchestrator's start.\n\nfn load_version_from_file(logger: &ReplicaLogger, path: &Path) -> Result<ReplicaVersion, ()> {\n\n let contents = std::fs::read_to_string(path).map_err(|err| {\n\n error!(\n\n logger,\n\n \"Couldn't open the version file {:?}: {:?}\", path, err\n\n );\n\n })?;\n\n ReplicaVersion::try_from(contents.trim()).map_err(|err| {\n\n error!(\n\n logger,\n\n \"Couldn't parse the contents of {:?}: {:?}\", path, err\n\n );\n\n })\n\n}\n\n\n\nimpl Orchestrator {\n\n pub async fn new(args: OrchestratorArgs) -> Result<Self, OrchestratorInstantiationError> {\n\n args.create_dirs();\n\n let metrics_addr = args.get_metrics_addr();\n\n let config = args.get_ic_config();\n", "file_path": "rs/orchestrator/src/orchestrator.rs", "rank": 25, "score": 477334.2634746862 }, { "content": "/// From its protobuf definition convert to a SubnetId. Normally, we would\n\n/// use `impl TryFrom<pb::SubnetId> for SubnetId` here however we cannot as\n\n/// both `Id` and `pb::SubnetId` are defined in other crates.\n\npub fn subnet_id_try_from_protobuf(value: pb::SubnetId) -> Result<SubnetId, ProxyDecodeError> {\n\n let principal_id = PrincipalId::try_from(\n\n value\n\n .principal_id\n\n .ok_or(ProxyDecodeError::MissingField(\"SubnetId::principal_id\"))?,\n\n )?;\n\n Ok(SubnetId::from(principal_id))\n\n}\n\n\n\nimpl From<PrincipalIdError> for ProxyDecodeError {\n\n fn from(err: PrincipalIdError) -> Self {\n\n Self::InvalidPrincipalId(Box::new(err))\n\n }\n\n}\n\n\n\nimpl From<CanisterIdError> for ProxyDecodeError {\n\n fn from(err: CanisterIdError) -> Self {\n\n Self::InvalidCanisterId(Box::new(err))\n\n }\n\n}\n", "file_path": "rs/types/base_types/src/lib.rs", "rank": 26, "score": 475397.26576637244 }, { "content": "pub fn id_of_file(file: PathBuf) -> Result<FileId> {\n\n let mut reader = std::fs::File::open(file)?;\n\n let mut sha256_hasher = Sha256::new();\n\n std::io::copy(&mut reader, &mut sha256_hasher).unwrap();\n\n let digest = sha256_hasher.finish();\n\n Ok(FileId(hex::encode(digest)))\n\n}\n\n\n", "file_path": "rs/tests/src/driver/farm.rs", "rank": 27, "score": 473336.3815254605 }, { "content": "/// Formats all Rust files at the specified path.\n\n///\n\n/// If the path is a with .rs suffix, the function formats this file. If the\n\n/// path is a directory, this function formats all Rust files under this\n\n/// directory recursively.\n\npub fn rustfmt(path: impl AsRef<Path>) -> std::io::Result<()> {\n\n let path = path.as_ref();\n\n if path.is_dir() {\n\n for entry in std::fs::read_dir(path)? {\n\n let entry = entry?;\n\n rustfmt(entry.path())?;\n\n }\n\n Ok(())\n\n } else if path.extension() == Some(\"rs\".as_ref()) {\n\n rustfmt_file(path)\n\n } else {\n\n Ok(())\n\n }\n\n}\n", "file_path": "rs/utils/rustfmt/src/lib.rs", "rank": 28, "score": 472410.9473856597 }, { "content": "// Start helper functions\n\nfn make_create_subnet_payload(node_ids: Vec<NodeId>) -> CreateSubnetPayload {\n\n // create payload message\n\n CreateSubnetPayload {\n\n node_ids,\n\n subnet_id_override: None,\n\n ingress_bytes_per_block_soft_cap: 2 * 1024 * 1024,\n\n max_ingress_bytes_per_message: 60 * 1024 * 1024,\n\n max_ingress_messages_per_block: 1000,\n\n max_block_payload_size: 4 * 1024 * 1024,\n\n unit_delay_millis: 500,\n\n initial_notary_delay_millis: 1500,\n\n replica_version_id: ReplicaVersion::default().into(),\n\n dkg_interval_length: 0,\n\n dkg_dealings_per_block: 1,\n\n gossip_max_artifact_streams_per_peer: MAX_ARTIFACT_STREAMS_PER_PEER,\n\n gossip_max_chunk_wait_ms: MAX_CHUNK_WAIT_MS,\n\n gossip_max_duplicity: MAX_DUPLICITY,\n\n gossip_max_chunk_size: MAX_CHUNK_SIZE,\n\n gossip_receive_check_cache_size: RECEIVE_CHECK_PEER_SET_SIZE,\n\n gossip_pfn_evaluation_period_ms: PFN_EVALUATION_PERIOD_MS,\n", "file_path": "rs/registry/canister/tests/create_subnet.rs", "rank": 29, "score": 471059.85302436224 }, { "content": "/// From its protobuf definition convert to a NodeId. Normally, we would\n\n/// use `impl TryFrom<Option<pb::NodeId>> for NodeId` here however we cannot\n\n/// as both `Id` and `pb::NodeId` are defined in other crates.\n\npub fn node_id_try_from_option(value: Option<pb::NodeId>) -> Result<NodeId, ProxyDecodeError> {\n\n let value: pb::NodeId = try_from_option_field(value, \"NodeId missing\")?;\n\n let inner: pb::PrincipalId = try_from_option_field(value.principal_id, \"PrincipalId missing\")?;\n\n\n\n let principal_id = PrincipalId::try_from(inner)\n\n .map_err(|e| ProxyDecodeError::InvalidPrincipalId(Box::new(e)))?;\n\n Ok(NodeId::from(principal_id))\n\n}\n\n\n\npub struct NumInstructionsTag;\n\n/// Represents an amount of weighted instructions that can be used as the\n\n/// execution cutoff point for messages. This amount can be used to charge the\n\n/// respective amount of `Cycles` on a canister's balance for message execution.\n\npub type NumInstructions = AmountOf<NumInstructionsTag, u64>;\n\n\n\npub struct NumMessagesTag;\n\n/// Represents the number of messages.\n\npub type NumMessages = AmountOf<NumMessagesTag, u64>;\n\n\n\npub struct NumSlicesTag;\n", "file_path": "rs/types/types/src/lib.rs", "rank": 30, "score": 469533.54944623925 }, { "content": "fn list_files(dir: &Path) -> std::io::Result<Vec<PathBuf>> {\n\n fn go(dir: &Path, files: &mut Vec<PathBuf>) -> std::io::Result<()> {\n\n if dir.is_dir() {\n\n for entry in std::fs::read_dir(dir)? {\n\n let entry = entry?;\n\n let path = entry.path();\n\n if path.is_dir() {\n\n go(&path, files)?;\n\n } else {\n\n files.push(path.to_path_buf());\n\n }\n\n }\n\n }\n\n Ok(())\n\n }\n\n let mut buf = vec![];\n\n go(dir, &mut buf)?;\n\n for path in buf.iter_mut() {\n\n *path = path\n\n .strip_prefix(dir)\n\n .expect(\"failed to strip path prefix\")\n\n .to_path_buf();\n\n }\n\n buf.sort();\n\n Ok(buf)\n\n}\n\n\n", "file_path": "rs/test_utilities/compare_dirs/src/lib.rs", "rank": 31, "score": 468705.7428450885 }, { "content": "pub fn node_id(id: u64) -> NodeId {\n\n NodeId::from(PrincipalId::new_node_test_id(id))\n\n}\n\n\n", "file_path": "rs/crypto/test_utils/canister_threshold_sigs/src/lib.rs", "rank": 32, "score": 466060.51587220724 }, { "content": "// Traverses the given path and returns a list of all leaf directories.\n\nfn get_leaves(dir: &Path, leaves: &mut Vec<PathBuf>) -> std::io::Result<()> {\n\n if !dir.is_dir() {\n\n return Ok(());\n\n }\n\n let mut sub_directory_found = false;\n\n for entry in fs::read_dir(dir)? {\n\n let path = entry?.path();\n\n if path.is_dir() {\n\n sub_directory_found = true;\n\n get_leaves(&path, leaves)?;\n\n }\n\n }\n\n if !sub_directory_found {\n\n if let Some(path_name) = dir.to_str() {\n\n // We skip the folder lost+found, which is currently present on the backup\n\n // volume.\n\n if !path_name.contains(\"lost+found\") {\n\n leaves.push(dir.to_path_buf());\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "rs/artifact_pool/src/backup.rs", "rank": 33, "score": 465623.6494296051 }, { "content": "// get_peers\n\n//\n\n// Get peer node ids for a replica\n\n//\n\n// Parameters\n\n// registry_node_list List of node belonging to a subnet\n\n// node_id node whose peers are to be looked up\n\npub fn get_peers(registry_node_list: &[(NodeId, NodeRecord)], node_id: NodeId) -> Vec<NodeId> {\n\n let mut node_ids: Vec<_> = registry_node_list.iter().map(|(id, _)| *id).collect();\n\n node_ids.retain(|n| (*n).get() != node_id.get());\n\n node_ids\n\n}\n\n\n\n// P2PTestContext\n\n//\n\n// Create a context for individual replicas participating in a test\n\npub struct P2PTestContext {\n\n pub node_num: u64, // u64 from which the replica id is derived\n\n pub node_id: NodeId, // replica id\n\n pub subnet_id: SubnetId, // Dummy test subnet ID\n\n pub metrics_registry: MetricsRegistry, // monitor metrics from various ICP layers\n\n pub test_synchronizer: P2PTestSynchronizer, // Provide basic inter-test synchronization\n\n pub _p2p_join_guard: Vec<Box<dyn JoinGuard>>, // p2p object to drive the ICP stack\n\n}\n\n\n\nimpl P2PTestContext {\n\n pub fn new(\n", "file_path": "rs/test_utilities/src/p2p.rs", "rank": 34, "score": 465149.65793992765 }, { "content": "pub fn get_node_ids_from_subnet_record(\n\n subnet: &SubnetRecord,\n\n) -> Result<Vec<NodeId>, PrincipalIdBlobParseError> {\n\n subnet\n\n .membership\n\n .iter()\n\n .map(|n| PrincipalId::try_from(&n[..]).map(NodeId::from))\n\n .collect::<Result<Vec<_>, _>>()\n\n}\n\n\n", "file_path": "rs/registry/helpers/src/subnet.rs", "rank": 35, "score": 463909.11407456244 }, { "content": "/// Makes a key for a record for the catch up package contents.\n\npub fn make_catch_up_package_contents_key(subnet_id: SubnetId) -> String {\n\n format!(\"catch_up_package_contents_{}\", subnet_id)\n\n}\n\n\n", "file_path": "rs/registry/keys/src/lib.rs", "rank": 36, "score": 463685.6951814807 }, { "content": "/// Makes a key for a TLS certificate registry entry for a node.\n\npub fn make_crypto_tls_cert_key(node_id: NodeId) -> String {\n\n format!(\"{}{}\", CRYPTO_TLS_CERT_KEY_PREFIX, node_id.get())\n\n}\n\n\n", "file_path": "rs/registry/keys/src/lib.rs", "rank": 37, "score": 463508.48755566357 }, { "content": "pub fn mainnet_nns_subnet() -> SubnetId {\n\n SubnetId::new(\n\n PrincipalId::from_str(\"tdb26-jop6k-aogll-7ltgs-eruif-6kk7m-qpktf-gdiqx-mxtrf-vb5e6-eqe\")\n\n .unwrap(),\n\n )\n\n}\n\n\n", "file_path": "rs/messaging/src/message_routing/tests.rs", "rank": 38, "score": 461408.57749112323 }, { "content": "pub fn mainnet_app_subnet() -> SubnetId {\n\n SubnetId::new(\n\n PrincipalId::from_str(\"6pbhf-qzpdk-kuqbr-pklfa-5ehhf-jfjps-zsj6q-57nrl-kzhpd-mu7hc-vae\")\n\n .unwrap(),\n\n )\n\n}\n\n\n\n/// Tests `BatchProcessorImpl::try_to_read_registry()` successfully reads a snapshot of the mainnet\n\n/// registry.\n", "file_path": "rs/messaging/src/message_routing/tests.rs", "rank": 39, "score": 461408.57749112323 }, { "content": "pub fn set_of(node_ids: &[NodeId]) -> BTreeSet<NodeId> {\n\n let mut dealers = BTreeSet::new();\n\n node_ids.iter().for_each(|node_id| {\n\n dealers.insert(*node_id);\n\n });\n\n dealers\n\n}\n\n\n", "file_path": "rs/types/types/src/crypto/tests.rs", "rank": 40, "score": 459558.6507473853 }, { "content": "/// Makes a key for a threshold signature public key entry for a subnet.\n\npub fn make_crypto_threshold_signing_pubkey_key(subnet_id: SubnetId) -> String {\n\n format!(\"{}{}\", CRYPTO_THRESHOLD_SIGNING_KEY_PREFIX, subnet_id)\n\n}\n\n\n", "file_path": "rs/registry/keys/src/lib.rs", "rank": 41, "score": 458547.6533557454 }, { "content": "/// Get a SubnetRecord that does not fail invariant checks in Registry\n\npub fn get_invariant_compliant_subnet_record(node_ids: Vec<NodeId>) -> SubnetRecord {\n\n CreateSubnetPayload {\n\n unit_delay_millis: 10,\n\n gossip_retransmission_request_ms: 10_000,\n\n gossip_registry_poll_period_ms: 2000,\n\n gossip_pfn_evaluation_period_ms: 50,\n\n gossip_receive_check_cache_size: 1,\n\n gossip_max_duplicity: 1,\n\n gossip_max_chunk_wait_ms: 200,\n\n gossip_max_artifact_streams_per_peer: 1,\n\n replica_version_id: ReplicaVersion::default().into(),\n\n node_ids,\n\n ..Default::default()\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "rs/registry/canister/src/common/test_helpers.rs", "rank": 42, "score": 457512.17535882315 }, { "content": "/// Returns a [`SubnetId`] that can be used in tests.\n\npub fn subnet_test_id(i: u64) -> SubnetId {\n\n SubnetId::from(PrincipalId::new_subnet_test_id(i))\n\n}\n\n\n", "file_path": "rs/types/types_test_utils/src/ids.rs", "rank": 43, "score": 457042.95547617174 }, { "content": "/// Returns a [`NodeId`] that can be used in tests.\n\npub fn node_test_id(i: u64) -> NodeId {\n\n NodeId::from(PrincipalId::new_node_test_id(i))\n\n}\n\n\n", "file_path": "rs/types/types_test_utils/src/ids.rs", "rank": 44, "score": 456848.16232396686 }, { "content": "pub fn node(i: u64, subnet_id: Principal) -> Node {\n\n Node {\n\n id: node_test_id(1001 + i).get().0,\n\n subnet_id,\n\n addr: IpAddr::V4(Ipv4Addr::new(192, 168, 0, i as u8)),\n\n port: 8080,\n\n tls_certificate: valid_tls_certificate_and_validation_time()\n\n .0\n\n .certificate_der,\n\n replica_version: \"7742d96ddd30aa6b607c9d2d4093a7b714f5b25b\".to_string(),\n\n }\n\n}\n\n\n", "file_path": "rs/boundary_node/ic_boundary/src/persist/test.rs", "rank": 45, "score": 456411.43739993806 }, { "content": "fn modify_byte_in_file(file_path: PathBuf) -> std::io::Result<()> {\n\n let mut perms = fs::metadata(&file_path)?.permissions();\n\n #[allow(clippy::permissions_set_readonly_false)]\n\n perms.set_readonly(false);\n\n fs::set_permissions(&file_path, perms)?;\n\n let mut file = OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .create(true)\n\n .open(file_path)?;\n\n file.seek(SeekFrom::Start(0))?;\n\n let mut byte: [u8; 1] = [0];\n\n assert!(file.read(&mut byte)? == 1);\n\n byte[0] ^= 0x01;\n\n file.seek(SeekFrom::Start(0))?;\n\n file.write_all(&byte)\n\n}\n\n\n", "file_path": "rs/tests/src/orchestrator/backup_manager.rs", "rank": 46, "score": 454605.75213471486 }, { "content": "pub fn temp_crypto_component_with_fake_registry(node_id: NodeId) -> TempCryptoComponent {\n\n TempCryptoComponent::builder()\n\n .with_registry(empty_fake_registry())\n\n .with_node_id(node_id)\n\n .build()\n\n}\n\n\n", "file_path": "rs/test_utilities/src/crypto.rs", "rank": 47, "score": 453391.2584062981 }, { "content": "fn dir_exists_and_have_file(log: &Logger, dir: &PathBuf) -> bool {\n\n debug!(log, \"Check directory: {:?}\", dir);\n\n if !dir.exists() {\n\n debug!(log, \"Doesn't exists!\");\n\n return false;\n\n }\n\n debug!(log, \"Directory exists!\");\n\n _ = ls_path(log, dir);\n\n let have_file = fs::read_dir(dir)\n\n .expect(\"Should be able to read existing directory\")\n\n .next()\n\n .is_some();\n\n debug!(log, \"Check does it contain file(s): {}\", have_file);\n\n have_file\n\n}\n\n\n", "file_path": "rs/tests/src/orchestrator/backup_manager.rs", "rank": 48, "score": 452722.61158854346 }, { "content": "fn subnet_id(id: u64) -> SubnetId {\n\n SubnetId::from(PrincipalId::new_subnet_test_id(id))\n\n}\n", "file_path": "rs/registry/helpers/src/crypto/tests.rs", "rank": 49, "score": 452296.5117347301 }, { "content": "fn node_id(id: u64) -> NodeId {\n\n NodeId::from(PrincipalId::new_node_test_id(id))\n\n}\n\n\n", "file_path": "rs/registry/helpers/src/crypto/tests.rs", "rank": 50, "score": 452092.2927283505 }, { "content": "fn create_if_not_exists(dir: PathBuf) -> PathBuf {\n\n if !dir.exists() {\n\n create_dir_all(&dir).unwrap_or_else(|e| panic!(\"Failure creating directory {dir:?}: {e}\"));\n\n }\n\n dir\n\n}\n\n\n", "file_path": "rs/backup/src/backup_helper.rs", "rank": 51, "score": 451021.93569473136 }, { "content": "fn subnet_id(id: u64) -> SubnetId {\n\n SubnetId::from(PrincipalId::new_subnet_test_id(id))\n\n}\n\n\n", "file_path": "rs/types/types/src/crypto/canister_threshold_sig/idkg/tests/index.rs", "rank": 52, "score": 449269.2226654714 }, { "content": "fn node_id(id: u64) -> NodeId {\n\n NodeId::from(PrincipalId::new_node_test_id(id))\n\n}\n\n\n", "file_path": "rs/types/types/src/crypto/canister_threshold_sig/idkg/tests/receivers.rs", "rank": 53, "score": 449078.3158817168 }, { "content": "fn node_id(id: u64) -> NodeId {\n\n NodeId::from(PrincipalId::new_node_test_id(id))\n\n}\n\n\n", "file_path": "rs/types/types/src/crypto/threshold_sig/ni_dkg/config/tests.rs", "rank": 54, "score": 449078.3158817168 }, { "content": "fn node_id(id: u64) -> NodeId {\n\n NodeId::from(PrincipalId::new_node_test_id(id))\n\n}\n\n\n", "file_path": "rs/types/types/src/crypto/canister_threshold_sig/idkg/tests/dealers.rs", "rank": 55, "score": 449078.3158817168 }, { "content": "fn node_id(id: u64) -> NodeId {\n\n NodeId::from(PrincipalId::new_node_test_id(id))\n\n}\n", "file_path": "rs/types/types/src/crypto/canister_threshold_sig/idkg/tests/index.rs", "rank": 56, "score": 449078.3158817168 }, { "content": "/// Compute the SHA256 of a file and return a hex-encoded string of the hash\n\npub fn compute_sha256_hex(path: &Path) -> FileDownloadResult<String> {\n\n let mut binary_file =\n\n fs::File::open(path).map_err(|e| FileDownloadError::file_open_error(path, e))?;\n\n\n\n let mut hasher = Sha256::new();\n\n std::io::copy(&mut binary_file, &mut hasher)\n\n .map_err(|e| FileDownloadError::compute_hash_error(path, e))?;\n\n\n\n Ok(hex::encode(hasher.finish()))\n\n}\n\n\n", "file_path": "rs/http_utils/src/file_downloader.rs", "rank": 57, "score": 446459.51389725134 }, { "content": "/// Formats a single Rust source file.\n\nfn rustfmt_file(path: impl AsRef<Path>) -> std::io::Result<()> {\n\n let rustfmt_path = std::env::var(\"RUSTFMT\").unwrap_or_else(|_| \"rustfmt\".to_owned());\n\n Command::new(rustfmt_path)\n\n .arg(\"--emit\")\n\n .arg(\"files\")\n\n .arg(path.as_ref())\n\n .output()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "rs/utils/rustfmt/src/lib.rs", "rank": 58, "score": 445940.3619096336 }, { "content": "fn node_id(id: u64) -> NodeId {\n\n NodeId::from(PrincipalId::new_node_test_id(id))\n\n}\n\n\n", "file_path": "rs/types/types/src/crypto/threshold_sig/ni_dkg/config/dealers/tests.rs", "rank": 59, "score": 445333.41922526446 }, { "content": "fn node_id(id: u64) -> NodeId {\n\n NodeId::new(PrincipalId::new_node_test_id(id))\n\n}\n\n\n", "file_path": "rs/types/types/src/crypto/threshold_sig/ni_dkg/config/receivers/tests.rs", "rank": 60, "score": 445333.41922526446 }, { "content": "pub fn print_height_info(logger: &Logger, registry_helper: &RegistryHelper, subnet_id: SubnetId) {\n\n info!(logger, \"Collecting node heights from metrics...\");\n\n info!(logger, \"Select a node with highest finalization height:\");\n\n match get_node_heights_from_metrics(logger, registry_helper, subnet_id) {\n\n Ok(heights) => info!(logger, \"{:#?}\", heights),\n\n Err(err) => warn!(logger, \"Failed to query height info: {:?}\", err),\n\n }\n\n}\n\n\n", "file_path": "rs/recovery/src/cli.rs", "rank": 61, "score": 443481.3450361765 }, { "content": "/// Copies only valid regions of file preserving the sparseness\n\n/// of the file. Also utilizes copy_file_range which performs\n\n/// in_kernel copy without the additional cost of transferring data\n\n/// from the kernel to user space and then back into the kernel. Also\n\n/// on certain file systems that support COW (btrfs/zfs), copy_file_range\n\n/// is a metadata operation and is extremely efficient \n\npub fn copy_file_sparse(from: &Path, to: &Path) -> io::Result<u64> {\n\n if *ic_sys::IS_WSL {\n\n return copy_file_sparse_portable(from, to);\n\n }\n\n\n\n use cvt::*;\n\n use fs::OpenOptions;\n\n use io::{ErrorKind, Read};\n\n use libc::{ftruncate64, lseek64};\n\n use std::os::unix::{fs::OpenOptionsExt, fs::PermissionsExt, io::AsRawFd};\n\n\n\n unsafe fn copy_file_range(\n\n fd_in: libc::c_int,\n\n off_in: *mut libc::loff_t,\n\n fd_out: libc::c_int,\n\n off_out: *mut libc::loff_t,\n\n len: libc::size_t,\n\n flags: libc::c_uint,\n\n ) -> libc::c_long {\n\n libc::syscall(\n", "file_path": "rs/utils/src/fs.rs", "rank": 62, "score": 443219.2774417084 }, { "content": "fn create_path(subnet_id: SubnetId, label: &[u8]) -> Vec<Label<StorageType>> {\n\n vec![\n\n SUBNET_LABEL.into(),\n\n subnet_id.get().as_slice().into(),\n\n label.into(),\n\n ]\n\n}\n", "file_path": "rs/recovery/subnet_splitting/src/agent_helper.rs", "rank": 63, "score": 442382.5612205724 }, { "content": "fn subnet_id(id: u64) -> SubnetId {\n\n SubnetId::from(PrincipalId::new_subnet_test_id(id))\n\n}\n", "file_path": "rs/types/types/src/crypto/canister_threshold_sig/idkg/tests/verify_transcript_params.rs", "rank": 64, "score": 441884.6679537303 }, { "content": "fn node_id(id: u64) -> NodeId {\n\n NodeId::from(PrincipalId::new_node_test_id(id))\n\n}\n\n\n", "file_path": "rs/types/types/src/crypto/canister_threshold_sig/idkg/tests/verify_transcript_params.rs", "rank": 65, "score": 441698.5730255961 }, { "content": "#[cfg(target_family = \"unix\")]\n\npub fn write_atomically_using_tmp_file<PDst, PTmp, F>(\n\n dst: PDst,\n\n tmp: PTmp,\n\n action: F,\n\n) -> io::Result<()>\n\nwhere\n\n F: FnOnce(&mut io::BufWriter<&std::fs::File>) -> io::Result<()>,\n\n PDst: AsRef<Path>,\n\n PTmp: AsRef<Path>,\n\n{\n\n let mut cleanup = OnScopeExit::new(|| {\n\n let _ = fs::remove_file(tmp.as_ref());\n\n });\n\n\n\n let f = fs::OpenOptions::new()\n\n .write(true)\n\n .create(true)\n\n .truncate(true) // Otherwise we'd overwrite existing content\n\n .open(tmp.as_ref())?;\n\n {\n", "file_path": "rs/utils/src/fs.rs", "rank": 66, "score": 441263.36119745485 }, { "content": "pub fn write_file(file: &Path, content: String) -> RecoveryResult<()> {\n\n let mut f = File::create(file).map_err(|e| RecoveryError::file_error(file, e))?;\n\n write!(f, \"{}\", content).map_err(|e| RecoveryError::file_error(file, e))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "rs/recovery/src/file_sync_helper.rs", "rank": 67, "score": 440399.7888582073 }, { "content": "pub fn execute_inspect_message_bench(c: &mut Criterion) {\n\n // List of benchmarks: benchmark id (name), WAT, expected instructions.\n\n let benchmarks: Vec<common::Benchmark> = vec![\n\n common::Benchmark(\n\n \"ic0_msg_method_name_size()\",\n\n Module::InspectMessage.from_ic0(\"msg_method_name_size\", NoParams, Result::I32),\n\n 11_000_005,\n\n ),\n\n common::Benchmark(\n\n \"ic0_msg_method_name_copy()/1B\",\n\n Module::InspectMessage.from_ic0(\"msg_method_name_copy\", Params3(0, 0, 1), Result::No),\n\n 34_000_005,\n\n ),\n\n common::Benchmark(\n\n \"ic0_msg_method_name_copy()/30B\",\n\n Module::InspectMessage.from_ic0(\"msg_method_name_copy\", Params3(0, 0, 20), Result::No),\n\n 53_000_005,\n\n ),\n\n common::Benchmark(\n\n \"ic0_accept_message()*\",\n", "file_path": "rs/execution_environment/benches/execute_inspect_message.rs", "rank": 68, "score": 438863.82455247093 }, { "content": "/// Makes a key for a crypto key registry entry for a node.\n\npub fn make_crypto_node_key(node_id: NodeId, key_purpose: KeyPurpose) -> String {\n\n format!(\n\n \"{}{}_{}\",\n\n CRYPTO_RECORD_KEY_PREFIX,\n\n node_id.get(),\n\n key_purpose as usize\n\n )\n\n}\n\n\n", "file_path": "rs/registry/keys/src/lib.rs", "rank": 69, "score": 438260.12320641894 }, { "content": "// Copied from hyper source code.\n\nfn h2_to_io_error(e: h2::Error) -> std::io::Error {\n\n if e.is_io() {\n\n e.into_io().unwrap()\n\n } else {\n\n std::io::Error::new(std::io::ErrorKind::Other, e)\n\n }\n\n}\n\n\n\n/// Our role in a connection\n\n#[derive(Debug, PartialEq, Eq, Copy, Clone, AsRefStr)]\n\n#[strum(serialize_all = \"snake_case\")]\n\npub(crate) enum ConnectionRole {\n\n /// We connect to the peer as a client\n\n Client,\n\n\n\n /// We are the server\n\n Server,\n\n}\n\n\n\n/// State about the server ports we are listening on\n", "file_path": "rs/transport/src/types.rs", "rank": 70, "score": 436407.7939539318 }, { "content": "pub fn remove_dir(path: &Path) -> RecoveryResult<()> {\n\n if path_exists(path)? {\n\n fs::remove_dir_all(path).map_err(|e| RecoveryError::dir_error(path, e))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use tempfile::tempdir;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn path_exists_should_return_true() {\n\n let tmp = tempdir().expect(\"Couldn't create a temp test directory\");\n\n\n\n assert!(path_exists(tmp.path()).unwrap());\n\n }\n", "file_path": "rs/recovery/src/file_sync_helper.rs", "rank": 71, "score": 434687.62078959343 }, { "content": "/// Assert that the given file has the given hash\n\npub fn check_file_hash(path: &Path, expected_sha256_hex: &str) -> FileDownloadResult<()> {\n\n let computed_sha256_hex = compute_sha256_hex(path)?;\n\n\n\n if computed_sha256_hex != expected_sha256_hex {\n\n Err(FileDownloadError::file_hash_mismatch_error(\n\n computed_sha256_hex,\n\n expected_sha256_hex.into(),\n\n path.to_path_buf(),\n\n ))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "rs/http_utils/src/file_downloader.rs", "rank": 72, "score": 434547.8922142038 }, { "content": "pub fn read_dir(path: &Path) -> RecoveryResult<ReadDir> {\n\n fs::read_dir(path).map_err(|e| RecoveryError::dir_error(path, e))\n\n}\n\n\n", "file_path": "rs/recovery/src/file_sync_helper.rs", "rank": 73, "score": 433535.4681565479 }, { "content": "/// Build protos using prost_build.\n\npub fn generate_prost_files(def: &Path, out: &Path) {\n\n let proto_file = def.join(\"ic_base_types/pb/v1/types.proto\");\n\n\n\n let mut config = Config::new();\n\n config.protoc_arg(\"--experimental_allow_proto3_optional\");\n\n std::fs::create_dir_all(out).expect(\"failed to create output directory\");\n\n config.out_dir(out);\n\n\n\n config.type_attribute(\n\n \"ic_base_types.pb.v1.PrincipalId\",\n\n [\"#[derive(candid::CandidType, candid::Deserialize, comparable::Comparable)]\"].join(\" \"),\n\n );\n\n config.compile_protos(&[proto_file], &[def]).unwrap();\n\n ic_utils_rustfmt::rustfmt(out).expect(\"failed to rustfmt protobufs\");\n\n}\n", "file_path": "rs/types/base_types/protobuf_generator/src/lib.rs", "rank": 74, "score": 433063.89183364727 }, { "content": "/// This creates a CatchupPackageContents for nodes that would be part of as subnet\n\n/// which is necessary if the underlying IC test machinery knows about the subnets you added\n\n/// to your registry\n\npub fn dummy_cup_for_subnet(nodes: Vec<NodeId>) -> CatchUpPackageContents {\n\n let low_threshold_transcript_record =\n\n dummy_initial_dkg_transcript(nodes.clone(), NiDkgTag::LowThreshold);\n\n let high_threshold_transcript_record =\n\n dummy_initial_dkg_transcript(nodes, NiDkgTag::HighThreshold);\n\n\n\n return CatchUpPackageContents {\n\n initial_ni_dkg_transcript_low_threshold: Some(low_threshold_transcript_record),\n\n initial_ni_dkg_transcript_high_threshold: Some(high_threshold_transcript_record),\n\n ..Default::default()\n\n };\n\n\n\n // copied from rs/consensus/src/dkg.rs\n\n fn dummy_initial_dkg_transcript(\n\n committee: Vec<NodeId>,\n\n tag: NiDkgTag,\n\n ) -> InitialNiDkgTranscriptRecord {\n\n let threshold = committee.len() as u32 / 3 + 1;\n\n let transcript =\n\n NiDkgTranscript::dummy_transcript_for_tests_with_params(committee, tag, threshold, 0);\n", "file_path": "rs/registry/canister/tests/common/test_helpers.rs", "rank": 75, "score": 431950.4671252655 }, { "content": "/// Create a pipe between commands by executing the FIRST [Command] blockingly\n\n/// and, if successful, set its output as the input of the second [Command]\n\n/// WITHOUT executing it.\n\npub fn pipe(a: &mut Command, b: &mut Command) -> RecoveryResult<()> {\n\n let mut cmd_a = a\n\n .stdout(Stdio::piped())\n\n .spawn()\n\n .map_err(|e| RecoveryError::cmd_error(a, None, format!(\"Could not spawn: {:?}\", e)))?;\n\n\n\n let b_stdin: Stdio = cmd_a\n\n .stdout\n\n .take()\n\n .ok_or_else(|| {\n\n RecoveryError::cmd_error(a, None, \"Could not create pipe: stdout is None\".to_string())\n\n })?\n\n .try_into()\n\n .map_err(|e| {\n\n RecoveryError::cmd_error(a, None, format!(\"Could not create pipe: {:?}\", e))\n\n })?;\n\n\n\n b.stdin(b_stdin).stdout(Stdio::piped());\n\n\n\n let output = cmd_a.wait_with_output();\n", "file_path": "rs/recovery/src/command_helper.rs", "rank": 76, "score": 431307.85449315514 }, { "content": "/// Removes a file from the filesystem.\n\npub fn remove_file<P>(path: P) -> io::Result<()>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n std::fs::remove_file(path)\n\n}\n\n\n\n#[cfg(target_family = \"unix\")]\n", "file_path": "rs/utils/src/fs.rs", "rank": 77, "score": 431180.1301040078 }, { "content": "pub fn path_exists(path: &Path) -> RecoveryResult<bool> {\n\n path.try_exists()\n\n .map_err(|e| RecoveryError::IoError(String::from(\"Cannot check if the path exists\"), e))\n\n}\n\n\n", "file_path": "rs/recovery/src/file_sync_helper.rs", "rank": 78, "score": 430076.85011519794 }, { "content": "// Parses a manifest in its textual representation as output by manifest\n\n// and recomputes the root hash using the information contained in it.\n\n//\n\n// Note that this means that it doesn't recompute the chunk hashes as\n\n// recomputing these would require to have the respective files at hand.\n\npub fn do_verify_manifest(file: &Path) -> Result<(), String> {\n\n verify_manifest(File::open(file).unwrap())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::io::{Seek, Write};\n\n\n\n use ic_state_manager::manifest::{\n\n hash::{chunk_hasher, file_hasher},\n\n manifest_hash,\n\n };\n\n use ic_types::state_sync::{\n\n ChunkInfo, FileInfo, Manifest, StateSyncVersion, CURRENT_STATE_SYNC_VERSION,\n\n };\n\n\n\n use super::verify_manifest;\n\n\n\n fn test_manifest_entry(\n\n version: StateSyncVersion,\n", "file_path": "rs/state_tool/src/commands/verify_manifest.rs", "rank": 79, "score": 428221.11069270375 }, { "content": "// Write all backup files to the disk. For the sake of simplicity, we write all\n\n// artifacts sequentially.\n\nfn store_artifacts(artifacts: Vec<ConsensusMessage>, path: &Path) -> Result<(), io::Error> {\n\n use ConsensusMessage::*;\n\n artifacts\n\n .into_iter()\n\n .filter_map(|artifact| match artifact {\n\n Finalization(artifact) => Some(BackupArtifact::Finalization(Box::new(artifact))),\n\n Notarization(artifact) => Some(BackupArtifact::Notarization(Box::new(artifact))),\n\n BlockProposal(artifact) => Some(BackupArtifact::BlockProposal(Box::new(artifact))),\n\n RandomTape(artifact) => Some(BackupArtifact::RandomTape(Box::new(artifact))),\n\n RandomBeacon(artifact) => Some(BackupArtifact::RandomBeacon(Box::new(artifact))),\n\n CatchUpPackage(artifact) => Some(BackupArtifact::CatchUpPackage(Box::new(artifact))),\n\n // Do not replace by a `_` so that we evaluate at this place if we want to\n\n // backup a new artifact!\n\n RandomBeaconShare(_)\n\n | NotarizationShare(_)\n\n | FinalizationShare(_)\n\n | RandomTapeShare(_)\n\n | CatchUpPackageShare(_) => None,\n\n })\n\n .try_for_each(|artifact| artifact.write_to_disk(path))\n\n}\n\n\n", "file_path": "rs/artifact_pool/src/backup.rs", "rank": 80, "score": 427083.7623683148 }, { "content": "/// Build protos using prost_build.\n\npub fn generate_prost_files(def: &Path, out: &Path) {\n\n let proto_files = [\n\n def.join(\"ic_registry_common/pb/local_store/v1/local_store.proto\"),\n\n def.join(\"ic_registry_common/pb/proto_registry/v1/proto_registry.proto\"),\n\n def.join(\"ic_registry_common/pb/test_protos/v1/test_protos.proto\"),\n\n ];\n\n\n\n let mut config = Config::new();\n\n std::fs::create_dir_all(out).expect(\"failed to create output directory\");\n\n config.out_dir(out);\n\n config.compile_protos(&proto_files, &[def]).unwrap();\n\n ic_utils_rustfmt::rustfmt(out).expect(\"failed to rustfmt protobufs\");\n\n}\n", "file_path": "rs/registry/proto/generator/src/lib.rs", "rank": 81, "score": 426653.4173936153 }, { "content": "pub fn exec_ssh_command(vm: &dyn SshSession, command: &str) -> Result<(String, i32), Error> {\n\n let mut channel = vm.block_on_ssh_session()?.channel_session()?;\n\n\n\n channel.exec(command)?;\n\n\n\n let mut output = String::new();\n\n channel.read_to_string(&mut output)?;\n\n channel.wait_close()?;\n\n\n\n Ok((output, channel.exit_status()?))\n\n}\n", "file_path": "rs/tests/src/boundary_nodes/helpers.rs", "rank": 82, "score": 424534.38393634016 }, { "content": "fn setup_file_server(env: &TestEnv, file_path: &std::path::PathBuf) -> String {\n\n // Set up Universal VM with HTTP Bin testing service\n\n let activate_script = &env\n\n .read_dependency_to_string(\n\n \"rs/tests/src/orchestrator/orchestrator_universal_vm_activation.sh\",\n\n )\n\n .expect(\"File not found\")[..];\n\n let config_dir = env\n\n .single_activate_script_config_dir(UNIVERSAL_VM_NAME, activate_script)\n\n .unwrap();\n\n\n\n let _ = insert_file_to_config(\n\n config_dir.clone(),\n\n \"registry.tar\",\n\n &fs::read(file_path).expect(\"File not found\")[..],\n\n );\n\n\n\n let path = env.get_dependency_path(\"rs/tests/static-file-server.tar\");\n\n let _ = insert_file_to_config(\n\n config_dir.clone(),\n", "file_path": "rs/tests/src/orchestrator/subnet_recovery_nns_failover.rs", "rank": 83, "score": 422512.0004916959 }, { "content": "pub fn random_subnet_id(rng: &mut ChaCha20Rng) -> SubnetId {\n\n subnet_test_id(rng.gen::<u64>())\n\n}\n", "file_path": "rs/crypto/internal/crypto_service_provider/src/threshold/ni_dkg/tests/fixtures.rs", "rank": 84, "score": 420621.72288371035 }, { "content": "/// Construct an IO error of kind Other holding the specified error.\n\nfn io_err(e: impl std::error::Error + Send + Sync + 'static) -> std::io::Error {\n\n std::io::Error::new(std::io::ErrorKind::Other, e)\n\n}\n\n\n\n/// A TLS connection.\n\npub struct TlsConnection(ConnectionState);\n\n\n\nimpl TlsConnection {\n\n /// Returns the identity of the connected peer if the TLS\n\n /// handshake completed successfully. Returns None if the handshake is not\n\n /// completed yet or failed.\n\n pub fn peer(&self) -> Option<&AuthenticatedPeer> {\n\n match &self.0 {\n\n ConnectionState::Ready { peer, .. } => Some(peer),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// If the handshake is completed, applies `f` to the TlsStream.\n\n /// Otherwise, tries to make the progress with the handshake first.\n", "file_path": "rs/xnet/hyper/src/lib.rs", "rank": 85, "score": 420350.5851139531 }, { "content": "/// Makes a copy-on-write copy of file located at src into dst. This is\n\n/// typically a much faster operation than copying file contents, but it's\n\n/// supported not by all file systems. Note that cloning is only supported\n\n/// between paths located on the same filesystem.\n\n///\n\n/// # Errors\n\n///\n\n/// * Returns Err(OperationNotSupported) if the underlying filesystem doesn't\n\n/// support clones.\n\n///\n\n/// * Returns low-level Err(IoError(e)) if one of files can't be open or the\n\n/// corresponding syscall fails for some reason.\n\npub fn clone_file(src: &Path, dst: &Path) -> Result<(), FileCloneError> {\n\n if *crate::IS_WSL {\n\n Err(FileCloneError::OperationNotSupported)\n\n } else {\n\n clone_file_impl(src, dst)\n\n }\n\n}\n\n\n", "file_path": "rs/sys/src/fs.rs", "rank": 86, "score": 420298.1295268516 }, { "content": "fn agent_error(message: impl Display, error: impl Display) -> RecoveryError {\n\n RecoveryError::AgentError(format!(\"{}: {}\", message, error))\n\n}\n\n\n", "file_path": "rs/recovery/subnet_splitting/src/agent_helper.rs", "rank": 87, "score": 420089.5308307445 }, { "content": "fn cleanup_fail_scenario(test: &mut ExecutionTest) -> (CanisterId, MessageId) {\n\n let initial_cycles = Cycles::new(1_000_000_000_000);\n\n\n\n // Create two canisters A, B.\n\n let a_id = test.universal_canister_with_cycles(initial_cycles).unwrap();\n\n let b_id = test.universal_canister_with_cycles(initial_cycles).unwrap();\n\n\n\n let transferred_cycles = initial_cycles.get() / 2;\n\n\n\n // Canister B simply replies with the message that was sent to it.\n\n let b = wasm().message_payload().append_and_reply().build();\n\n\n\n // Canister A:\n\n // 1. Calls canister B and transfers cycles.\n\n // 2. Traps in the response callback.\n\n // 3. Traps in the cleanup callback.\n\n let a = wasm()\n\n .call_with_cycles(\n\n b_id.get(),\n\n \"update\",\n", "file_path": "rs/execution_environment/src/execution/response/tests.rs", "rank": 88, "score": 419379.9062753212 }, { "content": "fn response_fail_scenario(test: &mut ExecutionTest) -> (CanisterId, MessageId) {\n\n let initial_cycles = Cycles::new(1_000_000_000_000);\n\n\n\n let a_id = test.universal_canister_with_cycles(initial_cycles).unwrap();\n\n let b_id = test.universal_canister_with_cycles(initial_cycles).unwrap();\n\n\n\n let transferred_cycles = initial_cycles.get() / 2;\n\n\n\n // Canister B simply replies with the message that was sent to it.\n\n let b = wasm().message_payload().append_and_reply().build();\n\n\n\n // Canister A:\n\n // 1. Calls canister B and transfers cycles.\n\n // 2. Traps in the response callback.\n\n let a = wasm()\n\n .call_with_cycles(\n\n b_id.get(),\n\n \"update\",\n\n call_args()\n\n .other_side(b)\n", "file_path": "rs/execution_environment/src/execution/response/tests.rs", "rank": 89, "score": 419379.9062753212 }, { "content": "/// Helper function to extract the effective canister id from the payload of an ingress message.\n\npub fn extract_effective_canister_id(\n\n ingress: &SignedIngressContent,\n\n subnet_id: SubnetId,\n\n) -> Result<Option<CanisterId>, ParseIngressError> {\n\n if !ingress.is_addressed_to_subnet(subnet_id) {\n\n return Ok(None);\n\n }\n\n match Method::from_str(ingress.method_name()) {\n\n Ok(Method::ProvisionalCreateCanisterWithCycles) | Ok(Method::ProvisionalTopUpCanister) => {\n\n Ok(None)\n\n }\n\n Ok(Method::StartCanister)\n\n | Ok(Method::CanisterStatus)\n\n | Ok(Method::DeleteCanister)\n\n | Ok(Method::UninstallCode)\n\n | Ok(Method::StopCanister) => match CanisterIdRecord::decode(ingress.arg()) {\n\n Ok(record) => Ok(Some(record.get_canister_id())),\n\n Err(err) => Err(ParseIngressError::InvalidSubnetPayload(err.to_string())),\n\n },\n\n Ok(Method::CanisterInfo) => match CanisterInfoRequest::decode(ingress.arg()) {\n", "file_path": "rs/types/types/src/messages/ingress_messages.rs", "rank": 90, "score": 417702.29249960283 }, { "content": "pub fn write_bytes(file: &Path, bytes: Vec<u8>) -> RecoveryResult<()> {\n\n fs::write(file, bytes).map_err(|e| RecoveryError::file_error(file, e))\n\n}\n\n\n", "file_path": "rs/recovery/src/file_sync_helper.rs", "rank": 91, "score": 417362.6545198871 }, { "content": "pub fn random_subnet_id(rng: &mut ChaCha20Rng) -> SubnetId {\n\n subnet_test_id(rng.gen::<u64>())\n\n}\n", "file_path": "rs/crypto/internal/crypto_service_provider/src/vault/test_utils/ni_dkg/fixtures.rs", "rank": 92, "score": 416574.56914144865 }, { "content": "pub fn local_store_latest_snapshot(path: PathBuf) -> RegistrySpec {\n\n let source = SourceSpec::LocalStore(path);\n\n let version = VersionSpec::RelativeToLatest(0);\n\n\n\n RegistrySpec { version, source }\n\n}\n\n\n", "file_path": "rs/registry/regedit/src/tests.rs", "rank": 93, "score": 416497.66577807255 }, { "content": "pub fn get_subnet_ids_from_subnet_list(subnet_list: SubnetListRecord) -> Vec<SubnetId> {\n\n subnet_list\n\n .subnets\n\n .iter()\n\n .map(|subnet_id_vec| SubnetId::new(PrincipalId::try_from(subnet_id_vec).unwrap()))\n\n .collect()\n\n}\n\n\n\n/// Returns the latest version of the registry\n\npub async fn get_latest_version() -> u64 {\n\n let response: Vec<u8> = call(REGISTRY_CANISTER_ID, \"get_latest_version\", bytes, vec![])\n\n .await\n\n .unwrap();\n\n deserialize_get_latest_version_response(response).unwrap()\n\n}\n", "file_path": "rs/nns/common/src/registry.rs", "rank": 94, "score": 416262.98732149624 }, { "content": "/// Adds the k/v entry to the given data provider.\n\npub fn write_registry_entry<P: AsRef<Path> + fmt::Debug, M: Message>(\n\n data_provider: &ProtoRegistryDataProvider,\n\n _path: P,\n\n key: &str,\n\n registry_version: RegistryVersion,\n\n record: M,\n\n) where\n\n P: AsRef<Path>,\n\n M: Message + std::clone::Clone,\n\n{\n\n data_provider\n\n .add(key, registry_version, Some(record))\n\n .expect(\"Could not add key to registry data provider.\");\n\n}\n\n\n\n/// Writes a protobuf registry entry to a file on disk, in the given path.\n\n/// The file name is the key of the entry in the registry.\n\n///\n\n/// # Panics\n\n///\n", "file_path": "rs/prep/src/util.rs", "rank": 95, "score": 414257.0658576745 }, { "content": "#[cfg(target_family = \"unix\")]\n\npub fn write_atomically<PDst, F>(dst: PDst, action: F) -> io::Result<()>\n\nwhere\n\n F: FnOnce(&mut io::BufWriter<&std::fs::File>) -> io::Result<()>,\n\n PDst: AsRef<Path>,\n\n{\n\n // `.parent()` returns `None` for either `/` or a prefix (e.g. 'c:\\\\` on\n\n // windows). `write_atomically` is only available on UNIX, so we default to\n\n // `/` in case `.parent()` returns `None`.\n\n let tmp_path = dst\n\n .as_ref()\n\n .parent()\n\n .unwrap_or_else(|| Path::new(\"/\"))\n\n .join(tmp_name());\n\n\n\n write_atomically_using_tmp_file(dst, tmp_path.as_path(), action)\n\n}\n\n\n", "file_path": "rs/utils/src/fs.rs", "rank": 96, "score": 414110.51814733865 }, { "content": "pub fn state_manager_test<F: FnOnce(&MetricsRegistry, StateManagerImpl)>(f: F) {\n\n state_manager_test_with_verifier_result(true, f)\n\n}\n\n\n", "file_path": "rs/state_manager/tests/common/mod.rs", "rank": 97, "score": 413527.12091884424 }, { "content": "pub fn get_subnet_ids_from_subnet_list(subnet_list: SubnetListRecord) -> Vec<SubnetId> {\n\n subnet_list\n\n .subnets\n\n .iter()\n\n .map(|subnet_id_vec| SubnetId::new(PrincipalId::try_from(subnet_id_vec).unwrap()))\n\n .collect()\n\n}\n\n\n", "file_path": "rs/registry/canister/src/mutations/common.rs", "rank": 98, "score": 411932.8146356278 } ]
Rust
core/src/avm2/object/dictionary_object.rs
threeoh6000/ruffle
7df5370ec1b9519f30df9a67cfafe8b41db6b116
use crate::avm2::activation::Activation; use crate::avm2::object::script_object::ScriptObjectData; use crate::avm2::object::{ClassObject, Object, ObjectPtr, TObject}; use crate::avm2::value::Value; use crate::avm2::Error; use fnv::FnvHashMap; use gc_arena::{Collect, GcCell, MutationContext}; use std::cell::{Ref, RefMut}; pub fn dictionary_allocator<'gc>( class: ClassObject<'gc>, proto: Object<'gc>, activation: &mut Activation<'_, 'gc, '_>, ) -> Result<Object<'gc>, Error> { let base = ScriptObjectData::base_new(Some(proto), Some(class)); Ok(DictionaryObject(GcCell::allocate( activation.context.gc_context, DictionaryObjectData { base, object_space: Default::default(), }, )) .into()) } #[derive(Clone, Collect, Debug, Copy)] #[collect(no_drop)] pub struct DictionaryObject<'gc>(GcCell<'gc, DictionaryObjectData<'gc>>); #[derive(Clone, Collect, Debug)] #[collect(no_drop)] pub struct DictionaryObjectData<'gc> { base: ScriptObjectData<'gc>, object_space: FnvHashMap<Object<'gc>, Value<'gc>>, } impl<'gc> DictionaryObject<'gc> { pub fn get_property_by_object(self, name: Object<'gc>) -> Value<'gc> { self.0 .read() .object_space .get(&name) .cloned() .unwrap_or(Value::Undefined) } pub fn set_property_by_object( self, name: Object<'gc>, value: Value<'gc>, mc: MutationContext<'gc, '_>, ) { self.0.write(mc).object_space.insert(name, value); } pub fn delete_property_by_object(self, name: Object<'gc>, mc: MutationContext<'gc, '_>) { self.0.write(mc).object_space.remove(&name); } pub fn has_property_by_object(self, name: Object<'gc>) -> bool { self.0.read().object_space.get(&name).is_some() } } impl<'gc> TObject<'gc> for DictionaryObject<'gc> { fn base(&self) -> Ref<ScriptObjectData<'gc>> { Ref::map(self.0.read(), |read| &read.base) } fn base_mut(&self, mc: MutationContext<'gc, '_>) -> RefMut<ScriptObjectData<'gc>> { RefMut::map(self.0.write(mc), |write| &mut write.base) } fn as_ptr(&self) -> *const ObjectPtr { self.0.as_ptr() as *const ObjectPtr } fn value_of(&self, _mc: MutationContext<'gc, '_>) -> Result<Value<'gc>, Error> { Ok(Object::from(*self).into()) } fn as_dictionary_object(self) -> Option<DictionaryObject<'gc>> { Some(self) } fn get_next_enumerant( self, last_index: u32, _activation: &mut Activation<'_, 'gc, '_>, ) -> Result<Option<u32>, Error> { let read = self.0.read(); let last_enumerant = read.base.get_last_enumerant(); let object_space_length = read.object_space.keys().len() as u32; if last_index < last_enumerant + object_space_length { Ok(Some(last_index.saturating_add(1))) } else { Ok(None) } } fn get_enumerant_name( self, index: u32, _activation: &mut Activation<'_, 'gc, '_>, ) -> Result<Value<'gc>, Error> { let read = self.0.read(); let last_enumerant = read.base.get_last_enumerant(); if index < last_enumerant { Ok(read .base .get_enumerant_name(index) .unwrap_or(Value::Undefined)) } else { let object_space_index = index.saturating_sub(last_enumerant); Ok(read .object_space .keys() .nth(object_space_index as usize) .cloned() .map(|v| v.into()) .unwrap_or(Value::Undefined)) } } }
use crate::avm2::activation::Activation; use crate::avm2::object::script_object::ScriptObjectData; use crate::avm2::object::{ClassObject, Object, ObjectPtr, TObject}; use crate::avm2::value::Value; use crate::avm2::Error; use fnv::FnvHashMap; use gc_arena::{Collect, GcCell, MutationContext}; use std::cell::{Ref, RefMut}; pub fn dictionary_allocator<'gc>( class: ClassObject<'gc>, proto: Object<'gc>, activation: &mut Activation<'_, 'gc, '_>, ) -> Result<Object<'gc>, Error> { let base = ScriptObjectData::base_new(Some(proto), Some(class)); Ok(DictionaryObject(GcCell::allocate( activation.context.gc_context, DictionaryObjectData { base, object_space: Default::default(), }, )) .into()) } #[derive(Clone, Collect, Debug, Copy)] #[collect(no_drop)] pub struct DictionaryObject<'gc>(GcCell<'gc, DictionaryObjectData<'gc>>); #[derive(Clone, Collect, Debug)] #[collect(no_drop)] pub struct DictionaryObjectData<'gc> { base: ScriptObjectData<'gc>, object_space: FnvHashMap<Object<'gc>, Value<'gc>>, } impl<'gc> DictionaryObject<'gc> { pub fn get_property_by_object(self, name: Object<'gc>) -> Value<'gc> { self.
pub fn set_property_by_object( self, name: Object<'gc>, value: Value<'gc>, mc: MutationContext<'gc, '_>, ) { self.0.write(mc).object_space.insert(name, value); } pub fn delete_property_by_object(self, name: Object<'gc>, mc: MutationContext<'gc, '_>) { self.0.write(mc).object_space.remove(&name); } pub fn has_property_by_object(self, name: Object<'gc>) -> bool { self.0.read().object_space.get(&name).is_some() } } impl<'gc> TObject<'gc> for DictionaryObject<'gc> { fn base(&self) -> Ref<ScriptObjectData<'gc>> { Ref::map(self.0.read(), |read| &read.base) } fn base_mut(&self, mc: MutationContext<'gc, '_>) -> RefMut<ScriptObjectData<'gc>> { RefMut::map(self.0.write(mc), |write| &mut write.base) } fn as_ptr(&self) -> *const ObjectPtr { self.0.as_ptr() as *const ObjectPtr } fn value_of(&self, _mc: MutationContext<'gc, '_>) -> Result<Value<'gc>, Error> { Ok(Object::from(*self).into()) } fn as_dictionary_object(self) -> Option<DictionaryObject<'gc>> { Some(self) } fn get_next_enumerant( self, last_index: u32, _activation: &mut Activation<'_, 'gc, '_>, ) -> Result<Option<u32>, Error> { let read = self.0.read(); let last_enumerant = read.base.get_last_enumerant(); let object_space_length = read.object_space.keys().len() as u32; if last_index < last_enumerant + object_space_length { Ok(Some(last_index.saturating_add(1))) } else { Ok(None) } } fn get_enumerant_name( self, index: u32, _activation: &mut Activation<'_, 'gc, '_>, ) -> Result<Value<'gc>, Error> { let read = self.0.read(); let last_enumerant = read.base.get_last_enumerant(); if index < last_enumerant { Ok(read .base .get_enumerant_name(index) .unwrap_or(Value::Undefined)) } else { let object_space_index = index.saturating_sub(last_enumerant); Ok(read .object_space .keys() .nth(object_space_index as usize) .cloned() .map(|v| v.into()) .unwrap_or(Value::Undefined)) } } }
0 .read() .object_space .get(&name) .cloned() .unwrap_or(Value::Undefined) }
function_block-function_prefixed
[ { "content": "pub fn root_error_handler<'gc>(activation: &mut Activation<'_, 'gc, '_>, error: Error<'gc>) {\n\n match &error {\n\n Error::ThrownValue(value) => {\n\n let message = value\n\n .coerce_to_string(activation)\n\n .unwrap_or_else(|_| \"undefined\".into());\n\n activation.context.avm_trace(&message.to_utf8_lossy());\n\n // Continue execution without halting.\n\n return;\n\n }\n\n Error::InvalidSwf(swf_error) => {\n\n log::error!(\"{}: {}\", error, swf_error);\n\n }\n\n _ => {\n\n log::error!(\"{}\", error);\n\n }\n\n }\n\n activation.context.avm1.halt();\n\n}\n\n\n", "file_path": "core/src/avm1.rs", "rank": 0, "score": 496261.8797817598 }, { "content": "pub trait TObject<'gc>: 'gc + Collect + Debug + Into<Object<'gc>> + Clone + Copy {\n\n /// Retrieve a named, non-virtual property from this object exclusively.\n\n ///\n\n /// This function should not inspect prototype chains. Instead, use\n\n /// `get_stored` to do ordinary property look-up and resolution.\n\n fn get_local_stored(\n\n &self,\n\n name: impl Into<AvmString<'gc>>,\n\n activation: &mut Activation<'_, 'gc, '_>,\n\n ) -> Option<Value<'gc>>;\n\n\n\n /// Retrieve a named property from the object, or its prototype.\n\n fn get(\n\n &self,\n\n name: impl Into<AvmString<'gc>>,\n\n activation: &mut Activation<'_, 'gc, '_>,\n\n ) -> Result<Value<'gc>, Error<'gc>> {\n\n // TODO: Extract logic to a `lookup` function.\n\n let (this, proto) = if let Some(super_object) = self.as_super_object() {\n\n (super_object.this(), super_object.proto(activation))\n", "file_path": "core/src/avm1/object.rs", "rank": 1, "score": 467410.0868399502 }, { "content": "pub trait TObject<'gc>: 'gc + Collect + Debug + Into<Object<'gc>> + Clone + Copy {\n\n /// Get the base of this object.\n\n /// Any trait method implementations that were not overrided will forward the call to this instead.\n\n fn base(&self) -> Ref<ScriptObjectData<'gc>>;\n\n fn base_mut(&self, mc: MutationContext<'gc, '_>) -> RefMut<ScriptObjectData<'gc>>;\n\n\n\n /// Retrieve a local property of the object. The Multiname should always be public.\n\n ///\n\n /// This skips class field lookups and looks at:\n\n /// - object-specific storage (like arrays)\n\n /// - Object dynamic properties\n\n /// - prototype chain.\n\n fn get_property_local(\n\n self,\n\n name: &Multiname<'gc>,\n\n activation: &mut Activation<'_, 'gc, '_>,\n\n ) -> Result<Value<'gc>, Error> {\n\n self.base().get_property_local(name, activation)\n\n }\n\n\n", "file_path": "core/src/avm2/object.rs", "rank": 2, "score": 467410.0868399502 }, { "content": "/// Partially construct `Function.prototype`.\n\n///\n\n/// `__proto__` and other cross-linked properties of this object will *not*\n\n/// be defined here. The caller of this function is responsible for linking\n\n/// them in order to obtain a valid ECMAScript `Function` prototype. The\n\n/// returned object is also a bare object, which will need to be linked into\n\n/// the prototype of `Object`.\n\npub fn create_proto<'gc>(gc_context: MutationContext<'gc, '_>, proto: Object<'gc>) -> Object<'gc> {\n\n let function_proto = ScriptObject::object_cell(gc_context, Some(proto));\n\n let object = function_proto.as_script_object().unwrap();\n\n define_properties_on(PROTO_DECLS, gc_context, object, function_proto);\n\n function_proto\n\n}\n", "file_path": "core/src/avm1/globals/function.rs", "rank": 3, "score": 455245.45238421066 }, { "content": "pub fn create_proto<'gc>(gc_context: MutationContext<'gc, '_>, proto: Object<'gc>) -> Object<'gc> {\n\n // It's a custom prototype but it's empty.\n\n ScriptObject::object(gc_context, Some(proto)).into()\n\n}\n", "file_path": "core/src/avm1/globals/selection.rs", "rank": 4, "score": 455233.75403558684 }, { "content": "fn name<'gc>(_activation: &mut Activation<'_, 'gc, '_>, this: DisplayObject<'gc>) -> Value<'gc> {\n\n this.name().into()\n\n}\n\n\n", "file_path": "core/src/avm1/object/stage_object.rs", "rank": 5, "score": 452989.8663344622 }, { "content": "pub fn create_proto<'gc>(gc_context: MutationContext<'gc, '_>, proto: Object<'gc>) -> Object<'gc> {\n\n // It's a custom prototype but it's empty.\n\n ScriptObject::object(gc_context, Some(proto)).into()\n\n}\n", "file_path": "core/src/avm1/globals/external_interface.rs", "rank": 6, "score": 451596.0240478306 }, { "content": "/// Construct `Object`'s class.\n\npub fn create_class<'gc>(gc_context: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let object_class = Class::new(\n\n QName::new(Namespace::public(), \"Object\"),\n\n None,\n\n Method::from_builtin(instance_init, \"<Object instance initializer>\", gc_context),\n\n Method::from_builtin(class_init, \"<Object class initializer>\", gc_context),\n\n gc_context,\n\n );\n\n let mut write = object_class.write(gc_context);\n\n write.set_call_handler(Method::from_builtin(\n\n class_call,\n\n \"<Object call handler>\",\n\n gc_context,\n\n ));\n\n\n\n write.define_class_trait(Trait::from_const(\n\n QName::new(Namespace::public(), \"length\"),\n\n QName::new(Namespace::public(), \"int\").into(),\n\n None,\n\n ));\n", "file_path": "core/src/avm2/globals/object.rs", "rank": 7, "score": 437080.9268293859 }, { "content": "pub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.net\"), \"ObjectEncoding\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<ObjectEncoding instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<ObjectEncoding class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::FINAL | ClassAttributes::SEALED);\n\n\n\n const CONSTANTS: &[(&str, u32)] = &[(\"AMF0\", 0), (\"AMF3\", 3), (\"DEFAULT\", 3)];\n\n write.define_public_constant_uint_class_traits(CONSTANTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/net/object_encoding.rs", "rank": 8, "score": 427389.4172390859 }, { "content": "pub fn render_base<'gc>(this: DisplayObject<'gc>, context: &mut RenderContext<'_, 'gc>) {\n\n if this.maskee().is_some() {\n\n return;\n\n }\n\n context.transform_stack.push(&*this.base().transform());\n\n\n\n let mask = this.masker();\n\n let mut mask_transform = crate::transform::Transform::default();\n\n if let Some(m) = mask {\n\n mask_transform.matrix = this.global_to_local_matrix();\n\n mask_transform.matrix *= m.local_to_global_matrix();\n\n context.renderer.push_mask();\n\n context.allow_mask = false;\n\n context.transform_stack.push(&mask_transform);\n\n m.render_self(context);\n\n context.transform_stack.pop();\n\n context.allow_mask = true;\n\n context.renderer.activate_mask();\n\n }\n\n this.render_self(context);\n", "file_path": "core/src/display_object.rs", "rank": 9, "score": 420292.3042640188 }, { "content": "/// Construct `Class`'s class.\n\npub fn create_class<'gc>(gc_context: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class_class = Class::new(\n\n QName::new(Namespace::public(), \"Class\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Class instance initializer>\", gc_context),\n\n Method::from_builtin(class_init, \"<Class class initializer>\", gc_context),\n\n gc_context,\n\n );\n\n\n\n let mut write = class_class.write(gc_context);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[(\"prototype\", Some(prototype), None)];\n\n write.define_public_builtin_instance_properties(gc_context, PUBLIC_INSTANCE_PROPERTIES);\n\n\n\n class_class\n\n}\n", "file_path": "core/src/avm2/globals/class.rs", "rank": 10, "score": 408006.25690012355 }, { "content": "/// Construct `QName`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::public(), \"QName\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<QName instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<QName class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n write.set_instance_allocator(qname_allocator);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[\n\n (\"localName\", Some(local_name), None),\n\n (\"uri\", Some(uri), None),\n\n ];\n\n write.define_public_builtin_instance_properties(mc, PUBLIC_INSTANCE_PROPERTIES);\n\n\n\n const AS3_INSTANCE_METHODS: &[(&str, NativeMethodImpl)] =\n\n &[(\"toString\", to_string), (\"valueOf\", value_of)];\n\n write.define_as3_builtin_instance_methods(mc, AS3_INSTANCE_METHODS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/qname.rs", "rank": 11, "score": 400185.3273656063 }, { "content": "/// Construct `Math`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::public(), \"Math\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Math instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Math class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n write.set_attributes(ClassAttributes::FINAL | ClassAttributes::SEALED);\n\n\n\n use std::f64::consts::*;\n\n const CONSTANTS: &[(&str, f64)] = &[\n\n (\"E\", E),\n\n (\"LN10\", LN_10),\n\n (\"LN2\", LN_2),\n\n (\"LOG10E\", LOG10_E),\n\n (\"LOG2E\", LOG2_E),\n\n (\"PI\", PI),\n", "file_path": "core/src/avm2/globals/math.rs", "rank": 12, "score": 400179.12060918054 }, { "content": "/// Construct `Namespace`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::public(), \"Namespace\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Namespace instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Namespace class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n write.set_instance_allocator(namespace_allocator);\n\n write.set_native_instance_init(Method::from_builtin(\n\n native_instance_init,\n\n \"<Namespace native instance initializer>\",\n\n mc,\n\n ));\n\n write.set_call_handler(Method::from_builtin(\n\n class_call,\n\n \"<Namespace call handler>\",\n\n mc,\n\n ));\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/namespace.rs", "rank": 13, "score": 400179.12060918054 }, { "content": "/// Construct `Vector`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(NS_VECTOR), \"Vector\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Vector instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Vector class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::GENERIC | ClassAttributes::FINAL);\n\n write.set_instance_allocator(vector_allocator);\n\n write.set_specialized_init(Method::from_builtin(\n\n specialized_class_init,\n\n \"<Vector specialized class initializer>\",\n\n mc,\n\n ));\n\n write.set_call_handler(Method::from_builtin(\n\n class_call,\n", "file_path": "core/src/avm2/globals/vector.rs", "rank": 14, "score": 400179.12060918054 }, { "content": "/// Construct `Number`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::public(), \"Number\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Number instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Number class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n write.set_instance_allocator(primitive_allocator);\n\n write.set_native_instance_init(Method::from_builtin(\n\n native_instance_init,\n\n \"<Number native instance initializer>\",\n\n mc,\n\n ));\n\n\n\n const CLASS_CONSTANTS: &[(&str, f64)] = &[\n\n (\"MAX_VALUE\", f64::MAX),\n\n (\"MIN_VALUE\", f64::MIN_POSITIVE),\n", "file_path": "core/src/avm2/globals/number.rs", "rank": 15, "score": 400179.12060918054 }, { "content": "/// Construct `int`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::public(), \"int\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<int instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<int class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n write.set_instance_allocator(primitive_allocator);\n\n write.set_native_instance_init(Method::from_builtin(\n\n native_instance_init,\n\n \"<int native instance initializer>\",\n\n mc,\n\n ));\n\n\n\n const CLASS_CONSTANTS: &[(&str, i32)] = &[(\"MAX_VALUE\", i32::MAX), (\"MIN_VALUE\", i32::MIN)];\n\n write.define_public_constant_int_class_traits(CLASS_CONSTANTS);\n\n\n", "file_path": "core/src/avm2/globals/int.rs", "rank": 16, "score": 400179.12060918054 }, { "content": "/// Construct `Boolean`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::public(), \"Boolean\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Boolean instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Boolean class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n write.set_instance_allocator(primitive_allocator);\n\n write.set_native_instance_init(Method::from_builtin(\n\n native_instance_init,\n\n \"<Boolean native instance initializer>\",\n\n mc,\n\n ));\n\n\n\n const AS3_INSTANCE_METHODS: &[(&str, NativeMethodImpl)] =\n\n &[(\"toString\", to_string), (\"valueOf\", value_of)];\n\n write.define_as3_builtin_instance_methods(mc, AS3_INSTANCE_METHODS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/boolean.rs", "rank": 17, "score": 400179.12060918054 }, { "content": "/// Construct `Array`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::public(), \"Array\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Array instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Array class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_instance_allocator(array_allocator);\n\n\n\n const PUBLIC_INSTANCE_METHODS: &[(&str, NativeMethodImpl)] = &[\n\n (\"toString\", to_string),\n\n (\"toLocaleString\", to_locale_string),\n\n (\"valueOf\", value_of),\n\n ];\n\n write.define_public_builtin_instance_methods(mc, PUBLIC_INSTANCE_METHODS);\n\n\n", "file_path": "core/src/avm2/globals/array.rs", "rank": 18, "score": 400179.12060918054 }, { "content": "/// Construct `JSON`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::public(), \"JSON\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<JSON instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<JSON class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n const PUBLIC_CLASS_METHODS: &[(&str, NativeMethodImpl)] =\n\n &[(\"parse\", parse), (\"stringify\", stringify)];\n\n write.define_public_builtin_class_methods(mc, PUBLIC_CLASS_METHODS);\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/json.rs", "rank": 19, "score": 400179.12060918054 }, { "content": "/// Construct `uint`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::public(), \"uint\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<uint instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<uint class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n write.set_instance_allocator(primitive_allocator);\n\n write.set_native_instance_init(Method::from_builtin_and_params(\n\n native_instance_init,\n\n \"<uint native instance initializer>\",\n\n vec![ParamConfig::of_type(\n\n \"num\",\n\n QName::new(Namespace::public(), \"Object\").into(),\n\n )],\n\n false,\n\n mc,\n", "file_path": "core/src/avm2/globals/uint.rs", "rank": 20, "score": 400179.12060918054 }, { "content": "/// Construct `Date`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::public(), \"Date\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Date instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Date class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n write.set_instance_allocator(date_allocator);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[\n\n (\"time\", Some(time), Some(set_time)),\n\n (\"milliseconds\", Some(milliseconds), Some(set_milliseconds)),\n\n (\"seconds\", Some(seconds), Some(set_seconds)),\n", "file_path": "core/src/avm2/globals/date.rs", "rank": 21, "score": 400179.12060918054 }, { "content": "/// Construct `String`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::public(), \"String\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<String instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<String class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n write.set_attributes(ClassAttributes::FINAL | ClassAttributes::SEALED);\n\n write.set_instance_allocator(primitive_allocator);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[(\"length\", Some(length), None)];\n\n write.define_public_builtin_instance_properties(mc, PUBLIC_INSTANCE_PROPERTIES);\n\n\n", "file_path": "core/src/avm2/globals/string.rs", "rank": 22, "score": 400179.12060918054 }, { "content": "/// Construct `RegExp`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::public(), \"RegExp\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin_and_params(\n\n instance_init,\n\n \"<RegExp instance initializer>\",\n\n vec![\n\n ParamConfig::optional(\"re\", QName::new(Namespace::public(), \"String\").into(), \"\"),\n\n ParamConfig::optional(\n\n \"flags\",\n\n QName::new(Namespace::public(), \"String\").into(),\n\n \"\",\n\n ),\n\n ],\n\n false,\n\n mc,\n\n ),\n\n Method::from_builtin(class_init, \"<RegExp class initializer>\", mc),\n\n mc,\n", "file_path": "core/src/avm2/globals/regexp.rs", "rank": 23, "score": 400179.04009024234 }, { "content": "pub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::public(), \"XML\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin_and_params(\n\n instance_init,\n\n \"<XML instance initializer>\",\n\n vec![ParamConfig::optional(\n\n \"value\",\n\n QName::new(Namespace::public(), \"Object\").into(),\n\n Value::Undefined,\n\n )],\n\n false,\n\n mc,\n\n ),\n\n Method::from_builtin(class_init, \"<XML class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n write.set_instance_allocator(xml_allocator);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/xml.rs", "rank": 24, "score": 400172.70643219224 }, { "content": "/// Construct `Function`'s class.\n\npub fn create_class<'gc>(gc_context: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let function_class = Class::new(\n\n QName::new(Namespace::public(), \"Function\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Function instance initializer>\", gc_context),\n\n Method::from_builtin(class_init, \"<Function class initializer>\", gc_context),\n\n gc_context,\n\n );\n\n\n\n let mut write = function_class.write(gc_context);\n\n\n\n // Fixed traits (in AS3 namespace)\n\n const AS3_INSTANCE_METHODS: &[(&str, NativeMethodImpl)] = &[(\"call\", call), (\"apply\", apply)];\n\n write.define_as3_builtin_instance_methods(gc_context, AS3_INSTANCE_METHODS);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[(\"prototype\", Some(prototype), Some(set_prototype))];\n\n write.define_public_builtin_instance_properties(gc_context, PUBLIC_INSTANCE_PROPERTIES);\n\n\n\n function_class\n\n}\n", "file_path": "core/src/avm2/globals/function.rs", "rank": 25, "score": 399731.81936281326 }, { "content": "/// Construct `global`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n Class::new(\n\n QName::new(Namespace::public(), \"global\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<global instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<global class initializer>\", mc),\n\n mc,\n\n )\n\n}\n", "file_path": "core/src/avm2/globals/global_scope.rs", "rank": 26, "score": 397213.6231345008 }, { "content": "pub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::public(), \"XMLList\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin_and_params(\n\n instance_init,\n\n \"<XMLList instance initializer>\",\n\n vec![ParamConfig::optional(\n\n \"value\",\n\n QName::new(Namespace::public(), \"Object\").into(),\n\n Value::Undefined,\n\n )],\n\n false,\n\n mc,\n\n ),\n\n Method::from_builtin(class_init, \"<XMLList class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n write.set_instance_allocator(xml_allocator);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/xml_list.rs", "rank": 27, "score": 397207.20895751263 }, { "content": "/// Construct `ActivityEvent`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.events\"), \"ActivityEvent\"),\n\n Some(QName::new(Namespace::package(\"flash.events\"), \"Event\").into()),\n\n Method::from_builtin(instance_init, \"<ActivityEvent instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<ActivityEvent class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[(\"activating\", Some(activating), Some(set_activating))];\n\n write.define_public_builtin_instance_properties(mc, PUBLIC_INSTANCE_PROPERTIES);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n const CONSTANTS: &[(&str, &str)] = &[(\"ACTIVITY\", \"activity\")];\n\n write.define_public_constant_string_class_traits(CONSTANTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/events/activityevent.rs", "rank": 28, "score": 394353.6745412806 }, { "content": "/// Construct `SharedObject`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.net\"), \"SharedObject\"),\n\n Some(QName::new(Namespace::package(\"flash.events\"), \"EventDispatcher\").into()),\n\n Method::from_builtin(instance_init, \"<SharedObject instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<SharedObject class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n write.define_instance_trait(Trait::from_slot(\n\n QName::new(Namespace::public(), \"data\"),\n\n QName::new(Namespace::public(), \"Object\").into(),\n\n None,\n\n ));\n\n\n\n const PUBLIC_CLASS_METHODS: &[(&str, NativeMethodImpl)] = &[(\"getLocal\", get_local)];\n\n write.define_public_builtin_class_methods(mc, PUBLIC_CLASS_METHODS);\n\n\n\n const PUBLIC_INSTANCE_METHODS: &[(&str, NativeMethodImpl)] = &[(\"flush\", flush)];\n\n write.define_public_builtin_instance_methods(mc, PUBLIC_INSTANCE_METHODS);\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/net/sharedobject.rs", "rank": 29, "score": 394353.6004776913 }, { "content": "/// Construct `DisplayObject`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"DisplayObject\"),\n\n Some(QName::new(Namespace::package(\"flash.events\"), \"EventDispatcher\").into()),\n\n Method::from_builtin(instance_init, \"<DisplayObject instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<DisplayObject class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_instance_allocator(stage_allocator);\n\n write.set_native_instance_init(Method::from_builtin(\n\n native_instance_init,\n\n \"<DisplayObject native instance initializer>\",\n\n mc,\n\n ));\n\n\n\n write.implements(QName::new(Namespace::package(\"flash.display\"), \"IBitmapDrawable\").into());\n\n\n", "file_path": "core/src/avm2/globals/flash/display/displayobject.rs", "rank": 30, "score": 394353.6004776913 }, { "content": "/// Construct `InteractiveObject`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"InteractiveObject\"),\n\n Some(QName::new(Namespace::package(\"flash.display\"), \"DisplayObject\").into()),\n\n Method::from_builtin(\n\n instance_init,\n\n \"<InteractiveObject instance initializer>\",\n\n mc,\n\n ),\n\n Method::from_builtin(class_init, \"<InteractiveObject class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_native_instance_init(Method::from_builtin(\n\n native_instance_init,\n\n \"<InteractiveObject native instance initializer>\",\n\n mc,\n\n ));\n", "file_path": "core/src/avm2/globals/flash/display/interactiveobject.rs", "rank": 31, "score": 394353.6004776913 }, { "content": "/// Construct `DisplayObjectContainer`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(\n\n Namespace::package(\"flash.display\"),\n\n \"DisplayObjectContainer\",\n\n ),\n\n Some(QName::new(Namespace::package(\"flash.display\"), \"InteractiveObject\").into()),\n\n Method::from_builtin(\n\n instance_init,\n\n \"<DisplayObjectContainer instance initializer>\",\n\n mc,\n\n ),\n\n Method::from_builtin(class_init, \"<DisplayObjectContainer class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_native_instance_init(Method::from_builtin(\n\n native_instance_init,\n", "file_path": "core/src/avm2/globals/flash/display/displayobjectcontainer.rs", "rank": 32, "score": 394353.4453226526 }, { "content": "/// Construct `Shape`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"Shape\"),\n\n Some(QName::new(Namespace::package(\"flash.display\"), \"DisplayObject\").into()),\n\n Method::from_builtin(instance_init, \"<Shape instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Shape class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[(\"graphics\", Some(graphics), None)];\n\n write.define_public_builtin_instance_properties(mc, PUBLIC_INSTANCE_PROPERTIES);\n\n\n\n // Slot for lazy-initialized Graphics object.\n\n write.define_instance_trait(Trait::from_slot(\n\n QName::new(Namespace::private(NS_RUFFLE_INTERNAL), \"graphics\"),\n\n QName::new(Namespace::package(\"flash.display\"), \"Graphics\").into(),\n\n None,\n\n ));\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/display/shape.rs", "rank": 33, "score": 394347.49977531563 }, { "content": "/// Construct `Capabilities`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.system\"), \"Capabilities\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Capabilities instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Capabilities class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n const PUBLIC_CLASS_TRAITS: &[(&str, Option<NativeMethodImpl>, Option<NativeMethodImpl>)] = &[\n\n (\"os\", Some(os), None),\n\n (\"playerType\", Some(player_type), None),\n\n (\"version\", Some(version), None),\n\n ];\n\n\n\n write.define_public_builtin_class_properties(mc, PUBLIC_CLASS_TRAITS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/system/capabilities.rs", "rank": 34, "score": 394347.4997753157 }, { "content": "/// Construct `Scene`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"Scene\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Scene instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Scene class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[\n\n (\"labels\", Some(labels), None),\n\n (\"name\", Some(name), None),\n\n (\"numFrames\", Some(num_frames), None),\n\n ];\n", "file_path": "core/src/avm2/globals/flash/display/scene.rs", "rank": 35, "score": 394347.49977531563 }, { "content": "/// Construct `Event`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.events\"), \"Event\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Event instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Event class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n write.set_instance_allocator(event_allocator);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[\n\n (\"bubbles\", Some(bubbles), None),\n", "file_path": "core/src/avm2/globals/flash/events/event.rs", "rank": 36, "score": 394347.49977531563 }, { "content": "/// Construct `System`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.system\"), \"System\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<System instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<System class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n const PUBLIC_CLASS_METHODS: &[(&str, NativeMethodImpl)] = &[(\"gc\", gc)];\n\n write.define_public_builtin_class_methods(mc, PUBLIC_CLASS_METHODS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/system/system.rs", "rank": 37, "score": 394347.4997753157 }, { "content": "/// Construct `Sound`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.media\"), \"Sound\"),\n\n Some(QName::new(Namespace::package(\"flash.events\"), \"EventDispatcher\").into()),\n\n Method::from_builtin(instance_init, \"<Sound instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Sound class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n write.set_instance_allocator(sound_allocator);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[\n\n (\"bytesLoaded\", Some(bytes_total), None),\n", "file_path": "core/src/avm2/globals/flash/media/sound.rs", "rank": 38, "score": 394347.49977531563 }, { "content": "/// Construct `Font`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.text\"), \"Font\"),\n\n Some(QName::new(Namespace::package(\"\"), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Font instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Font class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[\n\n (\"fontName\", Some(font_name), None),\n\n (\"fontStyle\", Some(font_style), None),\n", "file_path": "core/src/avm2/globals/flash/text/font.rs", "rank": 39, "score": 394347.4997753157 }, { "content": "/// Construct `Stage`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"Stage\"),\n\n Some(\n\n QName::new(\n\n Namespace::package(\"flash.display\"),\n\n \"DisplayObjectContainer\",\n\n )\n\n .into(),\n\n ),\n\n Method::from_builtin(instance_init, \"<Stage instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Stage class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n write.set_native_instance_init(Method::from_builtin(\n\n native_instance_init,\n", "file_path": "core/src/avm2/globals/flash/display/stage.rs", "rank": 40, "score": 394347.49977531563 }, { "content": "/// Construct `Sprite`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"Sprite\"),\n\n Some(\n\n QName::new(\n\n Namespace::package(\"flash.display\"),\n\n \"DisplayObjectContainer\",\n\n )\n\n .into(),\n\n ),\n\n Method::from_builtin(instance_init, \"<Sprite instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Sprite class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n", "file_path": "core/src/avm2/globals/flash/display/sprite.rs", "rank": 41, "score": 394347.49977531563 }, { "content": "/// Construct `Video`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.media\"), \"Video\"),\n\n Some(QName::new(Namespace::package(\"flash.display\"), \"DisplayObject\").into()),\n\n Method::from_builtin(instance_init, \"<Video instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Video class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/media/video.rs", "rank": 42, "score": 394347.49977531563 }, { "content": "/// Construct `Dictionary`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.utils\"), \"Dictionary\"),\n\n Some(QName::new(Namespace::package(\"\"), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Dictionary instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Dictionary class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_instance_allocator(dictionary_allocator);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/utils/dictionary.rs", "rank": 43, "score": 394347.4997753157 }, { "content": "/// Construct `Point`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.geom\"), \"Point\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Point instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Point class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[(\"length\", Some(length), None)];\n\n write.define_public_builtin_instance_properties(mc, PUBLIC_INSTANCE_PROPERTIES);\n\n\n\n const PUBLIC_INSTANCE_NUMBER_SLOTS: &[(&str, Option<f64>)] = &[(\"x\", None), (\"y\", None)];\n", "file_path": "core/src/avm2/globals/flash/geom/point.rs", "rank": 44, "score": 394347.49977531563 }, { "content": "/// Construct `Graphics`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"Graphics\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Graphics instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Graphics class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n write.set_instance_allocator(stage_allocator);\n\n write.set_native_instance_init(Method::from_builtin(\n\n native_instance_init,\n\n \"<Graphics native instance initializer>\",\n\n mc,\n\n ));\n\n\n\n const PUBLIC_INSTANCE_METHODS: &[(&str, NativeMethodImpl)] = &[\n", "file_path": "core/src/avm2/globals/flash/display/graphics.rs", "rank": 45, "score": 394347.49977531563 }, { "content": "/// Construct `Rectangle`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.geom\"), \"Rectangle\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Rectangle instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Rectangle class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[\n\n (\"top\", Some(top), Some(set_top)),\n\n (\"bottom\", Some(bottom), Some(set_bottom)),\n\n (\"left\", Some(left), Some(set_left)),\n", "file_path": "core/src/avm2/globals/flash/geom/rectangle.rs", "rank": 46, "score": 394347.49977531563 }, { "content": "/// Construct `Bitmap`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"Bitmap\"),\n\n Some(QName::new(Namespace::package(\"flash.display\"), \"DisplayObject\").into()),\n\n Method::from_builtin(instance_init, \"<Bitmap instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Bitmap class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[\n\n (\"bitmapData\", Some(bitmap_data), Some(set_bitmap_data)),\n\n (\n", "file_path": "core/src/avm2/globals/flash/display/bitmap.rs", "rank": 47, "score": 394347.49977531563 }, { "content": "/// Construct `PixelSnapping`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"PixelSnapping\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<PixelSnapping instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<PixelSnapping class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED | ClassAttributes::FINAL);\n\n\n\n const CONSTANTS: &[(&str, &str)] =\n\n &[(\"ALWAYS\", \"always\"), (\"AUTO\", \"auto\"), (\"NEVER\", \"never\")];\n\n write.define_public_constant_string_class_traits(CONSTANTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/display/pixelsnapping.rs", "rank": 48, "score": 394347.41925637744 }, { "content": "/// Construct `EventPhase`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.events\"), \"EventPhase\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<EventPhase instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<EventPhase class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n write.set_attributes(ClassAttributes::FINAL);\n\n\n\n const CONSTANTS: &[(&str, u32)] = &[\n\n (\"CAPTURING_PHASE\", 1),\n\n (\"AT_TARGET\", 2),\n\n (\"BUBBLING_PHASE\", 3),\n\n ];\n\n write.define_public_constant_uint_class_traits(CONSTANTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/events/eventphase.rs", "rank": 49, "score": 394347.4192563775 }, { "content": "/// Construct `StageQuality`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"StageQuality\"),\n\n Some(QName::new(Namespace::package(\"\"), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<StageQuality instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<StageQuality class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED | ClassAttributes::FINAL);\n\n\n\n const CONSTANTS: &[(&str, &str)] = &[\n\n (\"BEST\", \"best\"),\n\n (\"HIGH\", \"high\"),\n\n (\"HIGH_16X16\", \"16x16\"),\n\n (\"HIGH_16x16_LINEAR\", \"16x16linear\"),\n\n (\"HIGH_8X8\", \"8x8\"),\n\n (\"HIGH_8x8_LINEAR\", \"8x8linear\"),\n\n (\"LOW\", \"low\"),\n\n (\"MEDIUM\", \"medium\"),\n\n ];\n\n write.define_public_constant_string_class_traits(CONSTANTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/display/stagequality.rs", "rank": 50, "score": 394347.41925637744 }, { "content": "/// Construct `EventDispatcher`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.events\"), \"EventDispatcher\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<EventDispatcher instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<EventDispatcher class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n write.implements(QName::new(Namespace::package(\"flash.events\"), \"IEventDispatcher\").into());\n\n\n\n const PUBLIC_INSTANCE_METHODS: &[(&str, NativeMethodImpl)] = &[\n\n (\"addEventListener\", add_event_listener),\n\n (\"removeEventListener\", remove_event_listener),\n\n (\"hasEventListener\", has_event_listener),\n\n (\"willTrigger\", will_trigger),\n", "file_path": "core/src/avm2/globals/flash/events/eventdispatcher.rs", "rank": 51, "score": 394347.41925637744 }, { "content": "/// Construct `SoundTransform`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.media\"), \"SoundTransform\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<SoundTransform instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<SoundTransform class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED | ClassAttributes::FINAL);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[(\"pan\", Some(pan), Some(set_pan))];\n\n write.define_public_builtin_instance_properties(mc, PUBLIC_INSTANCE_PROPERTIES);\n\n\n", "file_path": "core/src/avm2/globals/flash/media/soundtransform.rs", "rank": 52, "score": 394347.4192563775 }, { "content": "/// Construct `TextField`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.text\"), \"TextField\"),\n\n Some(QName::new(Namespace::package(\"flash.display\"), \"InteractiveObject\").into()),\n\n Method::from_builtin(instance_init, \"<TextField instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<TextField class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[\n\n (\"autoSize\", Some(autosize), Some(set_autosize)),\n\n (\n", "file_path": "core/src/avm2/globals/flash/text/textfield.rs", "rank": 53, "score": 394347.4192563775 }, { "content": "/// Construct `BitmapData`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"BitmapData\"),\n\n Some(QName::new(Namespace::package(\"\"), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<BitmapData instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<BitmapData class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n write.set_instance_allocator(bitmapdata_allocator);\n\n\n\n write.implements(QName::new(Namespace::package(\"flash.display\"), \"IBitmapDrawable\").into());\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n", "file_path": "core/src/avm2/globals/flash/display/bitmapdata.rs", "rank": 54, "score": 394347.41925637744 }, { "content": "/// Construct `LoaderInfo`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"LoaderInfo\"),\n\n Some(QName::new(Namespace::package(\"flash.events\"), \"EventDispatcher\").into()),\n\n Method::from_builtin(instance_init, \"<LoaderInfo instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<LoaderInfo class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n write.set_instance_allocator(loaderinfo_allocator);\n\n write.set_native_instance_init(Method::from_builtin(\n\n native_instance_init,\n\n \"<LoaderInfo native instance initializer>\",\n\n mc,\n\n ));\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n", "file_path": "core/src/avm2/globals/flash/display/loaderinfo.rs", "rank": 55, "score": 394347.4192563774 }, { "content": "/// Construct `JointStyle`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"JointStyle\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<JointStyle instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<JointStyle class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n const CONSTANTS: &[(&str, &str)] =\n\n &[(\"BEVEL\", \"bevel\"), (\"MITER\", \"miter\"), (\"ROUND\", \"round\")];\n\n write.define_public_constant_string_class_traits(CONSTANTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/display/jointstyle.rs", "rank": 56, "score": 394347.41925637744 }, { "content": "/// Construct `SoundMixer`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.media\"), \"SoundMixer\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<SoundMixer instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<SoundMixer class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED | ClassAttributes::FINAL);\n\n\n\n const PUBLIC_CLASS_PROPERTIES: &[(&str, Option<NativeMethodImpl>, Option<NativeMethodImpl>)] =\n\n &[\n\n (\n\n \"soundTransform\",\n\n Some(sound_transform),\n\n Some(set_sound_transform),\n\n ),\n", "file_path": "core/src/avm2/globals/flash/media/soundmixer.rs", "rank": 57, "score": 394347.4192563775 }, { "content": "/// Construct `ContextMenu`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.ui\"), \"ContextMenu\"),\n\n Some(QName::new(Namespace::package(\"flash.display\"), \"NativeMenu\").into()),\n\n Method::from_builtin(instance_init, \"<ContextMenu instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<ContextMenu class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED | ClassAttributes::FINAL);\n\n\n\n const PUBLIC_CLASS_PROPERTIES: &[(&str, Option<NativeMethodImpl>, Option<NativeMethodImpl>)] =\n\n &[(\"isSupported\", Some(is_supported), None)];\n\n write.define_public_builtin_class_properties(mc, PUBLIC_CLASS_PROPERTIES);\n\n\n\n const PUBLIC_INSTANCE_METHODS: &[(&str, NativeMethodImpl)] =\n\n &[(\"hideBuiltInItems\", hide_built_in_items)];\n\n write.define_public_builtin_instance_methods(mc, PUBLIC_INSTANCE_METHODS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/ui/contextmenu.rs", "rank": 58, "score": 394347.41925637744 }, { "content": "/// Construct `SimpleButton`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"SimpleButton\"),\n\n Some(QName::new(Namespace::package(\"flash.display\"), \"InteractiveObject\").into()),\n\n Method::from_builtin(instance_init, \"<SimpleButton instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<SimpleButton class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[\n\n (\"downState\", Some(down_state), Some(set_down_state)),\n\n (\"enabled\", Some(enabled), Some(set_enabled)),\n", "file_path": "core/src/avm2/globals/flash/display/simplebutton.rs", "rank": 59, "score": 394347.41925637744 }, { "content": "/// Construct `FrameLabel`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"FrameLabel\"),\n\n Some(QName::new(Namespace::package(\"flash.events\"), \"EventDispatcher\").into()),\n\n Method::from_builtin(instance_init, \"<FrameLabel instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<FrameLabel class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[(\"name\", Some(name), None), (\"frame\", Some(frame), None)];\n\n write.define_public_builtin_instance_properties(mc, PUBLIC_INSTANCE_PROPERTIES);\n\n\n\n const PRIVATE_INSTANCE_SLOTS: &[(&str, &str, &str, &str)] = &[\n\n (NS_RUFFLE_INTERNAL, \"name\", \"\", \"String\"),\n\n (NS_RUFFLE_INTERNAL, \"frame\", \"\", \"int\"),\n\n ];\n\n write.define_private_slot_instance_traits(PRIVATE_INSTANCE_SLOTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/display/framelabel.rs", "rank": 60, "score": 394347.4192563774 }, { "content": "/// Construct `SWFVersion`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"SWFVersion\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<SWFVersion instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<SWFVersion class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::FINAL | ClassAttributes::SEALED);\n\n\n\n const CONSTANTS: &[(&str, u32)] = &[\n\n (\"FLASH1\", 1),\n\n (\"FLASH2\", 2),\n\n (\"FLASH3\", 3),\n\n (\"FLASH4\", 4),\n\n (\"FLASH5\", 5),\n\n (\"FLASH6\", 6),\n", "file_path": "core/src/avm2/globals/flash/display/swfversion.rs", "rank": 61, "score": 394347.41925637744 }, { "content": "/// Construct `KeyboardEvent`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.events\"), \"KeyboardEvent\"),\n\n Some(QName::new(Namespace::package(\"flash.events\"), \"Event\").into()),\n\n Method::from_builtin(instance_init, \"<KeyboardEvent instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<KeyboardEvent class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n const CONSTANTS: &[(&str, &str)] = &[(\"KEY_DOWN\", \"keyDown\"), (\"KEY_UP\", \"keyUp\")];\n\n\n\n write.define_public_constant_string_class_traits(CONSTANTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/events/keyboardevent.rs", "rank": 62, "score": 394347.4192563775 }, { "content": "/// Construct `StageAlign`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"StageAlign\"),\n\n Some(QName::new(Namespace::package(\"\"), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<StageAlign instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<StageAlign class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED | ClassAttributes::FINAL);\n\n\n\n const CONSTANTS: &[(&str, &str)] = &[\n\n (\"BOTTOM\", \"B\"),\n\n (\"BOTTOM_LEFT\", \"BL\"),\n\n (\"BOTTOM_RIGHT\", \"BR\"),\n\n (\"LEFT\", \"L\"),\n\n (\"RIGHT\", \"R\"),\n\n (\"TOP\", \"T\"),\n\n (\"TOP_LEFT\", \"TL\"),\n\n (\"TOP_RIGHT\", \"TR\"),\n\n ];\n\n write.define_public_constant_string_class_traits(CONSTANTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/display/stagealign.rs", "rank": 63, "score": 394347.41925637744 }, { "content": "/// Construct `MouseEvent`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.events\"), \"MouseEvent\"),\n\n Some(QName::new(Namespace::package(\"flash.events\"), \"Event\").into()),\n\n Method::from_builtin(instance_init, \"<MouseEvent instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<MouseEvent class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n const CONSTANTS: &[(&str, &str)] = &[\n\n (\"CLICK\", \"click\"),\n\n (\"CONTEXT_MENU\", \"contextMenu\"),\n\n (\"DOUBLE_CLICK\", \"doubleClick\"),\n\n (\"MIDDLE_CLICK\", \"middleClick\"),\n\n (\"MIDDLE_MOUSE_DOWN\", \"middleMouseDown\"),\n\n (\"MIDDLE_MOUSE_UP\", \"middleMouseUp\"),\n", "file_path": "core/src/avm2/globals/flash/events/mouseevent.rs", "rank": 64, "score": 394347.41925637744 }, { "content": "/// Construct `CapsStyle`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"CapsStyle\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<CapsStyle instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<CapsStyle class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n const CONSTANTS: &[(&str, &str)] =\n\n &[(\"NONE\", \"none\"), (\"ROUND\", \"round\"), (\"SQUARE\", \"square\")];\n\n write.define_public_constant_string_class_traits(CONSTANTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/display/capsstyle.rs", "rank": 65, "score": 394347.41925637744 }, { "content": "/// Construct `ProgressEvent`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.events\"), \"ProgressEvent\"),\n\n Some(QName::new(Namespace::package(\"flash.events\"), \"Event\").into()),\n\n Method::from_builtin(instance_init, \"<ProgressEvent instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<ProgressEvent class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n const CONSTANTS: &[(&str, &str)] = &[\n\n (\"PROGRESS\", \"progress\"),\n\n (\"SOCKET_DATA\", \"socketData\"),\n\n (\"STANDARD_ERROR_DATA\", \"standardErrorData\"),\n\n (\"STANDARD_INPUT_PROGRESS\", \"standardInputProgress\"),\n\n (\"STANDARD_OUTPUT_DATA\", \"standardOutputData\"),\n\n ];\n\n\n\n write.define_public_constant_string_class_traits(CONSTANTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/events/progressevent.rs", "rank": 66, "score": 394347.41925637744 }, { "content": "/// Construct `TextFormat`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.text\"), \"TextFormat\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<TextFormat instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<TextFormat class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n write.set_instance_allocator(textformat_allocator);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[\n\n (\"align\", Some(getter!(align)), Some(setter!(set_align))),\n", "file_path": "core/src/avm2/globals/flash/text/textformat.rs", "rank": 67, "score": 394347.41925637744 }, { "content": "/// Construct `NativeMenu`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"NativeMenu\"),\n\n Some(QName::new(Namespace::package(\"flash.events\"), \"EventDispatcher\").into()),\n\n Method::from_builtin(instance_init, \"<NativeMenu instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<NativeMenu class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/display/nativemenu.rs", "rank": 68, "score": 394347.41925637744 }, { "content": "/// Construct `MovieClip`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"MovieClip\"),\n\n Some(QName::new(Namespace::package(\"flash.display\"), \"Sprite\").into()),\n\n Method::from_builtin(instance_init, \"<MovieClip instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<MovieClip class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[\n\n (\"currentFrame\", Some(current_frame), None),\n\n (\"currentFrameLabel\", Some(current_frame_label), None),\n\n (\"currentLabel\", Some(current_label), None),\n\n (\"currentLabels\", Some(current_labels), None),\n", "file_path": "core/src/avm2/globals/flash/display/movieclip.rs", "rank": 69, "score": 394347.41925637744 }, { "content": "/// Construct `SoundChannel`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.media\"), \"SoundChannel\"),\n\n Some(QName::new(Namespace::package(\"flash.events\"), \"EventDispatcher\").into()),\n\n Method::from_builtin(instance_init, \"<SoundChannel instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<SoundChannel class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED | ClassAttributes::FINAL);\n\n write.set_instance_allocator(soundchannel_allocator);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[\n\n (\"leftPeak\", Some(left_peak), None),\n", "file_path": "core/src/avm2/globals/flash/media/soundchannel.rs", "rank": 70, "score": 394347.41925637744 }, { "content": "/// Construct `StageDisplayState`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"StageDisplayState\"),\n\n Some(QName::new(Namespace::package(\"\"), \"Object\").into()),\n\n Method::from_builtin(\n\n instance_init,\n\n \"<StageDisplayState instance initializer>\",\n\n mc,\n\n ),\n\n Method::from_builtin(class_init, \"<StageDisplayState class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED | ClassAttributes::FINAL);\n\n\n\n const CONSTANTS: &[(&str, &str)] = &[\n\n (\"FULL_SCREEN\", \"fullScreen\"),\n\n (\"FULL_SCREEN_INTERACTIVE\", \"fullScreenInteractive\"),\n\n (\"NORMAL\", \"normal\"),\n\n ];\n\n write.define_public_constant_string_class_traits(CONSTANTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/display/stagedisplaystate.rs", "rank": 71, "score": 394347.34073392977 }, { "content": "/// Construct `FullScreenEvent`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.events\"), \"FullScreenEvent\"),\n\n Some(QName::new(Namespace::package(\"flash.events\"), \"ActivityEvent\").into()),\n\n Method::from_builtin(instance_init, \"<FullScreenEvent instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<FullScreenEvent class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n const PUBLIC_INSTANCE_PROPERTIES: &[(\n\n &str,\n\n Option<NativeMethodImpl>,\n\n Option<NativeMethodImpl>,\n\n )] = &[\n\n (\"fullScreen\", Some(fullscreen), None),\n\n (\"interactive\", Some(interactive), None),\n\n ];\n\n write.define_public_builtin_instance_properties(mc, PUBLIC_INSTANCE_PROPERTIES);\n", "file_path": "core/src/avm2/globals/flash/events/fullscreenevent.rs", "rank": 72, "score": 394347.34073392977 }, { "content": "/// Construct `TextFormatAlign`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.text\"), \"TextFormatAlign\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<TextFormatAlign instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<TextFormatAlign class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::FINAL | ClassAttributes::SEALED);\n\n\n\n const CONSTANTS: &[(&str, &str)] = &[\n\n (\"CENTER\", \"center\"),\n\n (\"END\", \"end\"),\n\n (\"JUSTIFY\", \"justify\"),\n\n (\"LEFT\", \"left\"),\n\n (\"RIGHT\", \"right\"),\n\n (\"START\", \"start\"),\n\n ];\n\n write.define_public_constant_string_class_traits(CONSTANTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/text/textformatalign.rs", "rank": 73, "score": 394347.34073392977 }, { "content": "/// Construct `ActionScriptVersion`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"ActionScriptVersion\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(\n\n instance_init,\n\n \"<ActionScriptVersion instance initializer>\",\n\n mc,\n\n ),\n\n Method::from_builtin(class_init, \"<ActionScriptVersion class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::FINAL | ClassAttributes::SEALED);\n\n\n\n const CONSTANTS: &[(&str, u32)] = &[(\"ACTIONSCRIPT2\", 2), (\"ACTIONSCRIPT3\", 3)];\n\n write.define_public_constant_uint_class_traits(CONSTANTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/display/actionscriptversion.rs", "rank": 74, "score": 394347.34073392977 }, { "content": "/// Construct `LineScaleMode`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"LineScaleMode\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<LineScaleMode instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<LineScaleMode class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n const CONSTANTS: &[(&str, &str)] = &[\n\n (\"HORIZONTAL\", \"horizontal\"),\n\n (\"NONE\", \"none\"),\n\n (\"NORMAL\", \"normal\"),\n\n (\"VERTICAL\", \"vertical\"),\n\n ];\n\n write.define_public_constant_string_class_traits(CONSTANTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/display/linescalemode.rs", "rank": 75, "score": 394347.34073392977 }, { "content": "/// Construct `LineScaleMode`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.display\"), \"StageScaleMode\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<StageScaleMode instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<StageScaleMode class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED | ClassAttributes::FINAL);\n\n\n\n const CONSTANTS: &[(&str, &str)] = &[\n\n (\"EXACT_FIT\", \"exactFit\"),\n\n (\"NO_BORDER\", \"noBorder\"),\n\n (\"NO_SCALE\", \"noScale\"),\n\n (\"SHOW_ALL\", \"showAll\"),\n\n ];\n\n write.define_public_constant_string_class_traits(CONSTANTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/display/stagescalemode.rs", "rank": 76, "score": 394347.34073392977 }, { "content": "/// Construct `TextFieldType`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.text\"), \"TextFieldType\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<TextFieldType instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<TextFieldType class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::FINAL | ClassAttributes::SEALED);\n\n\n\n const CONSTANTS: &[(&str, &str)] = &[(\"DYNAMIC\", \"dynamic\"), (\"INPUT\", \"input\")];\n\n write.define_public_constant_string_class_traits(CONSTANTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/text/textfieldtype.rs", "rank": 77, "score": 394347.34073392977 }, { "content": "/// Construct `TextFieldAutoSize`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.text\"), \"TextFieldAutoSize\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(\n\n instance_init,\n\n \"<TextFieldAutoSize instance initializer>\",\n\n mc,\n\n ),\n\n Method::from_builtin(class_init, \"<TextFieldAutoSize class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::FINAL | ClassAttributes::SEALED);\n\n\n\n const CONSTANTS: &[(&str, &str)] = &[\n\n (\"CENTER\", \"center\"),\n\n (\"LEFT\", \"left\"),\n\n (\"NONE\", \"none\"),\n\n (\"RIGHT\", \"right\"),\n\n ];\n\n write.define_public_constant_string_class_traits(CONSTANTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/text/textfieldautosize.rs", "rank": 78, "score": 394347.2641346266 }, { "content": "pub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.ui\"), \"Mouse\"),\n\n Some(QName::new(Namespace::package(\"\"), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Mouse instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Mouse class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED | ClassAttributes::FINAL);\n\n\n\n const PUBLIC_CLASS_METHODS: &[(&str, NativeMethodImpl)] = &[(\"show\", show), (\"hide\", hide)];\n\n write.define_public_builtin_class_methods(mc, PUBLIC_CLASS_METHODS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/ui/mouse.rs", "rank": 79, "score": 394341.0855983274 }, { "content": "pub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.utils\"), \"Endian\"),\n\n None,\n\n Method::from_builtin(instance_init, \"<Endian instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Endian class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::FINAL | ClassAttributes::SEALED);\n\n\n\n const CONSTANTS: &[(&str, &str)] = &[\n\n (\"LITTLE_ENDIAN\", \"littleEndian\"),\n\n (\"BIG_ENDIAN\", \"bigEndian\"),\n\n ];\n\n write.define_public_constant_string_class_traits(CONSTANTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/utils/endian.rs", "rank": 80, "score": 394341.08559832734 }, { "content": "pub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.system\"), \"Security\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Security instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Security class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n\n\n const PUBLIC_CLASS_TRAITS: &[(&str, Option<NativeMethodImpl>, Option<NativeMethodImpl>)] =\n\n &[(\"sandboxType\", Some(sandbox_type), None)];\n\n write.define_public_builtin_class_properties(mc, PUBLIC_CLASS_TRAITS);\n\n\n\n const PUBLIC_CLASS_METHODS: &[(&str, NativeMethodImpl)] = &[\n\n (\"allowDomain\", allow_domain),\n\n (\"allowInsecureDomain\", allow_insecure_domain),\n", "file_path": "core/src/avm2/globals/flash/system/security.rs", "rank": 81, "score": 394341.0855983274 }, { "content": "pub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.utils\"), \"ByteArray\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<ByteArray instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<ByteArray class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n write.set_instance_allocator(bytearray_allocator);\n\n\n\n const PUBLIC_INSTANCE_METHODS: &[(&str, NativeMethodImpl)] = &[\n\n (\"writeByte\", write_byte),\n\n (\"writeBytes\", write_bytes),\n\n (\"readBytes\", read_bytes),\n\n (\"toString\", to_string),\n\n (\"readShort\", read_short),\n", "file_path": "core/src/avm2/globals/flash/utils/bytearray.rs", "rank": 82, "score": 394341.0855983274 }, { "content": "pub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.utils\"), \"Proxy\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(instance_init, \"<Proxy instance initializer>\", mc),\n\n Method::from_builtin(class_init, \"<Proxy class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::SEALED);\n\n write.set_instance_allocator(proxy_allocator);\n\n\n\n const FLASH_PROXY_INSTANCE_METHODS: &[(&str, NativeMethodImpl)] = &[\n\n (\"getProperty\", get_property),\n\n (\"setProperty\", set_property),\n\n (\"deleteProperty\", delete_property),\n\n (\"callProperty\", call_property),\n\n (\"hasProperty\", has_property),\n", "file_path": "core/src/avm2/globals/flash/utils/proxy.rs", "rank": 83, "score": 394341.0855983274 }, { "content": "fn y<'gc>(_activation: &mut Activation<'_, 'gc, '_>, this: DisplayObject<'gc>) -> Value<'gc> {\n\n this.y().into()\n\n}\n\n\n", "file_path": "core/src/avm1/object/stage_object.rs", "rank": 84, "score": 392185.20955637784 }, { "content": "fn x<'gc>(_activation: &mut Activation<'_, 'gc, '_>, this: DisplayObject<'gc>) -> Value<'gc> {\n\n this.x().into()\n\n}\n\n\n", "file_path": "core/src/avm1/object/stage_object.rs", "rank": 85, "score": 392185.20955637784 }, { "content": "/// Construct `ApplicationDomain`'s class.\n\npub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.system\"), \"ApplicationDomain\"),\n\n Some(QName::new(Namespace::public(), \"Object\").into()),\n\n Method::from_builtin(\n\n instance_init,\n\n \"<ApplicationDomain instance initializer>\",\n\n mc,\n\n ),\n\n Method::from_builtin(class_init, \"<ApplicationDomain class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n write.set_instance_allocator(appdomain_allocator);\n\n\n\n const PUBLIC_CLASS_METHODS: &[(&str, NativeMethodImpl)] = &[\n\n (\"currentDomain\", current_domain),\n\n (\"parentDomain\", parent_domain),\n\n (\"getDefinition\", get_definition),\n", "file_path": "core/src/avm2/globals/flash/system/application_domain.rs", "rank": 86, "score": 391575.3354236417 }, { "content": "pub fn create_class<'gc>(mc: MutationContext<'gc, '_>) -> GcCell<'gc, Class<'gc>> {\n\n let class = Class::new(\n\n QName::new(Namespace::package(\"flash.utils\"), \"CompressionAlgorithm\"),\n\n None,\n\n Method::from_builtin(\n\n instance_init,\n\n \"<CompressionAlgorithm instance initializer>\",\n\n mc,\n\n ),\n\n Method::from_builtin(class_init, \"<CompressionAlgorithm class initializer>\", mc),\n\n mc,\n\n );\n\n\n\n let mut write = class.write(mc);\n\n\n\n write.set_attributes(ClassAttributes::FINAL | ClassAttributes::SEALED);\n\n\n\n const CONSTANTS: &[(&str, &str)] =\n\n &[(\"DEFLATE\", \"deflate\"), (\"LZMA\", \"lzma\"), (\"ZLIB\", \"zlib\")];\n\n write.define_public_constant_string_class_traits(CONSTANTS);\n\n\n\n class\n\n}\n", "file_path": "core/src/avm2/globals/flash/utils/compression_algorithm.rs", "rank": 87, "score": 391569.00176559173 }, { "content": "fn x_scale<'gc>(activation: &mut Activation<'_, 'gc, '_>, this: DisplayObject<'gc>) -> Value<'gc> {\n\n this.scale_x(activation.context.gc_context)\n\n .into_fraction()\n\n .into()\n\n}\n\n\n", "file_path": "core/src/avm1/object/stage_object.rs", "rank": 88, "score": 388173.7158788211 }, { "content": "fn quality<'gc>(activation: &mut Activation<'_, 'gc, '_>, _this: DisplayObject<'gc>) -> Value<'gc> {\n\n let quality = activation.context.stage.quality().into_avm_str();\n\n quality.into()\n\n}\n\n\n", "file_path": "core/src/avm1/object/stage_object.rs", "rank": 89, "score": 388173.7158788211 }, { "content": "fn y_scale<'gc>(activation: &mut Activation<'_, 'gc, '_>, this: DisplayObject<'gc>) -> Value<'gc> {\n\n this.scale_y(activation.context.gc_context)\n\n .into_fraction()\n\n .into()\n\n}\n\n\n", "file_path": "core/src/avm1/object/stage_object.rs", "rank": 90, "score": 388173.7158788211 }, { "content": "fn x_mouse<'gc>(activation: &mut Activation<'_, 'gc, '_>, this: DisplayObject<'gc>) -> Value<'gc> {\n\n let (local_x, _) = this.global_to_local(*activation.context.mouse_position);\n\n local_x.to_pixels().into()\n\n}\n\n\n", "file_path": "core/src/avm1/object/stage_object.rs", "rank": 91, "score": 388173.7158788211 }, { "content": "fn alpha<'gc>(_activation: &mut Activation<'_, 'gc, '_>, this: DisplayObject<'gc>) -> Value<'gc> {\n\n (this.alpha() * 100.0).into()\n\n}\n\n\n", "file_path": "core/src/avm1/object/stage_object.rs", "rank": 92, "score": 388173.7158788211 }, { "content": "fn target<'gc>(activation: &mut Activation<'_, 'gc, '_>, this: DisplayObject<'gc>) -> Value<'gc> {\n\n AvmString::new(activation.context.gc_context, this.slash_path()).into()\n\n}\n\n\n", "file_path": "core/src/avm1/object/stage_object.rs", "rank": 93, "score": 388173.7158788211 }, { "content": "fn rotation<'gc>(activation: &mut Activation<'_, 'gc, '_>, this: DisplayObject<'gc>) -> Value<'gc> {\n\n let degrees: f64 = this.rotation(activation.context.gc_context).into();\n\n degrees.into()\n\n}\n\n\n", "file_path": "core/src/avm1/object/stage_object.rs", "rank": 94, "score": 388173.7158788211 }, { "content": "fn url<'gc>(activation: &mut Activation<'_, 'gc, '_>, this: DisplayObject<'gc>) -> Value<'gc> {\n\n this.as_movie_clip()\n\n .and_then(|mc| mc.movie())\n\n .and_then(|mov| mov.url().map(|url| url.to_string()))\n\n .map_or_else(\n\n || \"\".into(),\n\n |s| AvmString::new_utf8(activation.context.gc_context, s).into(),\n\n )\n\n}\n\n\n", "file_path": "core/src/avm1/object/stage_object.rs", "rank": 95, "score": 388173.7158788211 }, { "content": "fn height<'gc>(_activation: &mut Activation<'_, 'gc, '_>, this: DisplayObject<'gc>) -> Value<'gc> {\n\n this.height().into()\n\n}\n\n\n", "file_path": "core/src/avm1/object/stage_object.rs", "rank": 96, "score": 388173.7158788211 }, { "content": "fn visible<'gc>(_activation: &mut Activation<'_, 'gc, '_>, this: DisplayObject<'gc>) -> Value<'gc> {\n\n this.visible().into()\n\n}\n\n\n", "file_path": "core/src/avm1/object/stage_object.rs", "rank": 97, "score": 388173.7158788211 }, { "content": "fn y_mouse<'gc>(activation: &mut Activation<'_, 'gc, '_>, this: DisplayObject<'gc>) -> Value<'gc> {\n\n let (_, local_y) = this.global_to_local(*activation.context.mouse_position);\n\n local_y.to_pixels().into()\n\n}\n\n\n", "file_path": "core/src/avm1/object/stage_object.rs", "rank": 98, "score": 388173.7158788211 }, { "content": "fn width<'gc>(_activation: &mut Activation<'_, 'gc, '_>, this: DisplayObject<'gc>) -> Value<'gc> {\n\n this.width().into()\n\n}\n\n\n", "file_path": "core/src/avm1/object/stage_object.rs", "rank": 99, "score": 388173.71587882115 } ]
Rust
src/stage/ast/mod.rs
ncatelli/mossy
25211cc1e871dfe8d06d0a04d23271a321870fdc
macro_rules! generate_type_specifier { (integer, $sign:expr, $width:expr) => { $crate::stage::ast::Type::Integer($sign, $width) }; (char) => { generate_type_specifier!(i8) }; (ptr => $ty:expr) => { $crate::stage::ast::Type::Pointer(Box::new($ty)) }; (i8) => { generate_type_specifier!( integer, $crate::stage::ast::Signed::Signed, $crate::stage::ast::IntegerWidth::Eight ) }; (u8) => { generate_type_specifier!( integer, $crate::stage::ast::Signed::Unsigned, $crate::stage::ast::IntegerWidth::Eight ) }; (i16) => { generate_type_specifier!( integer, $crate::stage::ast::Signed::Signed, $crate::stage::ast::IntegerWidth::Sixteen ) }; (i32) => { generate_type_specifier!( integer, $crate::stage::ast::Signed::Signed, $crate::stage::ast::IntegerWidth::ThirtyTwo ) }; (u32) => { generate_type_specifier!( integer, $crate::stage::ast::Signed::Unsigned, $crate::stage::ast::IntegerWidth::ThirtyTwo ) }; (i64) => { generate_type_specifier!( integer, $crate::stage::ast::Signed::Signed, $crate::stage::ast::IntegerWidth::SixtyFour ) }; (u64) => { generate_type_specifier!( integer, $crate::stage::ast::Signed::Unsigned, $crate::stage::ast::IntegerWidth::SixtyFour ) }; } #[derive(Debug)] pub struct TypedProgram { pub defs: Vec<TypedGlobalDecls>, } impl TypedProgram { pub fn new(defs: Vec<TypedGlobalDecls>) -> Self { Self { defs } } } #[derive(PartialEq, Debug, Clone)] pub struct TypedFunctionDeclaration { pub id: String, pub block: TypedCompoundStmts, } impl TypedFunctionDeclaration { pub fn new(id: String, block: TypedCompoundStmts) -> Self { Self { id, block } } } #[derive(PartialEq, Debug, Clone)] pub enum TypedGlobalDecls { Func(TypedFunctionDeclaration), Var(Declaration), } #[derive(PartialEq, Debug, Clone)] pub struct TypedCompoundStmts { inner: Vec<TypedStmtNode>, } impl TypedCompoundStmts { pub fn new(inner: Vec<TypedStmtNode>) -> Self { Self { inner } } } impl From<TypedCompoundStmts> for Vec<TypedStmtNode> { fn from(src: TypedCompoundStmts) -> Self { src.inner } } #[derive(PartialEq, Debug, Clone)] pub enum Declaration { Scalar(Type, Vec<String>), Array { ty: Type, id: String, size: usize }, } #[derive(PartialEq, Debug, Clone)] pub enum TypedStmtNode { Declaration(Declaration), Return(Type, String, Option<TypedExprNode>), Expression(TypedExprNode), If( TypedExprNode, TypedCompoundStmts, Option<TypedCompoundStmts>, ), While(TypedExprNode, TypedCompoundStmts), For( Box<TypedStmtNode>, TypedExprNode, Box<TypedStmtNode>, TypedCompoundStmts, ), } #[derive(PartialEq, Debug, Clone)] pub enum TypedExprNode { Primary(Type, Primary), FunctionCall(Type, String, Option<Box<TypedExprNode>>), IdentifierAssignment(Type, String, Box<TypedExprNode>), DerefAssignment(Type, Box<TypedExprNode>, Box<TypedExprNode>), Equal(Type, Box<TypedExprNode>, Box<TypedExprNode>), NotEqual(Type, Box<TypedExprNode>, Box<TypedExprNode>), LessThan(Type, Box<TypedExprNode>, Box<TypedExprNode>), GreaterThan(Type, Box<TypedExprNode>, Box<TypedExprNode>), LessEqual(Type, Box<TypedExprNode>, Box<TypedExprNode>), GreaterEqual(Type, Box<TypedExprNode>, Box<TypedExprNode>), Addition(Type, Box<TypedExprNode>, Box<TypedExprNode>), Subtraction(Type, Box<TypedExprNode>, Box<TypedExprNode>), Division(Type, Box<TypedExprNode>, Box<TypedExprNode>), Modulo(Type, Box<TypedExprNode>, Box<TypedExprNode>), Multiplication(Type, Box<TypedExprNode>, Box<TypedExprNode>), LogicalNot(Type, Box<TypedExprNode>), Negate(Type, Box<TypedExprNode>), Invert(Type, Box<TypedExprNode>), Ref(Type, String), Deref(Type, Box<TypedExprNode>), ScaleBy(Type, Box<TypedExprNode>), Grouping(Type, Box<TypedExprNode>), } impl Typed for TypedExprNode { fn r#type(&self) -> Type { match self { TypedExprNode::Primary(ty, _) | TypedExprNode::FunctionCall(ty, _, _) | TypedExprNode::IdentifierAssignment(ty, _, _) | TypedExprNode::DerefAssignment(ty, _, _) | TypedExprNode::Equal(ty, _, _) | TypedExprNode::NotEqual(ty, _, _) | TypedExprNode::LessThan(ty, _, _) | TypedExprNode::GreaterThan(ty, _, _) | TypedExprNode::LessEqual(ty, _, _) | TypedExprNode::GreaterEqual(ty, _, _) | TypedExprNode::Addition(ty, _, _) | TypedExprNode::Subtraction(ty, _, _) | TypedExprNode::Division(ty, _, _) | TypedExprNode::Multiplication(ty, _, _) | TypedExprNode::Modulo(ty, _, _) | TypedExprNode::LogicalNot(ty, _) | TypedExprNode::Negate(ty, _) | TypedExprNode::Invert(ty, _) | TypedExprNode::Ref(ty, _) | TypedExprNode::Deref(ty, _) | TypedExprNode::ScaleBy(ty, _) | TypedExprNode::Grouping(ty, _) => ty.clone(), } } } #[derive(PartialEq, Debug, Clone)] pub enum Primary { Integer { sign: Signed, width: IntegerWidth, value: [u8; 8], }, Identifier(Type, String), Str(Vec<u8>), } impl Typed for Primary { fn r#type(&self) -> Type { match self { Primary::Integer { sign, width, value: _, } => Type::Integer(*sign, *width), Primary::Identifier(ty, _) => ty.clone(), Primary::Str(_) => generate_type_specifier!(ptr => generate_type_specifier!(char)), } } } pub trait ByteSized { fn size(&self) -> usize; } pub trait Typed { fn r#type(&self) -> Type; } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum Signed { Signed, Unsigned, } #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd)] pub enum IntegerWidth { Eight, Sixteen, ThirtyTwo, SixtyFour, } impl ByteSized for IntegerWidth { fn size(&self) -> usize { match self { Self::Eight => 1, Self::Sixteen => 2, Self::ThirtyTwo => 4, Self::SixtyFour => 8, } } } #[derive(Debug, Clone, PartialEq, Eq)] pub struct FuncProto { pub return_type: Box<Type>, pub args: Vec<Type>, } impl FuncProto { pub fn new(return_type: Box<Type>, args: Vec<Type>) -> Self { Self { return_type, args } } } const POINTER_BYTE_WIDTH: usize = (usize::BITS / 8) as usize; #[derive(Debug, Clone, PartialEq, Eq)] pub enum Type { Integer(Signed, IntegerWidth), Void, Func(FuncProto), Pointer(Box<Type>), } impl ByteSized for Type { fn size(&self) -> usize { match self { Self::Integer(_, iw) => iw.size(), Self::Void => 0, Self::Func { .. } => POINTER_BYTE_WIDTH, Self::Pointer(_) => POINTER_BYTE_WIDTH, } } } impl Type { pub fn pointer_to(&self) -> Self { Self::Pointer(Box::new(self.clone())) } pub fn value_at(&self) -> Option<Self> { match self { Type::Pointer(ty) => Some(*(ty.clone())), _ => None, } } }
macro_rules! generate_type_specifier { (integer, $sign:expr, $width:expr) => { $crate::stage::ast::Type::Integer($sign, $width) }; (char) => { generate_type_specifier!(i8) }; (ptr => $ty:expr) => { $crate::stage::ast::Type::Pointer(Box::new($ty)) }; (i8) => { generate_type_specifier!( integer, $crate::stage::ast::Signed::Signed, $crate::stage::ast::IntegerWidth::Eight ) }; (u8) => { generate_type_specifier!( integer, $crate::stage::ast::Signed::Unsigned, $crate::stage::ast::IntegerWidth::Eight ) }; (i16) => { generate_type_specifier!( integer, $crate::stage::ast::Signed::Signed, $crate::stage::ast::IntegerWidth::Sixteen ) }; (i32) => { generate_type_specifier!( integer, $crate::stage::ast::Signed::Signed, $crate::stage::ast::IntegerWidth::ThirtyTwo ) }; (u32) => { generate_type_specifier!( integer, $crate::stage::ast::Signed::Unsigned, $crate::stage::ast::IntegerWidth::ThirtyTwo ) }; (i64) => { generate_type_specifier!( integer, $crate::stage::ast::Signed::Signed, $crate::stage::ast::IntegerWidth::SixtyFour ) }; (u64) => { generate_type_specifier!( integer, $crate::stage::ast::Signed::Unsigned, $crate::stage::ast::IntegerWidth::SixtyFour ) }; } #[derive(Debug)] pub struct TypedProgram { pub defs: Vec<TypedGlobalDecls>, } impl TypedProgram { pub fn new(defs: Vec<TypedGlobalDecls>) -> Self { Self { defs } } } #[derive(PartialEq, Debug, Clone)] pub struct TypedFunctionDeclaration { pub id: String, pub block: TypedCompoundStmts, } impl TypedFunctionDeclaration { pub fn new(id: String, block: TypedCompoundStmts) -> Self { Self { id, block } } } #[derive(PartialEq, Debug, Clone)] pub enum TypedGlobalDecls { Func(TypedFunctionDeclaration), Var(Declaration), } #[derive(PartialEq, Debug, Clone)] pub struct TypedCompoundStmts { inner: Vec<TypedStmtNode>, } impl TypedCompoundStmts { pub fn new(inner: Vec<TypedStmtNode>) -> Self { Self { inner } } } impl From<TypedCompoundStmts> for Vec<TypedStmtNode> { fn from(src: TypedCompoundStmts) -> Self { src.inner } } #[derive(PartialEq, Debug, Clone)] pub enum Declaration { Scalar(Type, Vec<String>), Array { ty: Type, id: String, size: usize }, } #[derive(PartialEq, Debug, Clone)] pub enum TypedStmtNode { Declaration(Declaration), Return(Type, String, Option<TypedExprNode>), Expression(TypedExprNode), If( TypedExprNode, TypedCompoundStmts, Option<TypedCompoundStmts>, ), While(TypedExprNode, TypedCompoundStmts), For( Box<TypedStmtNode>, TypedExprNode, Box<TypedStmtNode>, TypedCompoundStmts, ), } #[derive(PartialEq, Debug, Clone)] pub enum TypedExprNode { Primary(Type, Primary), FunctionCall(Type, String, Option<Box<TypedExprNode>>), IdentifierAssignment(Type, String, Box<TypedExprNode>), DerefAssignment(Type, Box<TypedExprNode>, Box<TypedExprNode>), Equal(Type, Box<TypedExprNode>, Box<TypedExprNode>), NotEqual(Type, Box<TypedExprNode>, Box<TypedExprNode>), LessThan(Type, Box<TypedExprNode>, Box<TypedExprNode>), GreaterThan(Type, Box<TypedExprNode>, Box<TypedExprNode>), LessEqual(Type, Box<TypedExprNode>, Box<TypedExprNode>), GreaterEqual(Type, Box<TypedExprNode>, Box<TypedExprNode>), Addition(Type, Box<TypedExprNode>, Box<TypedExprNode>), Subtraction(Type, Box<TypedExprNode>, Box<TypedExprNode>), Division(Type, Box<TypedExprNode>, Box<TypedExprNode>), Modulo(Type, Box<TypedExprNode>, Box<TypedExprNode>), Multiplication(Type, Box<TypedExprNode>, Box<TypedExprNode>), LogicalNot(Type, Box<TypedExprNode>), Negate(Type, Box<TypedExprNode>), Invert(Type, Box<TypedExprNode>), Ref(Type, String), Deref(Type, Box<TypedExprNode>), ScaleBy(Type, Box<TypedExprNode>), Grouping(Type, Box<TypedExprNode>), } impl Typed for TypedExprNode { fn r#type(&self) -> Type { match self { TypedExprNode::Primary(ty, _) | TypedExprNode::FunctionCall(ty, _, _) | TypedExprNode::IdentifierAssignment(ty, _, _) | TypedExprNode::DerefAssignment(ty, _, _) | TypedExprNode::Equal(ty, _, _) | TypedExprNode::NotEqual(ty, _, _) | TypedExprNode::LessThan(ty, _, _) | TypedExprNode::GreaterThan(ty, _, _) | TypedExprNode::LessEqual(ty, _, _) | TypedExprNode::GreaterEqual(ty, _, _) | TypedExprNode::Addition(ty, _, _) | TypedExprNode::Subtraction(ty, _, _) | TypedExprNode::Division(ty, _, _) | TypedExprNode::Multiplication(ty, _, _) | TypedExprNode::Modulo(ty, _, _) | TypedExprNode::LogicalNot(ty, _) | TypedExprNode::Negate(ty, _) | TypedExprNode::Invert(ty, _) | TypedExprNode::Ref(ty, _) | TypedExprNode::Deref(ty, _) | TypedExprNode::ScaleBy(ty, _) | TypedExprNode::Grouping(ty, _) => ty.clone(), } } } #[derive(PartialEq, Debug, Clone)] pub enum Primary { Integer { sign: Signed, width: IntegerWidth, value: [u8; 8], }, Identifier(Type, String), Str(Vec<u8>), } impl Typed for Primary { fn r#type(&self) -> Type { match self { Primary::Integer { sign, width, value: _, } => Type::Integer(*sign, *width), Primary::Identifier(ty, _) => ty.clone(), Primary::Str(_) => generate_type_specifier!(ptr => generate_type_specifier!(char)), } } } pub trait ByteSized { fn size(&self) -> usize; } pub trait Typed { fn r#type(&self) -> Type; } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum Signed { Signed, Unsigned, } #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd)] pub enum IntegerWidth { Eight, Sixteen, ThirtyTwo, SixtyFour, } impl ByteSized for IntegerWidth {
} #[derive(Debug, Clone, PartialEq, Eq)] pub struct FuncProto { pub return_type: Box<Type>, pub args: Vec<Type>, } impl FuncProto { pub fn new(return_type: Box<Type>, args: Vec<Type>) -> Self { Self { return_type, args } } } const POINTER_BYTE_WIDTH: usize = (usize::BITS / 8) as usize; #[derive(Debug, Clone, PartialEq, Eq)] pub enum Type { Integer(Signed, IntegerWidth), Void, Func(FuncProto), Pointer(Box<Type>), } impl ByteSized for Type { fn size(&self) -> usize { match self { Self::Integer(_, iw) => iw.size(), Self::Void => 0, Self::Func { .. } => POINTER_BYTE_WIDTH, Self::Pointer(_) => POINTER_BYTE_WIDTH, } } } impl Type { pub fn pointer_to(&self) -> Self { Self::Pointer(Box::new(self.clone())) } pub fn value_at(&self) -> Option<Self> { match self { Type::Pointer(ty) => Some(*(ty.clone())), _ => None, } } }
fn size(&self) -> usize { match self { Self::Eight => 1, Self::Sixteen => 2, Self::ThirtyTwo => 4, Self::SixtyFour => 8, } }
function_block-full_function
[ { "content": "fn type_declarator<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], Type> {\n\n whitespace_wrapped(\n\n parcel::join(\n\n type_specifier(),\n\n whitespace_wrapped(expect_character('*').one_or_more()),\n\n )\n\n .map(|(ty, pointer_depth)| {\n\n let nested_pointers = pointer_depth.len() - 1;\n\n (0..nested_pointers)\n\n .into_iter()\n\n .fold(Type::Pointer(Box::new(ty)), |acc, _| {\n\n Type::Pointer(Box::new(acc))\n\n })\n\n }),\n\n )\n\n .or(type_specifier)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 0, "score": 200252.34097541665 }, { "content": "fn unsigned_number<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], Primary> {\n\n parcel::one_of(vec![\n\n dec_u8().map(|num| Primary::Integer {\n\n sign: Signed::Unsigned,\n\n width: IntegerWidth::Eight,\n\n value: crate::util::pad_to_64bit_array(num.to_le_bytes()),\n\n }),\n\n dec_u16().map(|num| Primary::Integer {\n\n sign: Signed::Unsigned,\n\n width: IntegerWidth::Sixteen,\n\n value: crate::util::pad_to_64bit_array(num.to_le_bytes()),\n\n }),\n\n dec_u32().map(|num| Primary::Integer {\n\n sign: Signed::Unsigned,\n\n width: IntegerWidth::ThirtyTwo,\n\n value: crate::util::pad_to_64bit_array(num.to_le_bytes()),\n\n }),\n\n dec_u64().map(|num| Primary::Integer {\n\n sign: Signed::Unsigned,\n\n width: IntegerWidth::SixtyFour,\n\n value: crate::util::pad_to_64bit_array(num.to_le_bytes()),\n\n }),\n\n ])\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 1, "score": 177647.38039922246 }, { "content": "fn string_literal<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], Primary> {\n\n character_wrapped(\n\n '\"',\n\n '\"',\n\n parcel::zero_or_more(\n\n ascii_alphanumeric()\n\n .or(ascii_whitespace)\n\n .or(ascii_control)\n\n // escaped quote\n\n .or(|| expect_character('\\\\').and_then(|_| expect_character('\\\"')))\n\n .map(|c| c as u8),\n\n ),\n\n )\n\n .map(ast::Primary::Str)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 2, "score": 177284.91173700444 }, { "content": "fn type_specifier<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], Type> {\n\n parcel::join(\n\n whitespace_wrapped(parcel::one_of(vec![\n\n expect_str(\"signed\").map(|_| Signed::Signed),\n\n expect_str(\"unsigned\").map(|_| Signed::Unsigned),\n\n ]))\n\n .optional(),\n\n whitespace_wrapped(parcel::one_of(vec![\n\n //\n\n // long parser\n\n //\n\n parcel::one_of(vec![\n\n // long long int\n\n whitespace_wrapped(expect_str(\"long\"))\n\n .and_then(|_| whitespace_wrapped(expect_str(\"long\")))\n\n .and_then(|_| whitespace_wrapped(expect_str(\"int\"))),\n\n // long long\n\n whitespace_wrapped(expect_str(\"long\"))\n\n .and_then(|_| whitespace_wrapped(expect_str(\"long\"))),\n\n // long int\n", "file_path": "src/parser/mod.rs", "rank": 3, "score": 176862.51615200468 }, { "content": "fn identifier<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], String> {\n\n parcel::one_or_more(ascii_alphanumeric().or(|| expect_character('_')))\n\n .map(|chars| chars.into_iter().collect())\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 4, "score": 172326.8090837539 }, { "content": "fn operand_width_of_type(ty: Type) -> OperandWidth {\n\n match ty {\n\n Type::Integer(_, iw) => match iw {\n\n ast::IntegerWidth::Eight => OperandWidth::Byte,\n\n ast::IntegerWidth::Sixteen => OperandWidth::Word,\n\n ast::IntegerWidth::ThirtyTwo => OperandWidth::DoubleWord,\n\n ast::IntegerWidth::SixtyFour => OperandWidth::QuadWord,\n\n },\n\n Type::Void | Type::Func(_) | Type::Pointer(_) => OperandWidth::QuadWord,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::stage::ast;\n\n\n\n macro_rules! compound_statements {\n\n ($($stmt:expr,)*) => {\n\n $crate::stage::ast::TypedCompoundStmts::new(\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 5, "score": 163673.75813404942 }, { "content": "fn function_declaration<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], FunctionDeclaration> {\n\n parcel::join(\n\n parcel::join(type_declarator(), whitespace_wrapped(identifier())),\n\n parcel::right(parcel::join(\n\n expect_character('(').and_then(|_| whitespace_wrapped(expect_character(')'))),\n\n compound_statements(),\n\n )),\n\n )\n\n .map(|((ty, id), block)| FunctionDeclaration::new(id, ty, block))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 7, "score": 159899.33990753148 }, { "content": "fn number<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], Primary> {\n\n parcel::one_of(vec![\n\n dec_i8().map(|num| Primary::Integer {\n\n sign: Signed::Signed,\n\n width: IntegerWidth::Eight,\n\n value: crate::util::pad_to_64bit_array(num.to_le_bytes()),\n\n }),\n\n dec_u8().map(|num| Primary::Integer {\n\n sign: Signed::Unsigned,\n\n width: IntegerWidth::Eight,\n\n value: crate::util::pad_to_64bit_array(num.to_le_bytes()),\n\n }),\n\n dec_i16().map(|num| Primary::Integer {\n\n sign: Signed::Signed,\n\n width: IntegerWidth::Sixteen,\n\n value: crate::util::pad_to_64bit_array(num.to_le_bytes()),\n\n }),\n\n dec_u16().map(|num| Primary::Integer {\n\n sign: Signed::Unsigned,\n\n width: IntegerWidth::Sixteen,\n", "file_path": "src/parser/mod.rs", "rank": 8, "score": 158273.8592988492 }, { "content": "fn declaration<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], StmtNode> {\n\n parcel::join(\n\n type_declarator(),\n\n whitespace_wrapped(parcel::join(\n\n identifier(),\n\n character_wrapped('[', ']', unsigned_number()),\n\n )),\n\n )\n\n .map(|(ty, (id, size))| {\n\n let size = match size {\n\n Primary::Integer {\n\n value,\n\n sign: Signed::Unsigned,\n\n ..\n\n } => usize::from_le_bytes(value),\n\n // The remaining three variants are guaranteed to be unreachable by\n\n // the parser.\n\n _ => unreachable!(),\n\n };\n\n (ty, id, size)\n", "file_path": "src/parser/mod.rs", "rank": 9, "score": 154237.76194020954 }, { "content": "fn primary<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], ExprNode> {\n\n number()\n\n .map(ExprNode::Primary)\n\n .or(|| string_literal().map(ExprNode::Primary))\n\n .or(|| identifier().map(|id| ExprNode::Primary(Primary::Identifier(id))))\n\n .or(grouping)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 10, "score": 154219.91782303908 }, { "content": "fn ascii_control<'a>() -> impl Parser<'a, &'a [(usize, char)], char> {\n\n any_character().predicate(|c| c.is_ascii_control())\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 11, "score": 153418.32459417556 }, { "content": "fn ascii_alphanumeric<'a>() -> impl Parser<'a, &'a [(usize, char)], char> {\n\n any_character().predicate(|c| c.is_ascii_alphanumeric())\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 12, "score": 153418.32459417556 }, { "content": "fn ascii_whitespace<'a>() -> impl Parser<'a, &'a [(usize, char)], char> {\n\n any_character().predicate(|c| c.is_ascii_whitespace())\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 13, "score": 153418.32459417556 }, { "content": "fn codegen_global_symbol(ty: &Type, identifier: &str, count: usize) -> Vec<String> {\n\n let reserve_bytes = ty.size() * count;\n\n\n\n vec![format!(\n\n \"\\t.data\\n\\t.globl\\t{}\\n{}:\\n\\t.zero\\t{}\\n\\t.text\\n\",\n\n identifier, identifier, reserve_bytes\n\n )]\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 14, "score": 150871.83586726495 }, { "content": "/// parse expects a character slice as input and attempts to parse a valid\n\n/// expression, returning a parse error if it is invalid.\n\npub fn parse(input: &[(usize, char)]) -> Result<Program, ParseErr> {\n\n parcel::one_or_more(function_declaration().map(ast::GlobalDecls::Func).or(|| {\n\n semicolon_terminated_statement(declaration()).map(|stmt| {\n\n // safe to unpack due to declaration guarantee.\n\n if let ast::StmtNode::Declaration(decl) = stmt {\n\n ast::GlobalDecls::Var(decl)\n\n } else {\n\n unreachable!()\n\n }\n\n })\n\n }))\n\n .parse(input)\n\n .map_err(ParseErr::UnexpectedToken)\n\n .and_then(|ms| match ms {\n\n MatchStatus::Match {\n\n span: _,\n\n remainder: _,\n\n inner,\n\n } => Ok(inner),\n\n MatchStatus::NoMatch(_) => Err(ParseErr::Unspecified(\"not a valid expression\".to_string())),\n\n })\n\n .map(Program::new)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 16, "score": 133925.71952793023 }, { "content": "/// Represents a bit sized type\n\npub trait AddressWidth {\n\n fn bits(&self) -> usize;\n\n}\n\n\n\nmacro_rules! impl_address_width_with_bits {\n\n ($($t:ty => $width:literal,)*) => {\n\n $(\n\n impl AddressWidth for $t {\n\n fn bits(&self) -> usize {\n\n $width\n\n }\n\n }\n\n )*\n\n };\n\n}\n\n\n\nimpl_address_width_with_bits!(\n\n u16 => 16,\n\n u32 => 32,\n\n u64 => 64,\n\n);\n\n\n", "file_path": "src/stage/codegen/register.rs", "rank": 17, "score": 133773.28777164343 }, { "content": "fn while_statement<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], StmtNode> {\n\n whitespace_wrapped(expect_str(\"while\"))\n\n .and_then(|_| {\n\n parcel::join(\n\n character_wrapped('(', ')', expression()),\n\n compound_statements(),\n\n )\n\n })\n\n .map(|(cond, block)| StmtNode::While(cond, block))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 18, "score": 130847.93711679758 }, { "content": "fn expression<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], ExprNode> {\n\n assignment()\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 19, "score": 130847.93711679758 }, { "content": "fn grouping<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], ExprNode> {\n\n whitespace_wrapped(expect_character('('))\n\n .and_then(|_| {\n\n parcel::left(parcel::join(\n\n expression(),\n\n whitespace_wrapped(expect_character(')')),\n\n ))\n\n })\n\n .map(|expr| grouping_expr!(expr))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 20, "score": 130847.93711679758 }, { "content": "fn for_statement<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], StmtNode> {\n\n whitespace_wrapped(expect_str(\"for\"))\n\n .and_then(|_| {\n\n parcel::join(\n\n character_wrapped(\n\n '(',\n\n ')',\n\n parcel::join(\n\n preop_statement(),\n\n parcel::join(\n\n parcel::left(parcel::join(\n\n expression(),\n\n whitespace_wrapped(expect_str(\";\")),\n\n )),\n\n postop_statement(),\n\n ),\n\n ),\n\n ),\n\n compound_statements(),\n\n )\n\n })\n\n .map(|((preop, (cond, postop)), block)| {\n\n StmtNode::For(Box::new(preop), cond, Box::new(postop), block)\n\n })\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 21, "score": 130847.93711679758 }, { "content": "fn statement<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], StmtNode> {\n\n semicolon_terminated_statement(declaration())\n\n .or(if_statement)\n\n .or(while_statement)\n\n .or(for_statement)\n\n .or(|| semicolon_terminated_statement(return_statement()))\n\n .or(|| semicolon_terminated_statement(expression().map(StmtNode::Expression)))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 22, "score": 130847.93711679758 }, { "content": "fn call<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], ExprNode> {\n\n parcel::join(\n\n identifier(),\n\n parcel::left(parcel::join(\n\n whitespace_wrapped(expect_character('(')).and_then(|_| expression().optional()),\n\n whitespace_wrapped(expect_character(')')),\n\n )),\n\n )\n\n .map(|(id, expr)| ExprNode::FunctionCall(id, expr.map(Box::new)))\n\n .or(prefix_expression)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 23, "score": 130847.93711679758 }, { "content": "fn multiplication<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], ExprNode> {\n\n parcel::join(\n\n call(),\n\n parcel::zero_or_more(parcel::join(\n\n whitespace_wrapped(\n\n expect_character('*')\n\n .map(|_| MultiplicationExprOp::Star)\n\n .or(|| expect_character('/').map(|_| MultiplicationExprOp::Slash))\n\n .or(|| expect_character('%').map(|_| MultiplicationExprOp::Mod)),\n\n ),\n\n whitespace_wrapped(call()),\n\n ))\n\n .map(unzip),\n\n )\n\n .map(|(first_expr, (operators, operands))| {\n\n operators\n\n .into_iter()\n\n .zip(operands.into_iter())\n\n .fold(first_expr, |lhs, (operator, rhs)| match operator {\n\n MultiplicationExprOp::Star => {\n\n factor_expr!(lhs, '*', rhs)\n\n }\n\n MultiplicationExprOp::Slash => factor_expr!(lhs, '/', rhs),\n\n MultiplicationExprOp::Mod => factor_expr!(lhs, '%', rhs),\n\n })\n\n })\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 24, "score": 130847.93711679758 }, { "content": "fn assignment<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], ExprNode> {\n\n parcel::join(\n\n whitespace_wrapped(equality()),\n\n whitespace_wrapped(expect_character('=')).and_then(|_| whitespace_wrapped(assignment())),\n\n )\n\n .map(|(lhs, rhs)| assignment_expr!(lhs, '=', rhs))\n\n .or(equality)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 25, "score": 130847.93711679758 }, { "content": "fn equality<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], ExprNode> {\n\n parcel::join(\n\n relational(),\n\n parcel::zero_or_more(parcel::join(\n\n whitespace_wrapped(\n\n expect_str(\"==\")\n\n .map(|_| EqualityExprOp::Equal)\n\n .or(|| expect_str(\"!=\").map(|_| EqualityExprOp::NotEqual)),\n\n ),\n\n whitespace_wrapped(relational()),\n\n ))\n\n .map(unzip),\n\n )\n\n .map(|(first_expr, (operators, operands))| {\n\n operators\n\n .into_iter()\n\n .zip(operands.into_iter())\n\n .fold(first_expr, |lhs, (operator, rhs)| match operator {\n\n EqualityExprOp::Equal => equality_expr!(lhs, \"==\", rhs),\n\n EqualityExprOp::NotEqual => equality_expr!(lhs, \"!=\", rhs),\n\n })\n\n })\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 26, "score": 130847.93711679758 }, { "content": "fn if_statement<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], StmtNode> {\n\n parcel::join(\n\n if_head(),\n\n parcel::optional(\n\n whitespace_wrapped(expect_str(\"else\")).and_then(|_| compound_statements()),\n\n ),\n\n )\n\n .map(|((cond, cond_true), cond_false)| StmtNode::If(cond, cond_true, cond_false))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 27, "score": 130847.93711679758 }, { "content": "fn addition<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], ExprNode> {\n\n parcel::join(\n\n multiplication(),\n\n parcel::zero_or_more(parcel::join(\n\n whitespace_wrapped(\n\n expect_character('+')\n\n .map(|_| AdditionExprOp::Plus)\n\n .or(|| expect_character('-').map(|_| AdditionExprOp::Minus)),\n\n ),\n\n whitespace_wrapped(multiplication()),\n\n ))\n\n .map(unzip),\n\n )\n\n .map(|(first_expr, (operators, operands))| {\n\n operators\n\n .into_iter()\n\n .zip(operands.into_iter())\n\n .fold(first_expr, |lhs, (operator, rhs)| match operator {\n\n AdditionExprOp::Plus => term_expr!(lhs, '+', rhs),\n\n AdditionExprOp::Minus => term_expr!(lhs, '-', rhs),\n\n })\n\n })\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 28, "score": 130847.93711679758 }, { "content": "fn relational<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], ExprNode> {\n\n parcel::join(\n\n addition(),\n\n parcel::zero_or_more(parcel::join(\n\n whitespace_wrapped(\n\n expect_str(\"<=\")\n\n .map(|_| RelationalExprOp::LessEqual)\n\n .or(|| expect_str(\">=\").map(|_| RelationalExprOp::GreaterEqual))\n\n .or(|| expect_str(\"<\").map(|_| RelationalExprOp::LessThan))\n\n .or(|| expect_str(\">\").map(|_| RelationalExprOp::GreaterThan)),\n\n ),\n\n whitespace_wrapped(addition()),\n\n ))\n\n .map(unzip),\n\n )\n\n .map(|(first_expr, (operators, operands))| {\n\n operators\n\n .into_iter()\n\n .zip(operands.into_iter())\n\n .fold(first_expr, |lhs, (operator, rhs)| match operator {\n", "file_path": "src/parser/mod.rs", "rank": 29, "score": 130847.93711679758 }, { "content": "fn codegen_deref(ret: &mut GeneralPurposeRegister, ty: ast::Type) -> Vec<String> {\n\n let width = operand_width_of_type(ty);\n\n vec![format!(\n\n \"\\tmov{}\\t(%{}), %{}\\n\",\n\n operator_suffix(width),\n\n ret.fmt_with_operand_width(OperandWidth::QuadWord),\n\n ret.fmt_with_operand_width(width)\n\n )]\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 30, "score": 130534.07551850428 }, { "content": "fn calculate_satisfying_integer_size_from_rank(lhs: usize, rhs: usize) -> usize {\n\n let max = core::cmp::max(lhs, rhs);\n\n let min = core::cmp::min(lhs, rhs);\n\n\n\n // promote to next largest signed integer\n\n if !is_even(max) && is_even(min) {\n\n max + 1\n\n } else {\n\n max\n\n }\n\n}\n\n\n", "file_path": "src/stage/type_check/mod.rs", "rank": 31, "score": 130367.5647568195 }, { "content": "fn preop_statement<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], StmtNode> {\n\n statement()\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 32, "score": 127842.11915015924 }, { "content": "fn compound_statements<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], CompoundStmts> {\n\n character_wrapped('{', '}', parcel::zero_or_more(statement())).map(CompoundStmts::new)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 33, "score": 127842.11915015924 }, { "content": "fn postfix_expression<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], ExprNode> {\n\n parcel::join(\n\n identifier(),\n\n parcel::left(parcel::join(\n\n whitespace_wrapped(expect_character('[')).and_then(|_| expression()),\n\n whitespace_wrapped(expect_character(']')),\n\n )),\n\n )\n\n .map(|(id, expr)| ExprNode::Index(id, Box::new(expr)))\n\n .or(primary)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 34, "score": 127842.11915015924 }, { "content": "fn postop_statement<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], StmtNode> {\n\n statement()\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 35, "score": 127842.11915015924 }, { "content": "fn prefix_expression<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], ExprNode> {\n\n whitespace_wrapped(expect_character('*'))\n\n .and_then(|_| prefix_expression())\n\n .map(Box::new)\n\n .map(ExprNode::Deref)\n\n .or(|| {\n\n whitespace_wrapped(expect_character('&'))\n\n .and_then(|_| identifier())\n\n .map(ExprNode::Ref)\n\n })\n\n // unary logical not\n\n .or(|| {\n\n whitespace_wrapped(expect_character('!'))\n\n .and_then(|_| prefix_expression())\n\n .map(Box::new)\n\n .map(ExprNode::LogicalNot)\n\n })\n\n // unary negate\n\n .or(|| {\n\n whitespace_wrapped(expect_character('-'))\n", "file_path": "src/parser/mod.rs", "rank": 36, "score": 127842.11915015924 }, { "content": "fn return_statement<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], StmtNode> {\n\n parcel::right(parcel::join(\n\n whitespace_wrapped(expect_str(\"return\")),\n\n expression().optional(),\n\n ))\n\n .map(StmtNode::Return)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 37, "score": 127842.11915015924 }, { "content": "pub trait WidthFormatted {\n\n fn fmt_with_operand_width(&self, width: OperandWidth) -> &'static str;\n\n}\n\n\n\n/// Represents the width of an operand\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum OperandWidth {\n\n QuadWord,\n\n DoubleWord,\n\n Word,\n\n Byte,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub struct PointerRegister;\n\n\n\nimpl WidthFormatted for PointerRegister {\n\n fn fmt_with_operand_width(&self, width: OperandWidth) -> &'static str {\n\n match width {\n\n OperandWidth::QuadWord => \"rip\",\n", "file_path": "src/stage/codegen/machine/arch/x86_64/register.rs", "rank": 38, "score": 125090.97544389391 }, { "content": "fn if_head<'a>() -> impl parcel::Parser<'a, &'a [(usize, char)], (ExprNode, CompoundStmts)> {\n\n whitespace_wrapped(expect_str(\"if\")).and_then(|_| {\n\n parcel::join(\n\n character_wrapped('(', ')', expression()),\n\n compound_statements(),\n\n )\n\n })\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 39, "score": 122886.11536399461 }, { "content": "fn codegen_return(ty: Type, ret_val: &mut GeneralPurposeRegister, func_name: &str) -> Vec<String> {\n\n let width = operand_width_of_type(ty);\n\n vec![format!(\n\n \"\\tmov{}\\t%{}, %{}\\n\",\n\n operator_suffix(width),\n\n ret_val.fmt_with_operand_width(width),\n\n ScalarRegister::A.fmt_with_operand_width(width)\n\n )]\n\n .into_iter()\n\n .chain(codegen_jump(format!(\"func_{}_ret\", func_name)).into_iter())\n\n .collect()\n\n}\n\n\n\nconst fn operator_suffix(width: OperandWidth) -> &'static str {\n\n match width {\n\n OperandWidth::QuadWord => \"q\",\n\n OperandWidth::DoubleWord => \"l\",\n\n OperandWidth::Word => \"w\",\n\n OperandWidth::Byte => \"b\",\n\n }\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 40, "score": 121892.0543722704 }, { "content": "fn whitespace_wrapped<'a, P, B>(parser: P) -> impl Parser<'a, &'a [(usize, char)], B>\n\nwhere\n\n B: 'a,\n\n P: Parser<'a, &'a [(usize, char)], B> + 'a,\n\n{\n\n parcel::right(parcel::join(\n\n parcel::zero_or_more(whitespace()),\n\n parcel::left(parcel::join(parser, parcel::zero_or_more(whitespace()))),\n\n ))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 41, "score": 116576.56964409394 }, { "content": "fn codegen_jump<L>(block_id: L) -> Vec<String>\n\nwhere\n\n L: LabelFormattable,\n\n{\n\n vec![format!(\"\\tjmp\\t{}\\n\", block_id)]\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 42, "score": 115098.50960156547 }, { "content": "fn codegen_label<L>(block_id: L) -> Vec<String>\n\nwhere\n\n L: LabelFormattable,\n\n{\n\n vec![format!(\"{}:\\n\", block_id)]\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 43, "score": 115098.50960156549 }, { "content": "pub fn codegen_function_postamble(identifier: &str) -> Vec<String> {\n\n codegen_label(format!(\"func_{}_ret\", identifier))\n\n .into_iter()\n\n .chain(\n\n vec![\"\\tpopq %rbp\n\n ret\\n\\n\"\n\n .to_string()]\n\n .into_iter(),\n\n )\n\n .collect()\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 44, "score": 113191.81837937346 }, { "content": "pub fn codegen_function_preamble(identifier: &str) -> Vec<String> {\n\n vec![format!(\n\n \"\\t.text\n\n .globl {name}\n\n .type {name}, @function\n\n{name}:\n\n pushq %rbp\n\n movq\t%rsp, %rbp\\n\",\n\n name = identifier\n\n )]\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 45, "score": 113191.81837937346 }, { "content": "#[derive(Clone, Copy, PartialEq, Eq)]\n\nstruct Integer {\n\n signed: ast::Signed,\n\n width: ast::IntegerWidth,\n\n}\n\n\n\nimpl Integer {\n\n fn new(signed: ast::Signed, width: ast::IntegerWidth) -> Self {\n\n Self { signed, width }\n\n }\n\n}\n\n\n\nimpl std::convert::TryFrom<usize> for Integer {\n\n type Error = String;\n\n\n\n fn try_from(value: usize) -> Result<Self, Self::Error> {\n\n use ast::{IntegerWidth, Signed};\n\n match value {\n\n 0 => Ok(Integer::new(Signed::Signed, IntegerWidth::Eight)),\n\n 1 => Ok(Integer::new(Signed::Unsigned, IntegerWidth::Eight)),\n\n 2 => Ok(Integer::new(Signed::Signed, IntegerWidth::Sixteen)),\n\n 3 => Ok(Integer::new(Signed::Unsigned, IntegerWidth::Sixteen)),\n\n 4 => Ok(Integer::new(Signed::Signed, IntegerWidth::ThirtyTwo)),\n\n 5 => Ok(Integer::new(Signed::Unsigned, IntegerWidth::ThirtyTwo)),\n\n 6 => Ok(Integer::new(Signed::Signed, IntegerWidth::SixtyFour)),\n\n 7 => Ok(Integer::new(Signed::Unsigned, IntegerWidth::SixtyFour)),\n\n _ => Err(format!(\"rank {} outside of accepted range\", value)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/stage/type_check/mod.rs", "rank": 46, "score": 113150.69542504789 }, { "content": "fn codegen_constant_i32(ret_val: &mut GeneralPurposeRegister, constant: i32) -> Vec<String> {\n\n const WIDTH: OperandWidth = OperandWidth::QuadWord;\n\n\n\n vec![format!(\n\n \"\\tmov{}\\t${}, %{}\\n\",\n\n operator_suffix(WIDTH),\n\n constant,\n\n ret_val.fmt_with_operand_width(WIDTH)\n\n )]\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 47, "score": 108093.53221262881 }, { "content": "fn codegen_constant_u64(ret_val: &mut GeneralPurposeRegister, constant: u64) -> Vec<String> {\n\n const WIDTH: OperandWidth = OperandWidth::QuadWord;\n\n\n\n vec![format!(\n\n \"\\tmov{}\\t${}, %{}\\n\",\n\n operator_suffix(WIDTH),\n\n constant,\n\n ret_val.fmt_with_operand_width(WIDTH)\n\n )]\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 48, "score": 108093.53221262881 }, { "content": "fn codegen_constant_i8(ret_val: &mut GeneralPurposeRegister, constant: i8) -> Vec<String> {\n\n const WIDTH: OperandWidth = OperandWidth::QuadWord;\n\n vec![format!(\n\n \"\\tmov{}\\t${}, %{}\\n\",\n\n operator_suffix(WIDTH),\n\n constant,\n\n ret_val.fmt_with_operand_width(WIDTH)\n\n )]\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 49, "score": 108093.53221262881 }, { "content": "fn codegen_constant_i16(ret_val: &mut GeneralPurposeRegister, constant: i16) -> Vec<String> {\n\n const WIDTH: OperandWidth = OperandWidth::QuadWord;\n\n\n\n vec![format!(\n\n \"\\tmov{}\\t${}, %{}\\n\",\n\n operator_suffix(WIDTH),\n\n constant,\n\n ret_val.fmt_with_operand_width(WIDTH)\n\n )]\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 50, "score": 108093.53221262881 }, { "content": "fn codegen_constant_i64(ret_val: &mut GeneralPurposeRegister, constant: i64) -> Vec<String> {\n\n const WIDTH: OperandWidth = OperandWidth::QuadWord;\n\n\n\n vec![format!(\n\n \"\\tmov{}\\t${}, %{}\\n\",\n\n operator_suffix(WIDTH),\n\n constant,\n\n ret_val.fmt_with_operand_width(WIDTH)\n\n )]\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 51, "score": 108093.53221262881 }, { "content": "fn codegen_constant_u32(ret_val: &mut GeneralPurposeRegister, constant: u32) -> Vec<String> {\n\n const WIDTH: OperandWidth = OperandWidth::QuadWord;\n\n\n\n vec![format!(\n\n \"\\tmov{}\\t${}, %{}\\n\",\n\n operator_suffix(WIDTH),\n\n constant,\n\n ret_val.fmt_with_operand_width(WIDTH)\n\n )]\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 52, "score": 108093.53221262881 }, { "content": "fn codegen_constant_u8(ret_val: &mut GeneralPurposeRegister, constant: u8) -> Vec<String> {\n\n const WIDTH: OperandWidth = OperandWidth::QuadWord;\n\n vec![format!(\n\n \"\\tmov{}\\t${}, %{}\\n\",\n\n operator_suffix(WIDTH),\n\n constant,\n\n ret_val.fmt_with_operand_width(WIDTH)\n\n )]\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 53, "score": 107999.34720735915 }, { "content": "fn codegen_global_str(identifier: &str, str_literal: &[u8]) -> Vec<String> {\n\n flattenable_instructions!(\n\n vec![\"\\t.section .rodata\\n\".to_string()],\n\n codegen_label(identifier),\n\n str_literal\n\n .iter()\n\n .map(|c| format!(\"\\t.byte\\t{}\\n\", c))\n\n .collect::<Vec<String>>(),\n\n vec![\"\\t.byte\\t0\\n\".to_string(), \"\\t.text\\n\".to_string()],\n\n )\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 54, "score": 106485.44759422973 }, { "content": "/// Register implements the methods for register to store a value of a given size.\n\npub trait Register<A>\n\nwhere\n\n Self: Copy,\n\n A: AddressWidth,\n\n{\n\n fn id(&self) -> &'static str;\n\n}\n", "file_path": "src/stage/codegen/register.rs", "rank": 55, "score": 104066.63910694745 }, { "content": "/// TargetArchitecture is a bare trait used for signalling that a type\n\n/// represents an architecture that cant be supported as a compiler target.\n\npub trait TargetArchitecture {}\n\n\n\n/// AvailableArchitectures represents each of the supported architectures in\n\n/// an enumerable format.\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum AvailableArchitectures {\n\n X86_64,\n\n}\n\n\n\nimpl From<AvailableArchitectures> for Box<dyn TargetArchitecture> {\n\n fn from(src: AvailableArchitectures) -> Self {\n\n match src {\n\n AvailableArchitectures::X86_64 => Box::new(x86_64::X86_64),\n\n }\n\n }\n\n}\n", "file_path": "src/stage/codegen/machine/arch/mod.rs", "rank": 56, "score": 102654.54698986489 }, { "content": "/// CompilationStage represents a transformation pass on the ADT. Taking an\n\n/// `Input` and `Output` type for the stage.\n\npub trait CompilationStage<I, O, E> {\n\n fn apply(&mut self, input: I) -> Result<O, E>;\n\n\n\n fn and_then<'a, F, Next, O2>(self, next: F) -> BoxedCompilationStage<'a, I, O2, E>\n\n where\n\n Self: Sized + 'a,\n\n I: 'a,\n\n O: 'a,\n\n O2: 'a,\n\n E: 'a,\n\n Next: CompilationStage<O, O2, E> + 'a,\n\n F: Fn() -> Next,\n\n {\n\n BoxedCompilationStage::new(AndThen::new(self, next()))\n\n }\n\n}\n\n\n\nimpl<F, I, O, E> CompilationStage<I, O, E> for F\n\nwhere\n\n F: Fn(I) -> Result<O, E>,\n", "file_path": "src/stage/mod.rs", "rank": 57, "score": 92337.89957006062 }, { "content": "trait TypeCompatibility {\n\n type Output;\n\n type Rhs;\n\n\n\n fn type_compatible(&self, right: &Self::Rhs, flow_left: bool) -> Self::Output;\n\n}\n\n\n\nimpl TypeCompatibility for ast::Type {\n\n type Output = CompatibilityResult;\n\n type Rhs = ast::Type;\n\n\n\n fn type_compatible(&self, right: &Self::Rhs, flow_left: bool) -> Self::Output {\n\n use ast::Type;\n\n match (self, right) {\n\n (lhs, rhs) if lhs == rhs => CompatibilityResult::Equivalent,\n\n (Type::Integer(l_sign, l_width), Type::Integer(r_sign, r_width)) => {\n\n let (lhs_rank, rhs_rank) = (\n\n Integer::new(*l_sign, *l_width).rank(),\n\n Integer::new(*r_sign, *r_width).rank(),\n\n );\n", "file_path": "src/stage/type_check/mod.rs", "rank": 58, "score": 90946.84751103993 }, { "content": "fn compile(source: &str) -> RuntimeResult<String> {\n\n use mossy::parser;\n\n use mossy::stage::codegen::machine::arch::x86_64;\n\n use mossy::stage::{type_check, CompilationStage};\n\n\n\n let input: Vec<(usize, char)> = source.chars().enumerate().collect();\n\n\n\n parser::parse(&input)\n\n .map(|program| {\n\n type_check::TypeAnalysis::new()\n\n .and_then(|| x86_64::X86_64)\n\n .apply(program)\n\n })\n\n .map_err(|e| RuntimeError::Undefined(format!(\"{:?}\", e)))?\n\n .map(|insts| insts.into_iter().collect::<String>())\n\n .map_err(RuntimeError::Undefined)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 59, "score": 87223.35688590558 }, { "content": "trait Ranking {\n\n type Output;\n\n\n\n fn rank(&self) -> Self::Output;\n\n}\n\n\n\nimpl Ranking for ast::Signed {\n\n type Output = usize;\n\n\n\n fn rank(&self) -> Self::Output {\n\n match self {\n\n ast::Signed::Signed => 0,\n\n ast::Signed::Unsigned => 1,\n\n }\n\n }\n\n}\n\n\n\nimpl Ranking for ast::IntegerWidth {\n\n type Output = usize;\n\n\n", "file_path": "src/stage/type_check/mod.rs", "rank": 60, "score": 84342.8148535726 }, { "content": "fn read_src_file(filename: &str) -> RuntimeResult<String> {\n\n let mut f = File::open(filename).map_err(|_| RuntimeError::FileUnreadable)?;\n\n\n\n let mut contents = String::new();\n\n match f.read_to_string(&mut contents) {\n\n Ok(_) => Ok(contents),\n\n Err(e) => Err(RuntimeError::Undefined(e.to_string())),\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 61, "score": 83654.42249280169 }, { "content": "enum CompatibilityResult {\n\n Equivalent,\n\n WidenTo(ast::Type),\n\n Scale(ast::Type),\n\n Incompatible,\n\n}\n\n\n", "file_path": "src/stage/type_check/mod.rs", "rank": 62, "score": 82337.0607744336 }, { "content": "fn write_dest_file(filename: &str, data: &[u8]) -> RuntimeResult<()> {\n\n let mut f = OpenOptions::new()\n\n .truncate(true)\n\n .create(true)\n\n .write(true)\n\n .open(filename)\n\n .map_err(|_| RuntimeError::FileUnreadable)?;\n\n\n\n match f.write_all(data) {\n\n Ok(_) => Ok(()),\n\n Err(e) => Err(RuntimeError::Undefined(e.to_string())),\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 63, "score": 80194.8158361295 }, { "content": "fn codegen_reference(ret: &mut GeneralPurposeRegister, identifier: &str) -> Vec<String> {\n\n vec![format!(\n\n \"\\tleaq\\t{}(%{}), %{}\\n\",\n\n identifier,\n\n PointerRegister.fmt_with_operand_width(OperandWidth::QuadWord),\n\n ret.fmt_with_operand_width(OperandWidth::QuadWord)\n\n )]\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 64, "score": 66332.42423477124 }, { "content": "fn codegen_constant_u16(ret_val: &mut GeneralPurposeRegister, constant: u16) -> Vec<String> {\n\n const WIDTH: OperandWidth = OperandWidth::QuadWord;\n\n\n\n vec![format!(\n\n \"\\tmov{}\\t${}, %{}\\n\",\n\n operator_suffix(WIDTH),\n\n constant,\n\n ret_val.fmt_with_operand_width(WIDTH)\n\n )]\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 65, "score": 64431.851943745554 }, { "content": "enum RuntimeError {\n\n FileUnreadable,\n\n Undefined(String),\n\n}\n\n\n\nimpl fmt::Debug for RuntimeError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Self::FileUnreadable => write!(f, \"source file unreadable\"),\n\n Self::Undefined(s) => write!(f, \"{}\", s),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for RuntimeError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{:?}\", &self)\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 66, "score": 60113.0704681249 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq)]\n\nenum RelationalExprOp {\n\n LessThan,\n\n LessEqual,\n\n GreaterThan,\n\n GreaterEqual,\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 67, "score": 57571.76787240719 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq)]\n\nenum AdditionExprOp {\n\n Plus,\n\n Minus,\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 68, "score": 57571.76787240719 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq)]\n\nenum MultiplicationExprOp {\n\n Star,\n\n Slash,\n\n Mod,\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 69, "score": 57571.76787240719 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq)]\n\nenum EqualityExprOp {\n\n Equal,\n\n NotEqual,\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 70, "score": 57571.76787240719 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nenum ComparisonOperation {\n\n LessThan,\n\n LessEqual,\n\n GreaterThan,\n\n GreaterEqual,\n\n Equal,\n\n NotEqual,\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 71, "score": 54445.13843692023 }, { "content": "#[derive(Clone, Copy, PartialEq)]\n\nenum DivisionVariant {\n\n Division,\n\n Modulo,\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 72, "score": 54444.88583862873 }, { "content": "trait PrefixedLabel {\n\n fn fmt_with_prefix(&self) -> String;\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 73, "score": 54245.254971441405 }, { "content": "#[derive(Debug)]\n\nstruct RegisterAllocationGuard {\n\n free_channel: std::sync::mpsc::Sender<GeneralPurposeRegister>,\n\n reg: GeneralPurposeRegister,\n\n}\n\n\n\nimpl RegisterAllocationGuard {\n\n fn new(\n\n free_channel: std::sync::mpsc::Sender<GeneralPurposeRegister>,\n\n reg: GeneralPurposeRegister,\n\n ) -> Self {\n\n Self { free_channel, reg }\n\n }\n\n\n\n #[allow(dead_code)]\n\n fn borrow_inner(&self) -> &GeneralPurposeRegister {\n\n &self.reg\n\n }\n\n\n\n fn borrow_inner_mut(&mut self) -> &mut GeneralPurposeRegister {\n\n &mut self.reg\n", "file_path": "src/stage/codegen/machine/arch/x86_64/register.rs", "rank": 74, "score": 53853.17100514169 }, { "content": "fn main() -> RuntimeResult<()> {\n\n let raw_args: Vec<String> = env::args().into_iter().collect::<Vec<String>>();\n\n let args = raw_args.iter().map(|a| a.as_str()).collect::<Vec<&str>>();\n\n\n\n // Flag Definitions\n\n let help = scrap::Flag::store_true(\"help\", \"h\", \"display usage information.\").optional();\n\n let in_file = scrap::Flag::expect_string(\"in-file\", \"i\", \"an input path for a source file.\");\n\n let out_file = scrap::Flag::expect_string(\"out-file\", \"o\", \"an assembly output path.\")\n\n .optional()\n\n .with_default(\"a.s\".to_string());\n\n let backend = scrap::Flag::with_choices(\n\n \"backend\",\n\n \"b\",\n\n \"a target architecture backend.\",\n\n [\"x86_64\".to_string()],\n\n scrap::StringValue,\n\n )\n\n .optional()\n\n .with_default(\"x86_64\".to_string());\n\n\n", "file_path": "src/main.rs", "rank": 75, "score": 49954.671294153624 }, { "content": "/// Logically negate a register's value.\n\nfn codegen_not(\n\n allocator: &mut GPRegisterAllocator,\n\n ret_val: &mut GeneralPurposeRegister,\n\n expr: ast::TypedExprNode,\n\n) -> Vec<String> {\n\n let expr_ctx = codegen_expr(allocator, ret_val, expr);\n\n let byte_ret_val_reg = ret_val.fmt_with_operand_width(OperandWidth::Byte);\n\n let quadword_ret_val_reg = ret_val.fmt_with_operand_width(OperandWidth::QuadWord);\n\n\n\n flattenable_instructions!(\n\n expr_ctx,\n\n vec![\n\n format!(\n\n \"\\ttestq\\t%{width_adj_reg}, %{width_adj_reg}\\n\",\n\n width_adj_reg = quadword_ret_val_reg\n\n ),\n\n format!(\"\\tsete\\t%{}\\n\", byte_ret_val_reg),\n\n format!(\n\n \"\\tmovzbq\\t%{}, %{}\\n\",\n\n byte_ret_val_reg, quadword_ret_val_reg\n\n )\n\n ],\n\n )\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 76, "score": 49312.69492698462 }, { "content": "fn codegen_call(\n\n allocator: &mut GPRegisterAllocator,\n\n ty: Type,\n\n ret_val: &mut GeneralPurposeRegister,\n\n func_name: &str,\n\n arg: Option<Box<ast::TypedExprNode>>,\n\n) -> Vec<String> {\n\n let width = operand_width_of_type(ty);\n\n if let Some(arg_expr) = arg {\n\n let width = operand_width_of_type(arg_expr.r#type());\n\n\n\n allocator.allocate_then(|allocator, arg_retval| {\n\n let arg_ctx = codegen_expr(allocator, arg_retval, *arg_expr);\n\n\n\n flattenable_instructions!(\n\n arg_ctx,\n\n vec![\n\n format!(\n\n \"\\tmov{}\\t%{}, %{}\\n\",\n\n operator_suffix(width),\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 77, "score": 48410.20737273808 }, { "content": "fn codegen_scaleby(\n\n allocator: &mut GPRegisterAllocator,\n\n ret_val: &mut GeneralPurposeRegister,\n\n size_of: usize,\n\n expr: Box<ast::TypedExprNode>,\n\n) -> Vec<String> {\n\n if let ast::Type::Integer(sign, _) = expr.r#type() {\n\n let scale_by_expr = ast::TypedExprNode::Primary(\n\n ast::Type::Integer(sign, ast::IntegerWidth::SixtyFour),\n\n ast::Primary::Integer {\n\n sign,\n\n width: ast::IntegerWidth::SixtyFour,\n\n value: crate::util::pad_to_64bit_array((size_of as u64).to_le_bytes()),\n\n },\n\n );\n\n\n\n codegen_multiplication(\n\n allocator,\n\n ret_val,\n\n ast::Type::Integer(sign, ast::IntegerWidth::SixtyFour),\n\n Box::new(scale_by_expr),\n\n expr,\n\n )\n\n } else {\n\n panic!(\"invalid scale_by types\")\n\n }\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 78, "score": 48410.20737273808 }, { "content": "fn codegen_subtraction(\n\n allocator: &mut GPRegisterAllocator,\n\n ret_val: &mut GeneralPurposeRegister,\n\n ty: ast::Type,\n\n lhs: Box<ast::TypedExprNode>,\n\n rhs: Box<ast::TypedExprNode>,\n\n) -> Vec<String> {\n\n let width = operand_width_of_type(ty);\n\n\n\n allocator.allocate_then(|allocator, rhs_ret_val| {\n\n let lhs_ctx = codegen_expr(allocator, ret_val, *lhs);\n\n let rhs_ctx = codegen_expr(allocator, rhs_ret_val, *rhs);\n\n\n\n flattenable_instructions!(\n\n lhs_ctx,\n\n rhs_ctx,\n\n vec![format!(\n\n \"\\tsub{}\\t%{}, %{}\\n\",\n\n operator_suffix(width),\n\n rhs_ret_val.fmt_with_operand_width(width),\n\n ret_val.fmt_with_operand_width(width)\n\n )],\n\n )\n\n })\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 79, "score": 48410.20737273808 }, { "content": "fn codegen_while_statement(\n\n allocator: &mut GPRegisterAllocator,\n\n cond: ast::TypedExprNode,\n\n block: ast::TypedCompoundStmts,\n\n) -> Result<Vec<String>, codegen::CodeGenerationErr> {\n\n allocator.allocate_then(|allocator, ret_val| {\n\n let cond_ctx = codegen_expr(allocator, ret_val, cond);\n\n let loop_cond_block_id = BLOCK_ID.fetch_add(1, Ordering::SeqCst);\n\n let loop_start_block_id = BLOCK_ID.fetch_add(1, Ordering::SeqCst);\n\n let loop_end_block_id = BLOCK_ID.fetch_add(1, Ordering::SeqCst);\n\n let block_insts = codegen_statements(allocator, block)?;\n\n\n\n Ok(flattenable_instructions!(\n\n codegen_label(LLabelPrefix(loop_cond_block_id)),\n\n cond_ctx,\n\n codegen_compare_and_jmp(\n\n allocator,\n\n ret_val,\n\n LLabelPrefix(loop_start_block_id),\n\n LLabelPrefix(loop_end_block_id)\n\n ),\n\n codegen_label(LLabelPrefix(loop_start_block_id)),\n\n block_insts,\n\n codegen_jump(LLabelPrefix(loop_cond_block_id)),\n\n codegen_label(LLabelPrefix(loop_end_block_id)),\n\n ))\n\n })\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 80, "score": 48410.20737273808 }, { "content": "#[allow(unused)]\n\nfn codegen_invert(\n\n allocator: &mut GPRegisterAllocator,\n\n ret_val: &mut GeneralPurposeRegister,\n\n expr: ast::TypedExprNode,\n\n) -> Vec<String> {\n\n let width = operand_width_of_type(expr.r#type());\n\n let expr_ctx = codegen_expr(allocator, ret_val, expr);\n\n\n\n flattenable_instructions!(\n\n expr_ctx,\n\n vec![format!(\n\n \"\\tnot{}\\t%{}\\n\",\n\n operator_suffix(width),\n\n ret_val.fmt_with_operand_width(width)\n\n )],\n\n )\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 81, "score": 48410.20737273808 }, { "content": "#[allow(unused)]\n\nfn codegen_negate(\n\n allocator: &mut GPRegisterAllocator,\n\n ret_val: &mut GeneralPurposeRegister,\n\n expr: ast::TypedExprNode,\n\n) -> Vec<String> {\n\n let width = operand_width_of_type(expr.r#type());\n\n let expr_ctx = codegen_expr(allocator, ret_val, expr);\n\n\n\n flattenable_instructions!(\n\n expr_ctx,\n\n vec![format!(\n\n \"\\tneg{}\\t%{}\\n\",\n\n operator_suffix(width),\n\n ret_val.fmt_with_operand_width(width)\n\n )],\n\n )\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 82, "score": 48410.20737273808 }, { "content": "fn codegen_statement(\n\n allocator: &mut GPRegisterAllocator,\n\n input: ast::TypedStmtNode,\n\n) -> Result<Vec<String>, codegen::CodeGenerationErr> {\n\n match input {\n\n ast::TypedStmtNode::Expression(expr) => allocator\n\n .allocate_then(|allocator, ret_val| Ok(vec![codegen_expr(allocator, ret_val, expr)])),\n\n ast::TypedStmtNode::Declaration(ast::Declaration::Scalar(ty, identifiers)) => {\n\n let var_decls = identifiers\n\n .iter()\n\n .map(|id| codegen_global_symbol(&ty, id, 1))\n\n .collect();\n\n Ok(var_decls)\n\n }\n\n ast::TypedStmtNode::Declaration(ast::Declaration::Array { ty, id, size }) => {\n\n Ok(vec![codegen_global_symbol(&ty, &id, size)])\n\n }\n\n ast::TypedStmtNode::Return(ty, id, arg) => allocator.allocate_then(|allocator, ret_val| {\n\n let res: Vec<String> = if let Some(expr) = arg {\n\n codegen_expr(allocator, ret_val, expr)\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 83, "score": 48410.20737273808 }, { "content": "fn codegen_for_statement(\n\n allocator: &mut GPRegisterAllocator,\n\n preop: ast::TypedStmtNode,\n\n cond: ast::TypedExprNode,\n\n postop: ast::TypedStmtNode,\n\n block: ast::TypedCompoundStmts,\n\n) -> Result<Vec<String>, codegen::CodeGenerationErr> {\n\n allocator.allocate_then(|allocator, ret_val| {\n\n let preop_ctx = codegen_statement(allocator, preop)?;\n\n let cond_ctx = codegen_expr(allocator, ret_val, cond);\n\n let postop_ctx = codegen_statement(allocator, postop)?;\n\n let loop_cond_block_id = BLOCK_ID.fetch_add(1, Ordering::SeqCst);\n\n let loop_start_block_id = BLOCK_ID.fetch_add(1, Ordering::SeqCst);\n\n let loop_end_block_id = BLOCK_ID.fetch_add(1, Ordering::SeqCst);\n\n let block_insts = codegen_statements(allocator, block)?;\n\n\n\n Ok(flattenable_instructions!(\n\n preop_ctx,\n\n codegen_label(LLabelPrefix(loop_cond_block_id)),\n\n cond_ctx,\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 84, "score": 48410.20737273808 }, { "content": "fn codegen_statements(\n\n allocator: &mut GPRegisterAllocator,\n\n input: ast::TypedCompoundStmts,\n\n) -> Result<Vec<String>, CodeGenerationErr> {\n\n let stmts = Vec::<ast::TypedStmtNode>::from(input);\n\n\n\n stmts\n\n .into_iter()\n\n .map(|stmt| codegen_statement(allocator, stmt).map(|output| output.join(\"\")))\n\n .collect::<Result<Vec<String>, _>>()\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 85, "score": 48410.20737273808 }, { "content": "fn codegen_multiplication(\n\n allocator: &mut GPRegisterAllocator,\n\n ret_val: &mut GeneralPurposeRegister,\n\n ty: ast::Type,\n\n lhs: Box<ast::TypedExprNode>,\n\n rhs: Box<ast::TypedExprNode>,\n\n) -> Vec<String> {\n\n let width = operand_width_of_type(ty);\n\n\n\n allocator.allocate_then(|allocator, lhs_retval| {\n\n let lhs_ctx = codegen_expr(allocator, lhs_retval, *lhs);\n\n let rhs_ctx = codegen_expr(allocator, ret_val, *rhs);\n\n\n\n flattenable_instructions!(\n\n lhs_ctx,\n\n rhs_ctx,\n\n vec![format!(\n\n \"\\timul{}\\t%{}, %{}\\n\",\n\n operator_suffix(width),\n\n lhs_retval.fmt_with_operand_width(width),\n\n ret_val.fmt_with_operand_width(width)\n\n )],\n\n )\n\n })\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 86, "score": 48410.20737273808 }, { "content": "fn codegen_addition(\n\n allocator: &mut GPRegisterAllocator,\n\n ret_val: &mut GeneralPurposeRegister,\n\n ty: ast::Type,\n\n lhs: Box<ast::TypedExprNode>,\n\n rhs: Box<ast::TypedExprNode>,\n\n) -> Vec<String> {\n\n let width = operand_width_of_type(ty);\n\n\n\n allocator.allocate_then(|allocator, lhs_retval| {\n\n let lhs_ctx = codegen_expr(allocator, lhs_retval, *lhs);\n\n let rhs_ctx = codegen_expr(allocator, ret_val, *rhs);\n\n\n\n vec![\n\n lhs_ctx,\n\n rhs_ctx,\n\n vec![format!(\n\n \"\\tadd{}\\t%{}, %{}\\n\",\n\n operator_suffix(width),\n\n lhs_retval.fmt_with_operand_width(width),\n\n ret_val.fmt_with_operand_width(width)\n\n )],\n\n ]\n\n .into_iter()\n\n .flatten()\n\n .collect()\n\n })\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 87, "score": 48410.20737273808 }, { "content": "fn codegen_expr(\n\n allocator: &mut GPRegisterAllocator,\n\n ret_val: &mut GeneralPurposeRegister,\n\n expr: ast::TypedExprNode,\n\n) -> Vec<String> {\n\n use crate::stage::ast::{IntegerWidth, Primary, Signed, TypedExprNode};\n\n\n\n match expr {\n\n TypedExprNode::Primary(_, Primary::Integer { sign, width, value }) => match (sign, width) {\n\n (Signed::Signed, IntegerWidth::Eight) => {\n\n let signed_val = u64::from_le_bytes(value);\n\n if signed_val.leading_zeros() >= 56 {\n\n codegen_constant_i8(ret_val, signed_val as i8)\n\n } else {\n\n panic!(\"value exceeds signed 8-bit integer\")\n\n }\n\n }\n\n (Signed::Signed, IntegerWidth::Sixteen) => {\n\n let signed_val = u64::from_le_bytes(value);\n\n if signed_val.leading_zeros() >= 48 {\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 88, "score": 48410.20737273808 }, { "content": "fn codegen_division(\n\n allocator: &mut GPRegisterAllocator,\n\n ret_val: &mut GeneralPurposeRegister,\n\n ty: ast::Type,\n\n lhs: Box<ast::TypedExprNode>,\n\n rhs: Box<ast::TypedExprNode>,\n\n sign: crate::stage::ast::Signed,\n\n division_variant: DivisionVariant,\n\n) -> Vec<String> {\n\n use crate::stage::ast::Signed;\n\n\n\n let width = operand_width_of_type(ty);\n\n\n\n allocator.allocate_then(|allocator, rhs_retval| {\n\n let lhs_ctx = codegen_expr(allocator, ret_val, *lhs);\n\n let rhs_ctx = codegen_expr(allocator, rhs_retval, *rhs);\n\n let operand_suffix = operator_suffix(width);\n\n\n\n flattenable_instructions!(\n\n lhs_ctx,\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 89, "score": 48410.20737273808 }, { "content": "fn codegen_load_global(\n\n ty: Type,\n\n ret: &mut GeneralPurposeRegister,\n\n identifier: &str,\n\n) -> Vec<String> {\n\n let width = operand_width_of_type(ty);\n\n\n\n vec![format!(\n\n \"\\tmov{}\\t{}(%{}), %{}\\n\",\n\n operator_suffix(width),\n\n identifier,\n\n PointerRegister.fmt_with_operand_width(OperandWidth::QuadWord),\n\n ret.fmt_with_operand_width(width)\n\n )]\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 90, "score": 47575.348969365514 }, { "content": "fn codegen_store_global(\n\n ty: Type,\n\n ret: &mut GeneralPurposeRegister,\n\n identifier: &str,\n\n) -> Vec<String> {\n\n let width = operand_width_of_type(ty);\n\n vec![format!(\n\n \"\\tmov{}\\t%{}, {}(%{})\\n\",\n\n operator_suffix(width),\n\n ret.fmt_with_operand_width(width),\n\n identifier,\n\n PointerRegister.fmt_with_operand_width(OperandWidth::QuadWord)\n\n )]\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 91, "score": 47575.348969365514 }, { "content": "fn codegen_if_statement_with_else(\n\n allocator: &mut GPRegisterAllocator,\n\n cond: ast::TypedExprNode,\n\n true_case: ast::TypedCompoundStmts,\n\n false_case: ast::TypedCompoundStmts,\n\n) -> Result<Vec<String>, codegen::CodeGenerationErr> {\n\n allocator.allocate_then(|allocator, ret_val| {\n\n let cond_ctx = codegen_expr(allocator, ret_val, cond);\n\n let exit_block_id = BLOCK_ID.fetch_add(1, Ordering::SeqCst);\n\n let true_case_block_id = BLOCK_ID.fetch_add(1, Ordering::SeqCst);\n\n let tctx = codegen_statements(allocator, true_case)?;\n\n let else_block_id = BLOCK_ID.fetch_add(1, Ordering::SeqCst);\n\n let block_ctx = codegen_statements(allocator, false_case)?;\n\n\n\n Ok(flattenable_instructions!(\n\n cond_ctx,\n\n codegen_compare_and_jmp(\n\n allocator,\n\n ret_val,\n\n LLabelPrefix(true_case_block_id),\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 92, "score": 47575.348969365514 }, { "content": "fn codegen_compare_and_set(\n\n allocator: &mut GPRegisterAllocator,\n\n ret_val: &mut GeneralPurposeRegister,\n\n comparison_op: ComparisonOperation,\n\n ty: ast::Type,\n\n lhs: Box<ast::TypedExprNode>,\n\n rhs: Box<ast::TypedExprNode>,\n\n) -> Vec<String> {\n\n let width = operand_width_of_type(ty);\n\n\n\n allocator.allocate_then(|allocator, lhs_retval| {\n\n let lhs_ctx = codegen_expr(allocator, lhs_retval, *lhs);\n\n let rhs_ctx = codegen_expr(allocator, ret_val, *rhs);\n\n\n\n let set_operator = match comparison_op {\n\n ComparisonOperation::LessThan => \"setl\",\n\n ComparisonOperation::LessEqual => \"setle\",\n\n ComparisonOperation::GreaterThan => \"setg\",\n\n ComparisonOperation::GreaterEqual => \"setge\",\n\n ComparisonOperation::Equal => \"sete\",\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 93, "score": 47575.348969365514 }, { "content": "fn codegen_store_deref(\n\n dest: &mut GeneralPurposeRegister,\n\n src: &mut GeneralPurposeRegister,\n\n) -> Vec<String> {\n\n const WIDTH: OperandWidth = OperandWidth::QuadWord;\n\n vec![format!(\n\n \"\\tmov{}\\t%{}, (%{})\\n\",\n\n operator_suffix(WIDTH),\n\n src.fmt_with_operand_width(WIDTH),\n\n dest.fmt_with_operand_width(OperandWidth::QuadWord)\n\n )]\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 94, "score": 47575.348969365514 }, { "content": "fn codegen_if_statement_without_else(\n\n allocator: &mut GPRegisterAllocator,\n\n cond: ast::TypedExprNode,\n\n true_case: ast::TypedCompoundStmts,\n\n) -> Result<Vec<String>, codegen::CodeGenerationErr> {\n\n allocator.allocate_then(|allocator, ret_val| {\n\n let cond_ctx = codegen_expr(allocator, ret_val, cond);\n\n let exit_block_id = BLOCK_ID.fetch_add(1, Ordering::SeqCst);\n\n let true_case_block_id = BLOCK_ID.fetch_add(1, Ordering::SeqCst);\n\n let tctx = codegen_statements(allocator, true_case)?;\n\n\n\n Ok(flattenable_instructions!(\n\n cond_ctx,\n\n codegen_compare_and_jmp(\n\n allocator,\n\n ret_val,\n\n LLabelPrefix(true_case_block_id),\n\n LLabelPrefix(exit_block_id)\n\n ),\n\n codegen_label(LLabelPrefix(true_case_block_id)),\n\n tctx,\n\n codegen_label(LLabelPrefix(exit_block_id)),\n\n ))\n\n })\n\n}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 95, "score": 46797.99859643281 }, { "content": "fn semicolon_terminated_statement<'a, P>(\n\n term: P,\n\n) -> impl parcel::Parser<'a, &'a [(usize, char)], StmtNode>\n\nwhere\n\n P: parcel::Parser<'a, &'a [(usize, char)], StmtNode> + 'a,\n\n{\n\n parcel::left(parcel::join(\n\n term,\n\n whitespace_wrapped(expect_character(';')),\n\n ))\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 96, "score": 45701.689414208515 }, { "content": "/// Defines marker traits for objects that can be used to generate labels.\n\ntrait LabelFormattable: core::fmt::Display {}\n\n\n\nimpl LabelFormattable for &str {}\n\nimpl LabelFormattable for String {}\n\n\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 97, "score": 44388.31093734385 }, { "content": "type RuntimeResult<T> = Result<T, RuntimeError>;\n\n\n", "file_path": "src/main.rs", "rank": 98, "score": 44323.77133358203 }, { "content": "fn codegen_compare_and_jmp<L>(\n\n allocator: &mut GPRegisterAllocator,\n\n ret_val: &mut GeneralPurposeRegister,\n\n cond_true_id: L,\n\n cond_false_id: L,\n\n) -> Vec<String>\n\nwhere\n\n L: LabelFormattable,\n\n{\n\n const WIDTH: OperandWidth = OperandWidth::QuadWord;\n\n let operand_suffix = operator_suffix(WIDTH);\n\n allocator.allocate_then(|_, zero_val| {\n\n vec![\n\n format!(\"\\tandq\\t$0, %{}\\n\", zero_val.fmt_with_operand_width(WIDTH)),\n\n format!(\n\n \"\\tcmp{}\\t%{}, %{}\\n\",\n\n operand_suffix,\n\n ret_val.fmt_with_operand_width(WIDTH),\n\n zero_val.fmt_with_operand_width(WIDTH)\n\n ),\n", "file_path": "src/stage/codegen/machine/arch/x86_64/mod.rs", "rank": 99, "score": 44278.152333713486 } ]
Rust
src/fileoperation.rs
DrStiev/rs-handlegfa
89c59bb1e6b0b2aad23061ab894b958467a14ae3
use gfa2::gfa1::GFA; use gfa2::gfa2::GFA2; use handlegraph2::hashgraph::HashGraph; use bstr::BString; use std::fs::File; use std::io::prelude::*; use std::path::Path; pub fn save_as_gfa2_file(graph: &HashGraph, path: Option<String>) -> Result<(), std::io::Error> { use handlegraph2::conversion; let path = path.unwrap_or_else(|| String::from("./tests/output_files/default_path/file_gfa2.gfa2")); let path = Path::new(&path); let mut file = File::create(path)?; let gfa_file: GFA2<BString, ()> = conversion::to_gfa2(&graph); file.write_all(format!("{}", gfa_file).as_bytes())?; file.sync_all()?; Ok(()) } pub fn save_as_gfa1_file(graph: &HashGraph, path: Option<String>) -> Result<(), std::io::Error> { use handlegraph2::conversion; let path = path.unwrap_or_else(|| String::from("./tests/output_files/default_path/file_gfa1.gfa")); let path = Path::new(&path); let mut file = File::create(path)?; let gfa_file: GFA<BString, ()> = conversion::to_gfa(&graph); file.write_all(format!("{}", gfa_file).as_bytes())?; file.sync_all()?; Ok(()) } #[cfg(test)] mod tests { use super::*; use handlegraph2::mutablehandlegraph::*; #[test] fn can_save_handlegraph_as_gfa2_file() { use handlegraph2::{ handle::Edge, hashgraph::HashGraph, mutablehandlegraph::MutableHandleGraph, pathgraph::PathHandleGraph, }; let mut graph = HashGraph::new(); let h1 = graph.create_handle(b"ACCTT", 11); let h2 = graph.create_handle(b"TCAAGG", 12); let h3 = graph.create_handle(b"CTTGATT", 13); graph.apply_orientation(h2.flip()); graph.create_edge(Edge(h1, h2)); graph.create_edge(Edge(h2, h3)); graph.create_edge(Edge(h1, h3)); let path = graph.create_path_handle(b"1", false); graph.append_step(&path, h1); graph.append_step(&path, h2); graph.append_step(&path, h3); match save_as_gfa2_file( &graph, Some(String::from("./tests/output_files/file_gfa2.gfa2")), ) { Ok(_) => println!("Handlegraph saved correctly!"), Err(why) => println!("Error: {}", why), }; } #[test] fn can_save_handlegraph_as_gfa2_file_default_path() { use handlegraph2::{ handle::Edge, hashgraph::HashGraph, mutablehandlegraph::MutableHandleGraph, pathgraph::PathHandleGraph, }; let mut graph = HashGraph::new(); let h1 = graph.create_handle(b"ACCTT", 11); let h2 = graph.create_handle(b"TCAAGG", 12); let h3 = graph.create_handle(b"CTTGATT", 13); graph.apply_orientation(h2.flip()); graph.create_edge(Edge(h1, h2)); graph.create_edge(Edge(h2, h3)); graph.create_edge(Edge(h1, h3)); let path = graph.create_path_handle(b"1", false); graph.append_step(&path, h1); graph.append_step(&path, h2); graph.append_step(&path, h3); match save_as_gfa2_file(&graph, None) { Ok(_) => println!("Handlegraph saved correctly!"), Err(why) => println!("Error: {}", why), }; } #[test] fn can_save_handlegraph_as_gfa1_file() { use handlegraph2::{ handle::Edge, hashgraph::HashGraph, mutablehandlegraph::MutableHandleGraph, pathgraph::PathHandleGraph, }; let mut graph = HashGraph::new(); let h1 = graph.create_handle(b"ACCTT", 11); let h2 = graph.create_handle(b"TCAAGG", 12); let h3 = graph.create_handle(b"CTTGATT", 13); graph.apply_orientation(h2.flip()); graph.create_edge(Edge(h1, h2)); graph.create_edge(Edge(h2, h3)); graph.create_edge(Edge(h1, h3)); let path = graph.create_path_handle(b"1", false); graph.append_step(&path, h1); graph.append_step(&path, h2); graph.append_step(&path, h3); match save_as_gfa1_file( &graph, Some(String::from("./tests/output_files/file_gfa1.gfa")), ) { Ok(_) => println!("Handlegraph saved correctly!"), Err(why) => println!("Error: {}", why), }; } #[test] fn can_save_handlegraph_as_gfa1_file_default_path() { use handlegraph2::{ handle::Edge, hashgraph::HashGraph, mutablehandlegraph::MutableHandleGraph, pathgraph::PathHandleGraph, }; let mut graph = HashGraph::new(); let h1 = graph.create_handle(b"ACCTT", 11); let h2 = graph.create_handle(b"TCAAGG", 12); let h3 = graph.create_handle(b"CTTGATT", 13); graph.apply_orientation(h2.flip()); graph.create_edge(Edge(h1, h2)); graph.create_edge(Edge(h2, h3)); graph.create_edge(Edge(h1, h3)); let path = graph.create_path_handle(b"1", false); graph.append_step(&path, h1); graph.append_step(&path, h2); graph.append_step(&path, h3); match save_as_gfa1_file(&graph, None) { Ok(_) => println!("Handlegraph saved correctly!"), Err(why) => println!("Error: {}", why), }; } #[test] fn can_use_file_gfa2_saved() { use gfa2::{parser_gfa2::GFA2Parser, tag::OptionalFields}; let parser: GFA2Parser<bstr::BString, OptionalFields> = GFA2Parser::new(); let gfa2: GFA2<BString, OptionalFields> = parser .parse_file("./tests/output_files/file_gfa2.gfa2") .unwrap(); println!("{}", gfa2); } #[test] fn can_use_file_gfa1_saved() { use gfa2::{parser_gfa1::GFAParser, tag::OptionalFields}; let parser: GFAParser<bstr::BString, OptionalFields> = GFAParser::new(); let gfa: GFA<BString, OptionalFields> = parser .parse_file("./tests/output_files/file_gfa1.gfa") .unwrap(); println!("{}", gfa); } }
use gfa2::gfa1::GFA; use gfa2::gfa2::GFA2; use handlegraph2::hashgraph::HashGraph; use bstr::BString; use std::fs::File; use std::io::prelude::*; use std::path::Path; pub fn save_as_gfa2_file(graph: &HashGraph, path: Option<String>) -> Result<(), std::io::Error> { use handlegraph2::conversion; let path = path.unwrap_or_else(|| String::from("./tests/output_files/default_path/file_gfa2.gfa2")); let path = Path::new(&path); let mut file = File::create(path)?; let gfa_file: GFA2<BString, ()> = conversion::to_gfa2(&graph); file.write_all(format!("{}", gfa_file).as_bytes())?; file.sync_all()?; Ok(()) } pub fn save_as_gfa1_file(graph: &HashGraph, path: Option<String>) -> Result<(), std::io::Error> { use handlegraph2::conversion; let path = path.unwrap_or_else(|| String::from("./tests/output_files/default_path/file_gfa1.gfa")); let path = Path::new(&path); let mut file = File::create(path)?; let gfa_file: GFA<BString, ()> = conversion::to_gfa(&graph); file.write_all(format!("{}", gfa_file).as_bytes())?; file.sync_all()?; Ok(()) } #[cfg(test)] mod tests { use super::*; use handlegraph2::mutablehandlegraph::*; #[test] fn can_save_handlegraph_as_gfa2_file() { use handlegraph2::{ handle::Edge, hashgraph::HashGraph, mutablehandlegraph::MutableHandleGraph, pathgraph::PathHandleGraph, }; let mut graph = HashGraph::new(); let h1 = graph.create_handle(b"ACCTT", 11); let h2 = graph.create_handle(b"TCAAGG", 12); let h3 = graph.create_handle(b"CTTGATT", 13); graph.apply_orientation(h2.flip()); graph.create_edge(Edge(h1, h2)); graph.create_edge(Edge(h2, h3)); graph.create_edge(Edge(h1, h3)); let path = graph.create_path_handle(b"1", false); graph.append_step(&path, h1); graph.append_step(&path, h2); graph.append_step(&path, h3); match save_as_gfa2_file( &graph, Some(String::from("./tests/output_files/file_gfa2.gfa2")), ) { Ok(_) => println!("Handlegraph saved correctly!"), Err(why) => println!("Error: {}", why), }; } #[test] fn can_save_handlegraph_as_gfa2_file_default_path() { use handlegraph2::{ handle::Edge, hashgraph::HashGraph, mutablehandlegraph::MutableHandleGraph, pathgraph::PathHandleGraph, }; let mut graph = HashGraph::new(); let h1 = graph.create_handle(b"ACCTT", 11); let h2 = graph.create_handle(b"TCAAGG", 12); let h3 = graph.create_handle(b"CTTGATT", 13); graph.apply_orientation(h2.flip()); graph.create_edge(Edge(h1, h2)); graph.create_edge(Edge(h2, h3)); graph.create_edge(Edge(h1, h3)); let path = graph.create_path_handle(b"1", false); graph.append_step(&path, h1); graph.append_step(&path, h2); graph.append_step(&path, h3); match save_as_gfa2_file(&graph, None) { Ok(_) => println!("Handlegraph saved correctly!"), Err(why) => println!("Error: {}", why), }; } #[test] fn can_save_handlegraph_as_gfa1_file() { use handlegraph2::{ handle::Edge, hashgraph::HashGraph, mutablehandlegraph::MutableHandleGraph, pathgraph::PathHandleGraph, }; let mut graph = HashGraph::new(); let h1 = graph.create_handle(b"ACCTT", 11); let h2 = graph.create_handle(b"TCAAGG", 12); let h3 = graph.create_handle(b"CTTGATT", 13); graph.apply_orientation(h2.flip()); graph.create_edge(Edge(h1, h2)); graph.create_edge(Edge(h2, h3)); graph.create_edge(Edge(h1, h3)); let path = graph.create_path_handle(b"1", false); graph.append_step(&path, h1); graph.append_step(&path, h2); graph.append_step(&path, h3); match save_as_gfa1_file( &graph, Some(String::from("./tests/output_files/file_gfa1.gfa")), ) { Ok(_) => println!("Handlegraph saved correctly!"), Err(why) => println!("Error: {}", why), }; } #[test] fn can_save_handlegraph_as_gfa1_file_default_path() { use handlegraph2::{ handle::Edge, hashgraph::HashGraph, mutablehandlegraph::MutableHandleGraph, pathgraph::PathHandleGraph, }; let mut graph = HashGraph::new(); let h1 = graph.create_handle(b"ACCTT", 11); let h2 = graph.create_handle(b"TCAAGG", 12); let h3 =
#[test] fn can_use_file_gfa2_saved() { use gfa2::{parser_gfa2::GFA2Parser, tag::OptionalFields}; let parser: GFA2Parser<bstr::BString, OptionalFields> = GFA2Parser::new(); let gfa2: GFA2<BString, OptionalFields> = parser .parse_file("./tests/output_files/file_gfa2.gfa2") .unwrap(); println!("{}", gfa2); } #[test] fn can_use_file_gfa1_saved() { use gfa2::{parser_gfa1::GFAParser, tag::OptionalFields}; let parser: GFAParser<bstr::BString, OptionalFields> = GFAParser::new(); let gfa: GFA<BString, OptionalFields> = parser .parse_file("./tests/output_files/file_gfa1.gfa") .unwrap(); println!("{}", gfa); } }
graph.create_handle(b"CTTGATT", 13); graph.apply_orientation(h2.flip()); graph.create_edge(Edge(h1, h2)); graph.create_edge(Edge(h2, h3)); graph.create_edge(Edge(h1, h3)); let path = graph.create_path_handle(b"1", false); graph.append_step(&path, h1); graph.append_step(&path, h2); graph.append_step(&path, h3); match save_as_gfa1_file(&graph, None) { Ok(_) => println!("Handlegraph saved correctly!"), Err(why) => println!("Error: {}", why), }; }
function_block-function_prefixed
[ { "content": "/// Function that reads a ```GFA1``` files passed as input and return its\n\n/// corresponding ```HandleGraph```\n\npub fn gfa1_to_handlegraph(path: String) -> Result<HashGraph, GraphOperationError> {\n\n use gfa2::{gfa1::GFA, parser_gfa1::GFAParser};\n\n\n\n let parser: GFAParser<usize, ()> = GFAParser::new();\n\n let gfa: GFA<usize, ()> = match parser.parse_file(&path) {\n\n Ok(g) => g,\n\n Err(why) => return Err(GraphOperationError::FileError(why.to_string())),\n\n };\n\n let graph: HashGraph = HashGraph::from_gfa(&gfa);\n\n\n\n Ok(graph)\n\n}\n\n\n", "file_path": "src/graphoperation.rs", "rank": 2, "score": 105003.40764938114 }, { "content": "/// Function that reads a ```GFA2``` files passed as input and return its\n\n/// corresponding ```HandleGraph```\n\npub fn gfa2_to_handlegraph(path: String) -> Result<HashGraph, GraphOperationError> {\n\n use gfa2::{gfa2::GFA2, parser_gfa2::GFA2Parser};\n\n\n\n let parser: GFA2Parser<usize, ()> = GFA2Parser::new();\n\n let gfa2: GFA2<usize, ()> = match parser.parse_file(&path) {\n\n Ok(g) => g,\n\n Err(why) => return Err(GraphOperationError::FileError(why.to_string())),\n\n };\n\n let graph: HashGraph = HashGraph::from_gfa2(&gfa2);\n\n\n\n Ok(graph)\n\n}\n\n\n", "file_path": "src/graphoperation.rs", "rank": 3, "score": 105003.40764938114 }, { "content": "/// Print an HashGraph object in a simplified way\n\n/// # Example\n\n/// ```ignore\n\n/// print_simple_graph(&hashgraph);\n\n/// /*\n\n/// Graph: {\n\n/// Nodes: {\n\n/// 13: CTTGATT\n\n/// 12: TCAAGG\n\n/// 11: ACCTT\n\n/// }\n\n/// Edges: {\n\n/// 12- --> 13+\n\n/// 11+ --> 12-\n\n/// 11+ --> 13+\n\n/// }\n\n/// Paths: {\n\n/// 14: ACCTT -> CTTGATT\n\n/// 15: ACCTT -> CCTTGA -(TCAAGG) -> CTTGATT\n\n/// }\n\n/// }\n\n/// */\n\n/// ```\n\npub fn print_simple_graph(graph: &HashGraph) {\n\n graph.print_graph();\n\n}\n\n\n", "file_path": "src/graphoperation.rs", "rank": 4, "score": 94575.02822176043 }, { "content": "// TODO: print the graph as a DeBrujin one in a graphical way\n\n/// Print an HashGraph object as a DeBrujin graph (more or less)\n\n/// () -> (1) -> AATTCG -> (2) -> CTTGGA -> (3) -> GAACTG -> ()\n\n/// \\ ^ \n\n/// -------------> AGGTCAG -------/\n\npub fn print_debrujin_graph(_graph: &HashGraph) {}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use gfa2::{gfa2::GFA2, parser_gfa2::GFA2Parser};\n\n\n\n #[test]\n\n fn can_print_graph() {\n\n use gfa2::{gfa1::GFA, parser_gfa1::GFAParser};\n\n let parser: GFA2Parser<usize, ()> = GFA2Parser::new();\n\n let gfa2: GFA2<usize, ()> = parser\n\n .parse_file(\"./tests/gfa2_files/spec_q7.gfa2\")\n\n .unwrap();\n\n let graph = HashGraph::from_gfa2(&gfa2);\n\n print_simple_graph(&graph);\n\n\n\n let parser: GFAParser<usize, ()> = GFAParser::new();\n\n let gfa: GFA<usize, ()> = parser.parse_file(\"./tests/gfa1_files/lil.gfa\").unwrap();\n\n let graph = HashGraph::from_gfa(&gfa);\n", "file_path": "src/graphoperation.rs", "rank": 5, "score": 82720.816571257 }, { "content": "/// Function that adds a path to read the node of a graph\n\n/// # Example\n\n/// ```ignore\n\n/// use handle_gfa::graphoperation::*;\n\n///\n\n/// let mut graph = HashGraph::from_gfa(&gfa2);\n\n/// let ids: Vec<&[u8]> = vec![b\"11+\", b\"13+\"];\n\n///\n\n/// match add_path(graph, Some(b\"TEST_PATH_1\"), ids) {\n\n/// Ok(g) => {\n\n/// let mut x = 0;\n\n/// while !g.get_path(&x).is_none() {\n\n/// g.print_path(&x);\n\n/// x += 1;\n\n/// }\n\n/// },\n\n/// Err(why) => println!(\"Error: {}\", why),\n\n/// };\n\n/// ```\n\npub fn add_path(\n\n mut graph: HashGraph,\n\n path_id: Option<&[u8]>,\n\n sequence_of_id: Vec<&[u8]>,\n\n) -> Result<HashGraph, GraphOperationError> {\n\n use bstr::ByteSlice;\n\n use gfa2::gfa2::orientation::Orientation;\n\n\n\n let path_id = path_id.unwrap_or(b\"default_path_id\");\n\n // check if the path it's circular\n\n let last = sequence_of_id.len() - 1;\n\n let is_circular: bool = sequence_of_id[0] == sequence_of_id[last];\n\n\n\n // create the path\n\n let path = graph.create_path_handle(path_id, is_circular);\n\n for seq in sequence_of_id.iter() {\n\n let last = seq.len() - 1;\n\n let seq_id = seq[..last].to_str().unwrap();\n\n\n\n let sgn: &str = &seq[last..].to_str().unwrap();\n", "file_path": "src/graphoperation.rs", "rank": 6, "score": 76609.55962597695 }, { "content": "/// Function that modifies a path in a graph checking if the provided ```PathName``` exists\n\n/// # Example\n\n/// ```ignore\n\n/// use handle_gfa::graphoperation::*;\n\n///\n\n/// let mut graph = HashGraph::from_gfa(&gfa2);\n\n/// graph = graph.modify_path(graph, b\"14\", vec![b\"11+\", b\"12-\"]).unwrap();\n\n/// ```\n\npub fn modify_path(\n\n mut graph: HashGraph,\n\n path_name: &[u8],\n\n sequence_of_id: Vec<&[u8]>,\n\n) -> Result<HashGraph, GraphOperationError> {\n\n use bstr::ByteSlice;\n\n use gfa2::gfa2::orientation::Orientation;\n\n\n\n let path_name = path_name;\n\n let mut handles: Vec<Handle> = vec![];\n\n\n\n for seq in sequence_of_id.iter() {\n\n let last = seq.len() - 1;\n\n let seq_id = seq[..last].to_str().unwrap();\n\n\n\n let sgn: &str = &seq[last..].to_str().unwrap();\n\n let orient: Orientation = match sgn {\n\n \"+\" => Orientation::Forward,\n\n \"-\" => Orientation::Backward,\n\n _ => {\n", "file_path": "src/graphoperation.rs", "rank": 7, "score": 76602.7688146222 }, { "content": "/// Function that removes a path in a graph checking if the provided ```PathName``` exists\n\n/// # Example\n\n/// ```ignore\n\n/// use handle_gfa::graphoperation::*;\n\n///\n\n/// let mut graph = HashGraph::from_gfa(&gfa2);\n\n/// graph = graph.remove_path(graph, Some(&BString::from(\"14\")).unwrap();\n\n/// ```\n\npub fn remove_path(\n\n mut graph: HashGraph,\n\n path_name: Option<&[u8]>,\n\n) -> Result<HashGraph, GraphOperationError> {\n\n let path_name = path_name.unwrap_or(b\"default_path_id\");\n\n if graph.remove_path(path_name) {\n\n Ok(graph)\n\n } else {\n\n Err(GraphOperationError::PathNotExist(\n\n String::from_utf8(path_name.to_vec()).expect(\"Invalid UTF-8 character\"),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/graphoperation.rs", "rank": 8, "score": 76602.67801955379 }, { "content": "#[test]\n\nfn readme_file_test() {\n\n let parser: GFA2Parser<usize, ()> = GFA2Parser::new();\n\n let gfa2: GFA2<usize, ()> = parser.parse_file(\"./tests/gfa2_files/irl.gfa2\").unwrap();\n\n println!(\"{:#?}\", gfa2);\n\n println!(\"{}\", gfa2);\n\n let graph = HashGraph::from_gfa2(&gfa2);\n\n println!(\"{:#?}\", graph);\n\n print_simple_graph(&graph);\n\n}\n\n\n\n// about 20 seconds\n", "file_path": "tests/test.rs", "rank": 9, "score": 76131.12931352702 }, { "content": "#[test]\n\nfn moddable_medium_graph() {\n\n println!(\"Parse and create graph\");\n\n let parser: GFA2Parser<usize, ()> = GFA2Parser::new();\n\n let gfa2: GFA2<usize, ()> = parser.parse_file(\"./tests/big_files/test.gfa2\").unwrap();\n\n let mut graph = HashGraph::from_gfa2(&gfa2);\n\n\n\n // remove nodes, edges and paths\n\n println!(\"Remove 1000 nodes\");\n\n for i in 1..1001 {\n\n match remove_node(graph.clone(), i as u64) {\n\n Ok(g) => graph = g,\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n }\n\n const PATHS: [&[u8]; 3] = [\n\n b\"gi|568815592:32578768-32589835\",\n\n b\"gi|568815529:3998044-4011446\",\n\n b\"gi|568815551:3814534-3830133\",\n\n ];\n\n println!(\"Remove 3 paths\");\n", "file_path": "tests/test.rs", "rank": 10, "score": 72207.00681768371 }, { "content": "#[test]\n\n#[ignore]\n\nfn big_graph_with_big_operation() {\n\n // about 8 minutes\n\n println!(\"Parse and create graph\");\n\n let parser: GFAParser<usize, ()> = GFAParser::new();\n\n let gfa: GFA<usize, ()> = parser\n\n .parse_file(\"./tests/big_files/ape-4-0.10b.gfa\")\n\n .unwrap();\n\n let mut graph = HashGraph::from_gfa(&gfa);\n\n\n\n // about x minutes\n\n // remove nodes\n\n println!(\"Remove 1_000 nodes\");\n\n for i in 1..1_001 {\n\n match remove_node(\n\n graph.clone(),\n\n format!(\"{}{}\", 115, i).parse::<u64>().unwrap(),\n\n ) {\n\n Ok(g) => graph = g,\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n", "file_path": "tests/test.rs", "rank": 11, "score": 69352.26583365219 }, { "content": "/// Function that modifies a link in a graph checking if the provided ```NodeId``` exists\n\n/// # Example\n\n/// ```ignore\n\n/// use handle_gfa::graphoperation::*;\n\n///\n\n/// let mut graph = HashGraph::from_gfa(&gfa2);\n\n/// graph = graph.modify_link(graph, b\"14+\", b\"15+\", b\"14+\", b\"17-\").unwrap();\n\n/// ```\n\npub fn modify_link(\n\n mut graph: HashGraph,\n\n from_node: &[u8],\n\n to_node: &[u8],\n\n new_from_node: Option<&[u8]>,\n\n new_to_node: Option<&[u8]>,\n\n) -> Result<HashGraph, GraphOperationError> {\n\n use bstr::ByteSlice;\n\n use gfa2::gfa2::orientation::Orientation;\n\n\n\n let last = from_node.len() - 1;\n\n let old_left_id = from_node[..last].to_str().unwrap();\n\n\n\n let sgn: &str = &from_node[last..].to_str().unwrap();\n\n let old_left_orient: Orientation = match sgn {\n\n \"+\" => Orientation::Forward,\n\n \"-\" => Orientation::Backward,\n\n _ => {\n\n return Err(GraphOperationError::OrientationNotExists(\n\n from_node.to_str().unwrap().to_string(),\n", "file_path": "src/graphoperation.rs", "rank": 12, "score": 59214.90611352324 }, { "content": "/// Function that removes a link in a graph checking if the provided ```NodeId``` exists\n\n/// # Example\n\n/// ```ignore\n\n/// use handle_gfa::graphoperation::*;\n\n///\n\n/// let mut graph = HashGraph::from_gfa(&gfa2);\n\n/// graph = graph.remove_link(graph, b\"14+\", b\"15+\").unwrap();\n\n/// ```\n\npub fn remove_link(\n\n mut graph: HashGraph,\n\n from_node: &[u8],\n\n to_node: &[u8],\n\n) -> Result<HashGraph, GraphOperationError> {\n\n use bstr::ByteSlice;\n\n use gfa2::gfa2::orientation::Orientation;\n\n\n\n let last = from_node.len() - 1;\n\n let left_id = from_node[..last].to_str().unwrap();\n\n\n\n let sgn: &str = &from_node[last..].to_str().unwrap();\n\n let left_orient: Orientation = match sgn {\n\n \"+\" => Orientation::Forward,\n\n \"-\" => Orientation::Backward,\n\n _ => {\n\n return Err(GraphOperationError::OrientationNotExists(\n\n from_node.to_str().unwrap().to_string(),\n\n ))\n\n }\n", "file_path": "src/graphoperation.rs", "rank": 13, "score": 59214.90611352324 }, { "content": "/// Function that adds a link between 2 existing ```Nodes``` in a graph.\n\n/// # Example\n\n/// ```ignore\n\n/// use handle_gfa::graphoperation::*;\n\n///\n\n/// let mut graph = HashGraph::from_gfa(&gfa2);\n\n/// graph = graph.add_link_between_nodes(graph, b\"14+\", b\"15+\").unwrap();\n\n/// ```\n\npub fn add_link_between_nodes(\n\n mut graph: HashGraph,\n\n from_node: &[u8],\n\n to_node: &[u8],\n\n) -> Result<HashGraph, GraphOperationError> {\n\n use bstr::ByteSlice;\n\n use gfa2::gfa2::orientation::Orientation;\n\n\n\n let last = from_node.len() - 1;\n\n let left_id = from_node[..last].to_str().unwrap();\n\n\n\n let sgn: &str = &from_node[last..].to_str().unwrap();\n\n let left_orient: Orientation = match sgn {\n\n \"+\" => Orientation::Forward,\n\n \"-\" => Orientation::Backward,\n\n _ => {\n\n return Err(GraphOperationError::OrientationNotExists(\n\n from_node.to_str().unwrap().to_string(),\n\n ))\n\n }\n", "file_path": "src/graphoperation.rs", "rank": 14, "score": 57349.804269436274 }, { "content": "#[test]\n\nfn extension_error() {\n\n match gfa2_to_handlegraph(\"./tests/gfa2_files/error_extension.txt\".to_string()) {\n\n Ok(g) => {\n\n let graph: HashGraph = g;\n\n print_simple_graph(&graph)\n\n }\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n}\n", "file_path": "tests/test.rs", "rank": 15, "score": 56983.31893754237 }, { "content": "/// Function that adds a node in a graph checking if the provided ```NodeId``` already exists\n\n/// # Example\n\n/// ```ignore\n\n/// use handle_gfa::graphoperation::*;\n\n///\n\n/// let mut graph = HashGraph::from_gfa(&gfa2);\n\n/// graph = graph.add_node(graph, 14 as u64, Some(b\"TEST_NODE_1\")).unwrap();\n\n/// ```\n\npub fn add_node<T: Into<NodeId>>(\n\n mut graph: HashGraph,\n\n nodeid: T,\n\n sequence: Option<&[u8]>,\n\n) -> Result<HashGraph, GraphOperationError> {\n\n let sequence = sequence.unwrap_or(b\"DEFAULT_SEQUENCE\");\n\n let nodeid_temp = nodeid.into();\n\n\n\n if graph.get_node(&nodeid_temp).is_some() {\n\n return Err(GraphOperationError::IdAlreadyExist(nodeid_temp.to_string()));\n\n } \n\n graph.create_handle(sequence, nodeid_temp); \n\n Ok(graph) \n\n}\n\n\n", "file_path": "src/graphoperation.rs", "rank": 16, "score": 46069.15017399652 }, { "content": "/// Function that removes a node in a graph checking if the provided ```NodeId``` exists\n\n/// # Example\n\n/// ```ignore\n\n/// use handle_gfa::graphoperation::*;\n\n///\n\n/// let mut graph = HashGraph::from_gfa(&gfa2);\n\n/// graph = graph.remove_node(graph, 14 as u64).unwrap();\n\n/// ```\n\npub fn remove_node<T: Into<NodeId>>(\n\n mut graph: HashGraph,\n\n nodeid: T,\n\n) -> Result<HashGraph, GraphOperationError> {\n\n let node = nodeid.into();\n\n if graph.remove_handle(node) {\n\n Ok(graph)\n\n } else {\n\n Err(GraphOperationError::NodesNotExist(\n\n node.to_string(),\n\n \"\".to_string(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/graphoperation.rs", "rank": 17, "score": 46067.748135610054 }, { "content": "/// Function that modifiws a node in a graph checking if the provided ```NodeId``` exists\n\n/// # Example\n\n/// ```ignore\n\n/// use handle_gfa::graphoperation::*;\n\n///\n\n/// let mut graph = HashGraph::from_gfa(&gfa2);\n\n/// graph = graph.modify_node(graph, 14 as u64, b\"NEW_SEQUENCE\").unwrap();\n\n/// ```\n\npub fn modify_node<T: Into<NodeId>>(\n\n mut graph: HashGraph,\n\n nodeid: T,\n\n sequence: &[u8],\n\n) -> Result<HashGraph, GraphOperationError> {\n\n let node = nodeid.into();\n\n\n\n if graph.modify_handle(node, sequence) {\n\n Ok(graph)\n\n } else {\n\n Err(GraphOperationError::NodesNotExist(\n\n node.to_string(),\n\n \"\".to_string(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/graphoperation.rs", "rank": 18, "score": 46067.60083990536 }, { "content": " x += 10_000;\n\n }\n\n */\n\n\n\n // about x minutes\n\n println!(\"Save modified file\");\n\n match save_as_gfa1_file(\n\n &graph,\n\n Some(String::from(\"./tests/output_files/new_ape-4-0.10b.gfa\")),\n\n ) {\n\n Ok(_) => println!(\"File saved!\"),\n\n Err(why) => println!(\"Error: {}\", why),\n\n }\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 19, "score": 27100.590880941232 }, { "content": " match remove_link(graph.clone(), b\"2139-\", b\"2138-\") {\n\n Ok(g) => graph = g,\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n match remove_link(graph.clone(), b\"2140+\", b\"2141+\") {\n\n Ok(g) => graph = g,\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n\n\n println!(\"Add 10 paths and edges\");\n\n // add nodes, edges and paths\n\n let paths: Vec<&[u8]> = vec![\n\n b\"5001+\", b\"5002+\", b\"5003-\", b\"5004+\", b\"5005-\", b\"5006-\", b\"5007+\", b\"5008+\", b\"5009+\",\n\n b\"5010-\",\n\n ];\n\n for i in 1..11 {\n\n match add_node(graph.clone(), 5000 + i as u64, None) {\n\n Ok(g) => graph = g,\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n", "file_path": "tests/test.rs", "rank": 20, "score": 27098.84153901737 }, { "content": " if i > 1 {\n\n match add_link_between_nodes(\n\n graph.clone(),\n\n format!(\"{}{}\", 4000 + i - 1 as u64, \"+\".to_string()).as_bytes(),\n\n format!(\"{}{}\", 4000 + i as u64, \"+\".to_string()).as_bytes(),\n\n ) {\n\n Ok(g) => graph = g,\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n }\n\n }\n\n match add_path(graph.clone(), None, paths.clone()) {\n\n Ok(_) => (),\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 21, "score": 27098.608505602035 }, { "content": " for i in 1..PATHS.len() {\n\n let path_name: &[u8] = PATHS.get(i as usize).unwrap();\n\n match remove_path(graph.clone(), Some(path_name)) {\n\n Ok(g) => graph = g,\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n }\n\n println!(\"Remove 5 edges\");\n\n match remove_link(graph.clone(), b\"2138-\", b\"2137-\") {\n\n Ok(g) => graph = g,\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n match remove_link(graph.clone(), b\"2139+\", b\"2140+\") {\n\n Ok(g) => graph = g,\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n match remove_link(graph.clone(), b\"2139+\", b\"3090+\") {\n\n Ok(g) => graph = g,\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n", "file_path": "tests/test.rs", "rank": 22, "score": 27097.71401426246 }, { "content": " }\n\n /*\n\n const PATHS: [&[u8]; 10] = [\n\n b\"path-1\", b\"path-2\", b\"path-3\", b\"path-4\", b\"path-5\", b\"path-6\", b\"path-7\", b\"path-8\",\n\n b\"path-9\", b\"path-10\",\n\n ];\n\n // about x minutes\n\n println!(\"Add 10 paths containing 100 segment ids each\");\n\n // add paths\n\n let mut x = 10_000;\n\n for i in 1..PATHS.len() {\n\n let mut ids: Vec<&[u8]> = vec![];\n\n let path_name: &[u8] = PATHS.get(i as usize).unwrap();\n\n for n in 1..101 {\n\n ids.push(format!(\"{}{}{}\", 115, x + n, \"+\").as_bytes()); \n\n }\n\n match add_path(graph.clone(), Some(path_name), ids) {\n\n Ok(g) => graph = g,\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n", "file_path": "tests/test.rs", "rank": 23, "score": 27097.39771134803 }, { "content": "// manipulate hashgraph\n\nuse handlegraph2::hashgraph::*;\n\n\n\n// manipulate files\n\nuse gfa2::{gfa1::GFA, gfa2::GFA2, parser_gfa1::GFAParser, parser_gfa2::GFA2Parser};\n\n\n\nuse handlegfa::{fileoperation::*, graphoperation::*};\n\n\n\n#[test]\n", "file_path": "tests/test.rs", "rank": 24, "score": 27097.398689201767 }, { "content": "// manipulate hashgraph\n\nuse handlegraph2::{\n\n handle::{Edge, Handle, NodeId},\n\n hashgraph::*,\n\n mutablehandlegraph::*,\n\n pathgraph::PathHandleGraph,\n\n};\n\n\n\npub mod error;\n\npub use self::error::*;\n\n\n\n/// Function that reads a ```GFA2``` files passed as input and return its\n\n/// corresponding ```HandleGraph```\n", "file_path": "src/graphoperation.rs", "rank": 33, "score": 14.219548124007746 }, { "content": "# RS-HANDLEGFA\n\nrs-handlegfa it's a tool to manipulate GFA files as graphs.\n\nThis tool use 2 libraries to make this possible:\n\n- rs-gfa2 that is used for parsing a file and check if it's correct for the chosen format. [link here](https://github.com/DrStiev/rs-gfa2)\n\n- rs-handlegraph2 that is used to create the hashgraph associated with a GFA object. [link here](https://github.com/DrStiev/rs-handlegraph2)\n\n\n\n## HOW IT WORKS\n\nHandleGFA performs three main tasks while running: \n\n1. Control wheter the file is comform to the format GFA1 or GFA2 and create the associated HashGraph\n\n2. Manipulate the graph through 3 main operation:\n\n - ADD Operation: such as add nodes, links between them and paths\n\n - REMOVE Operation: such as remove nodes, links between them and paths\n\n - MODIFY Operation: such as modify nodes, links between them and paths\n\n3. And at last, save the resulting graph back as a GFA file\n\n\n", "file_path": "README.md", "rank": 34, "score": 12.630703582607083 }, { "content": " print_simple_graph(&graph);\n\n match remove_link(graph, b\"12-\", b\"13+\") {\n\n Ok(g) => print_simple_graph(&g),\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n }\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n }\n\n\n\n #[test]\n\n fn can_remove_path() {\n\n use bstr::BString;\n\n\n\n match gfa2_to_handlegraph(\"./tests/gfa2_files/spec_q7.gfa2\".to_string()) {\n\n Ok(g) => {\n\n let graph: HashGraph = g;\n\n print_simple_graph(&graph);\n\n match remove_path(graph, Some(&BString::from(\"14\"))) {\n\n Ok(g) => print_simple_graph(&g),\n", "file_path": "src/graphoperation.rs", "rank": 35, "score": 12.614373970352297 }, { "content": "/// define a custom error type for the program\n\nuse std::{error, fmt};\n\n\n\npub type GraphOperationResult<T> = Result<T, GraphOperationError>;\n\n\n\n#[derive(Debug)]\n\npub enum GraphOperationError {\n\n FileError(String),\n\n IdAlreadyExist(String),\n\n NodesNotExist(String, String),\n\n EdgeNotExist(String, String),\n\n PathNotExist(String),\n\n OrientationNotExists(String),\n\n Unknown,\n\n}\n\n\n\nimpl fmt::Display for GraphOperationError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n use GraphOperationError as GE;\n\n match self {\n", "file_path": "src/graphoperation/error.rs", "rank": 36, "score": 12.495572512728184 }, { "content": " Err(why) => println!(\"Error: {}\", why),\n\n };\n\n }\n\n\n\n #[test]\n\n fn can_modify_path() {\n\n match gfa2_to_handlegraph(\"./tests/gfa2_files/spec_q7.gfa2\".to_string()) {\n\n Ok(g) => {\n\n let graph: HashGraph = g;\n\n print_simple_graph(&graph);\n\n //let smaller path = \"11+ 12-\";\n\n match modify_path(graph, b\"14\", vec![b\"11+\", b\"12-\"]) {\n\n Ok(g) => print_simple_graph(&g),\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n }\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n }\n\n\n", "file_path": "src/graphoperation.rs", "rank": 37, "score": 11.729808517361686 }, { "content": "\n\n #[test]\n\n fn can_add_link() {\n\n let parser: GFA2Parser<usize, ()> = GFA2Parser::new();\n\n let gfa2: GFA2<usize, ()> = parser\n\n .parse_file(\"./tests/gfa2_files/spec_q7.gfa2\")\n\n .unwrap();\n\n let mut graph = HashGraph::from_gfa2(&gfa2);\n\n\n\n graph = add_node(graph, 14 as u64, Some(b\"TEST_NODE_1\")).unwrap();\n\n graph = add_node(graph, 15 as u64, Some(b\"TEST_NODE_2\")).unwrap();\n\n print_simple_graph(&graph);\n\n match add_link_between_nodes(graph, b\"14+\", b\"15+\") {\n\n Ok(g) => print_simple_graph(&g),\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n }\n\n\n\n #[test]\n\n fn can_add_path() {\n", "file_path": "src/graphoperation.rs", "rank": 38, "score": 10.694031515547847 }, { "content": " #[test]\n\n fn can_remove_node() {\n\n match gfa2_to_handlegraph(\"./tests/gfa2_files/spec_q7.gfa2\".to_string()) {\n\n Ok(g) => {\n\n let graph: HashGraph = g;\n\n print_simple_graph(&graph);\n\n match remove_node(graph, 11 as u64) {\n\n Ok(g) => print_simple_graph(&g),\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n }\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n }\n\n\n\n #[test]\n\n fn can_remove_edge() {\n\n match gfa2_to_handlegraph(\"./tests/gfa2_files/spec_q7.gfa2\".to_string()) {\n\n Ok(g) => {\n\n let graph: HashGraph = g;\n", "file_path": "src/graphoperation.rs", "rank": 39, "score": 10.512914753038316 }, { "content": " print_simple_graph(&graph);\n\n }\n\n\n\n #[test]\n\n fn can_convert_file_to_handlegraph() {\n\n match gfa2_to_handlegraph(\"./tests/gfa2_files/spec_q7.gfa2\".to_string()) {\n\n Ok(g) => {\n\n let graph: HashGraph = g;\n\n print_simple_graph(&graph)\n\n }\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n }\n\n\n\n #[test]\n\n fn can_modify_node() {\n\n match gfa2_to_handlegraph(\"./tests/gfa2_files/spec_q7.gfa2\".to_string()) {\n\n Ok(g) => {\n\n let graph: HashGraph = g;\n\n print_simple_graph(&graph);\n", "file_path": "src/graphoperation.rs", "rank": 40, "score": 10.400384136289354 }, { "content": " let parser: GFA2Parser<usize, ()> = GFA2Parser::new();\n\n let gfa2: GFA2<usize, ()> = parser\n\n .parse_file(\"./tests/gfa2_files/spec_q7.gfa2\")\n\n .unwrap();\n\n let graph = HashGraph::from_gfa2(&gfa2);\n\n let ids: Vec<&[u8]> = vec![b\"11+\", b\"13+\"];\n\n print_simple_graph(&graph);\n\n match add_path(graph, Some(b\"TEST_PATH_1\"), ids) {\n\n Ok(g) => print_simple_graph(&g),\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n }\n\n}\n", "file_path": "src/graphoperation.rs", "rank": 41, "score": 10.399732077201746 }, { "content": " match modify_node(graph, 11 as u64, b\"NEW_TEST_SEQUENCE\") {\n\n Ok(g) => print_simple_graph(&g),\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n }\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n }\n\n\n\n #[test]\n\n fn can_modify_edge() {\n\n match gfa2_to_handlegraph(\"./tests/gfa2_files/spec_q7.gfa2\".to_string()) {\n\n Ok(g) => {\n\n let graph: HashGraph = g;\n\n print_simple_graph(&graph);\n\n match modify_link(graph, b\"11+\", b\"13+\", Some(b\"13+\"), Some(b\"11+\")) {\n\n Ok(g) => print_simple_graph(&g),\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n }\n", "file_path": "src/graphoperation.rs", "rank": 42, "score": 9.917450232330175 }, { "content": " Err(why) => println!(\"Error: {}\", why),\n\n };\n\n }\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n }\n\n\n\n #[test]\n\n fn can_add_node() {\n\n let parser: GFA2Parser<usize, ()> = GFA2Parser::new();\n\n let gfa2: GFA2<usize, ()> = parser\n\n .parse_file(\"./tests/gfa2_files/spec_q7.gfa2\")\n\n .unwrap();\n\n let graph2 = HashGraph::from_gfa2(&gfa2);\n\n print_simple_graph(&graph2);\n\n match add_node(graph2, 14 as u64, Some(b\"TEST_NODE_1\")) {\n\n Ok(g) => print_simple_graph(&g),\n\n Err(why) => println!(\"Error: {}\", why),\n\n };\n\n }\n", "file_path": "src/graphoperation.rs", "rank": 43, "score": 8.119146082775318 }, { "content": "pub mod fileoperation;\n\npub mod graphoperation;\n", "file_path": "src/lib.rs", "rank": 45, "score": 6.513222983651973 }, { "content": " let orient: Orientation = match sgn {\n\n \"+\" => Orientation::Forward,\n\n \"-\" => Orientation::Backward,\n\n _ => {\n\n return Err(GraphOperationError::OrientationNotExists(\n\n seq.to_str().unwrap().to_string(),\n\n ))\n\n }\n\n };\n\n\n\n let handle = Handle::new(seq_id.parse::<u64>().unwrap(), orient);\n\n graph.append_step(&path, handle);\n\n }\n\n\n\n Ok(graph)\n\n}\n\n\n", "file_path": "src/graphoperation.rs", "rank": 46, "score": 6.331619526834308 }, { "content": " return Err(GraphOperationError::OrientationNotExists(\n\n seq.to_str().unwrap().to_string(),\n\n ))\n\n }\n\n };\n\n\n\n let handle = Handle::new(seq_id.parse::<u64>().unwrap(), orient);\n\n handles.push(handle);\n\n }\n\n if graph.modify_path(path_name, handles) {\n\n Ok(graph)\n\n } else {\n\n Err(GraphOperationError::PathNotExist(\n\n String::from_utf8(path_name.to_vec()).expect(\"Invalid UTF-8 character\"),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/graphoperation.rs", "rank": 47, "score": 5.5337975974601745 }, { "content": " GE::FileError(file) => write!(f, \"{}\", file),\n\n GE::IdAlreadyExist(id) => write!(f, \"The Id provided ({}) already exists\", id),\n\n GE::NodesNotExist(node_left, node_right) => {\n\n write!(f, \"Cannot find the node(s): {} {}\", node_left, node_right)\n\n }\n\n GE::EdgeNotExist(l, r) => write!(f, \"The Edge ({} -> {}) did not exist\", l, r),\n\n GE::PathNotExist(path) => write!(f, \"The Path ({}) did not exist\", path),\n\n GE::OrientationNotExists(orientation) => write!(\n\n f,\n\n \"Segment reference Id ({}) did not include orientation\",\n\n orientation\n\n ),\n\n GE::Unknown => write!(f, \"Unknown error while operating on the graph\"),\n\n }\n\n }\n\n}\n\n\n\nimpl error::Error for GraphOperationError {}\n", "file_path": "src/graphoperation/error.rs", "rank": 48, "score": 4.443380091026944 }, { "content": " }\n\n None => Some(old_right),\n\n };\n\n\n\n let new_left = match new_from_node {\n\n Some(id) => {\n\n let last = id.len() - 1;\n\n let new_left_id = id[..last].to_str().unwrap();\n\n\n\n let sgn: &str = &id[last..].to_str().unwrap();\n\n let new_left_orient: Orientation = match sgn {\n\n \"+\" => Orientation::Forward,\n\n \"-\" => Orientation::Backward,\n\n _ => {\n\n return Err(GraphOperationError::OrientationNotExists(\n\n id.to_str().unwrap().to_string(),\n\n ))\n\n }\n\n };\n\n Some(Handle::new(\n", "file_path": "src/graphoperation.rs", "rank": 49, "score": 4.375914736123916 }, { "content": " new_left_id.parse::<u64>().unwrap(),\n\n new_left_orient,\n\n ))\n\n }\n\n None => Some(old_left),\n\n };\n\n\n\n if graph.modify_edge(Edge(old_left, old_right), new_left, new_right) {\n\n Ok(graph)\n\n } else {\n\n Err(GraphOperationError::EdgeNotExist(\n\n format!(\"{}{}\", old_left.id().to_string(), old_left_orient),\n\n format!(\"{}{}\", old_right.id().to_string(), old_right_orient),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/graphoperation.rs", "rank": 50, "score": 4.354584352133799 }, { "content": " };\n\n\n\n let last = to_node.len() - 1;\n\n let right_id = to_node[..last].to_str().unwrap();\n\n\n\n let sgn: &str = &to_node[last..].to_str().unwrap();\n\n let right_orient: Orientation = match sgn {\n\n \"+\" => Orientation::Forward,\n\n \"-\" => Orientation::Backward,\n\n _ => {\n\n return Err(GraphOperationError::OrientationNotExists(\n\n to_node.to_str().unwrap().to_string(),\n\n ))\n\n }\n\n };\n\n\n\n let right = Handle::new(right_id.parse::<u64>().unwrap(), right_orient);\n\n let left = Handle::new(left_id.parse::<u64>().unwrap(), left_orient);\n\n\n\n if graph.remove_edge(Edge(left, right)) {\n\n Ok(graph)\n\n } else {\n\n Err(GraphOperationError::EdgeNotExist(\n\n from_node.to_str().unwrap().to_string(),\n\n to_node.to_str().unwrap().to_string(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/graphoperation.rs", "rank": 51, "score": 3.521105660619754 }, { "content": " };\n\n\n\n let last = to_node.len() - 1;\n\n let right_id = to_node[..last].to_str().unwrap();\n\n\n\n let sgn: &str = &to_node[last..].to_str().unwrap();\n\n let right_orient: Orientation = match sgn {\n\n \"+\" => Orientation::Forward,\n\n \"-\" => Orientation::Backward,\n\n _ => {\n\n return Err(GraphOperationError::OrientationNotExists(\n\n to_node.to_str().unwrap().to_string(),\n\n ))\n\n }\n\n };\n\n\n\n let right = Handle::new(right_id.parse::<u64>().unwrap(), right_orient);\n\n let left = Handle::new(left_id.parse::<u64>().unwrap(), left_orient);\n\n\n\n if graph.create_edge(Edge(left, right)) {\n\n Ok(graph)\n\n } else {\n\n Err(GraphOperationError::EdgeNotExist(\n\n format!(\"{}{}\", from_node.to_str().unwrap(), left_orient),\n\n format!(\"{}{}\", to_node.to_str().unwrap(), right_orient),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/graphoperation.rs", "rank": 52, "score": 3.432636098938126 }, { "content": "\n\n let new_right = match new_to_node {\n\n Some(id) => {\n\n let last = id.len() - 1;\n\n let new_right_id = id[..last].to_str().unwrap();\n\n\n\n let sgn: &str = &id[last..].to_str().unwrap();\n\n let new_right_orient: Orientation = match sgn {\n\n \"+\" => Orientation::Forward,\n\n \"-\" => Orientation::Backward,\n\n _ => {\n\n return Err(GraphOperationError::OrientationNotExists(\n\n id.to_str().unwrap().to_string(),\n\n ))\n\n }\n\n };\n\n Some(Handle::new(\n\n new_right_id.parse::<u64>().unwrap(),\n\n new_right_orient,\n\n ))\n", "file_path": "src/graphoperation.rs", "rank": 53, "score": 2.6961780881147366 }, { "content": " ))\n\n }\n\n };\n\n\n\n let last = to_node.len() - 1;\n\n let old_right_id = to_node[..last].to_str().unwrap();\n\n\n\n let sgn: &str = &to_node[last..].to_str().unwrap();\n\n let old_right_orient: Orientation = match sgn {\n\n \"+\" => Orientation::Forward,\n\n \"-\" => Orientation::Backward,\n\n _ => {\n\n return Err(GraphOperationError::OrientationNotExists(\n\n to_node.to_str().unwrap().to_string(),\n\n ))\n\n }\n\n };\n\n\n\n let old_right = Handle::new(old_right_id.parse::<u64>().unwrap(), old_right_orient);\n\n let old_left = Handle::new(old_left_id.parse::<u64>().unwrap(), old_left_orient);\n", "file_path": "src/graphoperation.rs", "rank": 54, "score": 1.9361218696730655 } ]
Rust
rust/src/checkpoints.rs
rdettai/delta-rs
f1f9782196e3f3420399b44377acd153a3d0d7cf
use arrow::datatypes::Schema as ArrowSchema; use arrow::error::ArrowError; use arrow::json::reader::Decoder; use log::*; use parquet::arrow::ArrowWriter; use parquet::errors::ParquetError; use parquet::file::writer::InMemoryWriteableCursor; use std::convert::TryFrom; use super::action; use super::delta_arrow::delta_log_schema_for_table; use super::open_table_with_version; use super::schema::*; use super::storage; use super::storage::{StorageBackend, StorageError}; use super::{CheckPoint, DeltaTableError, DeltaTableState}; #[derive(thiserror::Error, Debug)] pub enum CheckPointWriterError { #[error("DeltaTableMetadata not present in DeltaTableState")] MissingMetaData, #[error("DeltaTableError: {source}")] DeltaTable { #[from] source: DeltaTableError, }, #[error("Failed to write parquet: {}", .source)] ParquetError { #[from] source: ParquetError, }, #[error("Failed to convert into Arrow schema: {}", .source)] ArrowError { #[from] source: ArrowError, }, #[error("StorageError: {source}")] Storage { #[from] source: StorageError, }, #[error("serde_json::Error: {source}")] JSONSerialization { #[from] source: serde_json::Error, }, } pub struct CheckPointWriter { table_uri: String, delta_log_uri: String, last_checkpoint_uri: String, storage: Box<dyn StorageBackend>, } impl CheckPointWriter { pub fn new(table_uri: &str, storage: Box<dyn StorageBackend>) -> Self { let delta_log_uri = storage.join_path(table_uri, "_delta_log"); let last_checkpoint_uri = storage.join_path(delta_log_uri.as_str(), "_last_checkpoint"); Self { table_uri: table_uri.to_string(), delta_log_uri, last_checkpoint_uri, storage, } } pub fn new_for_table_uri(table_uri: &str) -> Result<Self, CheckPointWriterError> { let storage_backend = storage::get_backend_for_uri(table_uri)?; Ok(Self::new(table_uri, storage_backend)) } pub async fn create_checkpoint_for_version( &self, version: DeltaDataTypeVersion, ) -> Result<(), CheckPointWriterError> { let table = open_table_with_version(self.table_uri.as_str(), version).await?; self.create_checkpoint_from_state(version, table.get_state()) .await } pub async fn create_checkpoint_from_state( &self, version: DeltaDataTypeVersion, state: &DeltaTableState, ) -> Result<(), CheckPointWriterError> { info!("Writing parquet bytes to checkpoint buffer."); let parquet_bytes = self.parquet_bytes_from_state(state)?; let size = parquet_bytes.len() as i64; let checkpoint = CheckPoint::new(version, size, None); let file_name = format!("{:020}.checkpoint.parquet", version); let checkpoint_uri = self.storage.join_path(&self.delta_log_uri, &file_name); info!("Writing checkpoint to {:?}.", checkpoint_uri); self.storage .put_obj(&checkpoint_uri, &parquet_bytes) .await?; let last_checkpoint_content: serde_json::Value = serde_json::to_value(&checkpoint)?; let last_checkpoint_content = serde_json::to_string(&last_checkpoint_content)?; info!( "Writing _last_checkpoint to {:?}.", self.last_checkpoint_uri ); self.storage .put_obj( self.last_checkpoint_uri.as_str(), last_checkpoint_content.as_bytes(), ) .await?; Ok(()) } fn parquet_bytes_from_state( &self, state: &DeltaTableState, ) -> Result<Vec<u8>, CheckPointWriterError> { let current_metadata = state .current_metadata() .ok_or(CheckPointWriterError::MissingMetaData)?; let mut jsons = std::iter::once(action::Action::protocol(action::Protocol { min_reader_version: state.min_reader_version(), min_writer_version: state.min_writer_version(), })) .chain(std::iter::once(action::Action::metaData( action::MetaData::try_from(current_metadata.clone())?, ))) .chain(state.files().iter().map(|f| action::Action::add(f.clone()))) .chain( state .tombstones() .iter() .map(|f| action::Action::remove(f.clone())), ) .chain( state .app_transaction_version() .iter() .map(|(app_id, version)| { action::Action::txn(action::Txn { app_id: app_id.clone(), version: *version, last_updated: None, }) }), ) .map(|a| serde_json::to_value(a).map_err(ArrowError::from)); debug!("Preparing checkpoint parquet buffer."); let arrow_schema = delta_log_schema_for_table( <ArrowSchema as TryFrom<&Schema>>::try_from(&current_metadata.schema)?, current_metadata.partition_columns.as_slice(), ); let writeable_cursor = InMemoryWriteableCursor::default(); let mut writer = ArrowWriter::try_new(writeable_cursor.clone(), arrow_schema.clone(), None)?; debug!("Writing to checkpoint parquet buffer..."); let batch_size = state.app_transaction_version().len() + state.tombstones().len() + state.files().len() + 2; let decoder = Decoder::new(arrow_schema, batch_size, None); while let Some(batch) = decoder.next_batch(&mut jsons)? { writer.write(&batch)?; } let _ = writer.close()?; debug!("Finished writing checkpoint parquet buffer."); Ok(writeable_cursor.data()) } }
use arrow::datatypes::Schema as ArrowSchema; use arrow::error::ArrowError; use arrow::json::reader::Decoder; use log::*; use parquet::arrow::ArrowWriter; use parquet::errors::ParquetError; use parquet::file::writer::InMemoryWriteableCursor; use std::convert::TryFrom; use super::action; use super::delta_arrow::delta_log_schema_for_table; use super::open_table_with_version; use super::schema::*; use super::storage; use super::storage::{StorageBackend, StorageError}; use super::{CheckPoint, DeltaTableError, DeltaTableState}; #[derive(thiserror::Error, Debug)] pub enum CheckPointWriterError { #[error("DeltaTableMetadata not present in DeltaTableState")] MissingMetaData, #[error("DeltaTableError: {source}")] DeltaTable { #[from] source: DeltaTableError, }, #[error("Failed to write parquet: {}", .source)] ParquetError { #[from] source: ParquetError, }, #[error("Failed to convert into Arrow schema: {}", .source)] ArrowError { #[from] source: ArrowError, }, #[error("StorageError: {source}")] Storage { #[from] source: StorageError, }, #[error("serde_json::Error: {source}")] JSONSerialization { #[from] source: serde_json::Error, }, } pub struct CheckPointWriter { table_uri: String, delta_log_uri: String, last_checkpoint_uri: String, storage: Box<dyn StorageBackend>, } impl CheckPointWriter { pub fn new(table_uri: &str, storage: Box<dyn StorageBackend>) -> Self { let delta_log_uri = storage.join_path(table_uri, "_delta_log"); let last_checkpoint_uri = storage.join_path(delta_log_uri.as_str(), "_last_checkpoint"); Self { table_uri: table_uri.to_string(), delta_log_uri, last_checkpoint_uri, storage, } } pub fn new_for_table_uri(table_uri: &str) -> Result<Self, CheckPointWriterError> { let storage_backend = storage::get_backend_for_uri(table_uri)?; Ok(Self::new(table_uri, storage_backend)) } pub async fn create_checkpoint_for_version( &self, version: DeltaDataTypeVersion, ) -> Result<(), CheckPointWriterError> { let table = open_table_with_version(self.table_uri.as_str(), version).await?; self.create_checkpoint_from_state(version, table.get_state()) .await }
fn parquet_bytes_from_state( &self, state: &DeltaTableState, ) -> Result<Vec<u8>, CheckPointWriterError> { let current_metadata = state .current_metadata() .ok_or(CheckPointWriterError::MissingMetaData)?; let mut jsons = std::iter::once(action::Action::protocol(action::Protocol { min_reader_version: state.min_reader_version(), min_writer_version: state.min_writer_version(), })) .chain(std::iter::once(action::Action::metaData( action::MetaData::try_from(current_metadata.clone())?, ))) .chain(state.files().iter().map(|f| action::Action::add(f.clone()))) .chain( state .tombstones() .iter() .map(|f| action::Action::remove(f.clone())), ) .chain( state .app_transaction_version() .iter() .map(|(app_id, version)| { action::Action::txn(action::Txn { app_id: app_id.clone(), version: *version, last_updated: None, }) }), ) .map(|a| serde_json::to_value(a).map_err(ArrowError::from)); debug!("Preparing checkpoint parquet buffer."); let arrow_schema = delta_log_schema_for_table( <ArrowSchema as TryFrom<&Schema>>::try_from(&current_metadata.schema)?, current_metadata.partition_columns.as_slice(), ); let writeable_cursor = InMemoryWriteableCursor::default(); let mut writer = ArrowWriter::try_new(writeable_cursor.clone(), arrow_schema.clone(), None)?; debug!("Writing to checkpoint parquet buffer..."); let batch_size = state.app_transaction_version().len() + state.tombstones().len() + state.files().len() + 2; let decoder = Decoder::new(arrow_schema, batch_size, None); while let Some(batch) = decoder.next_batch(&mut jsons)? { writer.write(&batch)?; } let _ = writer.close()?; debug!("Finished writing checkpoint parquet buffer."); Ok(writeable_cursor.data()) } }
pub async fn create_checkpoint_from_state( &self, version: DeltaDataTypeVersion, state: &DeltaTableState, ) -> Result<(), CheckPointWriterError> { info!("Writing parquet bytes to checkpoint buffer."); let parquet_bytes = self.parquet_bytes_from_state(state)?; let size = parquet_bytes.len() as i64; let checkpoint = CheckPoint::new(version, size, None); let file_name = format!("{:020}.checkpoint.parquet", version); let checkpoint_uri = self.storage.join_path(&self.delta_log_uri, &file_name); info!("Writing checkpoint to {:?}.", checkpoint_uri); self.storage .put_obj(&checkpoint_uri, &parquet_bytes) .await?; let last_checkpoint_content: serde_json::Value = serde_json::to_value(&checkpoint)?; let last_checkpoint_content = serde_json::to_string(&last_checkpoint_content)?; info!( "Writing _last_checkpoint to {:?}.", self.last_checkpoint_uri ); self.storage .put_obj( self.last_checkpoint_uri.as_str(), last_checkpoint_content.as_bytes(), ) .await?; Ok(()) }
function_block-full_function
[ { "content": "#[inline]\n\npub fn get_version() -> Result<Version, String> {\n\n imp::get_version()\n\n}\n", "file_path": "glibc_version/src/lib.rs", "rank": 0, "score": 266271.3643664542 }, { "content": "#[inline]\n\npub fn atomic_rename(from: &str, to: &str) -> Result<(), StorageError> {\n\n imp::atomic_rename(from, to)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::fs::File;\n\n use std::io::Write;\n\n use std::path::{Path, PathBuf};\n\n\n\n #[test]\n\n fn test_atomic_rename() {\n\n let tmp_dir = tempdir::TempDir::new_in(\".\", \"test_atomic_rename\").unwrap();\n\n let a = create_file(&tmp_dir.path(), \"a\");\n\n let b = create_file(&tmp_dir.path(), \"b\");\n\n let c = &tmp_dir.path().join(\"c\");\n\n\n\n // unsuccessful move not_exists to C, not_exists is missing\n\n match atomic_rename(\"not_exists\", c.to_str().unwrap()) {\n", "file_path": "rust/src/storage/file/rename.rs", "rank": 1, "score": 247664.6287053142 }, { "content": "/// Dynamically construct a Storage backend trait object based on scheme for provided URI\n\npub fn get_backend_for_uri(uri: &str) -> Result<Box<dyn StorageBackend>, StorageError> {\n\n match parse_uri(uri)? {\n\n Uri::LocalPath(root) => Ok(Box::new(file::FileStorageBackend::new(root))),\n\n #[cfg(feature = \"s3\")]\n\n Uri::S3Object(_) => Ok(Box::new(s3::S3StorageBackend::new()?)),\n\n #[cfg(feature = \"azure\")]\n\n Uri::AdlsGen2Object(obj) => Ok(Box::new(azure::AdlsGen2Backend::new(obj.file_system)?)),\n\n }\n\n}\n", "file_path": "rust/src/storage/mod.rs", "rank": 2, "score": 230104.90017461084 }, { "content": "fn table_uri_from_parts(bucket: &str, path: &str) -> Result<String, CheckPointLambdaError> {\n\n let mut table_uri = PathBuf::new();\n\n\n\n table_uri.push(format!(\"s3://{}\", bucket));\n\n table_uri.push(path);\n\n\n\n Ok(table_uri\n\n .to_str()\n\n .ok_or_else(|| CheckPointLambdaError::InvalidTableUri(table_uri.clone()))?\n\n .to_string())\n\n}\n\n\n", "file_path": "aws/delta-checkpoint/src/main.rs", "rank": 3, "score": 206806.37628327435 }, { "content": "/// Parses the URI and returns a variant of the Uri enum for the appropriate storage backend based\n\n/// on scheme.\n\npub fn parse_uri<'a>(path: &'a str) -> Result<Uri<'a>, UriError> {\n\n let parts: Vec<&'a str> = path.split(\"://\").collect();\n\n\n\n if parts.len() == 1 {\n\n return Ok(Uri::LocalPath(parts[0]));\n\n }\n\n\n\n match parts[0] {\n\n \"s3\" => {\n\n cfg_if::cfg_if! {\n\n if #[cfg(feature = \"s3\")] {\n\n let mut path_parts = parts[1].splitn(2, '/');\n\n let bucket = match path_parts.next() {\n\n Some(x) => x,\n\n None => {\n\n return Err(UriError::MissingObjectBucket);\n\n }\n\n };\n\n let key = match path_parts.next() {\n\n Some(x) => x,\n", "file_path": "rust/src/storage/mod.rs", "rank": 4, "score": 197500.64491329747 }, { "content": "/// Returns rust crate version, can be use used in language bindings to expose Rust core version\n\npub fn crate_version() -> &'static str {\n\n env!(\"CARGO_PKG_VERSION\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use pretty_assertions::assert_eq;\n\n use std::collections::HashMap;\n\n\n\n #[test]\n\n fn state_records_new_txn_version() {\n\n let mut app_transaction_version = HashMap::new();\n\n app_transaction_version.insert(\"abc\".to_string(), 1);\n\n app_transaction_version.insert(\"xyz\".to_string(), 1);\n\n\n\n let mut state = DeltaTableState {\n\n files: vec![],\n\n commit_infos: vec![],\n\n tombstones: vec![],\n", "file_path": "rust/src/delta.rs", "rank": 5, "score": 194574.52639025985 }, { "content": "pub fn create_remove(path: String) -> Remove {\n\n let deletion_timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();\n\n let deletion_timestamp = deletion_timestamp.as_millis() as i64;\n\n\n\n Remove {\n\n path,\n\n deletion_timestamp: deletion_timestamp,\n\n data_change: true,\n\n extended_file_metadata: Some(false),\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "rust/tests/write_exploration.rs", "rank": 6, "score": 181867.91966365214 }, { "content": "fn get_string(attr: Option<&AttributeValue>) -> Result<String, DynamoError> {\n\n Ok(attr\n\n .and_then(|r| r.s.as_ref())\n\n .ok_or(DynamoError::InvalidItemSchema)?\n\n .clone())\n\n}\n\n\n", "file_path": "rust/src/storage/s3/dynamodb_lock.rs", "rank": 7, "score": 165833.690742273 }, { "content": "fn log_entry_from_actions(actions: &[Action]) -> Result<String, serde_json::Error> {\n\n let mut jsons = Vec::<String>::new();\n\n\n\n for action in actions {\n\n let json = serde_json::to_string(action)?;\n\n jsons.push(json);\n\n }\n\n\n\n Ok(jsons.join(\"\\n\"))\n\n}\n\n\n", "file_path": "rust/src/delta.rs", "rank": 8, "score": 164489.71872503802 }, { "content": "#[async_trait::async_trait]\n\npub trait StorageBackend: Send + Sync + Debug {\n\n /// Create a new path by appending `path_to_join` as a new component to `path`.\n\n #[inline]\n\n fn join_path(&self, path: &str, path_to_join: &str) -> String {\n\n let normalized_path = path.trim_end_matches('/');\n\n format!(\"{}/{}\", normalized_path, path_to_join)\n\n }\n\n\n\n /// More efficient path join for multiple path components. Use this method if you need to\n\n /// combine more than two path components.\n\n #[inline]\n\n fn join_paths(&self, paths: &[&str]) -> String {\n\n paths\n\n .iter()\n\n .map(|s| s.trim_end_matches('/'))\n\n .collect::<Vec<_>>()\n\n .join(\"/\")\n\n }\n\n\n\n /// Returns trimed path with trailing path separator removed.\n", "file_path": "rust/src/storage/mod.rs", "rank": 9, "score": 160738.866129603 }, { "content": "pub fn setup_dynamodb(key: &str) {\n\n std::env::set_var(\"AWS_S3_LOCKING_PROVIDER\", \"dynamodb\");\n\n std::env::set_var(\"DYNAMO_LOCK_TABLE_NAME\", \"test_table\");\n\n std::env::set_var(\"DYNAMO_LOCK_PARTITION_KEY_VALUE\", key);\n\n std::env::set_var(\"DYNAMO_LOCK_REFRESH_PERIOD_MILLIS\", \"100\");\n\n std::env::set_var(\"DYNAMO_LOCK_ADDITIONAL_TIME_TO_WAIT_MILLIS\", \"100\");\n\n}\n\n\n\npub async fn cleanup_dir_except(path: &str, ignore_files: Vec<String>) {\n\n setup();\n\n let client = S3Client::new(region());\n\n let dir = deltalake::parse_uri(path).unwrap().into_s3object().unwrap();\n\n\n\n for obj in list_objects(&client, dir.bucket, dir.key).await {\n\n let name = obj.split(\"/\").last().unwrap().to_string();\n\n if !ignore_files.contains(&name) && !name.starts_with(\".\") {\n\n let req = DeleteObjectRequest {\n\n bucket: dir.bucket.to_string(),\n\n key: obj,\n\n ..Default::default()\n", "file_path": "rust/tests/s3_common/mod.rs", "rank": 10, "score": 154645.37132940895 }, { "content": "// very naive implementation for plucking the partition value from the first element of a column array.\n\n// ideally, we would do some validation to ensure the record batch containing the passed partition column contains only distinct values.\n\n// if we calculate stats _first_, we can avoid the extra iteration by ensuring max and min match for the column.\n\n// however, stats are optional and can be added later with `dataChange` false log entries, and it may be more appropriate to add stats _later_ to speed up the initial write.\n\n// a happy middle-road might be to compute stats for partition columns only on the initial write since we should validate partition values anyway, and compute additional stats later (at checkpoint time perhaps?).\n\n// also this does not currently support nested partition columns and many other data types.\n\nfn stringified_partition_value(arr: &Arc<dyn Array>) -> Result<String, DeltaWriterError> {\n\n let data_type = arr.data_type();\n\n\n\n let s = match data_type {\n\n DataType::Int8 => as_primitive_array::<Int8Type>(arr).value(0).to_string(),\n\n DataType::Int16 => as_primitive_array::<Int16Type>(arr).value(0).to_string(),\n\n DataType::Int32 => as_primitive_array::<Int32Type>(arr).value(0).to_string(),\n\n DataType::Int64 => as_primitive_array::<Int64Type>(arr).value(0).to_string(),\n\n DataType::UInt8 => as_primitive_array::<UInt8Type>(arr).value(0).to_string(),\n\n DataType::UInt16 => as_primitive_array::<UInt16Type>(arr).value(0).to_string(),\n\n DataType::UInt32 => as_primitive_array::<UInt32Type>(arr).value(0).to_string(),\n\n DataType::UInt64 => as_primitive_array::<UInt64Type>(arr).value(0).to_string(),\n\n DataType::Utf8 => {\n\n let data = arrow::array::as_string_array(arr);\n\n\n\n data.value(0).to_string()\n\n }\n\n // TODO: handle more types\n\n _ => {\n\n unimplemented!(\"Unimplemented data type: {:?}\", data_type);\n", "file_path": "rust/tests/write_exploration.rs", "rank": 11, "score": 152600.16358646046 }, { "content": "pub fn create_add(\n\n partition_values: &HashMap<String, String>,\n\n path: String,\n\n size: i64,\n\n record_batch: &RecordBatch,\n\n) -> Result<Add, DeltaWriterError> {\n\n let stats = Stats {\n\n num_records: record_batch.num_rows() as i64,\n\n // TODO: calculate additional stats\n\n // look at https://github.com/apache/arrow/blob/master/rust/arrow/src/compute/kernels/aggregate.rs for pulling these stats\n\n min_values: HashMap::new(),\n\n max_values: HashMap::new(),\n\n null_count: HashMap::new(),\n\n };\n\n let stats_string = serde_json::to_string(&stats).unwrap();\n\n\n\n let modification_time = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();\n\n let modification_time = modification_time.as_millis() as i64;\n\n\n\n let add = Add {\n", "file_path": "rust/tests/write_exploration.rs", "rank": 12, "score": 152430.886834894 }, { "content": "pub fn extract_partition_values(\n\n metadata: &DeltaTableMetaData,\n\n record_batch: &RecordBatch,\n\n) -> Result<HashMap<String, String>, DeltaWriterError> {\n\n let partition_cols = metadata.partition_columns.as_slice();\n\n\n\n let mut partition_values = HashMap::new();\n\n\n\n for col_name in partition_cols.iter() {\n\n let arrow_schema = record_batch.schema();\n\n\n\n let i = arrow_schema.index_of(col_name)?;\n\n let col = record_batch.column(i);\n\n\n\n let partition_string = stringified_partition_value(col)?;\n\n\n\n partition_values.insert(col_name.clone(), partition_string);\n\n }\n\n\n\n Ok(partition_values)\n\n}\n\n\n", "file_path": "rust/tests/write_exploration.rs", "rank": 13, "score": 149325.3120605974 }, { "content": "pub fn record_batch_from_json_buffer(\n\n arrow_schema_ref: Arc<ArrowSchema>,\n\n json_buffer: &[Value],\n\n) -> Result<RecordBatch, DeltaWriterError> {\n\n let row_count = json_buffer.len();\n\n let mut value_ter = InMemValueIter::from_vec(json_buffer);\n\n let decoder = Decoder::new(arrow_schema_ref.clone(), row_count, None);\n\n let batch = decoder.next_batch(&mut value_ter)?;\n\n\n\n // handle none\n\n let batch = batch.unwrap();\n\n\n\n Ok(batch)\n\n}\n\n\n", "file_path": "rust/tests/write_exploration.rs", "rank": 14, "score": 146397.61773719487 }, { "content": "fn bucket_and_key_from_event(event: &Value) -> Result<(String, String), CheckPointLambdaError> {\n\n let s3_value = event\n\n .get(\"Records\")\n\n .and_then(|v| v.as_array())\n\n .and_then(|arr| arr.get(0))\n\n .and_then(|elem| elem.get(\"s3\"))\n\n .ok_or_else(|| CheckPointLambdaError::InvalidEventStructure(event.to_string()))?;\n\n\n\n let bucket = s3_value\n\n .get(\"bucket\")\n\n .and_then(|v| v.get(\"name\"))\n\n .and_then(|v| v.as_str())\n\n .ok_or_else(|| CheckPointLambdaError::InvalidEventStructure(event.to_string()))?\n\n .to_string();\n\n\n\n let key = s3_value\n\n .get(\"object\")\n\n .and_then(|v| v.get(\"key\"))\n\n .and_then(|v| v.as_str())\n\n .ok_or_else(|| CheckPointLambdaError::InvalidEventStructure(event.to_string()))?\n\n .to_string();\n\n\n\n Ok((bucket, key))\n\n}\n\n\n", "file_path": "aws/delta-checkpoint/src/main.rs", "rank": 15, "score": 145128.91519408434 }, { "content": "#[pyfunction]\n\nfn rust_core_version() -> &'static str {\n\n deltalake::crate_version()\n\n}\n\n\n\n#[pymodule]\n", "file_path": "python/src/lib.rs", "rank": 16, "score": 144751.69491475692 }, { "content": "fn create_s3_client(region: Region) -> Result<S3Client, StorageError> {\n\n let dispatcher = HttpClient::new()\n\n .map_err(|_| StorageError::S3Generic(\"Failed to create request dispatcher\".to_string()))?;\n\n\n\n let client = match std::env::var(\"AWS_WEB_IDENTITY_TOKEN_FILE\") {\n\n Ok(_) => {\n\n let provider = WebIdentityProvider::from_k8s_env();\n\n let provider = AutoRefreshingProvider::new(provider).map_err(|e| {\n\n StorageError::S3Generic(format!(\n\n \"Failed to retrieve S3 credentials with message: {}\",\n\n e.message\n\n ))\n\n })?;\n\n S3Client::new_with(dispatcher, provider, region)\n\n }\n\n Err(_) => S3Client::new_with(dispatcher, ChainProvider::new(), region),\n\n };\n\n\n\n Ok(client)\n\n}\n\n\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 17, "score": 143394.26092133473 }, { "content": "fn ds_to_ts(ds: &str) -> i64 {\n\n let fixed_dt = DateTime::<FixedOffset>::parse_from_rfc3339(ds).unwrap();\n\n DateTime::<Utc>::from(fixed_dt).timestamp()\n\n}\n\n\n\n#[tokio::test]\n\nasync fn time_travel_by_ds() {\n\n // git does not preserve mtime, so we need to manually set it in the test\n\n let log_dir = \"./tests/data/simple_table/_delta_log\";\n\n let log_mtime_pair = vec![\n\n (\"00000000000000000000.json\", \"2020-05-01T22:47:31-07:00\"),\n\n (\"00000000000000000001.json\", \"2020-05-02T22:47:31-07:00\"),\n\n (\"00000000000000000002.json\", \"2020-05-03T22:47:31-07:00\"),\n\n (\"00000000000000000003.json\", \"2020-05-04T22:47:31-07:00\"),\n\n (\"00000000000000000004.json\", \"2020-05-05T22:47:31-07:00\"),\n\n ];\n\n for (fname, ds) in log_mtime_pair {\n\n let ts = ds_to_ts(ds);\n\n utime::set_file_times(Path::new(log_dir).join(fname), ts, ts).unwrap();\n\n }\n", "file_path": "rust/tests/read_simple_table_test.rs", "rank": 18, "score": 131959.9374285798 }, { "content": "fn cleanup_log_dir() {\n\n let log_dir = PathBuf::from(\"./tests/data/write_exploration/_delta_log\");\n\n let paths = fs::read_dir(log_dir.as_path()).unwrap();\n\n\n\n for p in paths {\n\n match p {\n\n Ok(d) => {\n\n let path = d.path();\n\n\n\n if let Some(extension) = path.extension() {\n\n if extension == \"json\" && path.file_stem().unwrap() != \"00000000000000000000\" {\n\n fs::remove_file(path).unwrap();\n\n }\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n\n\n let data_dir = PathBuf::from(\"./tests/data/write_exploration\");\n", "file_path": "rust/tests/write_exploration.rs", "rank": 19, "score": 131385.27570959376 }, { "content": "#[async_trait::async_trait]\n\npub trait LockClient: Send + Sync + Debug {\n\n /// Attempts to acquire lock. If successful, returns the lock.\n\n /// Otherwise returns [`Option::None`] which is retryable action.\n\n /// Visit implementation docs for more details.\n\n async fn try_acquire_lock(&self, data: &str) -> Result<Option<LockItem>, StorageError>;\n\n\n\n /// Returns current lock from DynamoDB (if any).\n\n async fn get_lock(&self) -> Result<Option<LockItem>, StorageError>;\n\n\n\n /// Update data in the upstream lock of the current user still has it.\n\n /// The returned lock will have a new `rvn` so it'll increase the lease duration\n\n /// as this method is usually called when the work with a lock is extended.\n\n async fn update_data(&self, lock: &LockItem) -> Result<LockItem, StorageError>;\n\n\n\n /// Releases the given lock if the current user still has it, returning true if the lock was\n\n /// successfully released, and false if someone else already stole the lock\n\n async fn release_lock(&self, lock: &LockItem) -> Result<bool, StorageError>;\n\n}\n\n\n\nconst DEFAULT_MAX_RETRY_ACQUIRE_LOCK_ATTEMPTS: u32 = 10_000;\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 20, "score": 129735.97036069182 }, { "content": "#[allow(clippy::unnecessary_wraps)]\n\nfn try_create_lock_client(_region: Region) -> Result<Option<Box<dyn LockClient>>, StorageError> {\n\n match std::env::var(\"AWS_S3_LOCKING_PROVIDER\") {\n\n Ok(p) if p.to_lowercase() == \"dynamodb\" => {\n\n let client = dynamodb_lock::DynamoDbLockClient::new(\n\n rusoto_dynamodb::DynamoDbClient::new(_region),\n\n dynamodb_lock::Options::default(),\n\n );\n\n Ok(Some(Box::new(client)))\n\n }\n\n _ => Ok(None),\n\n }\n\n}\n\n\n\n/// Abstraction over a distributive lock provider\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 21, "score": 126496.30525602361 }, { "content": "fn table_path_and_version_from_key(\n\n key: &str,\n\n) -> Result<(String, DeltaDataTypeVersion), CheckPointLambdaError> {\n\n lazy_static! {\n\n static ref JSON_LOG_ENTRY_REGEX: Regex =\n\n Regex::new(r#\"(.*)/_delta_log/0*(\\d+)\\.json$\"#).unwrap();\n\n }\n\n\n\n match JSON_LOG_ENTRY_REGEX.captures(key.as_ref()) {\n\n Some(captures) => {\n\n let table_path = captures\n\n .get(1)\n\n .ok_or_else(|| CheckPointLambdaError::ObjectKeyParseFailed(key.to_string()))?\n\n .as_str()\n\n .to_string();\n\n let version_str = captures\n\n .get(2)\n\n .ok_or_else(|| CheckPointLambdaError::ObjectKeyParseFailed(key.to_string()))?\n\n .as_str();\n\n let version = version_str.parse::<DeltaDataTypeVersion>()?;\n\n\n\n Ok((table_path, version))\n\n }\n\n _ => Err(CheckPointLambdaError::ObjectKeyParseFailed(key.to_string())),\n\n }\n\n}\n\n\n", "file_path": "aws/delta-checkpoint/src/main.rs", "rank": 22, "score": 124886.18899315663 }, { "content": "pub fn setup() {\n\n std::env::set_var(\"AWS_REGION\", \"us-east-2\");\n\n std::env::set_var(\"AWS_ACCESS_KEY_ID\", \"test\");\n\n std::env::set_var(\"AWS_SECRET_ACCESS_KEY\", \"test\");\n\n std::env::set_var(\"AWS_ENDPOINT_URL\", ENDPOINT);\n\n}\n\n\n", "file_path": "rust/tests/s3_common/mod.rs", "rank": 23, "score": 117328.65060932546 }, { "content": "pub fn cleanup_dir_except<P: AsRef<Path>>(path: P, ignore_files: Vec<String>) {\n\n for p in fs::read_dir(path).unwrap() {\n\n if let Ok(d) = p {\n\n let path = d.path();\n\n let name = d.path().file_name().unwrap().to_str().unwrap().to_string();\n\n\n\n if !ignore_files.contains(&name) && !name.starts_with(\".\") {\n\n fs::remove_file(&path).unwrap();\n\n }\n\n }\n\n }\n\n}\n", "file_path": "rust/tests/fs_common/mod.rs", "rank": 24, "score": 116029.14988283865 }, { "content": "fn gen_action_type_error(action: &str, field: &str, expected_type: &str) -> ActionError {\n\n ActionError::InvalidField(format!(\n\n \"type for {} in {} action should be {}\",\n\n field, action, expected_type\n\n ))\n\n}\n\n\n\n/// Struct used to represent minValues and maxValues in add action statistics.\n\n#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]\n\n#[serde(untagged)]\n\npub enum ColumnValueStat {\n\n /// Composite HashMap representation of statistics.\n\n Column(HashMap<String, ColumnValueStat>),\n\n /// Json representation of statistics.\n\n Value(serde_json::Value),\n\n}\n\n\n\nimpl ColumnValueStat {\n\n /// Returns the HashMap representation of the ColumnValueStat.\n\n pub fn as_column(&self) -> Option<&HashMap<String, ColumnValueStat>> {\n", "file_path": "rust/src/action.rs", "rank": 25, "score": 112309.21036815664 }, { "content": "pub fn region() -> Region {\n\n Region::Custom {\n\n name: \"custom\".to_string(),\n\n endpoint: ENDPOINT.to_string(),\n\n }\n\n}\n\n\n", "file_path": "rust/tests/s3_common/mod.rs", "rank": 26, "score": 110299.78339762823 }, { "content": "/// Converts Rust String into DynamoDB string AttributeValue\n\nfn attr<T: ToString>(s: T) -> AttributeValue {\n\n AttributeValue {\n\n s: Some(s.to_string()),\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "rust/src/storage/s3/dynamodb_lock.rs", "rank": 27, "score": 106288.67989578111 }, { "content": "struct ParquetBuffer {\n\n writer: ArrowWriter<InMemoryWriteableCursor>,\n\n cursor: InMemoryWriteableCursor,\n\n}\n\n\n\nimpl ParquetBuffer {\n\n fn try_new(schema: arrow::datatypes::SchemaRef) -> Result<Self, DeltaTableError> {\n\n // Initialize writer properties for the underlying arrow writer\n\n let writer_properties = WriterProperties::builder()\n\n // NOTE: Consider extracting config for writer properties and setting more than just compression\n\n .set_compression(Compression::SNAPPY)\n\n .build();\n\n\n\n let cursor = InMemoryWriteableCursor::default();\n\n let writer = ArrowWriter::try_new(cursor.clone(), schema, Some(writer_properties))?;\n\n\n\n Ok(Self { writer, cursor })\n\n }\n\n\n\n fn write_batch(&mut self, batch: &RecordBatch) -> Result<(), DeltaTableError> {\n", "file_path": "rust/src/writer.rs", "rank": 28, "score": 103347.32331412873 }, { "content": "#[test]\n\nfn test_arrow_from_delta_decimal_type() {\n\n let precision = 20;\n\n let scale = 2;\n\n let decimal_type = String::from(format![\"decimal({p},{s})\", p = precision, s = scale]);\n\n let decimal_field = deltalake::SchemaDataType::primitive(decimal_type);\n\n assert_eq!(\n\n <ArrowDataType as TryFrom<&deltalake::SchemaDataType>>::try_from(&decimal_field).unwrap(),\n\n ArrowDataType::Decimal(precision, scale)\n\n );\n\n}\n\n\n", "file_path": "rust/tests/delta_arrow_test.rs", "rank": 29, "score": 100676.83015029832 }, { "content": " def version(self) -> int:\n\n \"\"\"\n\n Get the version of the DeltaTable.\n\n\n\n :return: The current version of the DeltaTable\n\n \"\"\"\n", "file_path": "python/deltalake/table.py", "rank": 30, "score": 100325.9255970882 }, { "content": "#[pyclass]\n\nstruct RawDeltaTable {\n\n _table: deltalake::DeltaTable,\n\n}\n\n\n", "file_path": "python/src/lib.rs", "rank": 31, "score": 100313.3826224356 }, { "content": " def schema(self) -> Schema:\n\n \"\"\"\n\n Get the current schema of the DeltaTable.\n\n\n\n :return: the current Schema registered in the transaction log\n\n \"\"\"\n", "file_path": "python/deltalake/table.py", "rank": 32, "score": 100117.77615357585 }, { "content": "#[test]\n\nfn test_arrow_from_delta_wrong_decimal_type() {\n\n let precision = 20;\n\n let scale = \"wrong\";\n\n let decimal_type = String::from(format![\"decimal({p},{s})\", p = precision, s = scale]);\n\n let _error = format!(\n\n \"Invalid precision or scale decimal type for Arrow: {}\",\n\n scale\n\n );\n\n let decimal_field = deltalake::SchemaDataType::primitive(decimal_type);\n\n assert!(matches!(\n\n <ArrowDataType as TryFrom<&deltalake::SchemaDataType>>::try_from(&decimal_field)\n\n .unwrap_err(),\n\n arrow::error::ArrowError::SchemaError(_error),\n\n ));\n\n}\n", "file_path": "rust/tests/delta_arrow_test.rs", "rank": 33, "score": 99036.7711333512 }, { "content": "fn populate_hashmap_from_parquet_map(\n\n map: &mut HashMap<String, String>,\n\n pmap: &parquet::record::Map,\n\n) -> Result<(), &'static str> {\n\n let keys = pmap.get_keys();\n\n let values = pmap.get_values();\n\n for j in 0..pmap.len() {\n\n map.entry(\n\n keys.get_string(j)\n\n .map_err(|_| \"key for HashMap in parquet has to be a string\")?\n\n .clone(),\n\n )\n\n .or_insert(\n\n values\n\n .get_string(j)\n\n .map_err(|_| \"value for HashMap in parquet has to be a string\")?\n\n .clone(),\n\n );\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "rust/src/action.rs", "rank": 34, "score": 96293.50314903563 }, { "content": "#[pyclass]\n\nstruct RawDeltaTableMetaData {\n\n #[pyo3(get)]\n\n id: String,\n\n #[pyo3(get)]\n\n name: Option<String>,\n\n #[pyo3(get)]\n\n description: Option<String>,\n\n #[pyo3(get)]\n\n partition_columns: Vec<String>,\n\n #[pyo3(get)]\n\n created_time: deltalake::DeltaDataTypeTimestamp,\n\n #[pyo3(get)]\n\n configuration: HashMap<String, String>,\n\n}\n\n\n\n#[pymethods]\n\nimpl RawDeltaTable {\n\n #[new]\n\n fn new(table_uri: &str, version: Option<deltalake::DeltaDataTypeLong>) -> PyResult<Self> {\n\n let table = match version {\n", "file_path": "python/src/lib.rs", "rank": 35, "score": 96046.88452738365 }, { "content": "fn prepare_fs() {\n\n fs_common::cleanup_dir_except(\n\n \"./tests/data/concurrent_workers/_delta_log\",\n\n vec![\"00000000000000000000.json\".to_string()],\n\n );\n\n}\n", "file_path": "rust/tests/concurrent_writes_test.rs", "rank": 36, "score": 95976.28607168864 }, { "content": "#[test]\n\n#[cfg(feature = \"rust-dataframe-ext\")]\n\nfn dataframe_from_delta_table() {\n\n let df = DataFrame::from_delta_table(\"./tests/data/simple_table\").unwrap();\n\n assert_eq!(1, df.num_columns());\n\n assert_eq!(3, df.num_rows());\n\n\n\n assert_eq!(\n\n df.column_by_name(\"id\")\n\n .data()\n\n .chunks()\n\n .iter()\n\n .map(|chunk| UInt64Array::from(chunk.data()).value_slice(0, 1)[0])\n\n .collect::<Vec<u64>>(),\n\n vec![5u64, 7u64, 9u64],\n\n );\n\n}\n\n\n", "file_path": "rust/tests/dataframe_test.rs", "rank": 37, "score": 95641.9530848536 }, { "content": "fn to_storage_err(err: Box<dyn Error + Sync + std::marker::Send>) -> StorageError {\n\n match err.downcast_ref::<AzureError>() {\n\n Some(AzureError::UnexpectedHTTPResult(e)) if e.status_code().as_u16() == 404 => {\n\n StorageError::NotFound\n\n }\n\n _ => StorageError::AzureGeneric { source: err },\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl StorageBackend for AdlsGen2Backend {\n\n async fn head_obj(&self, path: &str) -> Result<ObjectMeta, StorageError> {\n\n debug!(\"Getting properties for {}\", path);\n\n let obj = parse_uri(path)?.into_adlsgen2_object()?;\n\n self.validate_container(&obj)?;\n\n\n\n let properties = self\n\n .container_client\n\n .as_blob_client(obj.path)\n\n .get_properties()\n", "file_path": "rust/src/storage/azure.rs", "rank": 38, "score": 94834.67834714272 }, { "content": "#[test]\n\n#[cfg(feature = \"rust-dataframe-ext\")]\n\nfn dataframe_from_delta_table_with_time_travel() {\n\n // start with 0..5\n\n let mut df = DataFrame::from_delta_table_with_version(\"./tests/data/simple_table\", 0).unwrap();\n\n assert_eq!(1, df.num_columns());\n\n assert_eq!(5, df.num_rows());\n\n assert_eq!(\n\n df.column_by_name(\"id\")\n\n .data()\n\n .chunks()\n\n .iter()\n\n .map(|chunk| UInt64Array::from(chunk.data()).value_slice(0, 1)[0])\n\n .collect::<Vec<u64>>(),\n\n (0u64..5u64).collect::<Vec<u64>>(),\n\n );\n\n\n\n // upsert with 0..20\n\n df = DataFrame::from_delta_table_with_version(\"./tests/data/simple_table\", 1).unwrap();\n\n assert_eq!(1, df.num_columns());\n\n assert_eq!(20, df.num_rows());\n\n let mut expected = df\n", "file_path": "rust/tests/dataframe_test.rs", "rank": 39, "score": 91648.45358983782 }, { "content": "struct AcquireLockState<'a> {\n\n client: &'a DynamoDbLockClient,\n\n cached_lock: Option<LockItem>,\n\n started: Instant,\n\n timeout_in: Duration,\n\n}\n\n\n\nimpl<'a> AcquireLockState<'a> {\n\n /// If lock is expirable (lease_duration is set) then this function returns `true`\n\n /// if the elapsed time sine `started` is reached `timeout_in`.\n\n fn has_timed_out(&self) -> bool {\n\n self.started.elapsed() > self.timeout_in && {\n\n let non_expirable = if let Some(ref cached_lock) = self.cached_lock {\n\n cached_lock.lease_duration.is_none()\n\n } else {\n\n false\n\n };\n\n !non_expirable\n\n }\n\n }\n", "file_path": "rust/src/storage/s3/dynamodb_lock.rs", "rank": 40, "score": 90278.7276481557 }, { "content": "fn parse_obj_last_modified_time(\n\n last_modified: &Option<String>,\n\n) -> Result<DateTime<Utc>, StorageError> {\n\n let dt_str = last_modified.as_ref().ok_or_else(|| {\n\n StorageError::S3Generic(\"S3 Object missing last modified attribute\".to_string())\n\n })?;\n\n // last modified time in object is returned in rfc3339 format\n\n // https://docs.aws.amazon.com/AmazonS3/latest/API/API_Object.html\n\n let dt = DateTime::<FixedOffset>::parse_from_rfc3339(dt_str).map_err(|e| {\n\n StorageError::S3Generic(format!(\n\n \"Failed to parse S3 modified time as rfc3339: {}, got: {:?}\",\n\n e, last_modified,\n\n ))\n\n })?;\n\n\n\n Ok(DateTime::<Utc>::from(dt))\n\n}\n\n\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 41, "score": 89304.72123558207 }, { "content": "#[test]\n\nfn test_create_delta_table_partition() {\n\n let year = \"2021\";\n\n let path = format!(\"year={}\", year);\n\n assert_eq!(\n\n deltalake::DeltaTablePartition::try_from(path.as_ref()).unwrap(),\n\n deltalake::DeltaTablePartition {\n\n key: \"year\",\n\n value: year\n\n }\n\n );\n\n\n\n let _wrong_path = \"year=2021/month=\";\n\n assert!(matches!(\n\n deltalake::DeltaTablePartition::try_from(_wrong_path).unwrap_err(),\n\n deltalake::DeltaTableError::PartitionError {\n\n partition: _wrong_path\n\n },\n\n ))\n\n}\n\n\n", "file_path": "rust/tests/read_delta_partitions_test.rs", "rank": 42, "score": 88087.64797495713 }, { "content": "fn now_millis() -> u128 {\n\n SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .unwrap()\n\n .as_millis()\n\n}\n\n\n", "file_path": "rust/src/storage/s3/dynamodb_lock.rs", "rank": 43, "score": 88070.11503781668 }, { "content": "fn parse_head_obj_last_modified_time(\n\n last_modified: &Option<String>,\n\n) -> Result<DateTime<Utc>, StorageError> {\n\n let dt_str = last_modified.as_ref().ok_or_else(|| {\n\n StorageError::S3Generic(\"S3 Object missing last modified attribute\".to_string())\n\n })?;\n\n // head object response sets last-modified time in rfc2822 format:\n\n // https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadObject.html#API_HeadObject_ResponseSyntax\n\n let dt = DateTime::<FixedOffset>::parse_from_rfc2822(dt_str).map_err(|e| {\n\n StorageError::S3Generic(format!(\n\n \"Failed to parse S3 modified time as rfc2822: {}, got: {:?}\",\n\n e, last_modified,\n\n ))\n\n })?;\n\n\n\n Ok(DateTime::<Utc>::from(dt))\n\n}\n\n\n\nimpl TryFrom<rusoto_s3::Object> for ObjectMeta {\n\n type Error = StorageError;\n", "file_path": "rust/src/storage/s3/mod.rs", "rank": 44, "score": 87587.80401563377 }, { "content": "fn cleanup_checkpoint_files(log_path: &Path) {\n\n let paths = fs::read_dir(log_path).unwrap();\n\n\n\n for p in paths {\n\n match p {\n\n Ok(d) => {\n\n let path = d.path();\n\n\n\n if path.file_name().unwrap() == \"_last_checkpoint\"\n\n || (path.extension().is_some() && path.extension().unwrap() == \"parquet\")\n\n {\n\n fs::remove_file(path).unwrap();\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n}\n", "file_path": "rust/tests/checkpoint_writer_test.rs", "rank": 45, "score": 83201.69813785805 }, { "content": "#[inline]\n\nfn rt() -> PyResult<tokio::runtime::Runtime> {\n\n tokio::runtime::Runtime::new().map_err(PyDeltaTableError::from_tokio)\n\n}\n\n\n", "file_path": "python/src/lib.rs", "rank": 46, "score": 83140.90439002108 }, { "content": "// module name need to match project name\n\nfn deltalake(py: Python, m: &PyModule) -> PyResult<()> {\n\n env_logger::Builder::from_env(env_logger::Env::default().default_filter_or(\"warn\")).init();\n\n\n\n m.add_function(pyo3::wrap_pyfunction!(rust_core_version, m)?)?;\n\n m.add_class::<RawDeltaTable>()?;\n\n m.add_class::<RawDeltaTableMetaData>()?;\n\n m.add(\"DeltaTableError\", py.get_type::<PyDeltaTableError>())?;\n\n Ok(())\n\n}\n", "file_path": "python/src/lib.rs", "rank": 47, "score": 79944.33960301918 }, { "content": " def self.open_table(table_path)\n\n Table.new(table_path)\n\n end\n\nend\n", "file_path": "ruby/lib/deltalake.rb", "rank": 48, "score": 78138.67660720002 }, { "content": "class Schema:\n\n \"\"\"Create a DeltaTable Schema instance.\"\"\"\n\n\n\n fields: List[Field]\n\n json_value: Dict[str, Any]\n\n\n\n def __str__(self) -> str:\n\n field_strs = [str(f) for f in self.fields]\n\n return f\"Schema({', '.join(field_strs)})\"\n\n\n\n def json(self) -> Dict[str, Any]:\n\n return self.json_value\n\n\n\n @classmethod\n\n def from_json(cls, json_data: str) -> \"Schema\":\n\n \"\"\"\n\n Generate a DeltaTable Schema from a json format.\n\n\n\n :param json_data: the schema in json format\n\n :return: the DeltaTable schema\n\n \"\"\"\n\n json_value = json.loads(json_data)\n\n fields = []\n\n for json_field in json_value[\"fields\"]:\n\n if isinstance(json_field[\"type\"], str):\n\n data_type = DataType(json_field[\"type\"])\n\n else:\n\n data_type = DataType.from_dict(json_field[\"type\"])\n\n field = Field(\n\n name=json_field[\"name\"],\n\n type=data_type,\n\n nullable=json_field[\"nullable\"],\n\n metadata=json_field.get(\"metadata\"),\n\n )\n\n fields.append(field)\n", "file_path": "python/deltalake/schema.py", "rank": 49, "score": 76458.31321218194 }, { "content": "def test_table_schema():\n\n table_path = \"../rust/tests/data/simple_table\"\n\n dt = DeltaTable(table_path)\n\n schema = dt.schema()\n\n assert schema.json() == {\n\n \"fields\": [{\"metadata\": {}, \"name\": \"id\", \"nullable\": True, \"type\": \"long\"}],\n\n \"type\": \"struct\",\n\n }\n\n assert len(schema.fields) == 1\n\n field = schema.fields[0]\n\n assert field.name == \"id\"\n\n assert field.type == DataType(\"long\")\n\n assert field.nullable is True\n\n assert field.metadata == {}\n\n\n\n json = '{\"type\":\"struct\",\"fields\":[{\"name\":\"x\",\"type\":{\"type\":\"array\",\"elementType\":\"long\",\"containsNull\":true},\"nullable\":true,\"metadata\":{}}]}'\n\n schema = Schema.from_json(json)\n", "file_path": "python/tests/test_schema.py", "rank": 50, "score": 72703.28799396759 }, { "content": "def test_table_schema_pyarrow_020():\n\n table_path = \"../rust/tests/data/delta-0.2.0\"\n\n dt = DeltaTable(table_path)\n\n schema = dt.pyarrow_schema()\n\n field = schema.field(0)\n\n assert len(schema.types) == 1\n\n assert field.name == \"value\"\n\n assert field.type == pyarrow.int32()\n\n assert field.nullable is True\n", "file_path": "python/tests/test_schema.py", "rank": 51, "score": 71145.19028417785 }, { "content": "def test_table_schema_pyarrow_simple():\n\n table_path = \"../rust/tests/data/simple_table\"\n\n dt = DeltaTable(table_path)\n\n schema = dt.pyarrow_schema()\n\n field = schema.field(0)\n\n assert len(schema.types) == 1\n\n assert field.name == \"id\"\n\n assert field.type == pyarrow.int64()\n\n assert field.nullable is True\n", "file_path": "python/tests/test_schema.py", "rank": 52, "score": 69654.94863682841 }, { "content": " def __str__(self) -> str:\n\n field_strs = [str(f) for f in self.fields]\n", "file_path": "python/deltalake/schema.py", "rank": 53, "score": 68243.44185271783 }, { "content": " def __str__(self) -> str:\n\n return (\n\n f\"Metadata(id: {self._metadata.id}, name: {self._metadata.name}, \"\n\n f\"description: {self._metadata.description}, partitionColumns: {self._metadata.partition_columns}, \"\n\n f\"created_time: {self.created_time}, configuration={self._metadata.configuration})\"\n", "file_path": "python/deltalake/table.py", "rank": 54, "score": 67980.28056702246 }, { "content": "def test_read_simple_table_by_version_to_dict():\n\n table_path = \"../rust/tests/data/delta-0.2.0\"\n\n dt = DeltaTable(table_path, version=2)\n", "file_path": "python/tests/test_table_read.py", "rank": 55, "score": 66953.14805169974 }, { "content": "class StructType(DataType):\n\n \"\"\"Concrete class for struct data types.\"\"\"\n\n\n\n fields: List[\"Field\"]\n\n type: str\n\n\n\n def __init__(self, fields: List[\"Field\"]):\n\n super().__init__(\"struct\")\n\n self.fields = fields\n\n\n\n def __str__(self) -> str:\n\n field_strs = [str(f) for f in self.fields]\n", "file_path": "python/deltalake/schema.py", "rank": 56, "score": 66398.25921680551 }, { "content": " def load_version(self, version: int) -> None:\n\n \"\"\"\n\n Load a DeltaTable with a specified version.\n\n\n\n :param version: the identifier of the version of the DeltaTable to load\n\n \"\"\"\n", "file_path": "python/deltalake/table.py", "rank": 57, "score": 66023.8885001538 }, { "content": " def pyarrow_schema(self) -> pyarrow.Schema:\n\n \"\"\"\n\n Get the current schema of the DeltaTable with the Parquet PyArrow format.\n\n\n\n :return: the current Schema with the Parquet PyArrow format\n\n \"\"\"\n", "file_path": "python/deltalake/table.py", "rank": 58, "score": 65826.8383016253 }, { "content": "fn main() {\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n platform_cfg::set();\n\n}\n", "file_path": "rust/build.rs", "rank": 59, "score": 65708.99692203333 }, { "content": "fn process_action(\n\n state: &mut DeltaTableState,\n\n action: Action,\n\n) -> Result<(), serde_json::error::Error> {\n\n match action {\n\n Action::add(v) => {\n\n state.files.push(v);\n\n }\n\n Action::remove(v) => {\n\n state.files.retain(|a| *a.path != v.path);\n\n state.tombstones.push(v);\n\n }\n\n Action::protocol(v) => {\n\n state.min_reader_version = v.min_reader_version;\n\n state.min_writer_version = v.min_writer_version;\n\n }\n\n Action::metaData(v) => {\n\n state.current_metadata = Some(DeltaTableMetaData::try_from(v)?);\n\n }\n\n Action::txn(v) => {\n", "file_path": "rust/src/delta.rs", "rank": 60, "score": 63135.14142409917 }, { "content": "def get_release_version() -> str:\n\n \"\"\"\n\n Get the release version from the Cargo.toml file\n\n\n\n :return:\n\n \"\"\"\n\n cargo_content = toml.load(\"../../Cargo.toml\")\n", "file_path": "python/docs/source/conf.py", "rank": 61, "score": 62986.60387733828 }, { "content": "#[derive(thiserror::Error, Debug)]\n\nenum CheckPointLambdaError {\n\n #[error(\"Invalid table uri: {0}\")]\n\n InvalidTableUri(PathBuf),\n\n\n\n #[error(\"Invalid event structure: {0}\")]\n\n InvalidEventStructure(String),\n\n\n\n #[error(\"Failed to parse object key: {0}\")]\n\n ObjectKeyParseFailed(String),\n\n\n\n #[error(\"Failed to parse version {}\", .source)]\n\n InvalidVersionParsed {\n\n #[from]\n\n source: ParseIntError,\n\n },\n\n\n\n #[error(\"Failed to deserialize JSON {}\", source)]\n\n JSONError {\n\n #[from]\n\n source: serde_json::Error,\n", "file_path": "aws/delta-checkpoint/src/main.rs", "rank": 62, "score": 61436.30648992253 }, { "content": "#[derive(FromPyObject)]\n\nenum PartitionFilterValue<'a> {\n\n Single(&'a str),\n\n Multiple(Vec<&'a str>),\n\n}\n\n\n", "file_path": "python/src/lib.rs", "rank": 63, "score": 61270.880652719075 }, { "content": "pub trait DeltaDataframe {\n\n fn from_loaded_delta_table(table: delta::DeltaTable) -> Result<DataFrame, DeltaTableError>;\n\n fn from_delta_table(path: &str) -> Result<DataFrame, DeltaTableError>;\n\n fn from_delta_table_with_version(\n\n path: &str,\n\n version: delta::DeltaDataTypeVersion,\n\n ) -> Result<DataFrame, DeltaTableError>;\n\n}\n\n\n\nimpl DeltaDataframe for DataFrame {\n\n fn from_loaded_delta_table(\n\n delta_table: delta::DeltaTable,\n\n ) -> Result<DataFrame, DeltaTableError> {\n\n let mut batches = vec![];\n\n let mut schema = None;\n\n let table_path = Path::new(&delta_table.table_path);\n\n\n\n for fname in delta_table.get_files() {\n\n let fpath = table_path.join(fname);\n\n let file = File::open(&fpath).map_err(|e| DeltaTableError::MissingDataFile {\n", "file_path": "rust/src/delta_dataframe.rs", "rank": 64, "score": 59504.46539999384 }, { "content": "def test_read_simple_table_to_dict() -> None:\n\n v = rust_core_version()\n", "file_path": "python/tests/test_version.py", "rank": 65, "score": 59229.177623082556 }, { "content": "#[test]\n\nfn test_match_filters() {\n\n let partitions = vec![\n\n deltalake::DeltaTablePartition {\n\n key: \"year\",\n\n value: \"2021\",\n\n },\n\n deltalake::DeltaTablePartition {\n\n key: \"month\",\n\n value: \"12\",\n\n },\n\n ];\n\n\n\n let valid_filters = deltalake::PartitionFilter {\n\n key: \"year\",\n\n value: deltalake::PartitionValue::Equal(\"2021\"),\n\n };\n\n\n\n let valid_filter_month = deltalake::PartitionFilter {\n\n key: \"month\",\n\n value: deltalake::PartitionValue::Equal(\"12\"),\n", "file_path": "rust/tests/read_delta_partitions_test.rs", "rank": 66, "score": 58871.97891239188 }, { "content": "#[test]\n\nfn test_match_partition() {\n\n let partition_2021 = deltalake::DeltaTablePartition {\n\n key: \"year\",\n\n value: \"2021\",\n\n };\n\n let partition_2020 = deltalake::DeltaTablePartition {\n\n key: \"year\",\n\n value: \"2020\",\n\n };\n\n let partition_2019 = deltalake::DeltaTablePartition {\n\n key: \"year\",\n\n value: \"2019\",\n\n };\n\n\n\n let partition_year_2020_filter = deltalake::PartitionFilter {\n\n key: \"year\",\n\n value: deltalake::PartitionValue::Equal(\"2020\"),\n\n };\n\n let partition_month_12_filter = deltalake::PartitionFilter {\n\n key: \"month\",\n", "file_path": "rust/tests/read_delta_partitions_test.rs", "rank": 67, "score": 58871.97891239188 }, { "content": "/// Return path relative to parent_path\n\nfn extract_rel_path<'a, 'b>(\n\n parent_path: &'b str,\n\n path: &'a str,\n\n) -> Result<&'a str, DeltaTableError> {\n\n if path.starts_with(&parent_path) {\n\n // plus one to account for path separator\n\n Ok(&path[parent_path.len() + 1..])\n\n } else {\n\n Err(DeltaTableError::Generic(format!(\n\n \"Parent path `{}` is not a prefix of path `{}`\",\n\n parent_path, path\n\n )))\n\n }\n\n}\n\n\n\n/// In memory representation of a Delta Table\n\npub struct DeltaTable {\n\n /// The version of the table as of the most recent loaded Delta log entry.\n\n pub version: DeltaDataTypeVersion,\n\n /// The URI the DeltaTable was loaded from.\n", "file_path": "rust/src/delta.rs", "rank": 68, "score": 56511.60000301522 }, { "content": "fn tx2_actions() -> Vec<action::Action> {\n\n vec![\n\n action::Action::add(action::Add {\n\n path: String::from(\n\n \"part-00000-512e1537-8aaa-4193-b8b4-bef3de0de409-c000.snappy.parquet\",\n\n ),\n\n size: 396,\n\n partition_values: HashMap::new(),\n\n partition_values_parsed: None,\n\n modification_time: 1564524296000,\n\n data_change: true,\n\n stats: None,\n\n stats_parsed: None,\n\n tags: None,\n\n }),\n\n action::Action::add(action::Add {\n\n path: String::from(\n\n \"part-00001-4327c977-2734-4477-9507-7ccf67924649-c000.snappy.parquet\",\n\n ),\n\n size: 400,\n", "file_path": "rust/tests/simple_commit_test.rs", "rank": 69, "score": 50547.9312437716 }, { "content": "fn tx1_actions() -> Vec<action::Action> {\n\n vec![\n\n action::Action::add(action::Add {\n\n path: String::from(\n\n \"part-00000-b44fcdb0-8b06-4f3a-8606-f8311a96f6dc-c000.snappy.parquet\",\n\n ),\n\n size: 396,\n\n partition_values: HashMap::new(),\n\n partition_values_parsed: None,\n\n modification_time: 1564524294000,\n\n data_change: true,\n\n stats: None,\n\n stats_parsed: None,\n\n tags: None,\n\n }),\n\n action::Action::add(action::Add {\n\n path: String::from(\n\n \"part-00001-185eca06-e017-4dea-ae49-fc48b973e37e-c000.snappy.parquet\",\n\n ),\n\n size: 400,\n", "file_path": "rust/tests/simple_commit_test.rs", "rank": 70, "score": 50547.9312437716 }, { "content": " def to_pyarrow_table(\n\n self,\n\n partitions: Optional[List[Tuple[str, str, Any]]] = None,\n\n columns: Optional[List[str]] = None,\n\n ) -> pyarrow.Table:\n\n \"\"\"\n\n Build a PyArrow Table using data from the DeltaTable.\n\n\n\n :param partitions: A list of partition filters, see help(DeltaTable.files_by_partitions) for filter syntax\n\n :param columns: The columns to project. This can be a list of column names to include (order and duplicates will be preserved)\n\n :return: the PyArrow table\n\n \"\"\"\n", "file_path": "python/deltalake/table.py", "rank": 71, "score": 42944.46821922126 }, { "content": "class DeltaTable:\n\n \"\"\"Create a DeltaTable instance.\"\"\"\n\n\n\n def __init__(self, table_uri: str, version: Optional[int] = None):\n\n \"\"\"\n\n Create the Delta Table from a path with an optional version.\n\n Multiple StorageBackends are currently supported: AWS S3, Azure Data Lake Storage Gen2 and local URI.\n\n\n\n :param table_uri: the path of the DeltaTable\n\n :param version: version of the DeltaTable\n\n \"\"\"\n\n self._table = RawDeltaTable(table_uri, version=version)\n\n self._metadata = Metadata(self._table)\n\n\n\n def version(self) -> int:\n\n \"\"\"\n\n Get the version of the DeltaTable.\n\n\n\n :return: The current version of the DeltaTable\n\n \"\"\"\n\n return self._table.version()\n\n\n\n def files(self) -> List[str]:\n\n \"\"\"\n\n Get the .parquet files of the DeltaTable.\n\n\n\n :return: list of the .parquet files referenced for the current version of the DeltaTable\n\n \"\"\"\n\n return self._table.files()\n\n\n\n def files_by_partitions(\n\n self, partition_filters: List[Tuple[str, str, Any]]\n\n ) -> List[str]:\n\n \"\"\"\n\n Get the files that match a given list of partitions filters.\n\n Partitions which do not match the filter predicate will be removed from scanned data.\n\n Predicates are expressed in disjunctive normal form (DNF), like [(\"x\", \"=\", \"a\"), ...].\n\n DNF allows arbitrary boolean logical combinations of single partition predicates.\n\n The innermost tuples each describe a single partition predicate.\n\n The list of inner predicates is interpreted as a conjunction (AND), forming a more selective and multiple partition predicates.\n\n Each tuple has format: (key, op, value) and compares the key with the value.\n\n The supported op are: `=`, `!=`, `in`, and `not in`.\n\n If the op is in or not in, the value must be a collection such as a list, a set or a tuple.\n\n The supported type for value is str.\n\n\n\n Examples:\n\n (\"x\", \"=\", \"a\")\n\n (\"x\", \"!=\", \"a\")\n\n (\"y\", \"in\", [\"a\", \"b\", \"c\"])\n\n (\"z\", \"not in\", [\"a\",\"b\"])\n\n\n\n :param partition_filters: the partition filters that will be used for getting the matched files\n\n :return: list of the .parquet files after applying the partition filters referenced for the current version of the DeltaTable.\n\n \"\"\"\n\n try:\n\n return self._table.files_by_partitions(partition_filters)\n\n except TypeError:\n\n raise ValueError(\n\n \"Only the type String is currently allowed inside the partition filters.\"\n\n )\n\n\n\n def file_paths(self) -> List[str]:\n\n \"\"\"\n\n Get the list of files with an absolute path.\n\n\n\n :return: list of the .parquet files with an absolute URI referenced for the current version of the DeltaTable\n\n \"\"\"\n\n warnings.warn(\n\n \"Call to deprecated method file_paths. Please use file_uris instead.\",\n\n category=DeprecationWarning,\n\n stacklevel=2,\n\n )\n\n return self.file_uris()\n\n\n\n def file_uris(self) -> List[str]:\n\n \"\"\"\n\n Get the list of files with an absolute path.\n\n\n\n :return: list of the .parquet files with an absolute URI referenced for the current version of the DeltaTable\n\n \"\"\"\n\n return self._table.file_uris()\n\n\n\n def load_version(self, version: int) -> None:\n\n \"\"\"\n\n Load a DeltaTable with a specified version.\n\n\n\n :param version: the identifier of the version of the DeltaTable to load\n\n \"\"\"\n\n self._table.load_version(version)\n\n\n\n def schema(self) -> Schema:\n\n \"\"\"\n\n Get the current schema of the DeltaTable.\n\n\n\n :return: the current Schema registered in the transaction log\n\n \"\"\"\n\n return Schema.from_json(self._table.schema_json())\n\n\n\n def metadata(self) -> Metadata:\n\n \"\"\"\n\n Get the current metadata of the DeltaTable.\n\n\n\n :return: the current Metadata registered in the transaction log\n\n \"\"\"\n\n return self._metadata\n\n\n\n def vacuum(self, retention_hours: int, dry_run: bool = True) -> List[str]:\n\n \"\"\"\n\n Run the Vacuum command on the Delta Table: list and delete files no longer referenced by the Delta table and are older than the retention threshold.\n\n\n\n :param retention_hours: the retention threshold in hours\n\n :param dry_run: when activated, list only the files, delete otherwise\n\n :return: the list of files no longer referenced by the Delta Table and are older than the retention threshold.\n\n \"\"\"\n\n if retention_hours < 0:\n\n raise ValueError(\"The retention periods should be positive.\")\n\n\n\n return self._table.vacuum(dry_run, retention_hours)\n\n\n\n def pyarrow_schema(self) -> pyarrow.Schema:\n\n \"\"\"\n\n Get the current schema of the DeltaTable with the Parquet PyArrow format.\n\n\n\n :return: the current Schema with the Parquet PyArrow format\n\n \"\"\"\n\n return pyarrow_schema_from_json(self._table.arrow_schema_json())\n\n\n\n def to_pyarrow_dataset(\n\n self, partitions: Optional[List[Tuple[str, str, Any]]] = None\n\n ) -> pyarrow.dataset.Dataset:\n\n \"\"\"\n\n Build a PyArrow Dataset using data from the DeltaTable.\n\n\n\n :param partitions: A list of partition filters, see help(DeltaTable.files_by_partitions) for filter syntax\n\n :return: the PyArrow dataset in PyArrow\n\n \"\"\"\n\n if partitions is None:\n\n file_paths = self._table.file_uris()\n\n else:\n\n file_paths = self._table.files_by_partitions(partitions)\n\n paths = [urlparse(curr_file) for curr_file in file_paths]\n\n\n\n empty_delta_table = len(paths) == 0\n\n if empty_delta_table:\n\n return dataset(\n\n [],\n\n schema=self.pyarrow_schema(),\n\n partitioning=partitioning(flavor=\"hive\"),\n\n )\n\n\n\n # Decide based on the first file, if the file is on cloud storage or local\n\n if paths[0].netloc:\n\n query_str = \"\"\n\n # pyarrow doesn't properly support the AWS_ENDPOINT_URL environment variable\n\n # for non-AWS S3 like resources. This is a slight hack until such a\n\n # point when pyarrow learns about AWS_ENDPOINT_URL\n\n endpoint_url = os.environ.get(\"AWS_ENDPOINT_URL\")\n\n if endpoint_url is not None:\n\n endpoint = urlparse(endpoint_url)\n\n # This format specific to the URL schema inference done inside\n\n # of pyarrow, consult their tests/dataset.py for examples\n\n query_str += (\n\n f\"?scheme={endpoint.scheme}&endpoint_override={endpoint.netloc}\"\n\n )\n\n\n\n keys = [curr_file.path for curr_file in paths]\n\n return dataset(\n\n keys,\n\n schema=self.pyarrow_schema(),\n\n filesystem=f\"{paths[0].scheme}://{paths[0].netloc}{query_str}\",\n\n partitioning=partitioning(flavor=\"hive\"),\n\n )\n\n else:\n\n return dataset(\n\n file_paths,\n\n schema=self.pyarrow_schema(),\n\n format=\"parquet\",\n\n partitioning=partitioning(flavor=\"hive\"),\n\n )\n\n\n\n def to_pyarrow_table(\n\n self,\n\n partitions: Optional[List[Tuple[str, str, Any]]] = None,\n\n columns: Optional[List[str]] = None,\n\n ) -> pyarrow.Table:\n\n \"\"\"\n\n Build a PyArrow Table using data from the DeltaTable.\n\n\n\n :param partitions: A list of partition filters, see help(DeltaTable.files_by_partitions) for filter syntax\n\n :param columns: The columns to project. This can be a list of column names to include (order and duplicates will be preserved)\n\n :return: the PyArrow table\n\n \"\"\"\n\n return self.to_pyarrow_dataset(partitions).to_table(columns=columns)\n\n\n\n def to_pandas(\n\n self,\n\n partitions: Optional[List[Tuple[str, str, Any]]] = None,\n\n columns: Optional[List[str]] = None,\n\n ) -> \"pandas.DataFrame\":\n\n \"\"\"\n\n Build a pandas dataframe using data from the DeltaTable.\n\n\n\n :param partitions: A list of partition filters, see help(DeltaTable.files_by_partitions) for filter syntax\n\n :param columns: The columns to project. This can be a list of column names to include (order and duplicates will be preserved)\n\n :return: a pandas dataframe\n\n \"\"\"\n", "file_path": "python/deltalake/table.py", "rank": 72, "score": 42936.15362282704 }, { "content": "def pyarrow_schema_from_json(json_data: str) -> pyarrow.Schema:\n\n \"\"\"\n\n Create a Schema in PyArrow format from a Schema in json format.\n\n\n\n :param json_data: the field in json format\n\n :return: the Schema in PyArrow format\n\n \"\"\"\n\n schema_json = json.loads(json_data)\n\n arrow_fields = [pyarrow_field_from_dict(field) for field in schema_json[\"fields\"]]\n", "file_path": "python/deltalake/schema.py", "rank": 73, "score": 42480.18515253187 }, { "content": " def read_table():\n\n b.wait()\n\n t = DeltaTable(\"s3://deltars/simple\")\n\n assert t.files() == [\n\n \"part-00000-c1777d7d-89d9-4790-b38a-6ee7e24456b1-c000.snappy.parquet\",\n\n \"part-00001-7891c33d-cedc-47c3-88a6-abcfb049d3b4-c000.snappy.parquet\",\n\n \"part-00004-315835fe-fb44-4562-98f6-5e6cfa3ae45d-c000.snappy.parquet\",\n\n \"part-00007-3a0e4727-de0d-41b6-81ef-5223cf40f025-c000.snappy.parquet\",\n\n \"part-00000-2befed33-c358-4768-a43c-3eda0d2a499d-c000.snappy.parquet\",\n", "file_path": "python/tests/test_table_read.py", "rank": 74, "score": 41379.74966730767 }, { "content": "def test_schema_pyarrow_types():\n\n field_name = \"column1\"\n\n metadata = {b\"metadata_k\": b\"metadata_v\"}\n\n pyarrow_field = pyarrow_field_from_dict(\n\n {\n\n \"name\": field_name,\n\n \"nullable\": False,\n\n \"metadata\": metadata,\n\n \"type\": {\"name\": \"int\", \"bitWidth\": 8, \"isSigned\": True},\n\n }\n\n )\n\n assert pyarrow_field.name == field_name\n\n assert pyarrow_field.type == pyarrow.int8()\n\n assert dict(pyarrow_field.metadata) == metadata\n\n assert pyarrow_field.nullable is False\n\n\n\n field_name = \"column_timestamp_no_unit\"\n\n metadata = {b\"metadata_k\": b\"metadata_v\"}\n\n pyarrow_field = pyarrow_field_from_dict(\n\n {\n\n \"name\": field_name,\n\n \"nullable\": False,\n\n \"metadata\": metadata,\n\n \"type\": {\"name\": \"timestamp\"},\n\n }\n\n )\n\n assert pyarrow_field.name == field_name\n\n assert pyarrow_field.type == pyarrow.timestamp(\"ns\")\n\n assert dict(pyarrow_field.metadata) == metadata\n\n assert pyarrow_field.nullable is False\n\n\n\n field_name = \"column_timestamp_with_unit\"\n\n metadata = {b\"metadata_k\": b\"metadata_v\"}\n\n pyarrow_field = pyarrow_field_from_dict(\n\n {\n\n \"name\": field_name,\n\n \"nullable\": False,\n\n \"metadata\": metadata,\n\n \"type\": {\"name\": \"timestamp\", \"unit\": \"MICROSECOND\"},\n\n }\n\n )\n\n assert pyarrow_field.name == field_name\n\n assert pyarrow_field.type == pyarrow.timestamp(\"us\")\n\n assert dict(pyarrow_field.metadata) == metadata\n\n assert pyarrow_field.nullable is False\n\n\n\n field_name = \"date_with_day_unit\"\n\n metadata = {b\"metadata_k\": b\"metadata_v\"}\n\n pyarrow_field = pyarrow_field_from_dict(\n\n {\n\n \"name\": field_name,\n\n \"nullable\": False,\n\n \"metadata\": metadata,\n\n \"type\": {\"name\": \"date\", \"unit\": \"DAY\"},\n\n }\n\n )\n\n assert pyarrow_field.name == field_name\n\n assert pyarrow_field.type == pyarrow.date32()\n\n assert dict(pyarrow_field.metadata) == metadata\n\n assert pyarrow_field.nullable is False\n\n\n\n field_name = \"simple_list\"\n\n pyarrow_field = pyarrow_field_from_dict(\n\n {\n\n \"name\": field_name,\n\n \"nullable\": False,\n\n \"metadata\": metadata,\n\n \"type\": {\"name\": \"list\"},\n\n \"children\": [{\"type\": {\"name\": \"int\", \"bitWidth\": 32, \"isSigned\": True}}],\n\n }\n\n )\n\n assert pyarrow_field.name == field_name\n\n assert pyarrow_field.type == pyarrow.list_(\n\n pyarrow.field(\"element\", pyarrow.int32())\n\n )\n\n assert pyarrow_field.metadata == metadata\n\n assert pyarrow_field.nullable is False\n\n\n\n field_name = \"dictionary\"\n\n pyarrow_field = pyarrow_field_from_dict(\n\n {\n\n \"name\": field_name,\n\n \"nullable\": False,\n\n \"metadata\": metadata,\n\n \"type\": {\"name\": \"int\", \"bitWidth\": 32, \"isSigned\": True},\n\n \"children\": [],\n\n \"dictionary\": {\n\n \"id\": 0,\n\n \"indexType\": {\"name\": \"int\", \"bitWidth\": 16, \"isSigned\": True},\n\n },\n\n }\n\n )\n\n assert pyarrow_field.name == field_name\n\n assert pyarrow_field.type == pyarrow.map_(pyarrow.int16(), pyarrow.int32())\n\n assert pyarrow_field.metadata == metadata\n\n assert pyarrow_field.nullable is False\n\n\n\n field_name = \"struct_array\"\n\n pyarrow_field = pyarrow_field_from_dict(\n\n {\n\n \"name\": field_name,\n\n \"nullable\": False,\n\n \"metadata\": metadata,\n\n \"type\": {\"name\": \"list\"},\n\n \"children\": [],\n\n \"dictionary\": {\n\n \"id\": 0,\n\n \"indexType\": {\"name\": \"int\", \"bitWidth\": 32, \"isSigned\": True},\n\n },\n\n }\n\n )\n\n assert pyarrow_field.name == field_name\n\n assert pyarrow_field.type == pyarrow.map_(\n\n pyarrow.int32(),\n\n pyarrow.list_(\n\n pyarrow.field(\n\n \"element\",\n\n pyarrow.struct(\n\n [pyarrow.field(\"val\", pyarrow.int32(), False, metadata)]\n\n ),\n\n )\n\n ),\n\n )\n\n assert pyarrow_field.metadata == metadata\n\n assert pyarrow_field.nullable is False\n\n\n\n field_name = \"simple_dictionary\"\n\n pyarrow_field = pyarrow_field_from_dict(\n\n {\n\n \"name\": field_name,\n\n \"metadata\": {\"metadata_k\": \"metadata_v\"},\n\n \"nullable\": False,\n\n \"type\": {\"name\": \"dictionary\"},\n\n \"dictionary\": {\"indexType\": {\"type\": {\"name\": \"int\", \"bitWidth\": 8}}},\n\n \"children\": [{\"type\": {\"name\": \"int\", \"bitWidth\": 32}}],\n\n }\n\n )\n\n assert pyarrow_field.name == field_name\n\n assert pyarrow_field.type == pyarrow.map_(pyarrow.int8(), pyarrow.int32())\n\n assert pyarrow_field.metadata == metadata\n\n assert pyarrow_field.nullable is False\n\n\n\n pyarrow_field = pyarrow_field_from_dict(\n\n {\n\n \"name\": field_name,\n\n \"type\": {\"name\": \"struct\"},\n\n \"children\": [\n\n {\n\n \"name\": \"x\",\n\n \"type\": {\"name\": \"int\", \"bitWidth\": 64},\n\n \"nullable\": True,\n\n \"metadata\": {},\n\n }\n\n ],\n\n \"metadata\": {\"metadata_k\": \"metadata_v\"},\n\n \"nullable\": False,\n\n }\n\n )\n\n assert pyarrow_field.name == field_name\n\n assert pyarrow_field.type == pyarrow.struct(\n\n [pyarrow.field(\"x\", pyarrow.int64(), True, {})]\n\n )\n\n assert pyarrow_field.metadata == metadata\n", "file_path": "python/tests/test_schema.py", "rank": 75, "score": 40961.95867965236 }, { "content": "def test_schema_delta_types():\n\n field_name = \"column1\"\n\n metadata = {\"metadata_k\": \"metadata_v\"}\n\n delta_field = Field(\n\n name=field_name,\n\n type=DataType.from_dict({\"type\": \"integer\"}),\n\n metadata={\"metadata_k\": \"metadata_v\"},\n\n nullable=False,\n\n )\n\n assert delta_field.name == field_name\n\n assert delta_field.type == DataType(\"integer\")\n\n assert delta_field.metadata == metadata\n\n assert delta_field.nullable is False\n\n\n\n delta_field = Field(\n\n name=field_name,\n\n type=DataType.from_dict(\n\n {\"type\": \"array\", \"elementType\": {\"type\": \"integer\"}, \"containsNull\": True}\n\n ),\n\n metadata={\"metadata_k\": \"metadata_v\"},\n\n nullable=False,\n\n )\n\n assert delta_field.name == field_name\n\n assert delta_field.type == ArrayType(DataType(\"integer\"), True)\n\n assert delta_field.metadata == metadata\n\n assert delta_field.nullable is False\n\n\n\n delta_field = Field(\n\n name=field_name,\n\n type=DataType.from_dict(\n\n {\n\n \"type\": \"map\",\n\n \"keyType\": \"integer\",\n\n \"valueType\": \"integer\",\n\n \"valueContainsNull\": True,\n\n }\n\n ),\n\n metadata={\"metadata_k\": \"metadata_v\"},\n\n nullable=False,\n\n )\n\n assert delta_field.name == field_name\n\n key_type = DataType(\"integer\")\n\n value_type = DataType(\"integer\")\n\n assert delta_field.type == MapType(key_type, value_type, True)\n\n assert delta_field.metadata == metadata\n\n assert delta_field.nullable is False\n\n\n\n delta_field = Field(\n\n name=field_name,\n\n type=DataType.from_dict(\n\n {\n\n \"type\": \"struct\",\n\n \"fields\": [\n\n {\n\n \"name\": \"x\",\n\n \"type\": {\"type\": \"integer\"},\n\n \"nullable\": True,\n\n \"metadata\": {},\n\n }\n\n ],\n\n }\n\n ),\n\n metadata={\"metadata_k\": \"metadata_v\"},\n\n nullable=False,\n\n )\n\n assert delta_field.name == field_name\n\n assert delta_field.type == StructType([Field(\"x\", DataType(\"integer\"), True, {})])\n\n assert delta_field.metadata == metadata\n", "file_path": "python/tests/test_schema.py", "rank": 76, "score": 40961.95867965236 }, { "content": "def test_delta_table_to_pandas():\n\n table_path = \"../rust/tests/data/simple_table\"\n\n dt = DeltaTable(table_path)\n", "file_path": "python/tests/test_table_read.py", "rank": 77, "score": 39935.825067831945 }, { "content": " /// Variant representing a struct.\n\n r#struct(SchemaTypeStruct),\n\n /// Variant representing an array.\n\n array(SchemaTypeArray),\n\n /// Variant representing a map.\n\n map(SchemaTypeMap),\n\n}\n\n\n\n/// Represents the schema of the delta table.\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct Schema {\n\n r#type: String,\n\n fields: Vec<SchemaField>,\n\n}\n\n\n\nimpl Schema {\n\n /// Returns the list of fields that make up the schema definition of the table.\n\n pub fn get_fields(&self) -> &Vec<SchemaField> {\n\n &self.fields\n\n }\n\n}\n", "file_path": "rust/src/schema.rs", "rank": 78, "score": 39911.71626625517 }, { "content": "#![allow(non_snake_case, non_camel_case_types)]\n\n\n\nuse serde::{Deserialize, Serialize};\n\nuse std::collections::HashMap;\n\n\n\n/// Type alias for a string expected to match a GUID/UUID format\n\npub type Guid = String;\n\n/// Type alias for i64/Delta long\n\npub type DeltaDataTypeLong = i64;\n\n/// Type alias representing the expected type (i64) of a Delta table version.\n\npub type DeltaDataTypeVersion = DeltaDataTypeLong;\n\n/// Type alias representing the expected type (i64/ms since Unix epoch) of a Delta timestamp.\n\npub type DeltaDataTypeTimestamp = DeltaDataTypeLong;\n\n/// Type alias for i32/Delta int\n\npub type DeltaDataTypeInt = i32;\n\n\n\n/// Represents a struct field defined in the Delta table schema.\n\n// https://github.com/delta-io/delta/blob/master/PROTOCOL.md#Schema-Serialization-Format\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Default, Clone)]\n\npub struct SchemaTypeStruct {\n", "file_path": "rust/src/schema.rs", "rank": 79, "score": 39910.97088650303 }, { "content": " // type field is always the string \"struct\", so we are ignoring it here\n\n r#type: String,\n\n fields: Vec<SchemaField>,\n\n}\n\n\n\nimpl SchemaTypeStruct {\n\n /// Returns the list of fields contained within the column struct.\n\n pub fn get_fields(&self) -> &Vec<SchemaField> {\n\n &self.fields\n\n }\n\n}\n\n\n\n/// Describes a specific field of the Delta table schema.\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct SchemaField {\n\n // Name of this (possibly nested) column\n\n name: String,\n\n r#type: SchemaDataType,\n\n // Boolean denoting whether this field can be null\n\n nullable: bool,\n", "file_path": "rust/src/schema.rs", "rank": 80, "score": 39910.92829716548 }, { "content": " // A JSON map containing information about this column. Keys prefixed with Delta are reserved\n\n // for the implementation.\n\n metadata: HashMap<String, String>,\n\n}\n\n\n\nimpl SchemaField {\n\n /// The column name of the schema field.\n\n pub fn get_name(&self) -> &str {\n\n &self.name\n\n }\n\n\n\n /// The data type of the schema field. SchemaDataType defines the possible values.\n\n pub fn get_type(&self) -> &SchemaDataType {\n\n &self.r#type\n\n }\n\n\n\n /// Whether the column/field is nullable.\n\n pub fn is_nullable(&self) -> bool {\n\n self.nullable\n\n }\n", "file_path": "rust/src/schema.rs", "rank": 81, "score": 39905.63504299928 }, { "content": "\n\n /// Additional metadata about the column/field.\n\n pub fn get_metadata(&self) -> &HashMap<String, String> {\n\n &self.metadata\n\n }\n\n}\n\n\n\n/// Schema definition for array type fields.\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct SchemaTypeArray {\n\n // type field is always the string \"array\", so we are ignoring it here\n\n r#type: String,\n\n // The type of element stored in this array represented as a string containing the name of a\n\n // primitive type, a struct definition, an array definition or a map definition\n\n elementType: Box<SchemaDataType>,\n\n // Boolean denoting whether this array can contain one or more null values\n\n containsNull: bool,\n\n}\n\n\n\nimpl SchemaTypeArray {\n", "file_path": "rust/src/schema.rs", "rank": 82, "score": 39904.6293739616 }, { "content": "impl SchemaTypeMap {\n\n /// The type of element used for the key of this map, represented as a string containing the\n\n /// name of a primitive type, a struct definition, an array definition or a map definition\n\n pub fn get_key_type(&self) -> &SchemaDataType {\n\n &self.keyType\n\n }\n\n\n\n /// The type of element contained in the value of this map, represented as a string containing the\n\n /// name of a primitive type, a struct definition, an array definition or a map definition\n\n pub fn get_value_type(&self) -> &SchemaDataType {\n\n &self.valueType\n\n }\n\n\n\n /// Whether the value field is allowed to contain null elements.\n\n pub fn get_value_contains_null(&self) -> bool {\n\n self.valueContainsNull\n\n }\n\n}\n\n\n\n/*\n", "file_path": "rust/src/schema.rs", "rank": 83, "score": 39903.905398062576 }, { "content": " * List of primitive types:\n\n * string: utf8\n\n * long // undocumented, i64?\n\n * integer: i32\n\n * short: i16\n\n * byte: i8\n\n * float: f32\n\n * double: f64\n\n * boolean: bool\n\n * binary: a sequence of binary data\n\n * date: A calendar date, represented as a year-month-day triple without a timezone\n\n * timestamp: Microsecond precision timestamp without a timezone\n\n */\n\n/// Enum with variants for each top level schema data type.\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\n#[serde(untagged)]\n\npub enum SchemaDataType {\n\n /// Variant representing non-array, non-map, non-struct fields. Wrapped value will contain the\n\n /// the string name of the primitive type.\n\n primitive(String),\n", "file_path": "rust/src/schema.rs", "rank": 84, "score": 39901.80657054171 }, { "content": " /// The data type of each element contained in the array.\n\n pub fn get_element_type(&self) -> &SchemaDataType {\n\n &self.elementType\n\n }\n\n\n\n /// Whether the column/field is allowed to contain null elements.\n\n pub fn contains_null(&self) -> bool {\n\n self.containsNull\n\n }\n\n}\n\n\n\n/// Schema definition for map type fields.\n\n#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]\n\npub struct SchemaTypeMap {\n\n r#type: String,\n\n keyType: Box<SchemaDataType>,\n\n valueType: Box<SchemaDataType>,\n\n valueContainsNull: bool,\n\n}\n\n\n", "file_path": "rust/src/schema.rs", "rank": 85, "score": 39901.10281595611 }, { "content": "def test_schema_pyarrow_from_decimal_and_floating_types():\n\n field_name = \"decimal_test\"\n\n metadata = {b\"metadata_k\": b\"metadata_v\"}\n\n precision = 20\n\n scale = 2\n\n pyarrow_field = pyarrow_field_from_dict(\n\n {\n\n \"name\": field_name,\n\n \"nullable\": False,\n\n \"metadata\": metadata,\n\n \"type\": {\"name\": \"decimal\", \"precision\": precision, \"scale\": scale},\n\n }\n\n )\n\n assert pyarrow_field.name == field_name\n\n assert pyarrow_field.type == pyarrow.decimal128(precision=precision, scale=scale)\n\n assert dict(pyarrow_field.metadata) == metadata\n\n assert pyarrow_field.nullable is False\n\n\n\n field_name = \"floating_test\"\n\n metadata = {b\"metadata_k\": b\"metadata_v\"}\n\n pyarrow_field = pyarrow_field_from_dict(\n\n {\n\n \"name\": field_name,\n\n \"nullable\": False,\n\n \"metadata\": metadata,\n\n \"type\": {\"name\": \"floatingpoint\", \"precision\": \"HALF\"},\n\n }\n\n )\n\n assert pyarrow_field.name == field_name\n\n assert pyarrow_field.type == pyarrow.float16()\n\n assert dict(pyarrow_field.metadata) == metadata\n", "file_path": "python/tests/test_schema.py", "rank": 86, "score": 39557.12317494523 }, { "content": "def test_read_multiple_tables_from_s3(s3cred):\n\n \"\"\"\n\n Should be able to create multiple cloud storage based DeltaTable instances\n\n without blocking on async rust function calls.\n\n \"\"\"\n\n for path in [\"s3://deltars/simple\", \"s3://deltars/simple\"]:\n\n t = DeltaTable(path)\n\n assert t.files() == [\n\n \"part-00000-c1777d7d-89d9-4790-b38a-6ee7e24456b1-c000.snappy.parquet\",\n\n \"part-00001-7891c33d-cedc-47c3-88a6-abcfb049d3b4-c000.snappy.parquet\",\n\n \"part-00004-315835fe-fb44-4562-98f6-5e6cfa3ae45d-c000.snappy.parquet\",\n\n \"part-00007-3a0e4727-de0d-41b6-81ef-5223cf40f025-c000.snappy.parquet\",\n\n \"part-00000-2befed33-c358-4768-a43c-3eda0d2a499d-c000.snappy.parquet\",\n", "file_path": "python/tests/test_table_read.py", "rank": 87, "score": 39262.752937014906 }, { "content": "def test_get_files_partitioned_table():\n\n table_path = \"../rust/tests/data/delta-0.8.0-partitioned\"\n\n dt = DeltaTable(table_path)\n\n partition_filters = [(\"day\", \"=\", \"3\")]\n\n assert dt.files_by_partitions(partition_filters=partition_filters) == [\n\n f\"{table_path}/year=2020/month=2/day=3/part-00000-94d16827-f2fd-42cd-a060-f67ccc63ced9.c000.snappy.parquet\"\n\n ]\n\n partition_filters = [(\"day\", \"!=\", \"3\")]\n\n assert dt.files_by_partitions(partition_filters=partition_filters) == [\n\n f\"{table_path}/year=2020/month=1/day=1/part-00000-8eafa330-3be9-4a39-ad78-fd13c2027c7e.c000.snappy.parquet\",\n\n f\"{table_path}/year=2020/month=2/day=5/part-00000-89cdd4c8-2af7-4add-8ea3-3990b2f027b5.c000.snappy.parquet\",\n\n f\"{table_path}/year=2021/month=12/day=20/part-00000-9275fdf4-3961-4184-baa0-1c8a2bb98104.c000.snappy.parquet\",\n\n f\"{table_path}/year=2021/month=12/day=4/part-00000-6dc763c0-3e8b-4d52-b19e-1f92af3fbb25.c000.snappy.parquet\",\n\n f\"{table_path}/year=2021/month=4/day=5/part-00000-c5856301-3439-4032-a6fc-22b7bc92bebb.c000.snappy.parquet\",\n\n ]\n\n partition_filters = [(\"day\", \"in\", [\"3\", \"20\"])]\n\n assert dt.files_by_partitions(partition_filters=partition_filters) == [\n\n f\"{table_path}/year=2020/month=2/day=3/part-00000-94d16827-f2fd-42cd-a060-f67ccc63ced9.c000.snappy.parquet\",\n\n f\"{table_path}/year=2021/month=12/day=20/part-00000-9275fdf4-3961-4184-baa0-1c8a2bb98104.c000.snappy.parquet\",\n\n ]\n\n partition_filters = [(\"day\", \"not in\", [\"3\", \"20\"])]\n\n assert dt.files_by_partitions(partition_filters=partition_filters) == [\n\n f\"{table_path}/year=2020/month=1/day=1/part-00000-8eafa330-3be9-4a39-ad78-fd13c2027c7e.c000.snappy.parquet\",\n\n f\"{table_path}/year=2020/month=2/day=5/part-00000-89cdd4c8-2af7-4add-8ea3-3990b2f027b5.c000.snappy.parquet\",\n\n f\"{table_path}/year=2021/month=12/day=4/part-00000-6dc763c0-3e8b-4d52-b19e-1f92af3fbb25.c000.snappy.parquet\",\n\n f\"{table_path}/year=2021/month=4/day=5/part-00000-c5856301-3439-4032-a6fc-22b7bc92bebb.c000.snappy.parquet\",\n\n ]\n\n partition_filters = [(\"day\", \"not in\", [\"3\", \"20\"]), (\"year\", \"=\", \"2021\")]\n\n assert dt.files_by_partitions(partition_filters=partition_filters) == [\n\n f\"{table_path}/year=2021/month=12/day=4/part-00000-6dc763c0-3e8b-4d52-b19e-1f92af3fbb25.c000.snappy.parquet\",\n\n f\"{table_path}/year=2021/month=4/day=5/part-00000-c5856301-3439-4032-a6fc-22b7bc92bebb.c000.snappy.parquet\",\n\n ]\n\n partition_filters = [(\"invalid_operation\", \"=>\", \"3\")]\n\n with pytest.raises(Exception) as exception:\n\n dt.files_by_partitions(partition_filters=partition_filters)\n\n assert (\n\n str(exception.value)\n\n == 'Invalid partition filter found: (\"invalid_operation\", \"=>\", \"3\").'\n\n )\n\n\n\n partition_filters = [(\"invalid_operation\", \"=\", [\"3\", \"20\"])]\n\n with pytest.raises(Exception) as exception:\n\n dt.files_by_partitions(partition_filters=partition_filters)\n\n assert (\n\n str(exception.value)\n\n == 'Invalid partition filter found: (\"invalid_operation\", \"=\", [\"3\", \"20\"]).'\n\n )\n\n\n\n partition_filters = [(\"day\", \"=\", 3)]\n\n with pytest.raises(Exception) as exception:\n\n dt.files_by_partitions(partition_filters=partition_filters)\n\n assert (\n\n str(exception.value)\n\n == \"Only the type String is currently allowed inside the partition filters.\"\n\n )\n\n\n\n partition_filters = [(\"unknown\", \"=\", \"3\")]\n", "file_path": "python/tests/test_table_read.py", "rank": 88, "score": 39251.00350688285 }, { "content": "def test_read_partitioned_table_metadata():\n\n table_path = \"../rust/tests/data/delta-0.8.0-partitioned\"\n\n dt = DeltaTable(table_path)\n\n metadata = dt.metadata()\n\n assert metadata.id == \"fe5a3c11-30d4-4dd7-b115-a1c121e66a4e\"\n\n assert metadata.name is None\n\n assert metadata.description is None\n\n assert metadata.partition_columns == [\"year\", \"month\", \"day\"]\n\n assert metadata.created_time == 1615555644515\n", "file_path": "python/tests/test_table_read.py", "rank": 89, "score": 39251.00350688285 }, { "content": "def test_read_partitioned_table_to_dict():\n\n table_path = \"../rust/tests/data/delta-0.8.0-partitioned\"\n\n dt = DeltaTable(table_path)\n\n expected = {\n\n \"value\": [\"1\", \"2\", \"3\", \"6\", \"7\", \"5\", \"4\"],\n\n \"year\": [\"2020\", \"2020\", \"2020\", \"2021\", \"2021\", \"2021\", \"2021\"],\n\n \"month\": [\"1\", \"2\", \"2\", \"12\", \"12\", \"12\", \"4\"],\n\n \"day\": [\"1\", \"3\", \"5\", \"20\", \"20\", \"4\", \"5\"],\n\n }\n", "file_path": "python/tests/test_table_read.py", "rank": 90, "score": 39251.00350688285 }, { "content": "def test_read_simple_table_to_dict():\n\n table_path = \"../rust/tests/data/simple_table\"\n\n dt = DeltaTable(table_path)\n", "file_path": "python/tests/test_table_read.py", "rank": 91, "score": 39251.00350688285 }, { "content": "def test_read_table_with_column_subset():\n\n table_path = \"../rust/tests/data/delta-0.8.0-partitioned\"\n\n dt = DeltaTable(table_path)\n\n expected = {\n\n \"value\": [\"1\", \"2\", \"3\", \"6\", \"7\", \"5\", \"4\"],\n\n \"day\": [\"1\", \"3\", \"5\", \"20\", \"20\", \"4\", \"5\"],\n\n }\n\n assert (\n\n dt.to_pyarrow_dataset().to_table(columns=[\"value\", \"day\"]).to_pydict()\n\n == expected\n", "file_path": "python/tests/test_table_read.py", "rank": 92, "score": 39251.00350688285 }, { "content": " use super::Version;\n\n use std::process::Command;\n\n\n\n use regex::Regex;\n\n\n\n // glibc version is taken from std/sys/unix/os.rs\n\n pub fn get_version() -> Result<Version, String> {\n\n let output = Command::new(\"ldd\")\n\n .args(&[\"--version\"])\n\n .output()\n\n .expect(\"failed to execute ldd\");\n\n let output_str = std::str::from_utf8(&output.stdout).unwrap();\n\n let version_str = ldd_output_to_version_str(output_str)?;\n\n\n\n parse_glibc_version(version_str)\n\n .ok_or_else(|| format!(\"Invalid version string from ldd output: {}\", version_str,))\n\n }\n\n\n\n fn ldd_output_to_version_str(output_str: &str) -> Result<&str, String> {\n\n let version_reg = Regex::new(r#\"ldd \\(.+\\) ([0-9]+\\.[0-9]+)\"#).unwrap();\n", "file_path": "glibc_version/src/lib.rs", "rank": 93, "score": 38817.51046719355 }, { "content": " warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n\n Written by Roland McGrath and Ulrich Drepper.\"#,\n\n )\n\n .unwrap();\n\n assert_eq!(ver_str, \"2.31\");\n\n }\n\n }\n\n}\n\n\n\n#[cfg(not(all(target_os = \"linux\", target_env = \"gnu\")))]\n\nmod imp {\n\n use super::Version;\n\n pub fn get_version() -> Result<Version, String> {\n\n unimplemented!();\n\n }\n\n}\n\n\n\n#[inline]\n", "file_path": "glibc_version/src/lib.rs", "rank": 94, "score": 38814.06262378907 }, { "content": " /// Adds a file to the table state.\n\n add(Add),\n\n /// Removes a file from the table state.\n\n remove(Remove),\n\n /// Used by streaming systems to track progress externally with application specific version\n\n /// identifiers.\n\n txn(Txn),\n\n /// Describes the minimum reader and writer versions required to read or write to the table.\n\n protocol(Protocol),\n\n /// Describes commit provenance information for the table.\n\n commitInfo(Value),\n\n}\n\n\n\nimpl Action {\n\n /// Returns an action from the given parquet Row. Used when deserializing delta log parquet\n\n /// checkpoints.\n\n pub fn from_parquet_record(\n\n schema: &parquet::schema::types::Type,\n\n record: &parquet::record::Row,\n\n ) -> Result<Self, ActionError> {\n", "file_path": "rust/src/action.rs", "rank": 97, "score": 37.00910994639022 } ]
Rust
kernel/src/net/netmapping.rs
lqd/chocolate_milk
ad7fc8f99721d70c06d825a30d8ecfd82263c5fd
use core::ops::{Deref, DerefMut}; use core::alloc::Layout; use core::convert::TryInto; use alloc::boxed::Box; use alloc::borrow::Cow; use noodle::*; use falktp::ServerMessage; use page_table::{VirtAddr, PageType, PhysMem}; use page_table::{PAGE_NX, PAGE_WRITE, PAGE_PRESENT}; use lockcell::LockCell; use crate::core_locals::LockInterrupts; use crate::mm::{self, PhysicalMemory}; use crate::net::{NetDevice, UdpAddress, UdpBind}; use crate::interrupts::{register_fault_handler, FaultReg, PageFaultHandler}; pub struct NetMapHandler { vaddr: VirtAddr, udp: UdpBind, file_id: u64, size: usize, server: UdpAddress, read_only: bool, handling: LockCell<(), LockInterrupts>, } impl PageFaultHandler for NetMapHandler { unsafe fn page_fault(&mut self, fault_addr: VirtAddr, code: u64) -> bool { let end = VirtAddr(self.vaddr.0 + (self.size as u64 - 1)); if self.read_only && (code & (1 << 1)) != 0 { return false; } if fault_addr >= self.vaddr && fault_addr <= end { let _lock = self.handling.lock(); { let mut pmem = PhysicalMemory; let mut page_table = core!().boot_args.page_table.lock(); let page_table = page_table.as_mut().unwrap(); if page_table.translate(&mut pmem, VirtAddr(fault_addr.0 & !0xfff)) .map(|x| x.page).flatten().is_some() { return true; } } let offset = ((fault_addr.0 & !0xfff) - self.vaddr.0) as usize; let page = { let mut pmem = PhysicalMemory; pmem.alloc_phys(Layout::from_size_align(4096, 4096).unwrap()) }; let new_page = mm::slice_phys_mut(page, 4096); let to_recv = core::cmp::min(4096, self.size - offset); new_page[to_recv..].iter_mut().for_each(|x| *x = 0); let mut retries = 0; 'retry: loop { retries += 1; if retries > 100 { panic!("Failed to download backing page"); } let mut packet = self.udp.device().allocate_packet(); { let mut pkt = packet.create_udp(&self.server); ServerMessage::Read { id: self.file_id, offset: offset, size: to_recv, }.serialize(&mut pkt).unwrap(); } self.udp.device().send(packet, true); if self.udp.recv_timeout(100_000, |_, udp| { let mut ptr = &udp.payload[..]; match ServerMessage::deserialize(&mut ptr)? { ServerMessage::ReadOk => Some(()), ServerMessage::ReadErr => panic!("Could not satisfy network mapping read"), _ => unreachable!(), } }).is_none() { continue 'retry; } let mut recv_off = 0; while recv_off < to_recv { if self.udp.recv_timeout(100_000, |_, udp| { assert!(udp.payload.len() <= to_recv - recv_off, "Whoa, larger packet than expected"); new_page[recv_off..recv_off + udp.payload.len()] .copy_from_slice(&udp.payload); recv_off += udp.payload.len(); Some(()) }).is_none() { continue 'retry; } } break; } let mut pmem = PhysicalMemory; let mut page_table = core!().boot_args.page_table.lock(); let page_table = page_table.as_mut().unwrap(); page_table.map_raw(&mut pmem, VirtAddr(fault_addr.0 & !0xfff), PageType::Page4K, page.0 | PAGE_NX | if self.read_only { 0 } else { PAGE_WRITE } | PAGE_PRESENT) .expect("Failed to map in network mapped memory"); true } else { false } } } pub struct NetMapping<'a> { backing: &'a mut [u8], _fault_reg: FaultReg, read_only: bool, } impl<'a> NetMapping<'a> { pub fn new(server: &str, filename: &str, read_only: bool) -> Option<Self> { let netdev = NetDevice::get()?; let udp = NetDevice::bind_udp(netdev.clone())?; let server = UdpAddress::resolve( &netdev, udp.port(), server) .expect("Couldn't resolve target address"); let mut packet = netdev.allocate_packet(); { let mut pkt = packet.create_udp(&server); ServerMessage::GetFileId(Cow::Borrowed(filename)) .serialize(&mut pkt).unwrap(); } netdev.send(packet, true); let (file_id, size) = udp.recv_timeout(5_000_000, |_, udp| { let mut ptr = &udp.payload[..]; let msg = ServerMessage::deserialize(&mut ptr) .expect("Failed to deserialize File ID response"); match msg { ServerMessage::FileId { id, size } => Some(Some((id, size))), ServerMessage::FileIdErr => Some(None), _ => unreachable!(), } })??; if size <= 0 { return None; } let size_align = size.checked_add(0xfff)? & !0xfff; let virt_addr = crate::mm::alloc_virt_addr_4k(size_align as u64); let handler = Box::new(NetMapHandler { vaddr: virt_addr, file_id: file_id, udp: udp, size: size, server: server, read_only: read_only, handling: LockCell::new_no_preempt(()), }); Some(NetMapping { backing: unsafe { core::slice::from_raw_parts_mut(virt_addr.0 as *mut u8, size.try_into().ok()?) }, _fault_reg: register_fault_handler(handler), read_only, }) } } impl<'a> Deref for NetMapping<'a> { type Target = [u8]; fn deref(&self) -> &Self::Target { self.backing } } impl<'a> DerefMut for NetMapping<'a> { fn deref_mut(&mut self) -> &mut Self::Target { assert!(!self.read_only, "Attempted write access to read-only network mapping"); self.backing } }
use core::ops::{Deref, DerefMut}; use core::alloc::Layout; use core::convert::TryInto; use alloc::boxed::Box; use alloc::borrow::Cow; use noodle::*; use falktp::ServerMessage; use page_table::{VirtAddr, PageType, PhysMem}; use page_table::{PAGE_NX, PAGE_WRITE, PAGE_PRESENT}; use lockcell::LockCell; use crate::core_locals::LockInterrupts; use crate::mm::{self, PhysicalMemory}; use crate::net::{NetDevice, UdpAddress, UdpBind}; use crate::interrupts::{register_fault_handler, FaultReg, PageFaultHandler}; pub struct NetMapHandler { vaddr: VirtAddr, udp: UdpBind, file_id: u64, size: usize, server: UdpAddress, read_only: bool, handling: LockCell<(), LockInterrupts>, } impl PageFaultHandler for NetMapHandler { unsafe fn page_fault(&mut self, fault_addr: VirtAddr, code: u64) -> bool { let end = VirtAddr(self.vaddr.0 + (self.size as u64 - 1)); if self.read_only && (code & (1 << 1)) != 0 { return false; } if fault_addr >= self.vaddr && fault_addr <= end { let _lock = self.handling.lock(); { let mut pmem = PhysicalMemory; let mut page_table = core!().boot_args.page_table.lock(); let page_table = page_table.as_mut().unwrap(); if page_table.translate(&mut pmem, VirtAddr(fault_addr.0 & !0xfff)) .map(|x| x.page).flatten().is_some() { return true; } } let offset = ((fault_addr.0 & !0xfff) - self.vaddr.0) as usize; let page = { let mut pmem = PhysicalMemory; pmem.alloc_phys(Layout::from_size_align(4096, 4096).unwrap()) }; let new_page = mm::slice_phys_mut(page, 4096); let to_recv = core::cmp::min(4096, self.size - offset); new_page[to_recv..].iter_mut().for_each(|x| *x = 0); let mut retries = 0; 'retry: loop { retries += 1; if retries > 100 { panic!("Failed to download backing page"); } let mut packet = self.udp.device().allocate_packet(); { let mut pkt = packet.create_udp(&self.server); ServerMessage::Read { id: self.file_id, offset: offset, size: to_recv, }.serialize(&mut pkt).unwrap(); } self.udp.device().send(packet, true); if self.udp.recv_timeout(100_000, |_, udp| { let mut ptr = &udp.payload[..]; match ServerMessage::deserialize(&mut ptr)? { ServerMessage::ReadOk => Some(()), ServerMessage::ReadErr => panic!("Could not satisfy network mapping read"), _ => unreachable!(), } }).is_none() { continue 'retry; } let mut recv_off = 0; while recv_off < to_recv { if self.udp.recv_timeout(100_000, |_, udp| { assert!(udp.payload.len() <= to_recv - recv_off, "Whoa, larger packet than expected"); new_page[recv_off..recv_off + udp.payload.len()] .copy_from_slice(&udp.payload); recv_off += udp.payload.len(); Some(()) }).is_none() { continue 'retry; } } break; } let mut pmem = PhysicalMemory; let mut page_table = core!().boot_args.page_table.lock(); let page_table = page_table.as_mut().unwrap(); page_table.map_raw(&mut pmem, VirtAddr(fault_addr.0 & !0xfff), PageType::Page4K, page.0 | PAGE_NX | if self.read_only { 0 } else { PAGE_WRITE } | PAGE_PRESENT) .expect("Failed to map in network mapped memory"); true } else { false } } } pub struct NetMapping<'a> { backing: &'a mut [u8], _fault_reg: FaultReg, read_only: bool, } impl<'a> NetMapping<'a> { pub fn new(server: &str, filename: &str, read_only: bool) -> Option<Self> { let netdev = NetDevice::get()?; let udp = NetDevice::bind_udp(netdev.clone())?; let server = UdpAddress::resolve( &netdev, udp.port(), server) .expect("Couldn't resolve target address"); let mut packet = netdev.allocate_packet(); { let mut pkt = packet.create_udp(&server); ServerMessage::GetFileId(Cow::Borrowed(filename)) .serialize(&mut pkt).unwrap(); } netdev.send(packet, true); let (file_id, size) = udp.recv_timeout(5_000_000, |_, udp| { let mut ptr = &udp.payload[..]; let msg = ServerMessage::deserialize(&mut ptr) .expect("Failed to deserialize File ID response"); match msg { ServerMessage::FileId { id, size } => Some(Some((id, size))), ServerMessage::FileIdErr => Some(None), _ => unreachable!(), } })??; if size <= 0 { return None; } let size_align = size.checked_add(0xfff)? & !0xfff; let virt_addr = crate::mm::alloc_virt_addr_4k(size_align as u64); let handler = Box::new(NetMapHandler { vaddr: virt_addr, file_id: file_id, udp: udp, size: size, server: server, read_only: read_only, handling: LockCell::new_no_preempt(()), });
} } impl<'a> Deref for NetMapping<'a> { type Target = [u8]; fn deref(&self) -> &Self::Target { self.backing } } impl<'a> DerefMut for NetMapping<'a> { fn deref_mut(&mut self) -> &mut Self::Target { assert!(!self.read_only, "Attempted write access to read-only network mapping"); self.backing } }
Some(NetMapping { backing: unsafe { core::slice::from_raw_parts_mut(virt_addr.0 as *mut u8, size.try_into().ok()?) }, _fault_reg: register_fault_handler(handler), read_only, })
call_expression
[ { "content": "/// Download a file with the `filename` over TFTP with the PXE 16-bit API\n\npub fn download<P: AsRef<[u8]>>(filename: P) -> Option<Vec<u8>> {\n\n // Lock access to PXE\n\n let _guard = PXE_GUARD.lock();\n\n\n\n // Convert the filename to a slice of bytes\n\n let filename: &[u8] = filename.as_ref();\n\n\n\n // Invoke the PXE installation check with int 0x1a\n\n let mut regs = RegisterState::default();\n\n regs.eax = 0x5650;\n\n unsafe { invoke_realmode(0x1a, &mut regs); }\n\n\n\n // Check that the PXE API responded as expected and CF has been cleared\n\n if regs.eax as u16 != 0x564e || (regs.efl & 1) != 0 {\n\n return None;\n\n }\n\n\n\n // Get the linear address to the PXENV+ structure\n\n let pxenv = segoff_to_linear(regs.es, regs.ebx as u16);\n\n let pxenv = unsafe {\n", "file_path": "bootloader/src/pxe.rs", "rank": 0, "score": 201029.88657041083 }, { "content": "/// Find a free region of virtual memory that can hold `size` bytes and return\n\n/// the virtual address\n\n///\n\n/// This is only valid for virtual requests for 4 KiB mappings\n\npub fn alloc_virt_addr_4k(size: u64) -> VirtAddr {\n\n /// Base address for virtual allocations\n\n static NEXT_FREE_VADDR: AtomicU64 = AtomicU64::new(KERNEL_VMEM_BASE);\n\n\n\n /// Gap between virtual allocations\n\n const GUARD_PAGE_SIZE: u64 = 32 * 1024;\n\n\n\n assert!(size > 0 && (size & 0xfff) == 0,\n\n \"Invalid size for virtual region allocation\");\n\n\n\n // Compute the amount of virtual memory to reserve, including the guard\n\n // size.\n\n let reserve_size = GUARD_PAGE_SIZE.checked_add(size as u64)\n\n .expect(\"Integer overflow on virtual region size\");\n\n \n\n // Get a new virtual region that is free\n\n let ret = VirtAddr(\n\n NEXT_FREE_VADDR.fetch_add(reserve_size, Ordering::SeqCst)\n\n );\n\n\n", "file_path": "kernel/src/mm.rs", "rank": 1, "score": 200287.01797958853 }, { "content": "#[inline]\n\npub fn rdtsc() -> u64 {\n\n let val_lo: u32;\n\n let val_hi: u32;\n\n\n\n unsafe {\n\n llvm_asm!(\"rdtsc\" : \"={edx}\"(val_hi), \"={eax}\"(val_lo) ::\n\n \"memory\" : \"volatile\", \"intel\");\n\n }\n\n\n\n ((val_hi as u64) << 32) | val_lo as u64\n\n}\n\n\n", "file_path": "shared/lockcell/src/lib.rs", "rank": 2, "score": 196863.89447740425 }, { "content": "#[inline]\n\npub fn read_cr4() -> u64 {\n\n let val: u64;\n\n unsafe {\n\n llvm_asm!(\"mov $0, cr4\" : \"=r\"(val) :: \"memory\" : \"volatile\", \"intel\");\n\n }\n\n val\n\n}\n\n\n\n/// Write to `cr4`\n\n#[inline]\n\npub unsafe fn write_cr4(val: u64) {\n\n llvm_asm!(\"mov cr4, $0\" :: \"r\"(val) : \"memory\" : \"volatile\", \"intel\");\n\n}\n\n\n\n/// Gets the ES selector value\n\n#[inline]\n\npub unsafe fn read_es() -> u16 {\n\n let ret;\n\n llvm_asm!(\"mov $0, es\" : \"=r\"(ret) ::: \"intel\", \"volatile\");\n\n ret\n", "file_path": "shared/cpu/src/lib.rs", "rank": 3, "score": 191761.29106786422 }, { "content": "#[inline]\n\npub fn read_cr3() -> u64 {\n\n let val: u64;\n\n unsafe {\n\n llvm_asm!(\"mov $0, cr3\" : \"=r\"(val) :: \"memory\" : \"volatile\", \"intel\");\n\n }\n\n val\n\n}\n\n\n\n/// Write to `cr3`\n\n#[inline]\n\npub unsafe fn write_cr3(val: u64) {\n\n llvm_asm!(\"mov cr3, $0\" :: \"r\"(val) : \"memory\" : \"volatile\", \"intel\");\n\n}\n\n\n\n/// Read `cr4`\n", "file_path": "shared/cpu/src/lib.rs", "rank": 4, "score": 191761.29106786422 }, { "content": "#[inline]\n\npub fn read_cr0() -> u64 {\n\n let val: u64;\n\n unsafe {\n\n llvm_asm!(\"mov $0, cr0\" : \"=r\"(val) :: \"memory\" : \"volatile\", \"intel\");\n\n }\n\n val\n\n}\n\n\n\n/// Write to `cr0`\n\n#[inline]\n\npub unsafe fn write_cr0(val: u64) {\n\n llvm_asm!(\"mov cr0, $0\" :: \"r\"(val) : \"memory\" : \"volatile\", \"intel\");\n\n}\n\n\n\n/// Read `cr2`\n", "file_path": "shared/cpu/src/lib.rs", "rank": 5, "score": 191761.29106786422 }, { "content": "#[inline]\n\npub fn read_cr2() -> u64 {\n\n let val: u64;\n\n unsafe {\n\n llvm_asm!(\"mov $0, cr2\" : \"=r\"(val) :: \"memory\" : \"volatile\", \"intel\");\n\n }\n\n val\n\n}\n\n\n\n/// Write to `cr2`\n\n#[inline]\n\npub unsafe fn write_cr2(val: u64) {\n\n llvm_asm!(\"mov cr2, $0\" :: \"r\"(val) : \"memory\" : \"volatile\", \"intel\");\n\n}\n\n\n\n/// Read `cr3`\n", "file_path": "shared/cpu/src/lib.rs", "rank": 6, "score": 191761.29106786422 }, { "content": "#[inline]\n\npub fn canonicalize_address(addr: u64) -> u64 {\n\n (((addr as i64) << 16) >> 16) as u64\n\n}\n\n\n\n/// Performs cpuid passing in eax and ecx as parameters. Returns a tuple\n\n/// containing the resulting (eax, ebx, ecx, edx)\n\n#[inline]\n\npub unsafe fn cpuid(eax: u32, ecx: u32) -> (u32, u32, u32, u32) {\n\n let (oeax, oebx, oecx, oedx);\n\n\n\n llvm_asm!(\"cpuid\" :\n\n \"={eax}\"(oeax), \"={ebx}\"(oebx), \"={ecx}\"(oecx), \"={edx}\"(oedx) :\n\n \"{eax}\"(eax), \"{ecx}\"(ecx) :: \"volatile\", \"intel\");\n\n\n\n (oeax, oebx, oecx, oedx)\n\n}\n\n\n\n/// Read `cr0`\n", "file_path": "shared/cpu/src/lib.rs", "rank": 7, "score": 190968.70925226095 }, { "content": "/// Register a page fault handler\n\npub fn register_fault_handler(handler: Box<dyn PageFaultHandler>) -> FaultReg {\n\n let ptr = &*handler as *const dyn PageFaultHandler;\n\n PAGE_FAULT_HANDLERS.lock().push(handler);\n\n FaultReg(ptr)\n\n}\n\n\n", "file_path": "kernel/src/interrupts.rs", "rank": 8, "score": 176867.15725259663 }, { "content": "/// Returns the TSC value upon a future time in microseconds\n\npub fn future(microseconds: u64) -> u64 {\n\n\tcpu::rdtsc() + (microseconds * RDTSC_MHZ.load(Ordering::SeqCst))\n\n}\n\n\n", "file_path": "kernel/src/time.rs", "rank": 9, "score": 167239.0294902903 }, { "content": "#[inline]\n\npub fn rdtsc() -> u64 {\n\n let val_lo: u32;\n\n let val_hi: u32;\n\n\n\n unsafe {\n\n llvm_asm!(\"rdtsc\" : \"={edx}\"(val_hi), \"={eax}\"(val_lo) ::\n\n \"memory\" : \"volatile\", \"intel\");\n\n }\n\n\n\n ((val_hi as u64) << 32) | val_lo as u64\n\n}\n\n\n\n/// Get the GS base\n\n#[inline]\n\npub unsafe fn gs_base() -> u64 {\n\n rdmsr(IA32_GS_BASE)\n\n}\n\n\n\n/// Set the GS base\n\n#[inline]\n", "file_path": "shared/cpu/src/lib.rs", "rank": 10, "score": 162838.10328922342 }, { "content": "/// Get the TSC rate in MHz\n\npub fn tsc_mhz() -> u64 {\n\n RDTSC_MHZ.load(Ordering::SeqCst)\n\n}\n\n\n", "file_path": "kernel/src/time.rs", "rank": 11, "score": 162838.10328922342 }, { "content": "/// Create a DHCP rquest packet\n\nfn create_dhcp_packet(packet: &mut Packet, xid: u32,\n\n mac: [u8; 6], options: &[u8]) {\n\n // Initialize the packet for a UDP DHCP packet\n\n let addr = UdpAddress {\n\n src_eth: mac,\n\n dst_eth: [0xff; 6],\n\n src_ip: 0.into(),\n\n dst_ip: (!0).into(),\n\n src_port: 68,\n\n dst_port: 67,\n\n };\n\n let mut pkt = packet.create_udp(&addr);\n\n\n\n // Reserve room in the packet for the header and the DHCP options\n\n let dhcp_header = pkt.reserve(size_of::<Header>() + options.len())\n\n .unwrap();\n\n\n\n {\n\n // Cast the header to a DHCP header\n\n let header: &mut Header = unsafe {\n", "file_path": "kernel/src/net/dhcp.rs", "rank": 12, "score": 157372.31569824493 }, { "content": "/// Busy sleep for a given number of microseconds\n\npub fn sleep(microseconds: u64) {\n\n let waitval = future(microseconds);\n\n while cpu::rdtsc() < waitval {\n\n core::sync::atomic::spin_loop_hint();\n\n }\n\n}\n\n\n\n/// Using the PIT, determine the frequency of rdtsc. Round this frequency to\n\n/// the nearest 100MHz and return it.\n\npub unsafe fn calibrate() {\n\n // Store off the current rdtsc value\n\n let start = cpu::rdtsc();\n\n RDTSC_START.store(start, Ordering::SeqCst);\n\n\n\n // Check if we already calibrated\n\n if let Some(tsc_freq) = *core!().persist_store().rdtsc_freq.lock() {\n\n RDTSC_MHZ.store(tsc_freq, Ordering::SeqCst);\n\n return;\n\n }\n\n\n", "file_path": "kernel/src/time.rs", "rank": 13, "score": 156167.87150326633 }, { "content": "/// Deserialize a buffer, creating a Some(`Self`) if the deserialization\n\n/// succeeds, otherwise a `None` is returned.\n\n///\n\n/// If deserialization fails at any point, all intermediate objects created\n\n/// will be destroyed and `None` will be returned.\n\npub trait Deserialize: Sized {\n\n fn deserialize<R: Reader>(reader: &mut R) -> Option<Self>;\n\n}\n\n\n\n/// Implement `Serialize` trait for types which provide `to_le_bytes()`\n\nmacro_rules! serialize_le {\n\n // Serialize `$input_type` as an `$wire_type` by using `to_le_bytes()`\n\n // and `from_le_bytes()`. The `$input_type` gets converted to an\n\n // `$wire_type` via `TryInto`\n\n ($input_type:ty, $wire_type:ty) => {\n\n impl Serialize for $input_type {\n\n fn serialize<W: Writer>(&self, writer: &mut W) -> Option<()> {\n\n let wire: $wire_type = (*self).try_into()\n\n .expect(\"Should never happen, input type to wire type\");\n\n writer.write(&wire.to_le_bytes())\n\n }\n\n }\n\n\n\n impl Deserialize for $input_type {\n\n fn deserialize<R: Reader>(reader: &mut R) -> Option<Self> {\n", "file_path": "shared/noodle/src/lib.rs", "rank": 14, "score": 154588.37918515765 }, { "content": "#[inline]\n\n#[cfg(target_arch = \"x86_64\")]\n\npub fn delay(cycles: u64) {\n\n if cycles <= 0 { return; }\n\n\n\n unsafe {\n\n llvm_asm!(r#\"\n\n mov rax, $0\n\n 2:\n\n dec rax\n\n jnz 2b\n\n \"# :: \"r\"(cycles) : \"rax\", \"memory\", \"cc\" : \"volatile\", \"intel\");\n\n }\n\n}\n\n\n\n/// Structure representing the various CPU features which are supported on this\n\n/// system. These can be detected with the `get_cpu_features` function\n\n#[derive(Default, Debug)]\n\npub struct CPUFeatures {\n\n pub max_cpuid: u32,\n\n pub max_extended_cpuid: u32,\n\n\n", "file_path": "shared/cpu/src/lib.rs", "rank": 15, "score": 152318.97070213844 }, { "content": "/// Returns true if the entirity of `a` is contained inside `b`, else\n\n/// returns false.\n\nfn contains(mut a: Range, mut b: Range) -> bool {\n\n // Make sure range `a` is always lowest to biggest\n\n if a.start > a.end {\n\n core::mem::swap(&mut a.end, &mut a.start);\n\n }\n\n\n\n // Make sure range `b` is always lowest to biggest\n\n if b.start > b.end {\n\n core::mem::swap(&mut b.end, &mut b.start);\n\n }\n\n\n\n a.start >= b.start && a.end <= b.end\n\n}\n\n\n", "file_path": "shared/rangeset/src/lib.rs", "rank": 16, "score": 149357.12357468656 }, { "content": "#[repr(C, packed)]\n\nstruct TablePtr(u16, u64);\n\n\n\n/// A 64-bit TSS data structure\n\n#[repr(C, packed)]\n\n#[derive(Clone, Copy, Default)]\n\npub struct Tss {\n\n\treserved1: u32,\n\n\trsp: [u64; 3],\n\n\treserved2: u64,\n\n\tist: [u64; 7],\n\n\treserved3: u64,\n\n\treserved4: u16,\n\n\tiopb_offset: u16,\n\n}\n\n\n\n/// A raw IDT entry, which is valid when placed in an IDT in this \n\n/// representation\n\n#[derive(Clone, Copy)]\n\n#[repr(C, align(16))]\n\npub struct IdtEntry(u32, u32, u32, u32);\n", "file_path": "kernel/src/interrupts.rs", "rank": 17, "score": 143695.84606173146 }, { "content": "/// Return number of seconds elapsed since a prior TSC value\n\npub fn elapsed(start_time: u64) -> f64 {\n\n (cpu::rdtsc() - start_time) as f64 /\n\n RDTSC_MHZ.load(Ordering::SeqCst) as f64 / 1_000_000.0\n\n}\n\n\n", "file_path": "kernel/src/time.rs", "rank": 18, "score": 143388.46905289622 }, { "content": "/// Parse a DHCP packet\n\nfn parse_dhcp_packet<'a>(xid: u32, udp: Udp<'a>) ->\n\n Option<(Header, Vec<DhcpOption<'a>>)> {\n\n // Get the UDP message\n\n let message = udp.payload;\n\n\n\n // Cast the header to a DHCP header\n\n let header = message.get(..size_of::<Header>())?;\n\n let header = unsafe { &*(header.as_ptr() as *const Header) };\n\n\n\n // XID did not match expected\n\n if header.xid != xid {\n\n return None;\n\n }\n\n\n\n // Sanity check some parts of the DHCP message\n\n if header.op != Opcode::Reply as u8 ||\n\n header.htype != HardwareType::Ethernet as u8 ||\n\n header.hlen != 6 ||\n\n header.cookie != DHCP_COOKIE.to_be() {\n\n return None;\n", "file_path": "kernel/src/net/dhcp.rs", "rank": 19, "score": 135740.2141739212 }, { "content": "/// Get the preferred memory range for the currently running APIC. Returns\n\n/// `None` if we have no valid APIC ID yet, or we do not have NUMA knowledge\n\n/// of the current APIC ID\n\npub fn memory_range() -> Option<Range> {\n\n // Check to see if the `APIC_TO_MEMORY_RANGE` has been initialized\n\n let atmr = APIC_TO_MEMORY_RANGE.load(Ordering::SeqCst);\n\n if atmr.is_null() {\n\n return None;\n\n }\n\n\n\n // Cast the memory range structure to something we can access\n\n let atmr = unsafe { &*atmr };\n\n\n\n // Based on our current APIC ID look up the memory range\n\n core!().apic_id().and_then(|x| atmr[x as usize])\n\n}\n\n\n\n/// Establish the `APIC_TO_MEMORY_RANGE` global with the APIC IDs to their\n\n/// corresponding NUMA-local memory regions\n\npub unsafe fn register_numa_nodes(apic_to_domain: BTreeMap<u32, u32>,\n\n domain_to_mem: BTreeMap<u32, (PhysAddr, u64)>) {\n\n // Create a heap-based database\n\n let mut apic_mappings = Box::new([None; MAX_CORES]);\n", "file_path": "kernel/src/mm.rs", "rank": 20, "score": 133841.03774145799 }, { "content": "/// Implemented for structures which may be registered as page fault handlers.\n\n/// These handlers can be used to hook page faults and potentially lazily map\n\n/// in pages when needed.\n\npub trait PageFaultHandler {\n\n /// Invoked when a page fault occurs with the contents for `cr2`, the\n\n /// faulting address. If the fault was handled this should return `true`\n\n /// and thus execution will return back to where the exception originally\n\n /// occurred.\n\n ///\n\n /// The `code` is the error code pushed onto a stack during a page fault\n\n unsafe fn page_fault(&mut self, vaddr: VirtAddr, code: u64) -> bool;\n\n}\n\n\n", "file_path": "kernel/src/interrupts.rs", "rank": 21, "score": 124888.5603941166 }, { "content": "type VmExitFilter<'a> = fn(&mut Worker<'a>, &VmExit) -> bool;\n\n\n\n/// A session for multiple workers to fuzz a shared job\n\npub struct FuzzSession<'a> {\n\n /// Master VM state\n\n master_vm: Arc<Worker<'a>>,\n\n\n\n /// Timeout for each fuzz case\n\n timeout: Option<u64>,\n\n\n\n /// Callback to invoke before every fuzz case, for the fuzzer to inject\n\n /// information into the VM\n\n inject: Option<InjectCallback<'a>>,\n\n\n\n /// Callback to invoke when VM exits are hit to allow a user to handle VM\n\n /// exits to re-enter the VM\n\n vmexit_filter: Option<VmExitFilter<'a>>,\n\n \n\n /// All observed coverage information\n\n coverage: Aht<CoverageRecord<'a>, (), 65536>,\n", "file_path": "kernel/src/snapshotted_app.rs", "rank": 22, "score": 120987.29691630515 }, { "content": "/// Gets the APIC state for a given APIC ID\n\npub fn core_state(apic_id: u32) -> ApicState {\n\n // Get the current state and convert it into an `ApicState`\n\n APICS[apic_id as usize].load(Ordering::SeqCst).into()\n\n}\n\n\n", "file_path": "kernel/src/acpi.rs", "rank": 23, "score": 120290.92917925821 }, { "content": "/// Initialize the physical memory manager. Here we get the memory map from the\n\n/// BIOS via E820 and put it into a `RangeSet` for tracking and allocation.\n\n/// We also subtract off the first 1 MiB of memory to prevent BIOS data\n\n/// structures from being overwritten.\n\npub fn init() {\n\n // Create a `RangeSet` to hold the memory that is marked free by the\n\n // BIOS\n\n let mut pmem = unsafe { BOOT_ARGS.free_memory_ref().lock() };\n\n\n\n // If physical memory has already been initialized, just return out!\n\n if pmem.is_some() {\n\n return;\n\n }\n\n\n\n // Create a new empty `RangeSet` for tracking free physical memory\n\n let mut free_memory = RangeSet::new();\n\n\n\n // Loop through the memory the BIOS reports twice. The first time we\n\n // accumulate all of the memory that is marked as free. The second pass\n\n // we remove all ranges that are not marked as free.\n\n // This sanitizes the BIOS memory map, and makes sure that any memory\n\n // marked both free and non-free, is not marked free at all.\n\n for &add_free_mem in &[true, false] {\n\n // Allocate a register state to use when doing the E820 call\n", "file_path": "bootloader/src/mm.rs", "rank": 24, "score": 119414.29514986393 }, { "content": "/// Switch to a kernel-based GDT, load a TSS with a critical stack for\n\n/// #DF, #MC, and NMI interrupts. Then set up a IDT with all interrupts passing\n\n/// through to the `interrupt_handler` Rust function.\n\npub fn init() {\n\n let mut interrupts = unsafe { core!().interrupts().lock() };\n\n assert!(interrupts.is_none(), \"Interrupts have already been initialized\");\n\n\n\n // Create a new, empty TSS\n\n\tlet mut tss: Box<Tss> = Box::new(Tss::default());\n\n\n\n // Create a 32 KiB critical stack for use during #DF, #MC, and NMI\n\n let crit_stack: ManuallyDrop<Vec<u8>> = ManuallyDrop::new(\n\n Vec::with_capacity(32 * 1024));\n\n tss.ist[0] = crit_stack.as_ptr() as u64 + crit_stack.capacity() as u64;\n\n \n\n // Create GDT in the kernel context\n\n let mut gdt: Vec<u64> = vec![\n\n\t 0x0000000000000000, // 0x0000 | Null descriptor\n\n\t 0x00009a007c00ffff, // 0x0008 | 16-bit, present, code, base 0x7c00\n\n\t 0x000092000000ffff, // 0x0010 | 16-bit, present, data, base 0\n\n\t 0x00cf9a000000ffff, // 0x0018 | 32-bit, present, code, base 0\n\n\t 0x00cf92000000ffff, // 0x0020 | 32-bit, present, data, base 0\n\n\t 0x00209a0000000000, // 0x0028 | 64-bit, present, code, base 0\n", "file_path": "kernel/src/interrupts.rs", "rank": 25, "score": 119409.12726340181 }, { "content": "/// Check if a command is working and returning the expected results.\n\nfn check_install(command: &str, args: &[&str],\n\n expected: &[&str]) -> Option<()> {\n\n // Invoke the command\n\n let result = Command::new(command).args(args).output().ok()?;\n\n \n\n // Check if the command was successful\n\n if !result.status.success() { return None; }\n\n\n\n // Convert the stdout bytes to a string\n\n let stdout = std::str::from_utf8(&result.stdout).ok()?;\n\n\n\n // Make sure `stdout` contains everything we expected\n\n if expected.iter().all(|x| stdout.contains(x)) {\n\n Some(())\n\n } else { \n\n None\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 26, "score": 118796.03028175532 }, { "content": "/// Check in that the current core has booted\n\npub fn core_checkin() {\n\n /// Number of cores which have checked in\n\n static CORES_CHECKED_IN: AtomicU32 = AtomicU32::new(0);\n\n\n\n // Transition from launched to online\n\n let old_state = APICS[core!().apic_id().unwrap() as usize]\n\n .compare_and_swap(ApicState::Launched as u8,\n\n ApicState::Online as u8,\n\n Ordering::SeqCst);\n\n\n\n if core!().id == 0 {\n\n // BSP should already be marked online\n\n assert!(old_state == ApicState::Online as u8,\n\n \"BSP not marked online in APIC state\");\n\n } else {\n\n // Make sure that we only ever go from launched to online, any other\n\n // transition is invalid\n\n assert!(old_state == ApicState::Launched as u8,\n\n \"Invalid core state transition\");\n\n }\n\n\n\n // Check in!\n\n CORES_CHECKED_IN.fetch_add(1, Ordering::SeqCst);\n\n\n\n // Wait for all cores to be checked in\n\n while CORES_CHECKED_IN.load(Ordering::SeqCst) != num_cores() {}\n\n}\n\n\n\n/// Get the total number of cores present on this system\n", "file_path": "kernel/src/acpi.rs", "rank": 27, "score": 116392.4029715058 }, { "content": "pub fn fuzz() {\n\n //if core!().id != 0 { cpu::halt(); }\n\n\n\n static SESSION:\n\n LockCell<Option<Arc<FuzzSession>>, LockInterrupts> =\n\n LockCell::new(None);\n\n\n\n // Create the master sessionshot, and fork from it for all cores\n\n let session = {\n\n let mut session = SESSION.lock();\n\n if session.is_none() {\n\n *session = Some(\n\n Arc::new(FuzzSession::new(\"192.168.101.1:1911\",\n\n \"falkdump_pid_00000000000000007500_tid_00000000000000007560\")\n\n .init_master_vm(|worker| {\n\n worker.vm.guest_regs.rip = 0x00007FF62C3D6E10;\n\n worker.write_from(\n\n VirtAddr(worker.vm.guest_regs.rip as u64),\n\n b\"\\x48\\x89\\x54\\x24\\x10\\x48\\x89\\x4c\\x24\\x08\\\n\n \\x48\\x83\\x7c\\x24\\x10\\x05\").unwrap();\n", "file_path": "kernel/src/test_fuzzer.rs", "rank": 28, "score": 116392.4029715058 }, { "content": "#[inline]\n\npub fn halt() -> ! {\n\n unsafe {\n\n loop {\n\n llvm_asm!(r#\"\n\n hlt\n\n \"# :::: \"volatile\", \"intel\");\n\n }\n\n }\n\n}\n\n\n\n/// Canonicalize an address\n", "file_path": "shared/cpu/src/lib.rs", "rank": 29, "score": 116392.4029715058 }, { "content": "/// A trait that allows generic access to physical memory\n\n///\n\n/// This allows the user of the page table to handle the physical to virtual\n\n/// translations that the page table uses during walks.\n\n///\n\n/// This also allows the user to provide mechanisms for the page table code\n\n/// to allocate and free physical memory such that page tables and pages can\n\n/// be freed when they are unmapped.\n\n///\n\n/// A user can control the physical translations such that this can be used to\n\n/// perform nested paging lookups given the `PhysMem` implementation for the\n\n/// guest `cr3` correctly uses the EPT for the VM to provide guest physical to\n\n/// host physical translations.\n\npub trait PhysMem {\n\n /// Provide a virtual address to memory which contains the raw physical\n\n /// memory at `paddr` for `size` bytes\n\n unsafe fn translate(&mut self, paddr: PhysAddr, size: usize) -> *mut u8;\n\n \n\n /// Allocate physical memory with a requested layout\n\n fn alloc_phys(&mut self, layout: Layout) -> PhysAddr;\n\n\n\n /// Free physical memory\n\n fn free_phys(&mut self, paddr: PhysAddr, size: u64);\n\n\n\n /// Same as `alloc_phys` but the memory will be zeroed\n\n fn alloc_phys_zeroed(&mut self, layout: Layout) -> PhysAddr {\n\n // Create an allocation\n\n let alc = self.alloc_phys(layout);\n\n\n\n // Zero it out\n\n unsafe {\n\n let bytes = self.translate(alc, layout.size());\n\n core::ptr::write_bytes(bytes, 0, layout.size());\n", "file_path": "shared/page_table/src/lib.rs", "rank": 30, "score": 112267.22838014306 }, { "content": "/// Initialize the locals for this core\n\npub fn init(boot_args: PhysAddr, core_id: u32) {\n\n unsafe {\n\n // Temporaraly set GS base to the core ID for early locks\n\n cpu::set_gs_base(core_id as u64);\n\n }\n\n\n\n /// Dummy structure to allow early `LockCell` access prior to having\n\n /// the `core!()` macro set up\n\n struct DummyLockInterrupts;\n\n\n\n // This dummy interrupt state implementation always reports no interrupts\n\n // or exceptions, as this code is run during early boot prior to interrupts\n\n impl lockcell::InterruptState for DummyLockInterrupts {\n\n fn in_interrupt() -> bool { false }\n\n fn in_exception() -> bool { false }\n\n fn core_id() -> u32 { unsafe { cpu::gs_base() as u32 } }\n\n fn enter_lock() {}\n\n fn exit_lock() {}\n\n }\n\n\n", "file_path": "kernel/src/core_locals.rs", "rank": 31, "score": 111785.27843519756 }, { "content": "/// Returns system uptime in seconds as a float\n\npub fn uptime() -> f64 {\n\n elapsed(RDTSC_START.load(Ordering::SeqCst))\n\n}\n\n\n", "file_path": "kernel/src/time.rs", "rank": 32, "score": 110772.42577538297 }, { "content": "#[allow(unused)]\n\npub fn num_cores() -> u32 {\n\n let count = TOTAL_CORES.load(Ordering::SeqCst);\n\n assert!(count > 0, \"total_cores() not ready until ACPI is initialized\");\n\n count\n\n}\n\n\n\n/// In-memory representation of an RSDP ACPI structure\n", "file_path": "kernel/src/acpi.rs", "rank": 33, "score": 107992.2688911498 }, { "content": "/// Returns a bitmask of the interrupts which are handled with EOIs in the\n\n/// current state. This is racey as we're going to construct the bitmaps from\n\n/// the list of atomic bools, and thus must be only used in situations where\n\n/// the `EOI_REQUIRED` table is not changing, or the code is not sensitive to\n\n/// the correctness of this output.\n\n///\n\n/// This is a safe function as it doesn't do anything dangerous, it's just some\n\n/// data.\n\npub fn eoi_required() -> [u128; 2] {\n\n let mut ret = [0; 2];\n\n\n\n // Accumulate the EOI required states into two 128 bits representing the\n\n // entire interrupt vector space\n\n for ii in 0..256 {\n\n let idx = ii / 128;\n\n let bit = ii % 128;\n\n let val = EOI_REQUIRED[ii as usize].load(Ordering::SeqCst);\n\n ret[idx] |= (val as u128) << bit;\n\n }\n\n\n\n ret\n\n}\n\n\n", "file_path": "kernel/src/interrupts.rs", "rank": 34, "score": 103613.26998983463 }, { "content": "fn inject(worker: &mut Worker) {\n\n // rcx points to the input buffer\n\n // rdx is the length of the input buffer\n\n \n\n // Create an empty input\n\n let mut input = worker.fuzz_input.take().unwrap();\n\n input.clear();\n\n\n\n if let (0, Some(old)) = (worker.rng.rand() % 2, worker.rand_input()) {\n\n // Use an existing input from the corpus\n\n input.extend_from_slice(old);\n\n } else {\n\n // Pick a random input size\n\n let input_size = worker.rng.rand() % (128 + 1);\n\n input.resize(input_size, 0u8);\n\n }\n\n \n\n // Set the input size\n\n worker.vm.guest_regs.rdx = input.len() as u64;\n\n\n", "file_path": "kernel/src/test_fuzzer.rs", "rank": 35, "score": 101872.83198049766 }, { "content": "/// Determines overlap of `a` and `b`. If there is overlap, returns the range\n\n/// of the overlap\n\n///\n\n/// In this overlap, returns:\n\n///\n\n/// [a.start -------------- a.end]\n\n/// [b.start -------------- b.end]\n\n/// | |\n\n/// ^-----------------^\n\n/// [ Return value ]\n\n///\n\nfn overlaps(mut a: Range, mut b: Range) -> Option<Range> {\n\n // Make sure range `a` is always lowest to biggest\n\n if a.start > a.end {\n\n core::mem::swap(&mut a.end, &mut a.start);\n\n }\n\n\n\n // Make sure range `b` is always lowest to biggest\n\n if b.start > b.end {\n\n core::mem::swap(&mut b.end, &mut b.start);\n\n }\n\n\n\n // Check if there is overlap\n\n if a.start <= b.end && b.start <= a.end {\n\n Some(Range {\n\n start: core::cmp::max(a.start, b.start),\n\n end: core::cmp::min(a.end, b.end)\n\n })\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "shared/rangeset/src/lib.rs", "rank": 36, "score": 101701.35726591278 }, { "content": "#[panic_handler]\n\npub fn panic(info: &PanicInfo) -> ! {\n\n // Disable interrupts, we're never coming back from this point.\n\n unsafe { core!().disable_interrupts(); }\n\n\n\n if core!().id == 0 {\n\n // If we had a panic on the BSP, we handle it quite uniquely. We'll\n\n // shut down all other processors by sending them NMIs and waiting for\n\n // them to check into a halted state.\n\n \n\n let our_info: *const PanicInfo = info;\n\n\n\n let other_info: *const PanicInfo =\n\n PANIC_PENDING.load(Ordering::SeqCst);\n\n \n\n let apic = unsafe {\n\n // Forcibly get access to the current APIC. This is likely safe in\n\n // almost every situation as the APIC is not very stateful.\n\n let apic = &mut *core!().apic().shatter();\n\n let apic = apic.as_mut().unwrap();\n\n \n", "file_path": "kernel/src/panic.rs", "rank": 37, "score": 101034.98106477024 }, { "content": "/// Get set of CPU features\n\npub fn get_cpu_features() -> CPUFeatures {\n\n let mut features: CPUFeatures = Default::default();\n\n\n\n unsafe {\n\n features.max_cpuid = cpuid(0, 0).0;\n\n features.max_extended_cpuid = cpuid(0x80000000, 0).0;\n\n\n\n if features.max_cpuid >= 1 {\n\n let cpuid_1 = cpuid(1, 0);\n\n features.fpu = ((cpuid_1.3 >> 0) & 1) == 1;\n\n features.vme = ((cpuid_1.3 >> 1) & 1) == 1;\n\n features.de = ((cpuid_1.3 >> 2) & 1) == 1;\n\n features.pse = ((cpuid_1.3 >> 3) & 1) == 1;\n\n features.tsc = ((cpuid_1.3 >> 4) & 1) == 1;\n\n features.apic = ((cpuid_1.3 >> 9) & 1) == 1;\n\n features.mmx = ((cpuid_1.3 >> 23) & 1) == 1;\n\n features.fxsr = ((cpuid_1.3 >> 24) & 1) == 1;\n\n features.sse = ((cpuid_1.3 >> 25) & 1) == 1;\n\n features.sse2 = ((cpuid_1.3 >> 26) & 1) == 1;\n\n features.htt = ((cpuid_1.3 >> 28) & 1) == 1;\n", "file_path": "shared/cpu/src/lib.rs", "rank": 38, "score": 100839.64165024305 }, { "content": "struct Client<'a> {\n\n /// \"Unique\" session ID for the client. Used to track when a client reboots\n\n /// and comes back with the same ip:port, but with a new session\n\n session_id: u64,\n\n\n\n /// Unique core IDs of the workers on this session\n\n workers: BTreeSet<u32>,\n\n\n\n /// Time of the first packet receieved from this client\n\n first_packet: Instant,\n\n\n\n /// Time of the last packet reciept from this client\n\n last_packet: Instant,\n\n\n\n /// Number of fuzz cases performed on this client\n\n fuzz_cases: u64,\n\n\n\n /// Set of coverage for this client\n\n coverage: BTreeSet<CoverageRecord<'a>>,\n\n}\n\n\n", "file_path": "server/src/main.rs", "rank": 39, "score": 100798.51695962394 }, { "content": "/// Network backed VM memory information\n\nstruct NetBacking<'a> {\n\n /// Mapping of valid pages to their offsets in the `memory` buffer\n\n virt_to_offset: BTreeMap<VirtAddr, usize>,\n\n\n\n /// Raw memory backing the snasphot\n\n memory: NetMapping<'a>,\n\n}\n\n\n\npub struct Worker<'a> {\n\n /// Master worker that we are forked from\n\n master: Option<Arc<Worker<'a>>>,\n\n\n\n /// Network mapped memory for the VM\n\n network_mem: Option<Arc<NetBacking<'a>>>,\n\n\n\n /// The fuzz session this worker belongs to\n\n session: Option<Arc<FuzzSession<'a>>>,\n\n\n\n /// Raw virtual machine that this worker uses\n\n pub vm: Vm,\n", "file_path": "kernel/src/snapshotted_app.rs", "rank": 40, "score": 94561.02529743663 }, { "content": "#[inline]\n\npub fn get_core_locals() -> &'static CoreLocals {\n\n unsafe {\n\n let ptr: usize;\n\n\n\n // Get the first `u64` from `CoreLocals`, which given we don't change\n\n // the structure shape, should be the address of the core locals.\n\n llvm_asm!(\"mov $0, gs:[0]\" :\n\n \"=r\"(ptr) :: \"memory\" : \"volatile\", \"intel\");\n\n\n\n &*(ptr as *const CoreLocals)\n\n }\n\n}\n\n\n", "file_path": "kernel/src/core_locals.rs", "rank": 41, "score": 94382.44078459346 }, { "content": "type InjectCallback<'a> = fn(&mut Worker<'a>);\n\n\n", "file_path": "kernel/src/snapshotted_app.rs", "rank": 42, "score": 92938.66978975217 }, { "content": "/// Read the contents of `self` into `buf`. Used to allow for custom adapters\n\n/// during serialization. Return `None` `buf` cannot fully be filled\n\npub trait Reader {\n\n fn read_exact(&mut self, buf: &mut [u8]) -> Option<()>;\n\n}\n\n\n\n/// Basic `Writer` implementation for vectors of bytes\n\nimpl Writer for Vec<u8> {\n\n fn write(&mut self, buf: &[u8]) -> Option<()> {\n\n self.extend_from_slice(buf);\n\n Some(())\n\n }\n\n}\n\n\n\n/// Basic `Reader` implementation for slices of bytes\n\nimpl Reader for &[u8] {\n\n fn read_exact(&mut self, buf: &mut [u8]) -> Option<()> {\n\n buf.copy_from_slice(self.get(..buf.len())?);\n\n *self = &self[buf.len()..];\n\n Some(())\n\n }\n\n}\n\n\n", "file_path": "shared/noodle/src/lib.rs", "rank": 43, "score": 91976.06059282475 }, { "content": "/// Write the contents of `buf` into `self`. Used to allow custom adapters for\n\n/// writing during serialization. Return `None` if `buf` cannot be fully\n\n/// serialized\n\npub trait Writer {\n\n fn write(&mut self, buf: &[u8]) -> Option<()>;\n\n}\n\n\n", "file_path": "shared/noodle/src/lib.rs", "rank": 44, "score": 91971.44442628544 }, { "content": "/// Serialize a `self` into an existing vector\n\npub trait Serialize {\n\n fn serialize<W: Writer>(&self, writer: &mut W) -> Option<()>;\n\n}\n\n\n", "file_path": "shared/noodle/src/lib.rs", "rank": 45, "score": 91958.8556547549 }, { "content": "/// Trait that allows access to OS-level constructs defining interrupt state,\n\n/// exception state, unique core IDs, and enter/exit lock (for interrupt\n\n/// disabling and enabling) primitives.\n\npub trait InterruptState {\n\n /// Returns `true` if we're currently in an interrupt\n\n fn in_interrupt() -> bool;\n\n \n\n /// Returns `true` if we're currently in an exception. Which indicates that\n\n /// a lock cannot be held as we may have pre-empted a non-preemptable lock\n\n fn in_exception() -> bool;\n\n\n\n /// Gets the ID of the running core. It's required that this core ID is\n\n /// unique to the core, and cannot be `!0`\n\n fn core_id() -> u32;\n\n\n\n /// A lock which does not allow interrupting was taken, and thus interrupts\n\n /// must be disabled. It's up to the callee to handle the nesting of the\n\n /// interrupt status. Eg. using a refcount of number of interrupt disable\n\n /// requests\n\n fn enter_lock();\n\n\n\n /// A lock which does not allow interrupting was released, and thus\n\n /// interrupts can be enabled. It's up to the callee to handle the nesting\n", "file_path": "shared/lockcell/src/lib.rs", "rank": 46, "score": 90961.41843116851 }, { "content": "fn main() -> io::Result<()> {\n\n // Map from file IDs to the modified time and their contents\n\n let mut file_db: HashMap<u64, (SystemTime, Vec<u8>)> = HashMap::new();\n\n\n\n // Coverage records\n\n let coverage: Arc<Mutex<BTreeSet<CoverageRecord>>> = Default::default();\n\n\n\n // Clients\n\n let clients: Arc<Mutex<HashMap<SocketAddr, Client>>> = Default::default();\n\n\n\n // Create a new coverage file\n\n let mut coverage_file = File::create(\"coverage.txt\")?;\n\n\n\n // Get the current directory\n\n let cur_dir = std::fs::canonicalize(\"files\")?;\n\n\n\n // Bind to all network devices on UDP port 1911\n\n let socket = UdpSocket::bind(\"0.0.0.0:1911\")?;\n\n\n\n // Buffer for sending packets (reused to prevent allocations)\n", "file_path": "server/src/main.rs", "rank": 47, "score": 90678.59893575637 }, { "content": "/// Trait to allow conversion of slices of bytes to primitives and back\n\n/// generically\n\npub trait Primitive: Sized {\n\n fn cast(buf: &[u8]) -> Self;\n\n}\n\n\n\nmacro_rules! primitive {\n\n ($ty:ty) => {\n\n impl Primitive for $ty {\n\n fn cast(buf: &[u8]) -> Self {\n\n <$ty>::from_ne_bytes(buf.try_into().unwrap())\n\n }\n\n }\n\n }\n\n}\n\n\n\nprimitive!(u8);\n\nprimitive!(u16);\n\nprimitive!(u32);\n\nprimitive!(u64);\n\nprimitive!(u128);\n\nprimitive!(i8);\n", "file_path": "kernel/src/snapshotted_app.rs", "rank": 48, "score": 88183.0270098324 }, { "content": "pub fn get_lease(device: Arc<NetDevice>) -> Option<Lease> {\n\n // Get a \"unique\" transaction ID\n\n let xid = cpu::rdtsc() as u32;\n\n\n\n // Save off our devices MAC address\n\n let mac = device.mac();\n\n\n\n // Bind to UDP port 68\n\n let bind = NetDevice::bind_udp_port(device.clone(), 68)\n\n .expect(\"Could not bind to port 68 for dhcp\");\n\n\n\n // Construct the DHCP options for the discover\n\n let mut options = Vec::new();\n\n DhcpOption::MessageType(MessageType::Discover).serialize(&mut options);\n\n DhcpOption::ParameterRequestList(&[\n\n DhcpOptionId::MessageType as u8,\n\n DhcpOptionId::ServerIp as u8,\n\n ]).serialize(&mut options);\n\n DhcpOption::End.serialize(&mut options);\n\n \n", "file_path": "kernel/src/net/dhcp.rs", "rank": 49, "score": 81801.96203826716 }, { "content": "/// Checks to see if the PCI device being probed is a device that we can handle\n\n/// with our driver\n\npub fn probe(device: &PciDevice) -> Option<Arc<NetDevice>> {\n\n const E1000_REGS: NicRegisters = NicRegisters {\n\n ctrl: 0x0000,\n\n imc: 0x00d8,\n\n rdbal: 0x2800,\n\n rdbah: 0x2804,\n\n rdlen: 0x2808,\n\n rdh: 0x2810,\n\n rdt: 0x2818,\n\n tdbal: 0x3800,\n\n tdbah: 0x3804,\n\n tdlen: 0x3808,\n\n tdh: 0x3810,\n\n tdt: 0x3818,\n\n ral0: 0x5400,\n\n rah0: 0x5404,\n\n rctl: Some(0x0100),\n\n tctl: Some(0x0400),\n\n rxdctl: None,\n\n txdctl: None,\n", "file_path": "kernel/src/net/intel_nic.rs", "rank": 50, "score": 80116.35893614523 }, { "content": "/// Create a flattened PE image\n\n/// Returns a tuple (entry point vaddr, base vaddr, image, reinit data)\n\nfn flatten_pe<P: AsRef<Path>>(filename: P)\n\n -> Option<(u32, u32, Vec<u8>, Vec<u8>)> {\n\n let pe = std::fs::read(filename).ok()?;\n\n let pe = PeParser::parse(&pe)?;\n\n\n\n // Holds a stream of [vaddr: u32][size: u32][data to init]\n\n // This is expected to be used to re-initialize the writable data sections\n\n // in the bootloader such that a soft reboot can reset the bootloader\n\n // state to its initial states.\n\n let mut reinit = Vec::new();\n\n\n\n // Compute the bounds of the _loaded_ image\n\n let mut image_start = None;\n\n let mut image_end = None;\n\n pe.sections(|base, size, raw, _, write, _| {\n\n // Convert the size from 32-bits to 64-bits\n\n let size = size as u64;\n\n let end = base.checked_add(size.checked_sub(1)?)?;\n\n\n\n // Set up initial values\n", "file_path": "src/main.rs", "rank": 51, "score": 77622.73969867382 }, { "content": "/// Convert a 16-bit `seg:off` pointer into a linear address\n\nfn segoff_to_linear(seg: u16, off: u16) -> usize {\n\n ((seg as usize) << 4) + off as usize\n\n}\n\n\n", "file_path": "bootloader/src/pxe.rs", "rank": 52, "score": 77617.69536372759 }, { "content": "/// AUTOGENERATED BY tuple_match_gen.py DO NOT MODIFY!\n\n/// Handles matching of tuples based on the number of types they have. This\n\n/// is kinda ugly but it seems to be required as there's no way to dynamically\n\n/// construct an indentifer in macros. Since we can't make identifiers, we\n\n/// cannot construct names for tuple variants which can be used during binding.\n\n#[macro_export]\n\nmacro_rules! handle_serialize_tuple_match {\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident) => {\n\n if let $enumname::$enumident( ) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty) => {\n\n if let $enumname::$enumident( aa,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty) => {\n\n if let $enumname::$enumident( aa,ba,) = $self {\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 53, "score": 71346.60526722099 }, { "content": "fn stats(coverage: Arc<Mutex<BTreeSet<CoverageRecord>>>,\n\n clients: Arc<Mutex<HashMap<SocketAddr, Client>>>) {\n\n /// Time to wait between prints\n\n const PRINT_DELAY: Duration = Duration::from_millis(1000);\n\n\n\n let mut last_cases = 0;\n\n\n\n loop {\n\n std::thread::sleep(PRINT_DELAY);\n\n\n\n // Total number of fuzz cases and workers\n\n let mut total_cases = 0u64;\n\n let mut total_workers = 0usize;\n\n let mut total_clients = 0u64;\n\n\n\n let clients = clients.lock().unwrap();\n\n for (addr, client) in clients.iter() {\n\n // Compute the duration of time since the last report\n\n let tsl = Instant::now() - client.last_packet;\n\n let unresponsive = tsl > Duration::from_secs(5);\n", "file_path": "server/src/main.rs", "rank": 54, "score": 71346.2549340035 }, { "content": " Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 55, "score": 71336.24292385629 }, { "content": " Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 56, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 57, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 58, "score": 71336.24292385629 }, { "content": " Serialize::serialize(eb, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty, $tyta:ty, $tyua:ty, $tyva:ty, $tywa:ty, $tyxa:ty, $tyya:ty, $tyza:ty, $tyab:ty, $tybb:ty, $tycb:ty, $tydb:ty, $tyeb:ty, $tyfb:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,ta,ua,va,wa,xa,ya,za,ab,bb,cb,db,eb,fb,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 59, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n Serialize::serialize(ra, $buf)?;\n\n Serialize::serialize(sa, $buf)?;\n\n Serialize::serialize(ta, $buf)?;\n\n Serialize::serialize(ua, $buf)?;\n\n Serialize::serialize(va, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty, $tyta:ty, $tyua:ty, $tyva:ty, $tywa:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,ta,ua,va,wa,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 60, "score": 71336.24292385629 }, { "content": " Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n Serialize::serialize(ra, $buf)?;\n\n Serialize::serialize(sa, $buf)?;\n\n Serialize::serialize(ta, $buf)?;\n\n Serialize::serialize(ua, $buf)?;\n\n Serialize::serialize(va, $buf)?;\n\n Serialize::serialize(wa, $buf)?;\n\n Serialize::serialize(xa, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty, $tyta:ty, $tyua:ty, $tyva:ty, $tywa:ty, $tyxa:ty, $tyya:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,ta,ua,va,wa,xa,ya,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 61, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 62, "score": 71336.24292385629 }, { "content": " Serialize::serialize(za, $buf)?;\n\n Serialize::serialize(ab, $buf)?;\n\n Serialize::serialize(bb, $buf)?;\n\n Serialize::serialize(cb, $buf)?;\n\n Serialize::serialize(db, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty, $tyta:ty, $tyua:ty, $tyva:ty, $tywa:ty, $tyxa:ty, $tyya:ty, $tyza:ty, $tyab:ty, $tybb:ty, $tycb:ty, $tydb:ty, $tyeb:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,ta,ua,va,wa,xa,ya,za,ab,bb,cb,db,eb,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 63, "score": 71336.24292385629 }, { "content": " Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n Serialize::serialize(ra, $buf)?;\n\n Serialize::serialize(sa, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 64, "score": 71336.24292385629 }, { "content": " Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n Serialize::serialize(ra, $buf)?;\n\n Serialize::serialize(sa, $buf)?;\n\n Serialize::serialize(ta, $buf)?;\n\n Serialize::serialize(ua, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty, $tyta:ty, $tyua:ty, $tyva:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,ta,ua,va,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 65, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n Serialize::serialize(ra, $buf)?;\n\n Serialize::serialize(sa, $buf)?;\n\n Serialize::serialize(ta, $buf)?;\n\n Serialize::serialize(ua, $buf)?;\n\n Serialize::serialize(va, $buf)?;\n\n Serialize::serialize(wa, $buf)?;\n\n Serialize::serialize(xa, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 66, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ra, $buf)?;\n\n Serialize::serialize(sa, $buf)?;\n\n Serialize::serialize(ta, $buf)?;\n\n Serialize::serialize(ua, $buf)?;\n\n Serialize::serialize(va, $buf)?;\n\n Serialize::serialize(wa, $buf)?;\n\n Serialize::serialize(xa, $buf)?;\n\n Serialize::serialize(ya, $buf)?;\n\n Serialize::serialize(za, $buf)?;\n\n Serialize::serialize(ab, $buf)?;\n\n Serialize::serialize(bb, $buf)?;\n\n Serialize::serialize(cb, $buf)?;\n\n Serialize::serialize(db, $buf)?;\n\n Serialize::serialize(eb, $buf)?;\n\n Serialize::serialize(fb, $buf)?;\n\n Serialize::serialize(gb, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty, $tyta:ty, $tyua:ty, $tyva:ty, $tywa:ty, $tyxa:ty, $tyya:ty, $tyza:ty, $tyab:ty, $tybb:ty, $tycb:ty, $tydb:ty, $tyeb:ty, $tyfb:ty, $tygb:ty, $tyhb:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,ta,ua,va,wa,xa,ya,za,ab,bb,cb,db,eb,fb,gb,hb,) = $self {\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 67, "score": 71336.24292385629 }, { "content": " }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 68, "score": 71336.24292385629 }, { "content": " ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty, $tyta:ty, $tyua:ty, $tyva:ty, $tywa:ty, $tyxa:ty, $tyya:ty, $tyza:ty, $tyab:ty, $tybb:ty, $tycb:ty, $tydb:ty, $tyeb:ty, $tyfb:ty, $tygb:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,ta,ua,va,wa,xa,ya,za,ab,bb,cb,db,eb,fb,gb,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 69, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ta, $buf)?;\n\n Serialize::serialize(ua, $buf)?;\n\n Serialize::serialize(va, $buf)?;\n\n Serialize::serialize(wa, $buf)?;\n\n Serialize::serialize(xa, $buf)?;\n\n Serialize::serialize(ya, $buf)?;\n\n Serialize::serialize(za, $buf)?;\n\n Serialize::serialize(ab, $buf)?;\n\n Serialize::serialize(bb, $buf)?;\n\n Serialize::serialize(cb, $buf)?;\n\n Serialize::serialize(db, $buf)?;\n\n Serialize::serialize(eb, $buf)?;\n\n Serialize::serialize(fb, $buf)?;\n\n Serialize::serialize(gb, $buf)?;\n\n Serialize::serialize(hb, $buf)?;\n\n Serialize::serialize(ib, $buf)?;\n\n Serialize::serialize(jb, $buf)?;\n\n Serialize::serialize(kb, $buf)?;\n\n }\n\n };\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 70, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ua, $buf)?;\n\n Serialize::serialize(va, $buf)?;\n\n Serialize::serialize(wa, $buf)?;\n\n Serialize::serialize(xa, $buf)?;\n\n Serialize::serialize(ya, $buf)?;\n\n Serialize::serialize(za, $buf)?;\n\n Serialize::serialize(ab, $buf)?;\n\n Serialize::serialize(bb, $buf)?;\n\n Serialize::serialize(cb, $buf)?;\n\n Serialize::serialize(db, $buf)?;\n\n Serialize::serialize(eb, $buf)?;\n\n Serialize::serialize(fb, $buf)?;\n\n Serialize::serialize(gb, $buf)?;\n\n Serialize::serialize(hb, $buf)?;\n\n Serialize::serialize(ib, $buf)?;\n\n Serialize::serialize(jb, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty, $tyta:ty, $tyua:ty, $tyva:ty, $tywa:ty, $tyxa:ty, $tyya:ty, $tyza:ty, $tyab:ty, $tybb:ty, $tycb:ty, $tydb:ty, $tyeb:ty, $tyfb:ty, $tygb:ty, $tyhb:ty, $tyib:ty, $tyjb:ty, $tykb:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,ta,ua,va,wa,xa,ya,za,ab,bb,cb,db,eb,fb,gb,hb,ib,jb,kb,) = $self {\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 71, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 72, "score": 71336.24292385629 }, { "content": " Serialize::serialize(fa, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 73, "score": 71336.24292385629 }, { "content": " Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n Serialize::serialize(ra, $buf)?;\n\n Serialize::serialize(sa, $buf)?;\n\n Serialize::serialize(ta, $buf)?;\n\n Serialize::serialize(ua, $buf)?;\n\n Serialize::serialize(va, $buf)?;\n\n Serialize::serialize(wa, $buf)?;\n\n Serialize::serialize(xa, $buf)?;\n\n Serialize::serialize(ya, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 74, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ta, $buf)?;\n\n Serialize::serialize(ua, $buf)?;\n\n Serialize::serialize(va, $buf)?;\n\n Serialize::serialize(wa, $buf)?;\n\n Serialize::serialize(xa, $buf)?;\n\n Serialize::serialize(ya, $buf)?;\n\n Serialize::serialize(za, $buf)?;\n\n Serialize::serialize(ab, $buf)?;\n\n Serialize::serialize(bb, $buf)?;\n\n Serialize::serialize(cb, $buf)?;\n\n Serialize::serialize(db, $buf)?;\n\n Serialize::serialize(eb, $buf)?;\n\n Serialize::serialize(fb, $buf)?;\n\n Serialize::serialize(gb, $buf)?;\n\n Serialize::serialize(hb, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty, $tyta:ty, $tyua:ty, $tyva:ty, $tywa:ty, $tyxa:ty, $tyya:ty, $tyza:ty, $tyab:ty, $tybb:ty, $tycb:ty, $tydb:ty, $tyeb:ty, $tyfb:ty, $tygb:ty, $tyhb:ty, $tyib:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,ta,ua,va,wa,xa,ya,za,ab,bb,cb,db,eb,fb,gb,hb,ib,) = $self {\n\n Serialize::serialize($count, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 75, "score": 71336.24292385629 }, { "content": " ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty, $tyta:ty, $tyua:ty, $tyva:ty, $tywa:ty, $tyxa:ty, $tyya:ty, $tyza:ty, $tyab:ty, $tybb:ty, $tycb:ty, $tydb:ty, $tyeb:ty, $tyfb:ty, $tygb:ty, $tyhb:ty, $tyib:ty, $tyjb:ty, $tykb:ty, $tylb:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,ta,ua,va,wa,xa,ya,za,ab,bb,cb,db,eb,fb,gb,hb,ib,jb,kb,lb,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 76, "score": 71336.24292385629 }, { "content": " Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n Serialize::serialize(ra, $buf)?;\n\n Serialize::serialize(sa, $buf)?;\n\n Serialize::serialize(ta, $buf)?;\n\n Serialize::serialize(ua, $buf)?;\n\n Serialize::serialize(va, $buf)?;\n\n Serialize::serialize(wa, $buf)?;\n\n Serialize::serialize(xa, $buf)?;\n\n Serialize::serialize(ya, $buf)?;\n\n Serialize::serialize(za, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty, $tyta:ty, $tyua:ty, $tyva:ty, $tywa:ty, $tyxa:ty, $tyya:ty, $tyza:ty, $tyab:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,ta,ua,va,wa,xa,ya,za,ab,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 77, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ta, $buf)?;\n\n Serialize::serialize(ua, $buf)?;\n\n Serialize::serialize(va, $buf)?;\n\n Serialize::serialize(wa, $buf)?;\n\n Serialize::serialize(xa, $buf)?;\n\n Serialize::serialize(ya, $buf)?;\n\n Serialize::serialize(za, $buf)?;\n\n Serialize::serialize(ab, $buf)?;\n\n Serialize::serialize(bb, $buf)?;\n\n Serialize::serialize(cb, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty, $tyta:ty, $tyua:ty, $tyva:ty, $tywa:ty, $tyxa:ty, $tyya:ty, $tyza:ty, $tyab:ty, $tybb:ty, $tycb:ty, $tydb:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,ta,ua,va,wa,xa,ya,za,ab,bb,cb,db,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 78, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n Serialize::serialize(ra, $buf)?;\n\n Serialize::serialize(sa, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty, $tyta:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,ta,) = $self {\n\n Serialize::serialize($count, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 79, "score": 71336.24292385629 }, { "content": " Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 80, "score": 71336.24292385629 }, { "content": " Serialize::serialize(wa, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty, $tyta:ty, $tyua:ty, $tyva:ty, $tywa:ty, $tyxa:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,ta,ua,va,wa,xa,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 81, "score": 71336.24292385629 }, { "content": " }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty, $tyta:ty, $tyua:ty, $tyva:ty, $tywa:ty, $tyxa:ty, $tyya:ty, $tyza:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,ta,ua,va,wa,xa,ya,za,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 82, "score": 71336.24292385629 }, { "content": " Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n Serialize::serialize(ra, $buf)?;\n\n Serialize::serialize(sa, $buf)?;\n\n Serialize::serialize(ta, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 83, "score": 71336.24292385629 }, { "content": " }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty, $tyta:ty, $tyua:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,ta,ua,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 84, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 85, "score": 71336.24292385629 }, { "content": " Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n Serialize::serialize(ra, $buf)?;\n\n Serialize::serialize(sa, $buf)?;\n\n Serialize::serialize(ta, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 86, "score": 71336.24292385629 }, { "content": " }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,) = $self {\n\n Serialize::serialize($count, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 87, "score": 71336.24292385629 }, { "content": " Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n Serialize::serialize(ra, $buf)?;\n\n Serialize::serialize(sa, $buf)?;\n\n Serialize::serialize(ta, $buf)?;\n\n Serialize::serialize(ua, $buf)?;\n\n Serialize::serialize(va, $buf)?;\n\n Serialize::serialize(wa, $buf)?;\n\n Serialize::serialize(xa, $buf)?;\n\n Serialize::serialize(ya, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 88, "score": 71336.24292385629 }, { "content": " Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n Serialize::serialize(ra, $buf)?;\n\n Serialize::serialize(sa, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 89, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n Serialize::serialize(ra, $buf)?;\n\n Serialize::serialize(sa, $buf)?;\n\n Serialize::serialize(ta, $buf)?;\n\n Serialize::serialize(ua, $buf)?;\n\n Serialize::serialize(va, $buf)?;\n\n Serialize::serialize(wa, $buf)?;\n\n Serialize::serialize(xa, $buf)?;\n\n Serialize::serialize(ya, $buf)?;\n\n Serialize::serialize(za, $buf)?;\n\n Serialize::serialize(ab, $buf)?;\n\n Serialize::serialize(bb, $buf)?;\n\n Serialize::serialize(cb, $buf)?;\n\n Serialize::serialize(db, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 90, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ya, $buf)?;\n\n Serialize::serialize(za, $buf)?;\n\n Serialize::serialize(ab, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty, $tyta:ty, $tyua:ty, $tyva:ty, $tywa:ty, $tyxa:ty, $tyya:ty, $tyza:ty, $tyab:ty, $tybb:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,ta,ua,va,wa,xa,ya,za,ab,bb,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 91, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ua, $buf)?;\n\n Serialize::serialize(va, $buf)?;\n\n Serialize::serialize(wa, $buf)?;\n\n Serialize::serialize(xa, $buf)?;\n\n Serialize::serialize(ya, $buf)?;\n\n Serialize::serialize(za, $buf)?;\n\n Serialize::serialize(ab, $buf)?;\n\n Serialize::serialize(bb, $buf)?;\n\n Serialize::serialize(cb, $buf)?;\n\n Serialize::serialize(db, $buf)?;\n\n Serialize::serialize(eb, $buf)?;\n\n Serialize::serialize(fb, $buf)?;\n\n Serialize::serialize(gb, $buf)?;\n\n Serialize::serialize(hb, $buf)?;\n\n Serialize::serialize(ib, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty, $tyta:ty, $tyua:ty, $tyva:ty, $tywa:ty, $tyxa:ty, $tyya:ty, $tyza:ty, $tyab:ty, $tybb:ty, $tycb:ty, $tydb:ty, $tyeb:ty, $tyfb:ty, $tygb:ty, $tyhb:ty, $tyib:ty, $tyjb:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,ta,ua,va,wa,xa,ya,za,ab,bb,cb,db,eb,fb,gb,hb,ib,jb,) = $self {\n\n Serialize::serialize($count, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 92, "score": 71336.24292385629 }, { "content": " Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n Serialize::serialize(ra, $buf)?;\n\n Serialize::serialize(sa, $buf)?;\n\n Serialize::serialize(ta, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 93, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 94, "score": 71336.24292385629 }, { "content": " Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n Serialize::serialize(ra, $buf)?;\n\n Serialize::serialize(sa, $buf)?;\n\n Serialize::serialize(ta, $buf)?;\n\n Serialize::serialize(ua, $buf)?;\n\n Serialize::serialize(va, $buf)?;\n\n Serialize::serialize(wa, $buf)?;\n\n Serialize::serialize(xa, $buf)?;\n\n Serialize::serialize(ya, $buf)?;\n\n Serialize::serialize(za, $buf)?;\n\n Serialize::serialize(ab, $buf)?;\n\n Serialize::serialize(bb, $buf)?;\n\n Serialize::serialize(cb, $buf)?;\n\n Serialize::serialize(db, $buf)?;\n\n Serialize::serialize(eb, $buf)?;\n\n Serialize::serialize(fb, $buf)?;\n\n }\n\n };\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 95, "score": 71336.24292385629 }, { "content": " Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n Serialize::serialize(ra, $buf)?;\n\n Serialize::serialize(sa, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 96, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n\n Serialize::serialize(ea, $buf)?;\n\n Serialize::serialize(fa, $buf)?;\n\n Serialize::serialize(ga, $buf)?;\n\n Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n Serialize::serialize(ra, $buf)?;\n\n Serialize::serialize(sa, $buf)?;\n\n Serialize::serialize(ta, $buf)?;\n\n Serialize::serialize(ua, $buf)?;\n\n Serialize::serialize(va, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 97, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n Serialize::serialize(ra, $buf)?;\n\n Serialize::serialize(sa, $buf)?;\n\n Serialize::serialize(ta, $buf)?;\n\n Serialize::serialize(ua, $buf)?;\n\n Serialize::serialize(va, $buf)?;\n\n Serialize::serialize(wa, $buf)?;\n\n Serialize::serialize(xa, $buf)?;\n\n Serialize::serialize(ya, $buf)?;\n\n Serialize::serialize(za, $buf)?;\n\n Serialize::serialize(ab, $buf)?;\n\n Serialize::serialize(bb, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty, $tyta:ty, $tyua:ty, $tyva:ty, $tywa:ty, $tyxa:ty, $tyya:ty, $tyza:ty, $tyab:ty, $tybb:ty, $tycb:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,ta,ua,va,wa,xa,ya,za,ab,bb,cb,) = $self {\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 98, "score": 71336.24292385629 }, { "content": " Serialize::serialize(ha, $buf)?;\n\n Serialize::serialize(ia, $buf)?;\n\n Serialize::serialize(ja, $buf)?;\n\n Serialize::serialize(ka, $buf)?;\n\n Serialize::serialize(la, $buf)?;\n\n Serialize::serialize(ma, $buf)?;\n\n Serialize::serialize(na, $buf)?;\n\n Serialize::serialize(oa, $buf)?;\n\n Serialize::serialize(pa, $buf)?;\n\n Serialize::serialize(qa, $buf)?;\n\n Serialize::serialize(ra, $buf)?;\n\n }\n\n };\n\n ($self:ident, $count:expr, $buf:expr, $enumname:ident, $enumident:ident, $tyaa:ty, $tyba:ty, $tyca:ty, $tyda:ty, $tyea:ty, $tyfa:ty, $tyga:ty, $tyha:ty, $tyia:ty, $tyja:ty, $tyka:ty, $tyla:ty, $tyma:ty, $tyna:ty, $tyoa:ty, $typa:ty, $tyqa:ty, $tyra:ty, $tysa:ty) => {\n\n if let $enumname::$enumident( aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa,pa,qa,ra,sa,) = $self {\n\n Serialize::serialize($count, $buf)?;\n\n Serialize::serialize(aa, $buf)?;\n\n Serialize::serialize(ba, $buf)?;\n\n Serialize::serialize(ca, $buf)?;\n\n Serialize::serialize(da, $buf)?;\n", "file_path": "shared/noodle/src/tuple_match.rs", "rank": 99, "score": 71336.24292385629 } ]
Rust
roapi-http/tests/helpers.rs
zemelLeong/roapi
3ace6078fa9b31cde12e367caf88ca31938c00cb
use std::path::PathBuf; use columnq::datafusion::arrow; use columnq::table::{KeyValueSource, TableColumn, TableLoadOption, TableSchema, TableSource}; use roapi_http::config::Config; use roapi_http::startup::Application; pub fn test_data_path(relative_path: &str) -> String { let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR")); d.push("../test_data"); d.push(relative_path); d.to_string_lossy().to_string() } pub async fn test_api_app_with_tables(tables: Vec<TableSource>) -> (Application, String) { test_api_app(tables, vec![]).await } pub async fn test_api_app_with_kvstores(kvstores: Vec<KeyValueSource>) -> (Application, String) { test_api_app(vec![], kvstores).await } pub async fn test_api_app( tables: Vec<TableSource>, kvstores: Vec<KeyValueSource>, ) -> (Application, String) { let config = Config { addr: "localhost:0".to_string().into(), tables, disable_read_only: false, kvstores, }; let app = Application::build(config) .await .expect("Failed to build application config"); let port = app.port(); let address = format!("http://localhost:{}", port); (app, address) } pub async fn http_get(url: &str, accept: Option<&str>) -> reqwest::Response { let request = reqwest::Client::new().get(url); let request = if let Some(accept) = accept { request.header("Accept", accept) } else { request }; request.send().await.expect("Unable to execute GET request") } pub async fn http_post(url: &str, payload: impl Into<reqwest::Body>) -> reqwest::Response { reqwest::Client::new() .post(url) .body(payload) .send() .await .expect("Unable to execute POST request") } pub fn get_spacex_table() -> TableSource { let json_source_path = test_data_path("spacex_launches.json"); TableSource::new("spacex_launches".to_string(), json_source_path) } pub fn get_uk_cities_table() -> TableSource { TableSource::new( "uk_cities".to_string(), test_data_path("uk_cities_with_headers.csv"), ) } pub fn get_ubuntu_ami_table() -> TableSource { TableSource::new("ubuntu_ami", test_data_path("ubuntu-ami.json")) .with_option(TableLoadOption::json { pointer: Some("/aaData".to_string()), array_encoded: Some(true), }) .with_schema(TableSchema { columns: vec![ TableColumn { name: "zone".to_string(), data_type: arrow::datatypes::DataType::Utf8, nullable: true, }, TableColumn { name: "name".to_string(), data_type: arrow::datatypes::DataType::Utf8, nullable: true, }, TableColumn { name: "version".to_string(), data_type: arrow::datatypes::DataType::Utf8, nullable: true, }, TableColumn { name: "arch".to_string(), data_type: arrow::datatypes::DataType::Utf8, nullable: true, }, TableColumn { name: "instance_type".to_string(), data_type: arrow::datatypes::DataType::Utf8, nullable: true, }, TableColumn { name: "release".to_string(), data_type: arrow::datatypes::DataType::Utf8, nullable: true, }, TableColumn { name: "ami_id".to_string(), data_type: arrow::datatypes::DataType::Utf8, nullable: true, }, TableColumn { name: "aki_id".to_string(), data_type: arrow::datatypes::DataType::Utf8, nullable: true, }, ], }) } pub fn get_spacex_launch_name_kvstore() -> KeyValueSource { KeyValueSource::new( "spacex_launch_name", test_data_path("spacex_launches.json"), "id", "name", ) }
use std::path::PathBuf; use columnq::datafusion::arrow; use columnq::table::{KeyValueSource, TableColumn, TableLoadOption, TableSchema, TableSource}; use roapi_http::config::Config; use roapi_http::startup::Application; pub fn test_data_path(relative_path: &str) -> String { let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR")); d.push("../test_data"); d.push(relative_path); d.to_string_lossy().to_string() } pub async fn test_api_app_with_tables(tables: Vec<TableSource>) -> (Application, String) { test_api_app(tables, vec![]).await } pub async fn test_api_app_with_kvstores(kvstores: Vec<KeyValueSource>) -> (Application, String) { test_api_app(vec![], kvstores).await } pub async fn test_api_app( tables: Vec<TableSource>, kvstores: Vec<KeyValueSource>, ) -> (Application, String) { let config = Config { addr: "localhost:0".to_string().into(), tables, disable_read_only: false, kvstores, }; let app = Application::build(config) .await .expect("Failed to build application config"); let port = app.port(); let address = format!("http://localhost:{}", port); (app, address) } pub async fn http_get(url: &str, accept: Option<&str>) -> reqwest::Response { let request = reqwest::Client::new().get(url); let request = if let Some(accept) = accept { request.header("Accept", accept) } else { request }; request.send().await.expect("Unable to execute GET request") } pub async fn http_post(url: &str, payload: impl Into<reqwest::Body>) -> reqwest::Response { reqwest::Client::new() .post(url) .body(payload) .send() .await .expect("Unable to execute POST request") } pub fn get_spacex_table() -> TableSource { let json_source_path = test_data_path("spacex_launches.json"); TableSource::new("spacex_launches".to_string(), json_source_path) } pub fn get_uk_cities_table() -> TableSource { TableSource::new( "uk_cities".to_string(), test_data_path("uk_cities_with_headers.csv"), ) } pub fn get_ubuntu_ami_table() -> TableSource { TableSource::new("ubuntu_ami", test_data_path("ubuntu-ami.json")) .with_option(TableLoadOption::json { pointer: Some("/aaData".to_string()), array_encoded: Some(true), }) .with_schema(TableSchema { columns: vec![ TableColumn { name: "zone".to_string(), data_type: arrow::datatypes::DataType::Utf8, nullable: true, }, TableColumn { name: "name".to_string(), data_type: arrow::datatypes::DataType::Utf8, nullable: true, }, TableColumn { name: "version".to_string(), data_type: arrow::datatypes::DataType::Utf8, nullable: true, }, TableColumn { name: "arch".to_string(), data_type: arrow::datatypes::DataType::Utf8, nullable: true, }, TableColumn { name: "instance_type".to_string(), data_type: arrow::datatypes::DataType::Utf8, nullable: true, }, TableColumn { name: "release".to_string(), data_type: arrow::datatypes::DataType::Utf8, nullable: true, }, TableColumn { name: "ami_id".to_string(), data_type: arrow::datatypes::DataType::Utf8, nullable: true, }, TableColumn { name: "aki_id".to_string(), data_type: arrow::datatypes::DataType::Utf8, nullable: true, }, ], }) }
pub fn get_spacex_launch_name_kvstore() -> KeyValueSource { KeyValueSource::new( "spacex_launch_name", test_data_path("spacex_launches.json"), "id", "name", ) }
function_block-full_function
[ { "content": "pub fn column_sort_expr_asc(column: impl Into<String>) -> Expr {\n\n Expr::Sort {\n\n expr: Box::new(Expr::Column(Column::from_name(column))),\n\n asc: true,\n\n nulls_first: true,\n\n }\n\n}\n\n\n\npub mod graphql;\n\npub mod rest;\n\npub mod sql;\n", "file_path": "columnq/src/query/mod.rs", "rank": 0, "score": 224410.16996309193 }, { "content": "#[inline]\n\npub fn bytes_to_resp(bytes: Vec<u8>, content_type: &'static str) -> impl IntoResponse {\n\n let mut res = Response::new(Body::from(bytes));\n\n res.headers_mut().insert(\n\n header::CONTENT_TYPE,\n\n header::HeaderValue::from_static(content_type),\n\n );\n\n res\n\n}\n\n\n", "file_path": "roapi-http/src/api/mod.rs", "rank": 1, "score": 190469.515768284 }, { "content": "pub fn column_sort_expr_desc(column: String) -> Expr {\n\n Expr::Sort {\n\n expr: Box::new(Expr::Column(Column::from_name(column))),\n\n asc: false,\n\n nulls_first: true,\n\n }\n\n}\n\n\n", "file_path": "columnq/src/query/mod.rs", "rank": 2, "score": 188833.8041719427 }, { "content": "pub fn test_data_path(relative_path: &str) -> String {\n\n let mut d = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n d.push(\"../test_data\");\n\n d.push(relative_path);\n\n d.to_string_lossy().to_string()\n\n}\n\n\n", "file_path": "columnq/src/test_util.rs", "rank": 3, "score": 178344.91493553 }, { "content": "pub fn test_data_path(relative_path: &str) -> String {\n\n let mut d = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n d.push(\"../test_data\");\n\n d.push(relative_path);\n\n d.to_string_lossy().to_string()\n\n}\n", "file_path": "roapi-http/src/test_util.rs", "rank": 5, "score": 175430.52978347207 }, { "content": "/// For parsing table URI arg in CLI\n\npub fn parse_table_uri_arg(uri_arg: &str) -> Result<TableSource, ColumnQError> {\n\n // separate uri from table load options\n\n let mut uri_args = uri_arg.split(',');\n\n\n\n let uri = uri_args\n\n .next()\n\n .ok_or_else(|| ColumnQError::Generic(format!(\"invalid table URI argument: {}\", uri_arg)))?;\n\n let split = uri.splitn(2, '=').collect::<Vec<&str>>();\n\n\n\n let (table_name, uri) = match split.len() {\n\n 1 => {\n\n let uri = split[0];\n\n let table_name = match Path::new(uri).file_stem() {\n\n Some(s) => Ok(s),\n\n None => Path::new(uri)\n\n .file_name()\n\n .ok_or_else(|| ColumnQError::Generic(format!(\"invalid table URI: {}\", uri))),\n\n }?\n\n .to_str()\n\n .ok_or_else(|| ColumnQError::Generic(format!(\"invalid table URI string: {}\", uri)))?;\n", "file_path": "columnq/src/table/mod.rs", "rank": 6, "score": 172765.91719485947 }, { "content": "pub fn get_configuration() -> Result<Config, anyhow::Error> {\n\n let matches = clap::Command::new(\"roapi-http\")\n\n .version(env!(\"CARGO_PKG_VERSION\"))\n\n .author(\"QP Hou\")\n\n .about(\n\n \"Create full-fledged APIs for static datasets without writing a single line of code.\",\n\n )\n\n .arg_required_else_help(true)\n\n .args(&[address_arg(), config_arg(), read_only_arg(), table_arg()])\n\n .get_matches();\n\n\n\n let mut config: Config = match matches.value_of(\"config\") {\n\n None => Config::default(),\n\n Some(config_path) => {\n\n let config_content = fs::read_to_string(config_path)\n\n .with_context(|| format!(\"Failed to read config file: {}\", config_path))?;\n\n\n\n serde_yaml::from_str(&config_content).context(\"Failed to parse YAML config\")?\n\n }\n\n };\n", "file_path": "roapi-http/src/config.rs", "rank": 7, "score": 168526.5889724042 }, { "content": "#[inline]\n\npub fn bytes_to_json_resp(bytes: Vec<u8>) -> impl IntoResponse {\n\n bytes_to_resp(bytes, \"application/json\")\n\n}\n\n\n", "file_path": "roapi-http/src/api/mod.rs", "rank": 8, "score": 166420.0393226035 }, { "content": "fn infer_schema(rows: &[Vec<String>]) -> Schema {\n\n let mut col_types: HashMap<&str, HashSet<DataType>> = HashMap::new();\n\n\n\n let col_names = &rows[0];\n\n\n\n rows.iter().skip(1).for_each(|row| {\n\n row.iter().enumerate().for_each(|(i, col_val)| {\n\n let col_name = &col_names[i];\n\n let col_type = infer_value_type(col_val);\n\n let entry = col_types.entry(col_name).or_insert_with(HashSet::new);\n\n entry.insert(col_type);\n\n });\n\n });\n\n\n\n let fields: Vec<Field> = col_names\n\n .iter()\n\n .map(|col_name| {\n\n let set = col_types.entry(col_name).or_insert_with(|| {\n\n // TODO: this should never happen, maybe we should use panic instead?\n\n let mut set = HashSet::new();\n", "file_path": "columnq/src/table/google_spreadsheets.rs", "rank": 10, "score": 161436.66615221096 }, { "content": "fn sheet_values_to_record_batch(values: &[Vec<String>]) -> Result<RecordBatch, ColumnQError> {\n\n let schema = infer_schema(values);\n\n\n\n let arrays = schema\n\n .fields()\n\n .iter()\n\n .enumerate()\n\n .map(|(i, field)| {\n\n // skip header row\n\n let rows_iter = values.iter().skip(1);\n\n\n\n Ok(match field.data_type() {\n\n DataType::Boolean => Arc::new(\n\n rows_iter\n\n .map(|row| row.get(i).map(|v| parse_boolean(v)))\n\n .collect::<BooleanArray>(),\n\n ) as ArrayRef,\n\n DataType::Int64 => Arc::new(\n\n rows_iter\n\n .map(|row| {\n", "file_path": "columnq/src/table/google_spreadsheets.rs", "rank": 11, "score": 161265.75610811182 }, { "content": "pub fn parse_uri<'a>(path: &'a str) -> Result<(&'a str, &'a str), ColumnQError> {\n\n let parts: Vec<&'a str> = path.split(\"://\").collect();\n\n\n\n if parts.len() <= 1 || parts[0] != \"s3\" {\n\n return Err(ColumnQError::InvalidUri(format!(\n\n \"{} is not a valid S3 URI\",\n\n path\n\n )));\n\n }\n\n\n\n let mut path_parts = parts[1].splitn(2, '/');\n\n let bucket = path_parts\n\n .next()\n\n .ok_or_else(|| ColumnQError::InvalidUri(\"missing s3 bucket\".to_string()))?;\n\n let key = path_parts\n\n .next()\n\n .ok_or_else(|| ColumnQError::InvalidUri(\"missing s3 key\".to_string()))?;\n\n\n\n Ok((bucket, key))\n\n}\n\n\n", "file_path": "columnq/src/io/s3.rs", "rank": 12, "score": 161225.26426047916 }, { "content": "fn read_partition<R: Read>(mut r: R, batch_size: usize) -> Result<Vec<RecordBatch>, ColumnQError> {\n\n let mut buffer = Vec::new();\n\n r.read_to_end(&mut buffer).map_err(|_| {\n\n ColumnQError::LoadDelta(\"failed to copy parquet data into memory\".to_string())\n\n })?;\n\n\n\n let file_reader = SerializedFileReader::new(SliceableCursor::new(buffer))\n\n .map_err(ColumnQError::parquet_file_reader)?;\n\n let mut arrow_reader = ParquetFileArrowReader::new(Arc::new(file_reader));\n\n\n\n let record_batch_reader = arrow_reader\n\n .get_record_reader(batch_size)\n\n .map_err(ColumnQError::parquet_record_reader)?;\n\n\n\n Ok(record_batch_reader\n\n .into_iter()\n\n .collect::<arrow::error::Result<Vec<RecordBatch>>>()?)\n\n}\n\n\n\npub async fn to_mem_table(\n", "file_path": "columnq/src/table/delta.rs", "rank": 16, "score": 147002.6199453592 }, { "content": "pub fn register_table_properties(dfctx: &mut SessionContext) -> anyhow::Result<()> {\n\n dfctx.register_table(\"properties\", Arc::new(properties_table()?))?;\n\n Ok(())\n\n}\n\n\n\npub async fn register_table_ubuntu_ami(dfctx: &mut SessionContext) -> anyhow::Result<()> {\n\n dfctx.register_table(\"ubuntu_ami\", ubuntu_ami_table().await?)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "columnq/src/test_util.rs", "rank": 17, "score": 145507.7383219293 }, { "content": "pub fn table_query_to_df(\n\n dfctx: &datafusion::execution::context::SessionContext,\n\n table_name: &str,\n\n params: &HashMap<String, String>,\n\n) -> Result<Arc<datafusion::dataframe::DataFrame>, QueryError> {\n\n lazy_static! {\n\n static ref RE_REST_FILTER: Regex =\n\n Regex::new(r\"filter\\[(?P<column>.+)\\](?P<op>.+)?\").unwrap();\n\n }\n\n\n\n let mut df = dfctx\n\n .table(table_name)\n\n .map_err(|e| QueryError::invalid_table(e, table_name))?;\n\n\n\n // filter[col1]eq='foo'\n\n // filter[col2]lt=2\n\n for (key, val) in params.iter().filter(|(k, _)| k.starts_with(\"filter[\")) {\n\n match RE_REST_FILTER.captures(key) {\n\n Some(caps) => {\n\n let col_expr: Box<Expr> = Box::new(match caps.name(\"column\") {\n", "file_path": "columnq/src/query/rest.rs", "rank": 18, "score": 123269.06586867201 }, { "content": "fn parse_boolean(s: &str) -> bool {\n\n s.eq_ignore_ascii_case(\"true\")\n\n}\n\n\n", "file_path": "columnq/src/table/google_spreadsheets.rs", "rank": 19, "score": 118923.8794471056 }, { "content": "// TODO: should we support optional column?\n\nfn infer_value_type(v: &str) -> DataType {\n\n // match order matters\n\n match v {\n\n // TODO: support Date64 and Time64\n\n _ if v.parse::<i64>().is_ok() => DataType::Int64,\n\n _ if v.parse::<f64>().is_ok() => DataType::Float64,\n\n _ => match v.to_lowercase().as_str() {\n\n \"false\" | \"true\" => DataType::Boolean,\n\n _ => DataType::Utf8,\n\n },\n\n }\n\n}\n\n\n\n// util wrapper for calling google spreadsheet API\n\nasync fn gs_api_get(token: &str, url: &str) -> Result<reqwest::Response, ColumnQError> {\n\n Client::builder()\n\n .build()\n\n .map_err(|e| {\n\n ColumnQError::GoogleSpreadsheets(format!(\"Failed to initialize HTTP client: {}\", e))\n\n })?\n\n .get(url)\n\n .bearer_auth(token)\n\n .send()\n\n .await\n\n .map_err(|e| ColumnQError::GoogleSpreadsheets(format!(\"Failed to send API request: {}\", e)))\n\n}\n\n\n", "file_path": "columnq/src/table/google_spreadsheets.rs", "rank": 20, "score": 114018.57662834428 }, { "content": "pub fn register_app_routes<H: HandlerCtx>() -> Router {\n\n let mut router = Router::new()\n\n .route(\"/api/tables/:table_name\", get(api::rest::get_table::<H>))\n\n .route(\"/api/sql\", post(api::sql::post::<H>))\n\n .route(\"/api/kv/:kv_name/:key\", get(api::kv::get::<H>))\n\n .route(\"/api/graphql\", post(api::graphql::post::<H>))\n\n .route(\"/api/schema\", get(api::schema::schema::<H>));\n\n\n\n if H::read_only_mode() {\n\n router = router.route(\"/api/table\", post(api::register::register_table_read_only));\n\n } else {\n\n router = router.route(\"/api/table\", post(api::register::register_table::<H>));\n\n }\n\n\n\n router\n\n}\n", "file_path": "roapi-http/src/api/routes.rs", "rank": 21, "score": 107638.53986498242 }, { "content": "fn json_vec_to_partition(\n\n json_rows: Vec<Value>,\n\n provided_schema: &Option<TableSchema>,\n\n batch_size: usize,\n\n array_encoded: bool,\n\n) -> Result<(arrow::datatypes::Schema, Vec<RecordBatch>), ColumnQError> {\n\n // load schema\n\n let schema = match provided_schema {\n\n Some(s) => s.into(),\n\n None => arrow::json::reader::infer_json_schema_from_iterator(\n\n json_rows.iter().map(|v| Ok(v.clone())),\n\n )\n\n .map_err(|e| {\n\n ColumnQError::LoadJson(format!(\"Failed to infer schema from JSON data: {}\", e))\n\n })?,\n\n };\n\n\n\n // decode to arrow record batch\n\n let decoder = arrow::json::reader::Decoder::new(Arc::new(schema.clone()), batch_size, None);\n\n let mut batches = vec![];\n", "file_path": "columnq/src/table/json.rs", "rank": 22, "score": 105099.99375877179 }, { "content": "fn json_partition_to_vec(\n\n json_partition: &Value,\n\n pointer: Option<&str>,\n\n) -> Result<Vec<Value>, ColumnQError> {\n\n let mut value_ref = json_partition;\n\n\n\n if let Some(p) = pointer {\n\n match value_ref.pointer(p) {\n\n Some(v) => value_ref = v,\n\n None => {\n\n return Err(ColumnQError::LoadJson(format!(\n\n \"Invalid json pointer: {}\",\n\n p\n\n )))\n\n }\n\n }\n\n }\n\n\n\n match value_ref.as_array() {\n\n Some(arr) => Ok(arr.to_vec()),\n\n None => Err(ColumnQError::LoadJson(format!(\n\n \"{} is not an array\",\n\n pointer.unwrap_or(\"JSON data\")\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "columnq/src/table/json.rs", "rank": 23, "score": 105099.99375877179 }, { "content": "fn address_arg() -> clap::Arg<'static> {\n\n clap::Arg::new(\"addr\")\n\n .help(\"bind address\")\n\n .required(false)\n\n .takes_value(true)\n\n .value_name(\"IP:PORT\")\n\n .long(\"addr\")\n\n .short('a')\n\n}\n\n\n", "file_path": "roapi-http/src/config.rs", "rank": 24, "score": 99568.99164536754 }, { "content": "fn table_arg() -> clap::Arg<'static> {\n\n clap::Arg::new(\"table\")\n\n .help(\"Table sources to load. Table option can be provided as optional setting as part of the table URI, for example: `blogs=s3://bucket/key,format=delta`. Set table uri to `stdin` if you want to consume table data from stdin as part of a UNIX pipe. If no table_name is provided, a table name will be derived from the filename in URI.\")\n\n .takes_value(true)\n\n .required(false)\n\n .number_of_values(1)\n\n .multiple_occurrences(true)\n\n .value_name(\"[table_name=]uri[,option_key=option_value]\")\n\n .long(\"table\")\n\n .short('t')\n\n}\n\n\n", "file_path": "roapi-http/src/config.rs", "rank": 25, "score": 97096.55307743767 }, { "content": "pub fn query_to_df(\n\n dfctx: &datafusion::execution::context::SessionContext,\n\n q: &str,\n\n) -> Result<Arc<datafusion::dataframe::DataFrame>, QueryError> {\n\n let doc = parse_query::<&str>(q)?;\n\n\n\n let def = match doc.definitions.len() {\n\n 1 => match &doc.definitions[0] {\n\n Definition::Operation(op_def) => op_def,\n\n Definition::Fragment(_) => {\n\n return Err(QueryError {\n\n error: \"invalid graphql query\".to_string(),\n\n message: \"TODO: fragment definition not supported, please file a Github issue\"\n\n .to_string(),\n\n });\n\n }\n\n },\n\n 0 => {\n\n return Err(QueryError {\n\n error: \"invalid graphql query\".to_string(),\n", "file_path": "columnq/src/query/graphql.rs", "rank": 26, "score": 97019.1289135225 }, { "content": "#[async_trait]\n\npub trait HandlerCtx: Send + Sync + 'static {\n\n fn read_only_mode() -> bool;\n\n\n\n async fn load_table(&self, table: &TableSource) -> Result<(), ColumnQError>;\n\n\n\n async fn schemas_json_bytes(&self) -> Result<Vec<u8>, ApiErrResp>;\n\n\n\n async fn table_schema_json_bytes(&self, table_name: &str) -> Result<Vec<u8>, ApiErrResp>;\n\n\n\n async fn query_graphql(\n\n &self,\n\n query: &str,\n\n ) -> Result<Vec<arrow::record_batch::RecordBatch>, QueryError>;\n\n\n\n async fn query_sql(\n\n &self,\n\n query: &str,\n\n ) -> Result<Vec<arrow::record_batch::RecordBatch>, QueryError>;\n\n\n\n async fn query_rest_table(\n", "file_path": "roapi-http/src/api/mod.rs", "rank": 27, "score": 95740.69148779089 }, { "content": "pub fn record_batches_to_bytes(\n\n batches: &[arrow::record_batch::RecordBatch],\n\n) -> Result<Vec<u8>, ParquetError> {\n\n let cursor = parquet::file::writer::InMemoryWriteableCursor::default();\n\n {\n\n if !batches.is_empty() {\n\n let schema = batches[0].schema();\n\n\n\n let mut writer = parquet::arrow::ArrowWriter::try_new(cursor.clone(), schema, None)?;\n\n for batch in batches {\n\n writer.write(batch)?;\n\n }\n\n writer.close()?;\n\n }\n\n }\n\n\n\n let result = cursor.into_inner().expect(\"Should not fail\");\n\n Ok(result)\n\n}\n", "file_path": "columnq/src/encoding/parquet.rs", "rank": 28, "score": 95081.45479255382 }, { "content": "pub fn record_batches_to_bytes(\n\n batches: &[arrow::record_batch::RecordBatch],\n\n) -> Result<Vec<u8>, ColumnQError> {\n\n let json_rows = arrow::json::writer::record_batches_to_json_rows(batches)?;\n\n serde_json::to_vec(&json_rows).map_err(ColumnQError::json_parse)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::sync::Arc;\n\n\n\n use datafusion::arrow::array::*;\n\n use datafusion::arrow::datatypes::*;\n\n use datafusion::arrow::record_batch::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n #[test]\n\n fn serialize_date_columns() {\n\n let schema = Schema::new(vec![\n", "file_path": "columnq/src/encoding/json.rs", "rank": 29, "score": 95081.45479255382 }, { "content": "pub fn record_batches_to_bytes(\n\n batches: &[arrow::record_batch::RecordBatch],\n\n) -> Result<Vec<u8>, ArrowError> {\n\n let mut cursor = std::io::Cursor::new(Vec::new());\n\n {\n\n let mut writer = arrow::csv::Writer::new(&mut cursor);\n\n for batch in batches {\n\n writer.write(batch)?;\n\n }\n\n }\n\n\n\n Ok(cursor.into_inner().to_vec())\n\n}\n", "file_path": "columnq/src/encoding/csv.rs", "rank": 30, "score": 95081.45479255382 }, { "content": "pub fn record_batches_to_stream_bytes(\n\n batches: &[arrow::record_batch::RecordBatch],\n\n) -> Result<Vec<u8>, ArrowError> {\n\n let mut buf = Vec::new();\n\n\n\n // TODO: write out schema regardless even for empty record batch?\n\n // see: https://issues.apache.org/jira/browse/ARROW-2119\n\n if !batches.is_empty() {\n\n let schema = batches[0].schema();\n\n let mut writer = StreamWriter::try_new(&mut buf, &schema)?;\n\n for batch in batches {\n\n writer.write(batch)?;\n\n }\n\n }\n\n\n\n Ok(buf)\n\n}\n\n\n", "file_path": "columnq/src/encoding/arrow.rs", "rank": 31, "score": 93261.34307863473 }, { "content": "pub fn encode_record_batches(\n\n content_type: encoding::ContentType,\n\n batches: &[arrow::record_batch::RecordBatch],\n\n) -> Result<impl IntoResponse, ApiErrResp> {\n\n let payload = match content_type {\n\n encoding::ContentType::Json => encoding::json::record_batches_to_bytes(batches)\n\n .map_err(ApiErrResp::json_serialization)?,\n\n encoding::ContentType::Csv => encoding::csv::record_batches_to_bytes(batches)\n\n .map_err(ApiErrResp::csv_serialization)?,\n\n encoding::ContentType::ArrowFile => encoding::arrow::record_batches_to_file_bytes(batches)\n\n .map_err(ApiErrResp::arrow_file_serialization)?,\n\n encoding::ContentType::ArrowStream => {\n\n encoding::arrow::record_batches_to_stream_bytes(batches)\n\n .map_err(ApiErrResp::arrow_stream_serialization)?\n\n }\n\n encoding::ContentType::Parquet => encoding::parquet::record_batches_to_bytes(batches)\n\n .map_err(ApiErrResp::parquet_serialization)?,\n\n };\n\n\n\n Ok(bytes_to_resp(payload, content_type.to_str()))\n", "file_path": "roapi-http/src/api/mod.rs", "rank": 32, "score": 93261.34307863473 }, { "content": "pub fn record_batches_to_file_bytes(\n\n batches: &[arrow::record_batch::RecordBatch],\n\n) -> Result<Vec<u8>, ArrowError> {\n\n let mut buf = Vec::new();\n\n\n\n // TODO: write out schema regardless even for empty record batch?\n\n // see: https://issues.apache.org/jira/browse/ARROW-2119\n\n if !batches.is_empty() {\n\n let schema = batches[0].schema();\n\n let mut writer = FileWriter::try_new(&mut buf, &schema)?;\n\n for batch in batches {\n\n writer.write(batch)?;\n\n }\n\n writer.finish()?;\n\n }\n\n\n\n Ok(buf)\n\n}\n", "file_path": "columnq/src/encoding/arrow.rs", "rank": 33, "score": 93261.34307863473 }, { "content": "fn invalid_query(message: String) -> QueryError {\n\n QueryError {\n\n error: \"invalid graphql query\".to_string(),\n\n message,\n\n }\n\n}\n\n\n", "file_path": "columnq/src/query/graphql.rs", "rank": 34, "score": 90087.23272454276 }, { "content": "pub fn partitions_from_uri<'a, F, T>(\n\n t: &'a TableSource,\n\n uri: URIReference<'a>,\n\n partition_reader: F,\n\n) -> Result<Vec<T>, ColumnQError>\n\nwhere\n\n F: FnMut(std::fs::File) -> Result<T, ColumnQError>,\n\n{\n\n let fs_path = uri.path().to_string();\n\n let mut file_ext = \".\".to_string();\n\n file_ext.push_str(t.extension()?);\n\n debug!(\"building file list from path {}...\", fs_path);\n\n let files = build_file_list(&fs_path, &file_ext).map_err(|e| {\n\n ColumnQError::FileStore(format!(\n\n \"Failed to build file list from path `{}`: {}\",\n\n fs_path, e\n\n ))\n\n })?;\n\n\n\n debug!(\"loading file partitions: {:?}\", files);\n\n partitions_from_iterator(files.iter().map(|s| s.as_str()), partition_reader)\n\n}\n", "file_path": "columnq/src/io/fs.rs", "rank": 35, "score": 87354.51712589321 }, { "content": "pub fn partitions_from_iterator<'a, F, T, I>(\n\n path_iter: I,\n\n mut partition_reader: F,\n\n) -> Result<Vec<T>, ColumnQError>\n\nwhere\n\n I: Iterator<Item = &'a str>,\n\n F: FnMut(std::fs::File) -> Result<T, ColumnQError>,\n\n{\n\n // TODO: load partitions in parallel\n\n let partitions = path_iter\n\n .map(|fpath| {\n\n debug!(\"loading file from path: {}\", fpath);\n\n let reader = fs::File::open(fpath)\n\n .map_err(|e| ColumnQError::FileStore(format!(\"open file error: {}\", e)))?;\n\n\n\n partition_reader(reader)\n\n })\n\n .collect::<Result<Vec<T>, ColumnQError>>()?;\n\n\n\n Ok(partitions)\n\n}\n\n\n", "file_path": "columnq/src/io/fs.rs", "rank": 36, "score": 85804.09421076138 }, { "content": "fn rest_query_value_to_expr(v: &str) -> Result<Expr, QueryError> {\n\n let dialect = sqlparser::dialect::GenericDialect {};\n\n let mut tokenizer = sqlparser::tokenizer::Tokenizer::new(&dialect, v);\n\n let tokens = tokenizer.tokenize().map_err(err_rest_query_value)?;\n\n\n\n let t = &tokens[0];\n\n match t {\n\n // TODO: support column expr instead of just literal\n\n sqlparser::tokenizer::Token::SingleQuotedString(s) => {\n\n Ok(Expr::Literal(ScalarValue::Utf8(Some(s.to_string()))))\n\n }\n\n sqlparser::tokenizer::Token::Number(s, _) => {\n\n if let Ok(n) = s.parse() {\n\n Ok(Expr::Literal(ScalarValue::Int64(Some(n))))\n\n } else if let Ok(n) = s.parse() {\n\n Ok(Expr::Literal(ScalarValue::Float64(Some(n))))\n\n } else {\n\n Err(QueryError {\n\n error: \"rest_query_value\".to_string(),\n\n message: format!(\"invalid REST query numeric value {}\", s),\n\n })\n\n }\n\n }\n\n _ => Err(QueryError {\n\n error: \"rest_query_value\".to_string(),\n\n message: format!(\"invalid REST query value {}\", v),\n\n }),\n\n }\n\n}\n\n\n", "file_path": "columnq/src/query/rest.rs", "rank": 37, "score": 81743.98257559103 }, { "content": "fn json_value_from_reader<R: Read>(r: R) -> Result<Value, ColumnQError> {\n\n let reader = BufReader::new(r);\n\n serde_json::from_reader(reader).map_err(ColumnQError::json_parse)\n\n}\n\n\n", "file_path": "columnq/src/table/json.rs", "rank": 38, "score": 81689.16850440457 }, { "content": "fn json_schema_from_reader<R: Read>(r: R) -> Result<Schema, ColumnQError> {\n\n let mut reader = BufReader::new(r);\n\n Ok(infer_json_schema(&mut reader, None)?)\n\n}\n\n\n", "file_path": "columnq/src/table/ndjson.rs", "rank": 39, "score": 81689.16850440457 }, { "content": "fn config_arg() -> clap::Arg<'static> {\n\n clap::Arg::new(\"config\")\n\n .help(\"config file path\")\n\n .required(false)\n\n .takes_value(true)\n\n .long(\"config\")\n\n .short('c')\n\n}\n\n\n", "file_path": "roapi-http/src/config.rs", "rank": 40, "score": 79333.27588479468 }, { "content": "fn properties_table() -> anyhow::Result<MemTable> {\n\n let schema = Arc::new(Schema::new(vec![\n\n Field::new(\"address\", DataType::Utf8, false),\n\n Field::new(\"landlord\", DataType::Utf8, false),\n\n Field::new(\"bed\", DataType::Int64, false),\n\n Field::new(\"bath\", DataType::Int64, false),\n\n Field::new(\"occupied\", DataType::Boolean, false),\n\n Field::new(\"monthly_rent\", DataType::Utf8, false),\n\n Field::new(\"lease_expiration_date\", DataType::Utf8, false),\n\n ]));\n\n\n\n let record_batch = RecordBatch::try_new(\n\n schema.clone(),\n\n vec![\n\n Arc::new(StringArray::from(vec![\n\n \"Bothell, WA\",\n\n \"Lynnwood, WA\",\n\n \"Kirkland, WA\",\n\n \"Kent, WA\",\n\n \"Mount Vernon, WA\",\n", "file_path": "columnq/src/test_util.rs", "rank": 41, "score": 74653.3540716269 }, { "content": "// convert order list from graphql argument to datafusion sort columns\n\n//\n\n// sort order matters, thus it's modeled as a list\n\nfn to_datafusion_sort_columns<'a, 'b>(\n\n sort_columns: &'a [Value<'b, &'b str>],\n\n) -> Result<Vec<Expr>, QueryError> {\n\n sort_columns\n\n .iter()\n\n .map(|optval| match optval {\n\n Value::Object(opt) => {\n\n let col = match opt.get(\"field\") {\n\n Some(Value::String(s)) => s,\n\n None => {\n\n return Err(invalid_query(\n\n \"sort option requires `field` argument\".to_string(),\n\n ));\n\n }\n\n _ => {\n\n return Err(invalid_query(format!(\n\n \"field in sort option should be a string, got: {}\",\n\n optval,\n\n )));\n\n }\n", "file_path": "columnq/src/query/graphql.rs", "rank": 42, "score": 74224.44802058027 }, { "content": "pub fn encode_type_from_hdr(headers: header::HeaderMap) -> encoding::ContentType {\n\n match headers.get(header::ACCEPT) {\n\n None => encoding::ContentType::Json,\n\n Some(hdr_value) => {\n\n encoding::ContentType::try_from(hdr_value.as_bytes()).unwrap_or(ContentType::Json)\n\n }\n\n }\n\n}\n\n\n", "file_path": "roapi-http/src/api/mod.rs", "rank": 43, "score": 74219.79437199616 }, { "content": "fn operand_to_datafusion_expr<'a, 'b>(operand: &'a Value<'b, &'b str>) -> Result<Expr, QueryError> {\n\n match operand {\n\n Value::Boolean(b) => Ok(Expr::Literal(ScalarValue::Boolean(Some(*b)))),\n\n Value::String(s) => Ok(Expr::Literal(ScalarValue::Utf8(Some(s.to_string())))),\n\n // GraphQL only supports int32 scalar input: http://spec.graphql.org/June2018/#sec-Int, but\n\n // graphql crate only supports in64.\n\n // TODO: set literal value type based on schema?\n\n Value::Int(n) => Ok(Expr::Literal(ScalarValue::Int64(Some(\n\n n.as_i64().ok_or_else(|| {\n\n invalid_query(format!(\n\n \"invalid integer number in filter predicate: {}\",\n\n operand\n\n ))\n\n })?,\n\n )))),\n\n Value::Float(f) => Ok(Expr::Literal(ScalarValue::Float64(Some(f.to_owned())))),\n\n other => Err(invalid_query(format!(\n\n \"invalid operand in filter predicate: {}\",\n\n other,\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "columnq/src/query/graphql.rs", "rank": 44, "score": 73217.37082518602 }, { "content": "pub fn assert_eq_df(df1: Arc<DataFrame>, df2: Arc<DataFrame>) {\n\n assert_eq!(\n\n format!(\"{:?}\", df1.to_logical_plan()),\n\n format!(\"{:?}\", df2.to_logical_plan())\n\n );\n\n}\n", "file_path": "columnq/src/test_util.rs", "rank": 45, "score": 72959.75353804062 }, { "content": "fn read_only_arg() -> clap::Arg<'static> {\n\n clap::Arg::new(\"disable-read-only\")\n\n .help(\"Start roapi-http in read write mode\")\n\n .required(false)\n\n .takes_value(false)\n\n .long(\"disable-read-only\")\n\n .short('d')\n\n}\n\n\n", "file_path": "roapi-http/src/config.rs", "rank": 46, "score": 69764.04022739027 }, { "content": "fn decode_json_from_reader<R: Read>(\n\n r: R,\n\n schema_ref: SchemaRef,\n\n batch_size: usize,\n\n) -> Result<Vec<RecordBatch>, ColumnQError> {\n\n let decoder = Decoder::new(schema_ref, batch_size, None);\n\n let mut reader = BufReader::new(r);\n\n let mut value_reader = ValueIter::new(&mut reader, None);\n\n let mut batches = vec![];\n\n while let Some(batch) = decoder.next_batch(&mut value_reader)? {\n\n batches.push(batch);\n\n }\n\n Ok(batches)\n\n}\n\n\n\npub async fn to_mem_table(\n\n t: &TableSource,\n\n) -> Result<datafusion::datasource::MemTable, ColumnQError> {\n\n let batch_size = t.batch_size;\n\n\n", "file_path": "columnq/src/table/ndjson.rs", "rank": 47, "score": 68114.96068009525 }, { "content": "#[allow(dead_code)]\n\nfn config_path() -> anyhow::Result<PathBuf> {\n\n let mut home =\n\n dirs::home_dir().ok_or_else(|| anyhow!(\"Failed to locate user home directory\"))?;\n\n\n\n home.push(\".config\");\n\n home.push(\"columnq\");\n\n\n\n Ok(home)\n\n}\n\n\n", "file_path": "columnq-cli/src/main.rs", "rank": 48, "score": 68041.10323159433 }, { "content": "fn table_arg() -> clap::Arg<'static> {\n\n clap::Arg::new(\"table\")\n\n .long_help(\"Table sources to load. Table option can be provided as optional setting as part of the table URI, for example: `blogs=s3://bucket/key,format=delta`. Set table uri to `stdin` if you want to consume table data from stdin as part of a UNIX pipe. If no table_name is provided, a table name will be derived from the filename in URI.\")\n\n .takes_value(true)\n\n .required(false)\n\n .number_of_values(1)\n\n .multiple_occurrences(true)\n\n .value_name(\"[table_name=]uri[,option_key=option_value]\")\n\n .long(\"table\")\n\n .short('t')\n\n}\n\n\n\nasync fn console_loop(cq: &ColumnQ) -> anyhow::Result<()> {\n\n let rl_history = history_path()?;\n\n\n\n let mut readline = Editor::<()>::new();\n\n if let Err(e) = readline.load_history(&rl_history) {\n\n debug!(\"no query history loaded: {:?}\", e);\n\n }\n\n\n", "file_path": "columnq-cli/src/main.rs", "rank": 49, "score": 67404.01300951527 }, { "content": "fn coerce_type(l: DataType, r: DataType) -> DataType {\n\n match (l, r) {\n\n (DataType::Boolean, DataType::Boolean) => DataType::Boolean,\n\n (DataType::Date32, DataType::Date32) => DataType::Date32,\n\n\n\n (DataType::Date64, DataType::Date64)\n\n | (DataType::Date64, DataType::Date32)\n\n | (DataType::Date32, DataType::Date64) => DataType::Date64,\n\n\n\n (DataType::Int64, DataType::Int64) => DataType::Int64,\n\n\n\n (DataType::Float64, DataType::Float64)\n\n | (DataType::Float64, DataType::Int64)\n\n | (DataType::Int64, DataType::Float64) => DataType::Float64,\n\n\n\n _ => DataType::Utf8,\n\n }\n\n}\n\n\n", "file_path": "columnq/src/table/google_spreadsheets.rs", "rank": 50, "score": 60131.76586128031 }, { "content": "fn new_s3_client() -> Result<rusoto_s3::S3Client, ColumnQError> {\n\n let region = rusoto_core::Region::default();\n\n let dispatcher = rusoto_core::HttpClient::new()\n\n .map_err(|_| ColumnQError::S3Store(\"Failed to create request dispatcher\".to_string()))?;\n\n\n\n let client = match std::env::var(\"AWS_WEB_IDENTITY_TOKEN_FILE\") {\n\n Ok(_) => {\n\n let provider = rusoto_sts::WebIdentityProvider::from_k8s_env();\n\n let provider =\n\n rusoto_credential::AutoRefreshingProvider::new(provider).map_err(|e| {\n\n ColumnQError::S3Store(format!(\n\n \"Failed to retrieve S3 credentials with message: {}\",\n\n e.message\n\n ))\n\n })?;\n\n rusoto_s3::S3Client::new_with(dispatcher, provider, region)\n\n }\n\n Err(_) => rusoto_s3::S3Client::new_with(\n\n dispatcher,\n\n rusoto_core::credential::ChainProvider::new(),\n\n region,\n\n ),\n\n };\n\n\n\n Ok(client)\n\n}\n\n\n", "file_path": "columnq/src/io/s3.rs", "rank": 51, "score": 60095.16017581187 }, { "content": "fn list_objects<'a>(\n\n bucket: &'a str,\n\n key: &'a str,\n\n) -> Result<impl futures::Stream<Item = Result<String, ColumnQError>> + 'a, ColumnQError> {\n\n struct S3ListState<'a> {\n\n bucket: &'a str,\n\n key: String,\n\n client: rusoto_s3::S3Client,\n\n continuation_token: ContinuationToken,\n\n obj_iter: std::vec::IntoIter<rusoto_s3::Object>,\n\n }\n\n\n\n // add / suffix if missing to to bused in `start_after` listing to exlude directory itself from\n\n // the list result\n\n let key = if key.ends_with('/') {\n\n key.to_string()\n\n } else {\n\n let mut path = key.to_string();\n\n path.push('/');\n\n path\n", "file_path": "columnq/src/io/s3.rs", "rank": 52, "score": 44475.35766961065 }, { "content": "// graphql filter in the format of:\n\n//\n\n// ```\n\n// table(\n\n// filter: {\n\n// col1: { eq: \"val1\" }\n\n// col2: { lt: 5, gt: 0 }\n\n// col3: \"foo\"\n\n// }\n\n// ) {\n\n// col3\n\n// col4\n\n// }\n\n// ```\n\nfn to_datafusion_predicates<'a, 'b>(\n\n col: &'b str,\n\n filter: &'a Value<'b, &'b str>,\n\n) -> Result<Vec<Expr>, QueryError> {\n\n match filter {\n\n Value::Object(obj) => obj\n\n .iter()\n\n .map(|(op, operand)| {\n\n let col_expr = Box::new(Expr::Column(Column::from_name(col.to_string())));\n\n let right_expr = Box::new(operand_to_datafusion_expr(operand)?);\n\n match *op {\n\n \"eq\" => Ok(Expr::BinaryExpr {\n\n left: col_expr,\n\n op: Operator::Eq,\n\n right: right_expr,\n\n }),\n\n \"lt\" => Ok(Expr::BinaryExpr {\n\n left: col_expr,\n\n op: Operator::Lt,\n\n right: right_expr,\n", "file_path": "columnq/src/query/graphql.rs", "rank": 53, "score": 43671.579275555996 }, { "content": "fn history_path() -> anyhow::Result<PathBuf> {\n\n let mut home =\n\n dirs::home_dir().ok_or_else(|| anyhow!(\"Failed to locate user home directory\"))?;\n\n home.push(\".columnq_history\");\n\n Ok(home)\n\n}\n\n\n", "file_path": "columnq-cli/src/main.rs", "rank": 54, "score": 39222.79421682311 }, { "content": "fn bytes_to_stdout(bytes: &[u8]) -> anyhow::Result<()> {\n\n let mut out = std::io::stdout();\n\n out.write_all(bytes)?;\n\n out.flush()?;\n\n Ok(())\n\n}\n\n\n\nasync fn cmd_sql(args: &clap::ArgMatches) -> anyhow::Result<()> {\n\n let config = SessionConfig::default().with_information_schema(true);\n\n let mut cq = ColumnQ::new_with_config(config);\n\n\n\n if let Some(tables) = args.values_of(\"table\") {\n\n for v in tables {\n\n cq.load_table(&parse_table_uri_arg(v)?).await?;\n\n }\n\n }\n\n\n\n match args.value_of(\"SQL\") {\n\n Some(query) => match cq.query_sql(query).await {\n\n Ok(batches) => match args.value_of(\"output\").unwrap_or(\"table\") {\n", "file_path": "columnq-cli/src/main.rs", "rank": 55, "score": 38591.64679498171 }, { "content": "fn num_parse_err(e: std::num::ParseIntError) -> QueryError {\n\n QueryError {\n\n error: \"invalid_numeric_param\".to_string(),\n\n message: format!(\"Failed to parse numeric parameter value: {}\", e),\n\n }\n\n}\n\n\n", "file_path": "columnq/src/query/rest.rs", "rank": 56, "score": 35079.86288016758 }, { "content": "use serde_derive::Deserialize;\n\n\n\nuse anyhow::{Context, Result};\n\n\n\nuse columnq::table::parse_table_uri_arg;\n\nuse columnq::table::KeyValueSource;\n\nuse columnq::table::TableSource;\n\nuse std::fs;\n\n\n\n#[derive(Deserialize, Default)]\n\npub struct Config {\n\n pub addr: Option<String>,\n\n pub tables: Vec<TableSource>,\n\n #[serde(default)]\n\n pub disable_read_only: bool,\n\n #[serde(default)]\n\n pub kvstores: Vec<KeyValueSource>,\n\n}\n\n\n", "file_path": "roapi-http/src/config.rs", "rank": 57, "score": 35027.501068897625 }, { "content": "\n\n if let Some(tables) = matches.values_of(\"table\") {\n\n for v in tables {\n\n config.tables.push(parse_table_uri_arg(v)?);\n\n }\n\n }\n\n\n\n if let Some(addr) = matches.value_of(\"addr\") {\n\n config.addr = Some(addr.to_string());\n\n }\n\n\n\n if matches.is_present(\"disable-read-only\") {\n\n config.disable_read_only = true;\n\n }\n\n\n\n Ok(config)\n\n}\n", "file_path": "roapi-http/src/config.rs", "rank": 58, "score": 35021.05335544257 }, { "content": "fn invalid_selection_set(error: datafusion::error::DataFusionError) -> QueryError {\n\n QueryError {\n\n error: \"invalid_selection_set\".to_string(),\n\n message: format!(\"failed to apply selection set for query: {}\", error),\n\n }\n\n}\n\n\n", "file_path": "columnq/src/query/graphql.rs", "rank": 59, "score": 34437.49903951443 }, { "content": "fn err_rest_query_value(error: sqlparser::tokenizer::TokenizerError) -> QueryError {\n\n QueryError {\n\n error: \"rest_query_value\".to_string(),\n\n message: format!(\"invalid REST query value {:?}\", error),\n\n }\n\n}\n\n\n", "file_path": "columnq/src/query/rest.rs", "rank": 60, "score": 34437.49903951443 }, { "content": " #[inline]\n\n pub fn default_batch_size() -> usize {\n\n 8192\n\n }\n\n\n\n #[must_use]\n\n pub fn with_option(mut self, option: impl Into<TableLoadOption>) -> Self {\n\n self.option = Some(option.into());\n\n self\n\n }\n\n\n\n #[must_use]\n\n pub fn with_schema(mut self, schema: impl Into<TableSchema>) -> Self {\n\n self.schema = Some(schema.into());\n\n self\n\n }\n\n\n\n pub fn get_uri_str(&self) -> &str {\n\n match &self.io_source {\n\n TableIoSource::Uri(uri) => uri.as_str(),\n", "file_path": "columnq/src/table/mod.rs", "rank": 61, "score": 32244.68746649171 }, { "content": "impl From<TableColumn> for arrow::datatypes::Field {\n\n fn from(c: TableColumn) -> Self {\n\n // TODO: update upstream arrow::datatypes::Field::new to support taking owned string as\n\n // name argument\n\n arrow::datatypes::Field::new(&c.name, c.data_type, c.nullable)\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Clone, Debug, Eq, PartialEq)]\n\n#[serde(deny_unknown_fields)]\n\npub struct TableSchema {\n\n pub columns: Vec<TableColumn>,\n\n}\n\n\n\nimpl From<&TableSchema> for arrow::datatypes::Schema {\n\n fn from(s: &TableSchema) -> Self {\n\n arrow::datatypes::Schema::new(\n\n s.columns\n\n .iter()\n\n .map(|c| c.into())\n", "file_path": "columnq/src/table/mod.rs", "rank": 62, "score": 32243.94479820442 }, { "content": " key: impl Into<String>,\n\n value: impl Into<String>,\n\n ) -> Self {\n\n Self {\n\n name: name.into(),\n\n key: key.into(),\n\n value: value.into(),\n\n io_source: source.into(),\n\n schema: None,\n\n option: None,\n\n }\n\n }\n\n\n\n #[must_use]\n\n pub fn with_option(mut self, option: impl Into<TableLoadOption>) -> Self {\n\n self.option = Some(option.into());\n\n self\n\n }\n\n\n\n #[must_use]\n", "file_path": "columnq/src/table/mod.rs", "rank": 63, "score": 32243.075570667952 }, { "content": "pub mod google_spreadsheets;\n\npub mod json;\n\npub mod ndjson;\n\npub mod parquet;\n\n\n\n#[derive(Deserialize, Clone, Debug, Eq, PartialEq)]\n\n#[serde(deny_unknown_fields)]\n\npub struct TableColumn {\n\n pub name: String,\n\n pub data_type: arrow::datatypes::DataType,\n\n #[serde(default)]\n\n pub nullable: bool,\n\n}\n\n\n\nimpl From<&TableColumn> for arrow::datatypes::Field {\n\n fn from(c: &TableColumn) -> Self {\n\n arrow::datatypes::Field::new(&c.name, c.data_type.clone(), c.nullable)\n\n }\n\n}\n\n\n", "file_path": "columnq/src/table/mod.rs", "rank": 64, "score": 32242.48030494337 }, { "content": " pub fn with_schema(mut self, schema: impl Into<TableSchema>) -> Self {\n\n self.schema = Some(schema.into());\n\n self\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n #[test]\n\n fn uri_deserialization() -> anyhow::Result<()> {\n\n let table_source: TableSource = serde_yaml::from_str(\n\n r#\"\n\nname: \"ubuntu_ami\"\n\nuri: \"test_data/ubuntu-ami.json\"\n\noption:\n\n format: \"json\"\n\n pointer: \"/aaData\"\n", "file_path": "columnq/src/table/mod.rs", "rank": 65, "score": 32242.29991391056 }, { "content": " .collect::<Vec<arrow::datatypes::Field>>(),\n\n )\n\n }\n\n}\n\n\n\nimpl From<TableSchema> for arrow::datatypes::Schema {\n\n fn from(s: TableSchema) -> Self {\n\n arrow::datatypes::Schema::new(\n\n s.columns\n\n .into_iter()\n\n .map(|c| c.into())\n\n .collect::<Vec<arrow::datatypes::Field>>(),\n\n )\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone, Eq, PartialEq)]\n\npub struct TableOptionGoogleSpreasheet {\n\n application_secret_path: String,\n\n sheet_title: Option<String>,\n", "file_path": "columnq/src/table/mod.rs", "rank": 66, "score": 32240.476253190114 }, { "content": "\n\n Ok(Arc::new(ListingTable::try_new(\n\n ListingTableConfig::new(df_object_store, table_path)\n\n .with_schema(file_schema)\n\n .with_listing_options(list_opt),\n\n )?))\n\n }\n\n}\n\n\n\npub async fn to_mem_table(t: &TableSource) -> Result<Arc<dyn TableProvider>, ColumnQError> {\n\n let batch_size = t.batch_size;\n\n\n\n let mut schema: Option<Schema> = None;\n\n\n\n let partitions: Vec<Vec<RecordBatch>> =\n\n partitions_from_table_source!(t, |mut r| -> Result<Vec<RecordBatch>, ColumnQError> {\n\n // TODO: this is very inefficient, we are copying the parquet data in memory twice when\n\n // it's being fetched from http store\n\n let mut buffer = Vec::new();\n\n r.read_to_end(&mut buffer).map_err(|_| {\n", "file_path": "columnq/src/table/parquet.rs", "rank": 67, "score": 32240.09937560632 }, { "content": "\n\n (table_name, uri)\n\n }\n\n 2 => (split[0], split[1]),\n\n _ => unreachable!(),\n\n };\n\n\n\n let t = if uri == \"stdin\" {\n\n let mut buffer = Vec::new();\n\n std::io::stdin().read_to_end(&mut buffer).map_err(|e| {\n\n ColumnQError::Generic(format!(\"Failed to read table data from stdin: {:?}\", e))\n\n })?;\n\n TableSource::new(table_name, TableIoSource::Memory(buffer))\n\n } else {\n\n TableSource::new(table_name, uri.to_string())\n\n };\n\n\n\n // parse extra options from table uri\n\n let mut option_json = serde_json::map::Map::new();\n\n for opt_str in uri_args {\n", "file_path": "columnq/src/table/mod.rs", "rank": 68, "score": 32240.045958247214 }, { "content": " .unwrap_or_else(|| TableLoadOption::delta(TableOptionDelta::default()));\n\n\n\n let TableOptionDelta { use_memory_table } = opt.as_delta()?;\n\n\n\n let uri_str = t.get_uri_str();\n\n let delta_table = deltalake::open_table(uri_str).await?;\n\n let parsed_uri = t.parsed_uri()?;\n\n let blob_type = io::BlobStoreType::try_from(parsed_uri.scheme())?;\n\n let batch_size = t.batch_size;\n\n\n\n if *use_memory_table {\n\n to_mem_table(delta_table, blob_type, batch_size).await\n\n } else {\n\n to_delta_table(delta_table, blob_type).await\n\n }\n\n}\n\n\n\npub async fn to_delta_table(\n\n delta_table: deltalake::DeltaTable,\n\n blob_type: io::BlobStoreType,\n", "file_path": "columnq/src/table/delta.rs", "rank": 69, "score": 32239.876349139126 }, { "content": " use_memory_table: true,\n\n }),\n\n ),\n\n )\n\n .await?;\n\n\n\n validate_statistics(t.scan(&None, &[], None).await?.statistics());\n\n\n\n match t.as_any().downcast_ref::<MemTable>() {\n\n Some(_) => Ok(()),\n\n None => panic!(\"must be of type datafusion::datasource::MemTable\"),\n\n }\n\n }\n\n\n\n #[tokio::test]\n\n async fn load_delta_as_delta_source() -> Result<(), ColumnQError> {\n\n let t = to_datafusion_table(\n\n &TableSource::new(\"blogs\".to_string(), test_data_path(\"blogs-delta\")).with_option(\n\n TableLoadOption::delta(TableOptionDelta {\n\n use_memory_table: false,\n", "file_path": "columnq/src/table/delta.rs", "rank": 70, "score": 32239.4715132268 }, { "content": " Ok(t)\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Clone, Debug, Eq, PartialEq)]\n\n#[serde(deny_unknown_fields)]\n\npub struct KeyValueSource {\n\n pub name: String,\n\n pub key: String,\n\n pub value: String,\n\n #[serde(flatten)]\n\n pub io_source: TableIoSource,\n\n pub schema: Option<TableSchema>,\n\n pub option: Option<TableLoadOption>,\n\n}\n\n\n\nimpl KeyValueSource {\n\n pub fn new(\n\n name: impl Into<String>,\n\n source: impl Into<TableIoSource>,\n", "file_path": "columnq/src/table/mod.rs", "rank": 71, "score": 32238.989258452966 }, { "content": " Self::postgres { .. } => \"postgres\",\n\n }\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Clone, Debug, PartialEq, Eq)]\n\n#[serde(rename_all = \"lowercase\")]\n\npub enum TableIoSource {\n\n Uri(String),\n\n Memory(Vec<u8>),\n\n}\n\n\n\nimpl<T: Into<String>> From<T> for TableIoSource {\n\n fn from(s: T) -> Self {\n\n Self::Uri(s.into())\n\n }\n\n}\n\n\n\nimpl TableIoSource {\n\n pub fn as_memory(&self) -> Result<&[u8], ColumnQError> {\n", "file_path": "columnq/src/table/mod.rs", "rank": 72, "score": 32238.875889191288 }, { "content": " }\n\n}\n\n\n\nimpl TableSource {\n\n pub fn new(name: impl Into<String>, source: impl Into<TableIoSource>) -> Self {\n\n let io_source = source.into();\n\n let option = Self::parse_option(&io_source);\n\n Self {\n\n name: name.into(),\n\n io_source,\n\n schema: None,\n\n option,\n\n batch_size: Self::default_batch_size(),\n\n }\n\n }\n\n\n\n pub fn new_with_uri(name: impl Into<String>, uri: impl Into<String>) -> Self {\n\n Self::new(name, uri.into())\n\n }\n\n\n", "file_path": "columnq/src/table/mod.rs", "rank": 73, "score": 32238.677659082587 }, { "content": " t: &TableSource,\n\n ) -> Result<datafusion::datasource::MemTable, ColumnQError> {\n\n debug!(\"loading database table data...\");\n\n let queries = &[format!(\"SELECT * FROM {}\", t.name)];\n\n let mut destination = ArrowDestination::new();\n\n match self {\n\n DatabaseLoader::MySQL => {\n\n let source = MySQLSource::<BinaryProtocol>::new(t.get_uri_str(), 2)\n\n .map_err(|e| ColumnQError::Database(e.to_string()))?;\n\n let dispatcher =\n\n Dispatcher::<\n\n MySQLSource<BinaryProtocol>,\n\n ArrowDestination,\n\n MySQLArrowTransport<BinaryProtocol>,\n\n >::new(source, &mut destination, queries, None);\n\n dispatcher\n\n .run()\n\n .map_err(|e| ColumnQError::Database(e.to_string()))?;\n\n }\n\n DatabaseLoader::SQLite => {\n", "file_path": "columnq/src/table/database.rs", "rank": 74, "score": 32238.51972318736 }, { "content": " };\n\n let schema_ref = destination.arrow_schema();\n\n let data: Vec<RecordBatch> = destination.arrow().unwrap();\n\n Ok(datafusion::datasource::MemTable::try_new(\n\n schema_ref,\n\n vec![data],\n\n )?)\n\n }\n\n }\n\n}\n\n\n\n#[cfg(not(feature = \"database\"))]\n\nmod imp {\n\n use crate::error::ColumnQError;\n\n use crate::table::TableSource;\n\n\n\n use super::DatabaseLoader;\n\n\n\n impl DatabaseLoader {\n\n pub fn to_mem_table(\n", "file_path": "columnq/src/table/database.rs", "rank": 75, "score": 32238.320314043547 }, { "content": "\n\nimpl Default for TableOptionParquet {\n\n fn default() -> Self {\n\n Self {\n\n use_memory_table: Self::default_use_memory_table(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone, Eq, PartialEq)]\n\npub struct TableOptionDelta {\n\n #[serde(default = \"TableOptionDelta::default_use_memory_table\")]\n\n use_memory_table: bool,\n\n}\n\n\n\nimpl TableOptionDelta {\n\n #[inline]\n\n pub fn default_use_memory_table() -> bool {\n\n true\n\n }\n", "file_path": "columnq/src/table/mod.rs", "rank": 76, "score": 32237.619395624006 }, { "content": ") -> Result<(Option<Schema>, Vec<Vec<RecordBatch>>), ColumnQError> {\n\n let batch_size = t.batch_size;\n\n let array_encoded = match &t.option {\n\n Some(TableLoadOption::json { array_encoded, .. }) => array_encoded.unwrap_or(false),\n\n _ => false,\n\n };\n\n\n\n if array_encoded && t.schema.is_none() {\n\n return Err(ColumnQError::LoadJson(\n\n \"Array encoded option requires manually specified schema\".to_string(),\n\n ));\n\n }\n\n\n\n let pointer = match &t.option {\n\n Some(TableLoadOption::json { pointer, .. }) => pointer.to_owned(),\n\n _ => None,\n\n };\n\n\n\n let mut merged_schema: Option<Schema> = None;\n\n let json_partitions: Vec<Value> = partitions_from_table_source!(t, json_value_from_reader)?;\n", "file_path": "columnq/src/table/json.rs", "rank": 77, "score": 32237.17832023727 }, { "content": "\n\n let (batch_schema, partition) =\n\n json_vec_to_partition(json_rows, &t.schema, batch_size, array_encoded)?;\n\n\n\n merged_schema = Some(match &merged_schema {\n\n Some(s) if s != &batch_schema => Schema::try_merge(vec![s.clone(), batch_schema])?,\n\n _ => batch_schema,\n\n });\n\n\n\n Ok(partition)\n\n })\n\n .collect::<Result<Vec<Vec<RecordBatch>>, ColumnQError>>()?;\n\n\n\n Ok((merged_schema, partitions))\n\n}\n\n\n\npub async fn to_mem_table(\n\n t: &TableSource,\n\n) -> Result<datafusion::datasource::MemTable, ColumnQError> {\n\n let (merged_schema, partitions) = to_partitions(t).await?;\n", "file_path": "columnq/src/table/json.rs", "rank": 78, "score": 32236.862166690982 }, { "content": " delta_table: deltalake::DeltaTable,\n\n blob_type: io::BlobStoreType,\n\n batch_size: usize,\n\n) -> Result<Arc<dyn TableProvider>, ColumnQError> {\n\n if delta_table.get_files().is_empty() {\n\n return Err(ColumnQError::LoadDelta(\"empty delta table\".to_string()));\n\n }\n\n\n\n let delta_schema = delta_table.get_schema()?;\n\n\n\n let paths = delta_table.get_file_uris().collect::<Vec<String>>();\n\n let path_iter = paths.iter().map(|s| s.as_ref());\n\n\n\n let partitions: Vec<Vec<RecordBatch>> = match blob_type {\n\n io::BlobStoreType::FileSystem => io::fs::partitions_from_iterator(\n\n path_iter,\n\n |r| -> Result<Vec<RecordBatch>, ColumnQError> {\n\n read_partition::<std::fs::File>(r, batch_size)\n\n },\n\n )?,\n", "file_path": "columnq/src/table/delta.rs", "rank": 79, "score": 32236.344191787117 }, { "content": "pub enum TableLoadOption {\n\n json {\n\n // JSON query pointer following https://tools.ietf.org/html/rfc6901\n\n pointer: Option<String>,\n\n array_encoded: Option<bool>,\n\n },\n\n csv(TableOptionCsv),\n\n ndjson {},\n\n parquet(TableOptionParquet),\n\n google_spreadsheet(TableOptionGoogleSpreasheet),\n\n delta(TableOptionDelta),\n\n arrow {},\n\n arrows {},\n\n mysql {},\n\n sqlite {},\n\n postgres {},\n\n}\n\n\n\nimpl TableLoadOption {\n\n fn as_google_spreadsheet(&self) -> Result<&TableOptionGoogleSpreasheet, ColumnQError> {\n", "file_path": "columnq/src/table/mod.rs", "rank": 80, "score": 32236.3172793432 }, { "content": " &self,\n\n _t: &TableSource,\n\n ) -> Result<datafusion::datasource::MemTable, ColumnQError> {\n\n Err(ColumnQError::Database(\n\n \"Enable 'database' feature flag to support this\".to_string(),\n\n ))\n\n }\n\n }\n\n}\n\n\n\npub use imp::*;\n\n\n\n#[cfg(feature = \"database\")]\n\n#[cfg(test)]\n\nmod tests {\n\n use datafusion::datasource::TableProvider;\n\n use dotenv::dotenv;\n\n use std::env;\n\n\n\n use crate::table::TableSource;\n", "file_path": "columnq/src/table/database.rs", "rank": 81, "score": 32236.27779684328 }, { "content": " Self {\n\n has_header: Self::default_has_header(),\n\n delimiter: Self::default_delimiter(),\n\n projection: Self::default_projection(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone, Eq, PartialEq)]\n\npub struct TableOptionParquet {\n\n #[serde(default = \"TableOptionParquet::default_use_memory_table\")]\n\n use_memory_table: bool,\n\n}\n\n\n\nimpl TableOptionParquet {\n\n #[inline]\n\n pub fn default_use_memory_table() -> bool {\n\n true\n\n }\n\n}\n", "file_path": "columnq/src/table/mod.rs", "rank": 82, "score": 32236.231815904106 }, { "content": "pub enum DatabaseLoader {\n\n MySQL,\n\n SQLite,\n\n Postgres,\n\n}\n\n\n\n#[cfg(feature = \"database\")]\n\nmod imp {\n\n use crate::error::ColumnQError;\n\n use crate::table::TableSource;\n\n use connectorx::prelude::*;\n\n use connectorx::sources::mysql::BinaryProtocol;\n\n use datafusion::arrow::record_batch::RecordBatch;\n\n use log::debug;\n\n\n\n use super::DatabaseLoader;\n\n\n\n impl DatabaseLoader {\n\n pub fn to_mem_table(\n\n &self,\n", "file_path": "columnq/src/table/database.rs", "rank": 83, "score": 32235.9009107938 }, { "content": "#[serde(deny_unknown_fields)]\n\npub struct TableSource {\n\n pub name: String,\n\n #[serde(flatten)]\n\n pub io_source: TableIoSource,\n\n pub schema: Option<TableSchema>,\n\n pub option: Option<TableLoadOption>,\n\n #[serde(default = \"TableSource::default_batch_size\")]\n\n pub batch_size: usize,\n\n}\n\n\n\nimpl From<KeyValueSource> for TableSource {\n\n fn from(kv: KeyValueSource) -> Self {\n\n Self {\n\n name: kv.name,\n\n io_source: kv.io_source,\n\n schema: kv.schema,\n\n option: kv.option,\n\n batch_size: Self::default_batch_size(),\n\n }\n", "file_path": "columnq/src/table/mod.rs", "rank": 84, "score": 32235.64919046655 }, { "content": "\n\n use super::*;\n\n\n\n #[tokio::test]\n\n async fn load_mysql() -> anyhow::Result<()> {\n\n dotenv().ok();\n\n if let Ok(name) = env::var(\"TABLE_NAME\") {\n\n let t = DatabaseLoader::MySQL\n\n .to_mem_table(&TableSource::new(name, env::var(\"MYSQL_URL\")?))?;\n\n let stats = t.scan(&None, &[], None).await?.statistics();\n\n assert!(stats.num_rows.is_some());\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "columnq/src/table/database.rs", "rank": 85, "score": 32235.61560419796 }, { "content": "use std::io::Read;\n\nuse std::sync::Arc;\n\n\n\nuse crate::error::ColumnQError;\n\nuse crate::table::{TableLoadOption, TableOptionParquet, TableSource};\n\n\n\nuse datafusion::arrow;\n\nuse datafusion::arrow::datatypes::Schema;\n\nuse datafusion::arrow::record_batch::RecordBatch;\n\nuse datafusion::datafusion_data_access::object_store::local::LocalFileSystem;\n\nuse datafusion::datasource::file_format::parquet::ParquetFormat;\n\nuse datafusion::datasource::listing::{ListingOptions, ListingTable, ListingTableConfig};\n\nuse datafusion::datasource::TableProvider;\n\nuse datafusion::parquet::arrow::{ArrowReader, ParquetFileArrowReader};\n\nuse datafusion::parquet::file::reader::SerializedFileReader;\n\nuse datafusion::parquet::file::serialized_reader::SliceableCursor;\n\n\n\npub async fn to_datafusion_table(t: &TableSource) -> Result<Arc<dyn TableProvider>, ColumnQError> {\n\n let opt = t\n\n .option\n", "file_path": "columnq/src/table/parquet.rs", "rank": 86, "score": 32235.61170197884 }, { "content": " )?))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n use datafusion::datasource::MemTable;\n\n use datafusion::physical_plan::Statistics;\n\n\n\n use deltalake::DeltaTable;\n\n\n\n use crate::error::ColumnQError;\n\n use crate::test_util::test_data_path;\n\n\n\n #[tokio::test]\n\n async fn load_delta_as_memtable() -> Result<(), ColumnQError> {\n\n let t = to_datafusion_table(\n\n &TableSource::new(\"blogs\".to_string(), test_data_path(\"blogs-delta\")).with_option(\n\n TableLoadOption::delta(TableOptionDelta {\n", "file_path": "columnq/src/table/delta.rs", "rank": 87, "score": 32235.412224421303 }, { "content": "use std::sync::Arc;\n\n\n\nuse datafusion::arrow;\n\nuse datafusion::arrow::datatypes::Schema;\n\nuse datafusion::arrow::record_batch::RecordBatch;\n\nuse log::debug;\n\n\n\nuse crate::error::ColumnQError;\n\nuse crate::table::{TableLoadOption, TableOptionCsv, TableSource};\n\n\n\npub async fn to_mem_table(\n\n t: &TableSource,\n\n) -> Result<datafusion::datasource::MemTable, ColumnQError> {\n\n let opt = t\n\n .option\n\n .clone()\n\n .unwrap_or_else(|| TableLoadOption::csv(TableOptionCsv::default()));\n\n let opt = opt.as_csv()?;\n\n\n\n let has_header = opt.has_header;\n", "file_path": "columnq/src/table/csv.rs", "rank": 88, "score": 32235.30783785076 }, { "content": " )\n\n .await?;\n\n\n\n let stats = t.scan(&None, &[], None).await?.statistics();\n\n assert_eq!(stats.num_rows, Some(37 * 3));\n\n\n\n Ok(())\n\n }\n\n\n\n #[tokio::test]\n\n async fn load_from_memory() -> anyhow::Result<()> {\n\n let csv_content = r#\"\n\nc1,c2,c3\n\n1,\"hello\",true\n\n2,\"world\",true\n\n100,\"!\",false\n\n\"#\n\n .to_string();\n\n\n\n let source = TableSource::new(\"test\", TableIoSource::Memory(csv_content.into_bytes()))\n", "file_path": "columnq/src/table/csv.rs", "rank": 89, "score": 32235.264418493298 }, { "content": " Ok(datafusion::datasource::MemTable::try_new(\n\n Arc::new(\n\n merged_schema\n\n .ok_or_else(|| ColumnQError::LoadJson(\"failed to load schema\".to_string()))?,\n\n ),\n\n partitions,\n\n )?)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use datafusion::datasource::TableProvider;\n\n\n\n use crate::test_util::*;\n\n\n\n #[tokio::test]\n\n async fn nested_struct_and_lists() -> Result<(), ColumnQError> {\n\n let t = to_mem_table(&TableSource::new(\n", "file_path": "columnq/src/table/json.rs", "rank": 90, "score": 32235.251188368475 }, { "content": " array_encoded: true\n\nschema:\n\n columns:\n\n - name: \"zone\"\n\n data_type: \"Utf8\"\n\n - name: \"name\"\n\n data_type: \"Utf8\"\n\n\"#,\n\n )?;\n\n\n\n assert_eq!(\n\n table_source.io_source,\n\n TableIoSource::Uri(\"test_data/ubuntu-ami.json\".to_string())\n\n );\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn batch_size_deserialisation() -> anyhow::Result<()> {\n", "file_path": "columnq/src/table/mod.rs", "rank": 91, "score": 32235.224841288426 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::fs;\n\n\n\n use crate::table::TableLoadOption;\n\n use crate::test_util::*;\n\n\n\n #[tokio::test]\n\n async fn load_flattened_parquet() -> Result<(), ColumnQError> {\n\n let t = to_datafusion_table(\n\n &TableSource::new(\n\n \"blogs\".to_string(),\n\n test_data_path(\"blogs_flattened.parquet\"),\n\n )\n\n .with_option(TableLoadOption::parquet(TableOptionParquet {\n\n use_memory_table: false,\n\n })),\n", "file_path": "columnq/src/table/parquet.rs", "rank": 92, "score": 32235.124669681558 }, { "content": "use std::convert::TryFrom;\n\nuse std::ffi::OsStr;\n\nuse std::io::Read;\n\nuse std::path::Path;\n\nuse std::sync::Arc;\n\n\n\nuse datafusion::datasource::TableProvider;\n\n\n\nuse datafusion::arrow;\n\nuse serde::de::{Deserialize, Deserializer};\n\nuse serde_derive::Deserialize;\n\nuse uriparse::URIReference;\n\n\n\nuse crate::error::ColumnQError;\n\n\n\npub mod arrow_ipc_file;\n\npub mod arrow_ipc_stream;\n\npub mod csv;\n\npub mod database;\n\npub mod delta;\n", "file_path": "columnq/src/table/mod.rs", "rank": 93, "score": 32234.833687149003 }, { "content": "use std::convert::{TryFrom, TryInto};\n\nuse std::io::Read;\n\nuse std::sync::Arc;\n\n\n\nuse datafusion::arrow;\n\nuse datafusion::arrow::record_batch::RecordBatch;\n\nuse datafusion::datasource::TableProvider;\n\nuse datafusion::parquet::arrow::{ArrowReader, ParquetFileArrowReader};\n\nuse datafusion::parquet::file::reader::SerializedFileReader;\n\nuse datafusion::parquet::file::serialized_reader::SliceableCursor;\n\n\n\nuse crate::error::ColumnQError;\n\nuse crate::io;\n\nuse crate::table::{TableLoadOption, TableOptionDelta, TableSource};\n\nuse deltalake;\n\n\n\npub async fn to_datafusion_table(t: &TableSource) -> Result<Arc<dyn TableProvider>, ColumnQError> {\n\n let opt = t\n\n .option\n\n .clone()\n", "file_path": "columnq/src/table/delta.rs", "rank": 94, "score": 32234.58748270736 }, { "content": " #[inline]\n\n pub fn default_delimiter() -> u8 {\n\n b','\n\n }\n\n\n\n #[inline]\n\n pub fn default_projection() -> Option<Vec<usize>> {\n\n None\n\n }\n\n\n\n #[inline]\n\n #[must_use]\n\n pub fn with_delimiter(mut self, d: u8) -> Self {\n\n self.delimiter = d;\n\n self\n\n }\n\n\n\n #[inline]\n\n #[must_use]\n\n pub fn with_has_header(mut self, has_header: bool) -> Self {\n", "file_path": "columnq/src/table/mod.rs", "rank": 95, "score": 32234.585672260826 }, { "content": " async fn load_partitions() -> anyhow::Result<()> {\n\n let tmp_dir = tempdir::TempDir::new(\"columnq.test.parquet_partitions\")?;\n\n let tmp_dir_path = tmp_dir.path();\n\n\n\n let source_path = test_data_path(\"blogs.parquet\");\n\n assert!(fs::copy(&source_path, tmp_dir_path.join(\"2020-01-01.parquet\"))? > 0);\n\n assert!(fs::copy(&source_path, tmp_dir_path.join(\"2020-01-02.parquet\"))? > 0);\n\n assert!(fs::copy(&source_path, tmp_dir_path.join(\"2020-01-03.parquet\"))? > 0);\n\n\n\n let t = to_mem_table(\n\n &TableSource::new_with_uri(\"blogs\", tmp_dir_path.to_string_lossy())\n\n .with_option(TableLoadOption::parquet(TableOptionParquet::default())),\n\n )\n\n .await?;\n\n\n\n assert_eq!(\n\n t.schema()\n\n .metadata()\n\n .get(\"writer.model.name\")\n\n .map(|s| s.as_str()),\n", "file_path": "columnq/src/table/parquet.rs", "rank": 96, "score": 32234.547796153176 }, { "content": " \"spacex_launches\".to_string(),\n\n test_data_path(\"spacex_launches.json\"),\n\n ))\n\n .await?;\n\n\n\n let schema = t.schema();\n\n let fields = schema.fields();\n\n\n\n let mut obj_keys = fields.iter().map(|f| f.name()).collect::<Vec<_>>();\n\n obj_keys.sort();\n\n let mut expected_obj_keys = vec![\n\n \"fairings\",\n\n \"links\",\n\n \"static_fire_date_utc\",\n\n \"static_fire_date_unix\",\n\n \"tbd\",\n\n \"net\",\n\n \"window\",\n\n \"rocket\",\n\n \"success\",\n", "file_path": "columnq/src/table/json.rs", "rank": 97, "score": 32234.26960009975 }, { "content": " let mut parts = opt_str.splitn(2, '=');\n\n let opt_key = parts\n\n .next()\n\n .ok_or_else(|| ColumnQError::Generic(format!(\"invalid table option: {:?}\", opt_str)))?;\n\n let opt_value = parts\n\n .next()\n\n .ok_or_else(|| ColumnQError::Generic(format!(\"invalid table option: {:?}\", opt_str)))?;\n\n option_json.insert(\n\n opt_key.to_string(),\n\n serde_json::from_str(opt_value).unwrap_or_else(|_| opt_value.into()),\n\n );\n\n }\n\n\n\n if !option_json.is_empty() {\n\n let opt: TableLoadOption = serde_json::from_value(serde_json::Value::Object(option_json))\n\n .map_err(|e| {\n\n ColumnQError::Generic(format!(\"Failed to parse table option: {:?}\", e))\n\n })?;\n\n Ok(t.with_option(opt))\n\n } else {\n", "file_path": "columnq/src/table/mod.rs", "rank": 98, "score": 32234.133258758113 }, { "content": "\n\n assert_eq!(obj_keys, expected_obj_keys);\n\n\n\n Ok(())\n\n }\n\n\n\n #[tokio::test]\n\n async fn test_multiple_batches() -> Result<(), ColumnQError> {\n\n let mut source = TableSource::new(\n\n \"spacex_launches\".to_string(),\n\n test_data_path(\"spacex_launches.json\"),\n\n );\n\n source.batch_size = 1;\n\n let (_, p) = to_partitions(&source).await?;\n\n assert_eq!(p.len(), 1);\n\n assert_eq!(p[0][0].num_rows(), source.batch_size);\n\n assert_eq!(p[0].len(), 132);\n\n Ok(())\n\n }\n\n}\n", "file_path": "columnq/src/table/json.rs", "rank": 99, "score": 32234.06174219813 } ]
Rust
src/testdrive/src/action/verify_timestamp_compaction.rs
ruchirK/materialize
94a022c1bca35726b0b1efa7516200a41d4a12d9
use std::sync::atomic::{AtomicU64, Ordering}; use std::sync::Arc; use std::time::Duration; use async_trait::async_trait; use coord::catalog::Catalog; use coord::session::Session; use ore::now::NOW_ZERO; use ore::result::ResultExt; use ore::retry::Retry; use sql::catalog::SessionCatalog; use sql::names::PartialName; use crate::action::{Action, State}; use crate::parser::BuiltinCommand; pub struct VerifyTimestampsAction { source: String, max_size: usize, permit_progress: bool, } pub fn build_verify_timestamp_compaction( mut cmd: BuiltinCommand, ) -> Result<VerifyTimestampsAction, String> { let source = cmd.args.string("source")?; let max_size = cmd .args .opt_string("max-size") .map(|s| s.parse::<usize>().expect("unable to parse usize")) .unwrap_or(3); let permit_progress = cmd .args .opt_bool("permit-progress") .expect("require valid bool if specified") .unwrap_or(false); cmd.args.done()?; Ok(VerifyTimestampsAction { source, max_size, permit_progress, }) } #[async_trait] impl Action for VerifyTimestampsAction { async fn undo(&self, _: &mut State) -> Result<(), String> { Ok(()) } async fn redo(&self, state: &mut State) -> Result<(), String> { if let Some(path) = &state.materialized_catalog_path { let initial_highest_base = Arc::new(AtomicU64::new(u64::MAX)); Retry::default() .initial_backoff(Duration::from_secs(1)) .max_duration(Duration::from_secs(10)) .retry(|retry_state| { let initial_highest = initial_highest_base.clone(); async move { let mut catalog = Catalog::open_debug(path, NOW_ZERO.clone()) .await .map_err_to_string()?; let item_id = catalog .for_session(&Session::dummy()) .resolve_item(&PartialName { database: None, schema: None, item: self.source.clone(), }) .map_err_to_string()? .id(); let bindings = catalog .load_timestamp_bindings(item_id) .map_err_to_string()?; let progress = if retry_state.i == 0 { initial_highest.store( bindings.iter().map(|(_, ts, _)| ts).fold(u64::MIN, |a, &b| a.max(b)), Ordering::SeqCst, ); false } else { self.permit_progress && (bindings.iter().map(|(_, ts, _)| ts).fold(u64::MAX, |a, &b| a.min(b)) >= initial_highest.load(Ordering::SeqCst)) }; println!( "Verifying timestamp binding compaction for {:?}. Found {:?} vs expected {:?}. Progress: {:?}", self.source, bindings.len(), self.max_size, progress, ); if bindings.len() <= self.max_size || progress { Ok(()) } else { Err(format!( "There are {:?} bindings compared to max size {:?}", bindings.len(), self.max_size, )) } } }).await } else { println!( "Skipping timestamp binding compaction verification for {:?}.", self.source ); Ok(()) } } }
use std::sync::atomic::{AtomicU64, Ordering}; use std::sync::Arc; use std::time::Duration; use async_trait::async_trait; use coord::catalog::Catalog; use coord::session::Session; use ore::now::NOW_ZERO; use ore::result::ResultExt; use ore::retry::Retry; use sql::catalog::SessionCatalog; use sql::names::PartialName; use crate::action::{Action, State}; use crate::parser::BuiltinCommand; pub struct VerifyTimestampsAction { source: String, max_size: usize, permit_progress: bool, } pub fn build_verify_timestamp_compaction( mut cmd: BuiltinCommand, ) -> Result<VerifyTimestampsAction, String> { let source = cmd.args.string("source")?; let max_size = cmd .args .opt_string("max-size") .map(|s| s.parse::<usize>().expect("unable to parse usize")) .unwrap_or(3);
cmd.args.done()?; Ok(VerifyTimestampsAction { source, max_size, permit_progress, }) } #[async_trait] impl Action for VerifyTimestampsAction { async fn undo(&self, _: &mut State) -> Result<(), String> { Ok(()) } async fn redo(&self, state: &mut State) -> Result<(), String> { if let Some(path) = &state.materialized_catalog_path { let initial_highest_base = Arc::new(AtomicU64::new(u64::MAX)); Retry::default() .initial_backoff(Duration::from_secs(1)) .max_duration(Duration::from_secs(10)) .retry(|retry_state| { let initial_highest = initial_highest_base.clone(); async move { let mut catalog = Catalog::open_debug(path, NOW_ZERO.clone()) .await .map_err_to_string()?; let item_id = catalog .for_session(&Session::dummy()) .resolve_item(&PartialName { database: None, schema: None, item: self.source.clone(), }) .map_err_to_string()? .id(); let bindings = catalog .load_timestamp_bindings(item_id) .map_err_to_string()?; let progress = if retry_state.i == 0 { initial_highest.store( bindings.iter().map(|(_, ts, _)| ts).fold(u64::MIN, |a, &b| a.max(b)), Ordering::SeqCst, ); false } else { self.permit_progress && (bindings.iter().map(|(_, ts, _)| ts).fold(u64::MAX, |a, &b| a.min(b)) >= initial_highest.load(Ordering::SeqCst)) }; println!( "Verifying timestamp binding compaction for {:?}. Found {:?} vs expected {:?}. Progress: {:?}", self.source, bindings.len(), self.max_size, progress, ); if bindings.len() <= self.max_size || progress { Ok(()) } else { Err(format!( "There are {:?} bindings compared to max size {:?}", bindings.len(), self.max_size, )) } } }).await } else { println!( "Skipping timestamp binding compaction verification for {:?}.", self.source ); Ok(()) } } }
let permit_progress = cmd .args .opt_bool("permit-progress") .expect("require valid bool if specified") .unwrap_or(false);
assignment_statement
[ { "content": "pub fn build_compression(cmd: &mut BuiltinCommand) -> Result<Compression, String> {\n\n match cmd.args.opt_string(\"compression\") {\n\n Some(s) => s.parse(),\n\n None => Ok(Compression::None),\n\n }\n\n}\n\n\n", "file_path": "src/testdrive/src/action/file.rs", "rank": 0, "score": 408778.4309590295 }, { "content": "pub fn build_append(mut cmd: BuiltinCommand) -> Result<AppendAction, String> {\n\n let path = build_path(&mut cmd)?;\n\n let compression = build_compression(&mut cmd)?;\n\n let trailing_newline = cmd.args.opt_bool(\"trailing-newline\")?.unwrap_or(true);\n\n cmd.args.done()?;\n\n let mut contents = vec![];\n\n for line in cmd.input {\n\n contents.extend(bytes::unescape(line.as_bytes())?);\n\n contents.push(b'\\n');\n\n }\n\n if !trailing_newline {\n\n contents.pop();\n\n }\n\n Ok(AppendAction {\n\n path,\n\n contents,\n\n compression,\n\n })\n\n}\n\n\n", "file_path": "src/testdrive/src/action/file.rs", "rank": 1, "score": 403851.17489575414 }, { "content": "pub fn build_execute(mut cmd: BuiltinCommand) -> Result<ExecuteAction, String> {\n\n let command = cmd.args.string(\"command\")?;\n\n Ok(ExecuteAction {\n\n command,\n\n expected_output: cmd.input.join(\"\\n\"),\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for ExecuteAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<(), String> {\n\n let output = Command::new(\"psql\")\n\n .args(&[\n\n \"--pset\",\n\n \"footer=off\",\n\n \"--command\",\n", "file_path": "src/testdrive/src/action/psql.rs", "rank": 2, "score": 403851.17489575414 }, { "content": "pub fn build_sleep(mut cmd: BuiltinCommand) -> Result<SleepAction, String> {\n\n let arg = cmd.args.string(\"duration\")?;\n\n let duration = repr::util::parse_duration(&arg).map_err_to_string()?;\n\n Ok(SleepAction {\n\n duration,\n\n random: false,\n\n })\n\n}\n\n\n\nimpl SyncAction for SleepAction {\n\n fn undo(&self, _: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n\n fn redo(&self, _: &mut State) -> Result<(), String> {\n\n let sleep = if self.random {\n\n let mut rng = rand::thread_rng();\n\n rng.gen_range(Duration::from_secs(0)..self.duration)\n\n } else {\n\n self.duration\n\n };\n\n println!(\"Sleeping for {:?}\", sleep);\n\n thread::sleep(sleep);\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/testdrive/src/action/sleep.rs", "rank": 3, "score": 403851.1748957542 }, { "content": "pub fn build_delete(mut cmd: BuiltinCommand) -> Result<DeleteAction, String> {\n\n let path = build_path(&mut cmd)?;\n\n cmd.args.done()?;\n\n Ok(DeleteAction { path })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for DeleteAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<(), String> {\n\n let path = state.temp_path.join(&self.path);\n\n println!(\"Deleting file {}\", path.display());\n\n tokio::fs::remove_file(&path).await.map_err_to_string()\n\n }\n\n}\n", "file_path": "src/testdrive/src/action/file.rs", "rank": 4, "score": 403851.1748957542 }, { "content": "pub fn build_request(mut cmd: BuiltinCommand) -> Result<RequestAction, String> {\n\n Ok(RequestAction {\n\n url: cmd.args.string(\"url\")?,\n\n method: cmd.args.string(\"method\")?.to_ascii_uppercase(),\n\n content_type: cmd.args.opt_parse(\"content-type\")?,\n\n body: cmd.input.join(\"\\n\"),\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for RequestAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, _: &mut State) -> Result<(), String> {\n\n println!(\"$ http-request {} {}\\n{}\", self.method, self.url, self.body);\n\n\n\n let client = reqwest::Client::new();\n\n\n", "file_path": "src/testdrive/src/action/http.rs", "rank": 5, "score": 403851.1748957542 }, { "content": "pub fn build_ingest(mut cmd: BuiltinCommand) -> Result<IngestAction, String> {\n\n let topic_prefix = format!(\"testdrive-{}\", cmd.args.string(\"topic\")?);\n\n let partition = cmd.args.opt_parse::<i32>(\"partition\")?;\n\n let start_iteration = cmd.args.opt_parse::<isize>(\"start-iteration\")?.unwrap_or(0);\n\n let repeat = cmd.args.opt_parse::<isize>(\"repeat\")?.unwrap_or(1);\n\n let publish = cmd.args.opt_bool(\"publish\")?.unwrap_or(false);\n\n let format = match cmd.args.string(\"format\")?.as_str() {\n\n \"avro\" => Format::Avro {\n\n schema: cmd.args.string(\"schema\")?,\n\n confluent_wire_format: cmd.args.opt_bool(\"confluent-wire-format\")?.unwrap_or(true),\n\n },\n\n \"protobuf\" => {\n\n let descriptor_file = cmd.args.string(\"descriptor-file\")?;\n\n // This was introduced after the avro format's confluent-wire-format, so it defaults to\n\n // false\n\n let message = cmd.args.string(\"message\")?;\n\n validate_protobuf_message_name(&message)?;\n\n\n\n Format::Protobuf {\n\n descriptor_file,\n", "file_path": "src/testdrive/src/action/kafka/ingest.rs", "rank": 6, "score": 399139.67122464115 }, { "content": "pub fn build_append(mut cmd: BuiltinCommand) -> Result<AppendAction, String> {\n\n let path = cmd.args.string(\"path\")?;\n\n let records = cmd.input;\n\n cmd.args.done()?;\n\n if path.contains(path::MAIN_SEPARATOR) {\n\n // The goal isn't security, but preventing mistakes.\n\n return Err(\"separators in paths are forbidden\".into());\n\n }\n\n Ok(AppendAction { path, records })\n\n}\n\n\n\nimpl SyncAction for AppendAction {\n\n fn undo(&self, _state: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n\n fn redo(&self, state: &mut State) -> Result<(), String> {\n\n let path = state.temp_path.join(&self.path);\n\n println!(\"Appending to {}\", path.display());\n\n let file = OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .open(path)\n\n .map_err_to_string()?;\n\n let mut writer = Writer::append_to(file).map_err_to_string()?;\n\n write_records(&mut writer, &self.records)?;\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/testdrive/src/action/avro_ocf.rs", "rank": 7, "score": 399139.67122464115 }, { "content": "pub fn build_random_sleep(mut cmd: BuiltinCommand) -> Result<SleepAction, String> {\n\n let arg = cmd.args.string(\"duration\")?;\n\n let duration = repr::util::parse_duration(&arg).map_err_to_string()?;\n\n Ok(SleepAction {\n\n duration,\n\n random: true,\n\n })\n\n}\n\n\n", "file_path": "src/testdrive/src/action/sleep.rs", "rank": 8, "score": 399139.67122464115 }, { "content": "pub fn build_connect(mut cmd: BuiltinCommand) -> Result<ConnectAction, String> {\n\n let name = cmd.args.string(\"name\")?;\n\n if name.starts_with(\"postgres://\") {\n\n return Err(\"connection name can not be url\".into());\n\n }\n\n\n\n let url = cmd.args.string(\"url\")?;\n\n cmd.args.done()?;\n\n Ok(ConnectAction { name, url })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for ConnectAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<(), String> {\n\n let client = postgres_client(&self.url).await?;\n\n state.postgres_clients.insert(self.name.clone(), client);\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/testdrive/src/action/postgres/connect.rs", "rank": 9, "score": 399139.67122464115 }, { "content": "pub fn build_ingest(mut cmd: BuiltinCommand) -> Result<IngestAction, String> {\n\n let stream_prefix = format!(\"testdrive-{}\", cmd.args.string(\"stream\")?);\n\n match cmd.args.string(\"format\")?.as_str() {\n\n \"bytes\" => (),\n\n f => return Err(format!(\"unsupported message format for Kinesis: {}\", f)),\n\n }\n\n cmd.args.done()?;\n\n\n\n Ok(IngestAction {\n\n stream_prefix,\n\n rows: cmd.input,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for IngestAction {\n\n async fn undo(&self, _state: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n", "file_path": "src/testdrive/src/action/kinesis/ingest.rs", "rank": 10, "score": 399139.67122464115 }, { "content": "pub fn build_write(mut cmd: BuiltinCommand) -> Result<WriteAction, String> {\n\n let path = cmd.args.string(\"path\")?;\n\n let schema = cmd.args.string(\"schema\")?;\n\n let codec = cmd.args.opt_parse(\"codec\")?;\n\n\n\n let records = cmd.input;\n\n cmd.args.done()?;\n\n if path.contains(path::MAIN_SEPARATOR) {\n\n // The goal isn't security, but preventing mistakes.\n\n return Err(\"separators in paths are forbidden\".into());\n\n }\n\n Ok(WriteAction {\n\n path,\n\n schema,\n\n records,\n\n codec,\n\n })\n\n}\n\n\n\nimpl SyncAction for WriteAction {\n", "file_path": "src/testdrive/src/action/avro_ocf.rs", "rank": 11, "score": 399139.67122464115 }, { "content": "pub fn build_execute(mut cmd: BuiltinCommand) -> Result<ExecuteAction, String> {\n\n let connection = cmd.args.string(\"connection\")?;\n\n cmd.args.done()?;\n\n Ok(ExecuteAction {\n\n connection,\n\n queries: cmd.input,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for ExecuteAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<(), String> {\n\n let client;\n\n let client = if self.connection.starts_with(\"postgres://\") {\n\n client = postgres_client(&self.connection).await?;\n\n &client\n", "file_path": "src/testdrive/src/action/postgres/execute.rs", "rank": 12, "score": 399139.67122464115 }, { "content": "pub fn build_verify(mut cmd: BuiltinCommand) -> Result<VerifyAction, String> {\n\n let stream_prefix = cmd.args.string(\"stream\")?;\n\n let expected_records: HashSet<String> = cmd.input.into_iter().collect();\n\n\n\n cmd.args.done()?;\n\n\n\n Ok(VerifyAction {\n\n stream_prefix,\n\n expected_records,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for VerifyAction {\n\n async fn undo(&self, _state: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<(), String> {\n\n let stream_name = format!(\"testdrive-{}-{}\", self.stream_prefix, state.seed);\n", "file_path": "src/testdrive/src/action/kinesis/verify.rs", "rank": 13, "score": 399139.67122464115 }, { "content": "pub fn build_publish(mut cmd: BuiltinCommand) -> Result<PublishAction, String> {\n\n let subject = cmd.args.string(\"subject\")?;\n\n let schema_type = match cmd.args.string(\"schema-type\")?.as_str() {\n\n \"avro\" => SchemaType::Avro,\n\n \"json\" => SchemaType::Json,\n\n \"protobuf\" => SchemaType::Protobuf,\n\n s => return Err(format!(\"unknown schema type: {}\", s)),\n\n };\n\n let references = match cmd.args.opt_string(\"references\") {\n\n None => vec![],\n\n Some(s) => s.split(',').map(|s| s.into()).collect(),\n\n };\n\n Ok(PublishAction {\n\n subject,\n\n schema: cmd.input.join(\"\\n\"),\n\n schema_type,\n\n references,\n\n })\n\n}\n\n\n", "file_path": "src/testdrive/src/action/schema_registry.rs", "rank": 14, "score": 399139.67122464115 }, { "content": "pub fn build_execute(mut cmd: BuiltinCommand) -> Result<ExecuteAction, String> {\n\n let name = cmd.args.string(\"name\")?;\n\n cmd.args.done()?;\n\n Ok(ExecuteAction {\n\n name,\n\n queries: cmd.input,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for ExecuteAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<(), String> {\n\n let client = state\n\n .sql_server_clients\n\n .get_mut(&self.name)\n\n .ok_or(format!(\"connection '{}' not found\", &self.name))?;\n", "file_path": "src/testdrive/src/action/sql_server/execute.rs", "rank": 15, "score": 394630.05265512294 }, { "content": "pub fn build_delete_object(mut cmd: BuiltinCommand) -> Result<DeleteObjectAction, String> {\n\n let bucket_prefix = format!(\"testdrive-{}\", cmd.args.string(\"bucket\")?);\n\n cmd.args.done()?;\n\n Ok(DeleteObjectAction {\n\n bucket_prefix,\n\n keys: cmd.input,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for DeleteObjectAction {\n\n async fn undo(&self, _state: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<(), String> {\n\n let bucket = format!(\"{}-{}\", self.bucket_prefix, state.seed);\n\n println!(\"Deleting S3 objects {}: {}\", bucket, self.keys.join(\", \"));\n\n let result = state\n\n .s3_client\n", "file_path": "src/testdrive/src/action/s3.rs", "rank": 16, "score": 394630.05265512294 }, { "content": "pub fn build_connect(mut cmd: BuiltinCommand) -> Result<ConnectAction, String> {\n\n let name = cmd.args.string(\"name\")?;\n\n cmd.args.done()?;\n\n\n\n let ado_string = cmd.input.join(\"\\n\");\n\n\n\n let config =\n\n Config::from_ado_string(&ado_string).map_err(|e| format!(\"parsing ADO string: {}\", e))?;\n\n\n\n Ok(ConnectAction { name, config })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for ConnectAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<(), String> {\n\n let tcp = TcpStream::connect(self.config.get_addr())\n", "file_path": "src/testdrive/src/action/sql_server/connect.rs", "rank": 17, "score": 394630.05265512294 }, { "content": "pub fn build_create_bucket(mut cmd: BuiltinCommand) -> Result<CreateBucketAction, String> {\n\n let bucket_prefix = format!(\"testdrive-{}\", cmd.args.string(\"bucket\")?);\n\n cmd.args.done()?;\n\n Ok(CreateBucketAction { bucket_prefix })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for CreateBucketAction {\n\n async fn undo(&self, _state: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<(), String> {\n\n let bucket = format!(\"{}-{}\", self.bucket_prefix, state.seed);\n\n println!(\"Creating S3 bucket {}\", bucket);\n\n\n\n match state\n\n .s3_client\n\n .create_bucket()\n\n .bucket(&bucket)\n", "file_path": "src/testdrive/src/action/s3.rs", "rank": 18, "score": 394630.05265512294 }, { "content": "pub fn build_add_notifications(mut cmd: BuiltinCommand) -> Result<AddBucketNotifications, String> {\n\n let bucket_prefix = format!(\"testdrive-{}\", cmd.args.string(\"bucket\")?);\n\n let queue_prefix = format!(\"testdrive-{}\", cmd.args.string(\"queue\")?);\n\n let events = cmd\n\n .args\n\n .opt_string(\"events\")\n\n .map(|a| a.split(',').map(|s| s.to_string()).collect())\n\n .unwrap_or_else(|| vec![\"s3:ObjectCreated:*\".to_string()]);\n\n let sqs_validation_timeout = cmd\n\n .args\n\n .opt_string(\"sqs-validation-timeout\")\n\n .map(|t| repr::util::parse_duration(&t).map_err_to_string())\n\n .transpose()?;\n\n cmd.args.done()?;\n\n Ok(AddBucketNotifications {\n\n bucket_prefix,\n\n queue_prefix,\n\n events,\n\n sqs_validation_timeout,\n\n })\n", "file_path": "src/testdrive/src/action/s3.rs", "rank": 19, "score": 394630.05265512294 }, { "content": "pub fn build_put_object(mut cmd: BuiltinCommand) -> Result<PutObjectAction, String> {\n\n let bucket_prefix = format!(\"testdrive-{}\", cmd.args.string(\"bucket\")?);\n\n let key = cmd.args.string(\"key\")?;\n\n let compression = build_compression(&mut cmd)?;\n\n let contents = cmd.input.join(\"\\n\");\n\n cmd.args.done()?;\n\n Ok(PutObjectAction {\n\n bucket_prefix,\n\n key,\n\n compression,\n\n contents,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for PutObjectAction {\n\n async fn undo(&self, _state: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n", "file_path": "src/testdrive/src/action/s3.rs", "rank": 20, "score": 394630.05265512294 }, { "content": "pub fn build_add_partitions(mut cmd: BuiltinCommand) -> Result<AddPartitionsAction, String> {\n\n let topic_prefix = format!(\"testdrive-{}\", cmd.args.string(\"topic\")?);\n\n let partitions = cmd.args.opt_parse(\"total-partitions\")?.unwrap_or(1);\n\n cmd.args.done()?;\n\n\n\n Ok(AddPartitionsAction {\n\n topic_prefix,\n\n partitions,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for AddPartitionsAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<(), String> {\n\n let topic_name = format!(\"{}-{}\", self.topic_prefix, state.seed);\n\n println!(\n", "file_path": "src/testdrive/src/action/kafka/add_partitions.rs", "rank": 21, "score": 386166.69955773954 }, { "content": "pub fn build_create_topic(mut cmd: BuiltinCommand) -> Result<CreateTopicAction, String> {\n\n let topic_prefix = format!(\"testdrive-{}\", cmd.args.string(\"topic\")?);\n\n let partitions = cmd.args.opt_parse(\"partitions\")?.unwrap_or(1);\n\n let replication_factor = cmd.args.opt_parse(\"replication-factor\")?.unwrap_or(1);\n\n let compression = cmd\n\n .args\n\n .opt_string(\"compression\")\n\n .unwrap_or_else(|| \"producer\".into());\n\n let compaction = cmd.args.opt_parse(\"compaction\")?.unwrap_or(false);\n\n cmd.args.done()?;\n\n\n\n Ok(CreateTopicAction {\n\n topic_prefix,\n\n partitions,\n\n replication_factor,\n\n compression,\n\n compaction,\n\n })\n\n}\n\n\n", "file_path": "src/testdrive/src/action/kafka/create_topic.rs", "rank": 22, "score": 386166.6995577395 }, { "content": "pub fn build_create_stream(mut cmd: BuiltinCommand) -> Result<CreateStreamAction, String> {\n\n let stream_name = format!(\"testdrive-{}\", cmd.args.string(\"stream\")?);\n\n let shard_count = cmd.args.parse(\"shards\")?;\n\n cmd.args.done()?;\n\n\n\n Ok(CreateStreamAction {\n\n stream_name,\n\n shard_count,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for CreateStreamAction {\n\n async fn undo(&self, _state: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<(), String> {\n\n let stream_name = format!(\"{}-{}\", self.stream_name, state.seed);\n\n println!(\"Creating Kinesis stream {}\", stream_name);\n", "file_path": "src/testdrive/src/action/kinesis/create_stream.rs", "rank": 23, "score": 386166.6995577395 }, { "content": "pub fn build_verify_slot(mut cmd: BuiltinCommand) -> Result<VerifySlotAction, String> {\n\n let connection = cmd.args.string(\"connection\")?;\n\n let slot = cmd.args.string(\"slot\")?;\n\n let active: bool = cmd.args.parse(\"active\")?;\n\n cmd.args.done()?;\n\n Ok(VerifySlotAction {\n\n connection,\n\n slot,\n\n active,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for VerifySlotAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<(), String> {\n\n let (client, conn) = tokio_postgres::connect(&self.connection, NoTls)\n", "file_path": "src/testdrive/src/action/postgres/verify_slot.rs", "rank": 24, "score": 386166.69955773954 }, { "content": "pub fn build_update_shards(mut cmd: BuiltinCommand) -> Result<UpdateShardCountAction, String> {\n\n let stream_name = format!(\"testdrive-{}\", cmd.args.string(\"stream\")?);\n\n let target_shard_count = cmd.args.parse(\"shards\")?;\n\n cmd.args.done()?;\n\n\n\n Ok(UpdateShardCountAction {\n\n stream_name,\n\n target_shard_count,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for UpdateShardCountAction {\n\n async fn undo(&self, _state: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<(), String> {\n\n let stream_name = format!(\"{}-{}\", self.stream_name, state.seed);\n\n println!(\n", "file_path": "src/testdrive/src/action/kinesis/update_shards.rs", "rank": 25, "score": 382190.58423790336 }, { "content": "fn build_path(cmd: &mut BuiltinCommand) -> Result<String, String> {\n\n let path = cmd.args.string(\"path\")?;\n\n if path.contains(path::MAIN_SEPARATOR) {\n\n // The goal isn't security, but preventing mistakes.\n\n Err(\"separators in paths are forbidden\".into())\n\n } else {\n\n Ok(path)\n\n }\n\n}\n\n\n", "file_path": "src/testdrive/src/action/file.rs", "rank": 26, "score": 376048.34865553793 }, { "content": "pub fn build_verify(mut cmd: BuiltinCommand, context: Context) -> Result<VerifyAction, String> {\n\n let format = match cmd.args.string(\"format\")?.as_str() {\n\n \"avro\" => SinkFormat::Avro,\n\n \"json\" => SinkFormat::Json {\n\n key: cmd.args.parse(\"key\")?,\n\n },\n\n f => return Err(format!(\"unknown format: {}\", f)),\n\n };\n\n let sink = cmd.args.string(\"sink\")?;\n\n let consistency = match cmd.args.opt_string(\"consistency\").as_deref() {\n\n Some(\"debezium\") => Some(SinkConsistencyFormat::Debezium),\n\n Some(s) => return Err(format!(\"unknown sink consistency format {}\", s)),\n\n None => None,\n\n };\n\n\n\n let sort_messages = cmd.args.opt_bool(\"sort-messages\")?.unwrap_or(false);\n\n let expected_messages = cmd.input;\n\n if expected_messages.len() == 0 {\n\n // verify with 0 messages doesn't check that no messages have been written -\n\n // it 'verifies' 0 messages and trivially returns true\n", "file_path": "src/testdrive/src/action/kafka/verify.rs", "rank": 27, "score": 372097.5541524192 }, { "content": "pub fn build_verify(mut cmd: BuiltinCommand, context: Context) -> Result<VerifyAction, String> {\n\n let sink = cmd.args.string(\"sink\")?;\n\n let expected = cmd.input;\n\n cmd.args.done()?;\n\n if sink.contains(path::MAIN_SEPARATOR) {\n\n // The goal isn't security, but preventing mistakes.\n\n return Err(\"separators in file sink names are forbidden\".into());\n\n }\n\n Ok(VerifyAction {\n\n sink,\n\n expected,\n\n context,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for VerifyAction {\n\n async fn undo(&self, _state: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n", "file_path": "src/testdrive/src/action/avro_ocf.rs", "rank": 28, "score": 372097.5541524192 }, { "content": "pub fn build_wait(mut cmd: BuiltinCommand, context: Context) -> Result<WaitSchemaAction, String> {\n\n let schema = cmd.args.string(\"schema\")?;\n\n cmd.args.done()?;\n\n Ok(WaitSchemaAction { schema, context })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for WaitSchemaAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<(), String> {\n\n Retry::default()\n\n .initial_backoff(Duration::from_millis(50))\n\n .factor(1.5)\n\n .max_duration(self.context.timeout)\n\n .retry(|_| async {\n\n state\n\n .ccsr_client\n\n .get_schema_by_subject(&self.schema)\n\n .await\n\n .map_err(|e| format!(\"fetching schema: {}\", e))\n\n .and(Ok(()))\n\n })\n\n .await\n\n }\n\n}\n", "file_path": "src/testdrive/src/action/schema_registry.rs", "rank": 29, "score": 367954.6383306588 }, { "content": "pub fn build_sql(mut cmd: SqlCommand, context: Context) -> Result<SqlAction, String> {\n\n let stmts = sql_parser::parser::parse_statements(&cmd.query)\n\n .map_err(|e| format!(\"unable to parse SQL: {}: {}\", cmd.query, e))?;\n\n if stmts.len() != 1 {\n\n return Err(format!(\"expected one statement, but got {}\", stmts.len()));\n\n }\n\n if let SqlOutput::Full { expected_rows, .. } = &mut cmd.expected_output {\n\n // TODO(benesch): one day we'll support SQL queries where order matters.\n\n expected_rows.sort();\n\n }\n\n Ok(SqlAction {\n\n cmd,\n\n stmt: stmts.into_element(),\n\n context,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for SqlAction {\n\n async fn undo(&self, state: &mut State) -> Result<(), String> {\n", "file_path": "src/testdrive/src/action/sql.rs", "rank": 30, "score": 347517.5515747516 }, { "content": "/// Construct a Batch that depends on `state`\n\n///\n\n/// In particular this will have somewhat sensible values for all fields, and\n\n/// will be the next time slice after `state.last_time`, incrementing `last_time` to now\n\npub fn random_batch(rng: &mut impl Rng, state: &mut RecordState) -> Batch {\n\n let id = Uuid::new_v4();\n\n\n\n let dur_val = rng.gen_range(15..1_000);\n\n let dur = chrono::Duration::seconds(dur_val);\n\n let interval_start_time = state.last_time.clone();\n\n let interval_start = protobuf_timestamp(state.last_time);\n\n state.last_time = state.last_time.checked_add_signed(dur).unwrap();\n\n let interval_end = protobuf_timestamp(state.last_time);\n\n\n\n let mut records = vec![];\n\n\n\n for _ in 0..rng.gen_range(1..50) {\n\n records.push(random_record(rng, interval_start_time, dur_val));\n\n }\n\n\n\n let mut batch = Batch::new();\n\n batch.id = id.to_string();\n\n batch.interval_start = MessageField::some(interval_start);\n\n batch.interval_end = MessageField::some(interval_end);\n\n batch.records = records;\n\n\n\n batch\n\n}\n\n\n", "file_path": "demo/billing/src/randomizer.rs", "rank": 31, "score": 330279.78301324917 }, { "content": "/// Writes a boolean value into `buf`.\n\n///\n\n/// `true` is encoded as the char `'t'` and `false` is encoded as the char\n\n/// `'f'`.\n\npub fn format_bool<F>(buf: &mut F, b: bool) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n buf.write_str(format_bool_static(b));\n\n Nestable::Yes\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 32, "score": 318660.8624405764 }, { "content": "/// Given some stack traces, generate a map of addresses to their\n\n/// corresponding symbols.\n\n///\n\n/// Each address could correspond to more than one symbol, becuase\n\n/// of inlining. (E.g. if 0x1234 comes from \"g\", which is inlined in \"f\", the corresponding vec of symbols will be [\"f\", \"g\"].)\n\npub fn symbolicate(profile: &StackProfile) -> HashMap<usize, Vec<String>> {\n\n let mut all_addrs = vec![];\n\n for (stack, _annotation) in profile.stacks.iter() {\n\n all_addrs.extend(stack.addrs.iter().cloned());\n\n }\n\n // Sort so addresses from the same images are together,\n\n // to avoid thrashing `backtrace::resolve`'s cache of\n\n // parsed images.\n\n all_addrs.sort_unstable();\n\n all_addrs.dedup();\n\n all_addrs\n\n .into_iter()\n\n .map(|addr| {\n\n let mut syms = vec![];\n\n backtrace::resolve(addr as *mut c_void, |sym| {\n\n let name = sym\n\n .name()\n\n .map(|sn| sn.to_string())\n\n .unwrap_or_else(|| \"???\".to_string());\n\n syms.push(name);\n\n });\n\n syms.reverse();\n\n (addr, syms)\n\n })\n\n .collect()\n\n}\n", "file_path": "src/prof/src/lib.rs", "rank": 33, "score": 318311.0358047228 }, { "content": "/// Generates a visitor for a mutable AST.\n\n///\n\n/// Returns a string of Rust code that should be compiled alongside the module\n\n/// from which it was generated.\n\npub fn gen_visit_mut(ir: &Ir) -> String {\n\n gen_visit_root(&VisitConfig { mutable: true }, ir)\n\n}\n\n\n", "file_path": "src/walkabout/src/gen.rs", "rank": 34, "score": 304384.92504569533 }, { "content": "pub fn indent(s: &str, n: usize) -> String {\n\n let space = \" \".repeat(n);\n\n let s = s.replace(\"\\n\", &format!(\"\\n{}\", space));\n\n space + &s\n\n}\n", "file_path": "src/sqllogictest/src/util.rs", "rank": 35, "score": 302244.77694600215 }, { "content": "fn lex_to_adjacent_string(buf: &mut LexBuf) -> bool {\n\n // Adjacent string literals that are separated by whitespace are\n\n // concatenated if and only if that whitespace contains at least one newline\n\n // character. This bizzare rule matches PostgreSQL and the SQL standard.\n\n let whitespace = buf.take_while(|ch| ch.is_ascii_whitespace());\n\n whitespace.contains(&['\\n', '\\r'][..]) && buf.consume('\\'')\n\n}\n\n\n", "file_path": "src/sql-parser/src/lexer.rs", "rank": 36, "score": 295967.86545533844 }, { "content": "/// Changes the `name` used in an item's `CREATE` statement. To complete a\n\n/// rename operation, you must also call `create_stmt_rename_refs` on all dependent\n\n/// items.\n\npub fn create_stmt_rename(create_stmt: &mut Statement<Raw>, to_item_name: String) {\n\n // TODO(sploiselle): Support renaming schemas and databases.\n\n match create_stmt {\n\n Statement::CreateIndex(CreateIndexStatement { name, .. }) => {\n\n *name = Some(Ident::new(to_item_name));\n\n }\n\n Statement::CreateSink(CreateSinkStatement { name, .. })\n\n | Statement::CreateSource(CreateSourceStatement { name, .. })\n\n | Statement::CreateView(CreateViewStatement {\n\n definition: ViewDefinition { name, .. },\n\n ..\n\n })\n\n | Statement::CreateTable(CreateTableStatement { name, .. }) => {\n\n // The last name in an ObjectName is the item name. The item name\n\n // does not have a fixed index.\n\n // TODO: https://github.com/MaterializeInc/materialize/issues/5591\n\n let object_name_len = name.0.len() - 1;\n\n name.0[object_name_len] = Ident::new(to_item_name);\n\n }\n\n _ => unreachable!(\"Internal error: only catalog items can be renamed\"),\n\n }\n\n}\n\n\n", "file_path": "src/sql/src/ast/transform.rs", "rank": 37, "score": 292919.1771908814 }, { "content": "fn pcf_map(schema: &Map<String, serde_json::Value>) -> String {\n\n // Look for the namespace variant up front.\n\n let ns = schema.get(\"namespace\").and_then(|v| v.as_str());\n\n let mut fields = Vec::new();\n\n for (k, v) in schema {\n\n // Reduce primitive types to their simple form. ([PRIMITIVE] rule)\n\n if schema.len() == 1 && k == \"type\" {\n\n // Invariant: function is only callable from a valid schema, so this is acceptable.\n\n if let serde_json::Value::String(s) = v {\n\n return pcf_string(s);\n\n }\n\n }\n\n\n\n // Strip out unused fields ([STRIP] rule)\n\n if field_ordering_position(k).is_none() {\n\n continue;\n\n }\n\n\n\n // Fully qualify the name, if it isn't already ([FULLNAMES] rule).\n\n if k == \"name\" {\n", "file_path": "src/avro/src/schema.rs", "rank": 38, "score": 282517.0816931255 }, { "content": "/// Transforms a vector containing indexes of needed columns into one containing\n\n/// the \"skips\" an iterator over a Row would need to perform to see those values.\n\n///\n\n/// This function requires that all of the elements in `indexes` are strictly\n\n/// increasing.\n\n/// E.g. [3, 6, 10, 15] turns into [3, 3, 4, 5]\n\npub fn convert_indexes_to_skips(mut indexes: Vec<usize>) -> Vec<usize> {\n\n for i in 1..indexes.len() {\n\n soft_assert_or_log!(\n\n indexes[i - 1] < indexes[i],\n\n \"convert_indexes_to_skip needs indexes to be strictly increasing. Received: {:?}\",\n\n indexes,\n\n );\n\n }\n\n\n\n for i in (1..indexes.len()).rev() {\n\n indexes[i] -= indexes[i - 1];\n\n indexes[i] -= 1;\n\n }\n\n\n\n indexes\n\n}\n\n\n", "file_path": "src/dataflow-types/src/plan/reduce.rs", "rank": 39, "score": 280848.8901833212 }, { "content": "fn slurp_one(line_reader: &mut LineReader) -> Option<(usize, String)> {\n\n while let Some((_, line)) = line_reader.peek() {\n\n match line.chars().next() {\n\n Some('#') => {\n\n // Comment line. Skip.\n\n let _ = line_reader.next();\n\n }\n\n Some('$') | Some('>') | Some('!') | Some('?') => return None,\n\n Some('\\\\') => {\n\n return line_reader.next().map(|(pos, mut line)| {\n\n line.remove(0);\n\n (pos, line)\n\n })\n\n }\n\n _ => return line_reader.next(),\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/testdrive/src/parser.rs", "rank": 40, "score": 278930.7011602391 }, { "content": "pub fn build_fail_sql(cmd: FailSqlCommand, context: Context) -> Result<FailSqlAction, String> {\n\n let stmts = sql_parser::parser::parse_statements(&cmd.query)\n\n .map_err(|e| format!(\"unable to parse SQL: {}: {}\", cmd.query, e));\n\n\n\n // Allow for statements that could not be parsed.\n\n // This way such statements can be used for negative testing in .td files\n\n let stmt = match stmts {\n\n Ok(s) => {\n\n if s.len() != 1 {\n\n return Err(format!(\"expected one statement, but got {}\", s.len()));\n\n }\n\n Some(s.into_element())\n\n }\n\n Err(_) => None,\n\n };\n\n\n\n Ok(FailSqlAction { cmd, stmt, context })\n\n}\n\n\n\n#[async_trait]\n", "file_path": "src/testdrive/src/action/sql.rs", "rank": 41, "score": 276538.6073399994 }, { "content": "/// Ensures that `s` has fewer than `length` characters, and returns a `String`\n\n/// version of it with blank padding so that its width is `length` characters.\n\n///\n\n/// The value returned is appropriate to return to clients, but _is not_\n\n/// appropriate to store in `Datum::String`.\n\npub fn format_str_pad(s: &str, length: Option<usize>) -> String {\n\n format_char_str(s, length, false, CharWhiteSpace::Pad).unwrap()\n\n}\n", "file_path": "src/repr/src/adt/char.rs", "rank": 42, "score": 273099.2731522641 }, { "content": "pub fn format_string<F>(buf: &mut F, s: &str) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n buf.write_str(s);\n\n Nestable::MayNeedEscaping\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 43, "score": 272477.78490164626 }, { "content": "/// Legacy logic included something like an offset into almost data streams\n\n///\n\n/// Eventually we will require `INCLUDE <metadata>` for everything.\n\npub fn provide_default_metadata(envelope: &SourceEnvelope, encoding: &DataEncoding) -> bool {\n\n let is_avro = matches!(encoding, DataEncoding::Avro(_));\n\n let is_stateless_dbz = matches!(envelope, SourceEnvelope::Debezium(_, DebeziumMode::Plain));\n\n\n\n !is_avro && !is_stateless_dbz\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]\n\npub enum DebeziumMode {\n\n Plain,\n\n /// Keep track of keys from upstream and discard retractions for new keys\n\n Upsert,\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize)]\n\npub enum Compression {\n\n Gzip,\n\n None,\n\n}\n\n\n", "file_path": "src/dataflow-types/src/types.rs", "rank": 44, "score": 270243.89900496625 }, { "content": "fn write_fn_name(out: &mut String, s: &str) {\n\n // Simplify associated type names so that e.g. `T::FooBar` becomes\n\n // `visit_foo_bar`.\n\n let s = s.splitn(2, \"::\").last().unwrap();\n\n for c in s.chars() {\n\n if c.is_ascii_uppercase() {\n\n out.push('_');\n\n out.push(c.to_ascii_lowercase());\n\n } else {\n\n out.push(c);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/walkabout/src/gen.rs", "rank": 45, "score": 268707.28110361914 }, { "content": "/// Changes `\"\\\"foo\\\"\"` to `\"foo\"`\n\npub fn unquote(s: &str) -> String {\n\n if s.starts_with('\"') && s.ends_with('\"') {\n\n s[1..(s.len() - 1)].replace(\"\\\\\\\"\", \"\\\"\")\n\n } else {\n\n s.to_string()\n\n }\n\n}\n\n\n\n/* #endregion */\n\n\n", "file_path": "src/lowertest/src/lib.rs", "rank": 46, "score": 267686.37042525364 }, { "content": "/// Like `format_bool`, but returns a string with a static lifetime.\n\n///\n\n/// This function should be preferred to `format_bool` when applicable, as it\n\n/// avoids an allocation.\n\npub fn format_bool_static(b: bool) -> &'static str {\n\n match b {\n\n true => \"t\",\n\n false => \"f\",\n\n }\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 47, "score": 264883.1258745506 }, { "content": "/// Normalizes a single identifier.\n\npub fn ident(ident: Ident) -> String {\n\n ident.as_str().into()\n\n}\n\n\n", "file_path": "src/sql/src/normalize.rs", "rank": 48, "score": 263519.65684609255 }, { "content": "pub fn bench_avro(c: &mut Criterion) {\n\n let schema_str = r#\"\n\n{\n\n \"type\": \"record\",\n\n \"name\": \"Envelope\",\n\n \"namespace\": \"tpch.tpch.lineitem\",\n\n \"fields\": [\n\n {\n\n \"name\": \"before\",\n\n \"type\": [\n\n \"null\",\n\n {\n\n \"type\": \"record\",\n\n \"name\": \"Value\",\n\n \"fields\": [\n\n {\n\n \"name\": \"l_orderkey\",\n\n \"type\": \"int\"\n\n },\n\n {\n", "file_path": "src/interchange/benches/avro.rs", "rank": 49, "score": 263300.5208263013 }, { "content": "pub fn bench_sort(c: &mut Criterion) {\n\n let num_rows = 10_000;\n\n\n\n let mut rng = seeded_rng();\n\n let int_rows = (0..num_rows)\n\n .map(|_| {\n\n vec![\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n ]\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n let mut rng = seeded_rng();\n\n let byte_data = (0..num_rows)\n\n .map(|_| {\n", "file_path": "src/repr/benches/row.rs", "rank": 50, "score": 263300.5208263013 }, { "content": "pub fn bench_protobuf(c: &mut Criterion) {\n\n let mut value = Value::new();\n\n value.l_orderkey = 1;\n\n value.l_orderkey = 155_190;\n\n value.l_suppkey = 7706;\n\n value.l_linenumber = 1;\n\n value.l_quantity = 17.0;\n\n value.l_extendedprice = 21168.23;\n\n value.l_discount = 0.04;\n\n value.l_tax = 0.02;\n\n value.l_returnflag = \"N\".into();\n\n value.l_linestatus = \"O\".into();\n\n value.l_shipdate = 9567;\n\n value.l_commitdate = 9537;\n\n value.l_receiptdate = 9537;\n\n value.l_shipinstruct = \"DELIVER IN PERSON\".into();\n\n value.l_shipmode = \"TRUCK\".into();\n\n value.l_comment = \"egular courts above the\".into();\n\n\n\n let mut connector = Connector::new();\n", "file_path": "src/interchange/benches/protobuf.rs", "rank": 51, "score": 263300.5208263013 }, { "content": "pub fn bench_pack(c: &mut Criterion) {\n\n let num_rows = 10_000;\n\n\n\n let mut rng = seeded_rng();\n\n let int_rows = (0..num_rows)\n\n .map(|_| {\n\n vec![\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n ]\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n let mut rng = seeded_rng();\n\n let byte_data = (0..num_rows)\n\n .map(|_| {\n", "file_path": "src/repr/benches/row.rs", "rank": 52, "score": 263300.52082630136 }, { "content": "/// helper for serde default\n\nfn btrue() -> bool {\n\n true\n\n}\n\n\n", "file_path": "test/correctness/args.rs", "rank": 53, "score": 262243.96381750616 }, { "content": "pub fn format_numeric<F>(buf: &mut F, n: &OrderedDecimal<Numeric>) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n write!(buf, \"{}\", n.0.to_standard_notation_string());\n\n Nestable::Yes\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 54, "score": 259830.30601522065 }, { "content": "/// Reports whether the given stream begins with a pgwire handshake.\n\n///\n\n/// To avoid false negatives, there must be at least eight bytes in `buf`.\n\npub fn match_handshake(buf: &[u8]) -> bool {\n\n // The pgwire StartupMessage looks like this:\n\n //\n\n // i32 - Length of entire message.\n\n // i32 - Protocol version number.\n\n // [String] - Arbitrary key-value parameters of any length.\n\n //\n\n // Since arbitrary parameters can be included in the StartupMessage, the\n\n // first Int32 is worthless, since the message could have any length.\n\n // Instead, we sniff the protocol version number.\n\n if buf.len() < 8 {\n\n return false;\n\n }\n\n let version = NetworkEndian::read_i32(&buf[4..8]);\n\n VERSIONS.contains(&version)\n\n}\n\n\n\n/// Parameters for the [`run`] function.\n\npub struct RunParams<'a, A> {\n\n /// The TLS mode of the pgwire server.\n", "file_path": "src/pgwire/src/protocol.rs", "rank": 55, "score": 259771.20304651544 }, { "content": "/// Generates a visitor for an immutable AST.\n\n///\n\n/// Returns a string of Rust code that should be compiled alongside the module\n\n/// from which it was generated.\n\npub fn gen_visit(ir: &Ir) -> String {\n\n gen_visit_root(&VisitConfig { mutable: false }, ir)\n\n}\n\n\n", "file_path": "src/walkabout/src/gen.rs", "rank": 56, "score": 259574.79245412923 }, { "content": "/// Generates a fold transformer for a mutable AST.\n\n///\n\n/// Returns a string of Rust code that should be compiled alongside the module\n\n/// from which it was generated.\n\npub fn gen_fold(ir: &Ir) -> String {\n\n gen_fold_root(ir)\n\n}\n\n\n", "file_path": "src/walkabout/src/gen.rs", "rank": 57, "score": 259574.74889850887 }, { "content": "pub fn bench_mem_snapshots(c: &mut Criterion) {\n\n bench_runtime_snapshots(c, \"mem\", |_path| MemRegistry::new().runtime_no_reentrance());\n\n}\n\n\n", "file_path": "src/persist/benches/snapshot.rs", "rank": 58, "score": 259350.14820282988 }, { "content": "pub fn bench_writes_log(c: &mut Criterion) {\n\n let data = \"entry0\".as_bytes().to_vec();\n\n\n\n let mut mem_log = MemRegistry::new()\n\n .log_no_reentrance()\n\n .expect(\"creating a MemLog cannot fail\");\n\n c.bench_function(\"mem_log_write_sync\", |b| {\n\n bench_write_sync(&mut mem_log, data.clone(), b)\n\n });\n\n\n\n // Create a directory that will automatically be dropped after the test finishes.\n\n let temp_dir = tempfile::tempdir().expect(\"failed to create temp directory\");\n\n let mut file_log = new_file_log(\"file_log_write_sync\", temp_dir.path());\n\n c.bench_function(\"file_log_write_sync\", |b| {\n\n bench_write_sync(&mut file_log, data.clone(), b)\n\n });\n\n}\n\n\n", "file_path": "src/persist/benches/writer.rs", "rank": 59, "score": 259350.14820282988 }, { "content": "pub fn bench_writes_blob(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"blob_set\");\n\n\n\n // Limit the amount of time this test gets to run in order to limit the total\n\n // number of iterations criterion takes, and consequently, limit the peak\n\n // memory usage.\n\n group.warm_up_time(Duration::from_secs(1));\n\n group.measurement_time(Duration::from_secs(1));\n\n\n\n let mut blob_val = vec![];\n\n let data = DataGenerator::default();\n\n for batch in data.batches() {\n\n for ((k, v), t, d) in batch.iter() {\n\n blob_val.extend_from_slice(k);\n\n blob_val.extend_from_slice(v);\n\n blob_val.extend_from_slice(&t.to_le_bytes());\n\n blob_val.extend_from_slice(&d.to_le_bytes());\n\n }\n\n }\n\n assert_eq!(data.goodput_bytes(), u64::cast_from(blob_val.len()));\n", "file_path": "src/persist/benches/writer.rs", "rank": 60, "score": 259350.14820282988 }, { "content": "pub fn bench_file_snapshots(c: &mut Criterion) {\n\n let temp_dir = tempfile::tempdir().expect(\"failed to create temp directory\");\n\n bench_runtime_snapshots(c, \"file\", move |path| {\n\n let blob_dir = temp_dir\n\n .path()\n\n .join(format!(\"snapshot_bench_blob_{}\", path));\n\n let lock_info = LockInfo::new_no_reentrance(\"snapshot_bench\".to_owned());\n\n runtime::start(\n\n RuntimeConfig::default(),\n\n ErrorLog,\n\n FileBlob::open_exclusive(blob_dir.into(), lock_info)?,\n\n build_info::DUMMY_BUILD_INFO,\n\n &MetricsRegistry::new(),\n\n None,\n\n )\n\n });\n\n}\n\n\n\ncriterion_group!(benches, bench_mem_snapshots, bench_file_snapshots);\n\ncriterion_main!(benches);\n", "file_path": "src/persist/benches/snapshot.rs", "rank": 61, "score": 259350.14820282988 }, { "content": "pub fn bench_writes_indexed(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"indexed_write_drain\");\n\n\n\n // Limit the sample size of this benchmark group to constrain it to a more\n\n // reasonable runtime.\n\n group.sample_size(10);\n\n let mem_indexed = MemRegistry::new()\n\n .indexed_no_reentrance()\n\n .expect(\"failed to create mem indexed\");\n\n bench_writes_indexed_inner(mem_indexed, \"mem\", &mut group).expect(\"running benchmark failed\");\n\n\n\n // Create a directory that will automatically be dropped after the test finishes.\n\n let temp_dir = tempfile::tempdir().expect(\"failed to create temp directory\");\n\n let file_log = new_file_log(\"indexed_write_drain_log\", temp_dir.path());\n\n let file_blob = new_file_blob(\"indexed_write_drain_blob\", temp_dir.path());\n\n\n\n let metrics = Metrics::register_with(&MetricsRegistry::new());\n\n let blob_cache = BlobCache::new(build_info::DUMMY_BUILD_INFO, metrics.clone(), file_blob);\n\n let compacter = Maintainer::new(blob_cache.clone(), Arc::new(Runtime::new().unwrap()));\n\n let file_indexed = Indexed::new(file_log, blob_cache, compacter, metrics)\n\n .expect(\"failed to create file indexed\");\n\n bench_writes_indexed_inner(file_indexed, \"file\", &mut group).expect(\"running benchmark failed\");\n\n}\n\n\n", "file_path": "src/persist/benches/writer.rs", "rank": 62, "score": 259350.14820282988 }, { "content": "/// helper for serde default\n\nfn btrue() -> bool {\n\n true\n\n}\n\n\n", "file_path": "src/peeker/src/args.rs", "rank": 63, "score": 257304.66389120475 }, { "content": "/// Converts serialized JSON to the syntax that [to_json] handles.\n\n///\n\n/// `json` is assumed to have been produced by serializing an object of type\n\n/// `type_name`.\n\n/// `ctx` is responsible for converting serialized JSON to any syntax\n\n/// extensions or overrides.\n\npub fn from_json<C>(json: &Value, type_name: &str, rti: &ReflectedTypeInfo, ctx: &mut C) -> String\n\nwhere\n\n C: TestDeserializeContext,\n\n{\n\n let type_name = normalize_type_name(type_name);\n\n if let Some(result) = ctx.reverse_syntax_override(json, &type_name, rti) {\n\n return result;\n\n }\n\n if let Some((names, types)) = rti.struct_dict.get(&type_name[..]) {\n\n if types.is_empty() {\n\n \"\".to_string()\n\n } else {\n\n format!(\"({})\", from_json_fields(json, names, types, rti, ctx))\n\n }\n\n } else if let Some(enum_dict) = rti.enum_dict.get(&type_name[..]) {\n\n match json {\n\n // A unit enum in JSON is `\"variant\"`. In the spec it is `variant`.\n\n Value::String(s) => unquote(s),\n\n // An enum with fields is `{\"variant\": <fields>}` in JSON. In the\n\n // spec it is `(variant field1 .. fieldn).\n", "file_path": "src/lowertest/src/lib.rs", "rank": 64, "score": 256259.6412492871 }, { "content": "pub fn gen_fold_root(ir: &Ir) -> String {\n\n let mut generics = BTreeMap::new();\n\n for (name, bounds) in &ir.generics {\n\n generics.insert(name.clone(), bounds.clone());\n\n generics.insert(f!(\"{name}2\"), bounds.clone());\n\n }\n\n let trait_generics = trait_generics(&generics);\n\n let trait_generics_and_bounds = trait_generics_and_bounds(&generics);\n\n\n\n let mut buf = CodegenBuf::new();\n\n\n\n buf.start_block(f!(\"pub trait Fold<{trait_generics_and_bounds}>\"));\n\n for (name, item) in &ir.items {\n\n match item {\n\n Item::Abstract => {\n\n // The intent is to replace `T::FooBar` with `T2::FooBar`. This\n\n // is a bit gross, but it seems reliable enough, and is so far\n\n // simpler than trying to use a structured type for `name`.\n\n let name2 = name.replacen(\"::\", \"2::\", 1);\n\n let fn_name = fold_fn_name(name);\n", "file_path": "src/walkabout/src/gen.rs", "rank": 65, "score": 255816.35491392465 }, { "content": "/// Trims trailing whitespace from each line of `s`.\n\npub fn trim_trailing_space(s: &str) -> String {\n\n let mut lines: Vec<_> = s.lines().map(|line| line.trim_end()).collect();\n\n while lines.last().map_or(false, |l| l.is_empty()) {\n\n lines.pop();\n\n }\n\n lines.join(\"\\n\")\n\n}\n\n\n", "file_path": "src/testdrive/src/util/text.rs", "rank": 66, "score": 255816.35491392465 }, { "content": "pub fn bench_end_to_end(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"end_to_end\");\n\n\n\n let temp_dir = tempfile::tempdir().expect(\"failed to create temp directory\");\n\n let nonce = \"end_to_end\".to_string();\n\n let collection_name = \"end_to_end\".to_string();\n\n let mut runtime = create_runtime(temp_dir.path(), &nonce).expect(\"missing runtime\");\n\n let (write, _read) = runtime.create_or_load(&collection_name);\n\n let data = DataGenerator::default();\n\n let goodput_bytes = workload::load(&write, &data, true).expect(\"error writing data\");\n\n group.throughput(Throughput::Bytes(goodput_bytes));\n\n let expected_frontier = u64::cast_from(data.record_count);\n\n runtime.stop().expect(\"runtime shut down cleanly\");\n\n\n\n group.bench_function(\n\n BenchmarkId::new(\"end_to_end\", data.goodput_pretty()),\n\n move |b| {\n\n bench_read_persisted_source(\n\n 1,\n\n temp_dir.path().to_path_buf(),\n\n nonce.clone(),\n\n collection_name.clone(),\n\n expected_frontier,\n\n b,\n\n )\n\n },\n\n );\n\n}\n\n\n", "file_path": "src/persist/benches/end_to_end.rs", "rank": 67, "score": 255598.91169270588 }, { "content": "pub fn bench_writes_blob_cache(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"blob_cache_set_unsealed_batch\");\n\n\n\n // Limit the sample size and measurement time of this benchmark group to both\n\n // limit the overall runtime to a reasonable length and bound the memory\n\n // utilization.\n\n //\n\n // Criterion tries to fit as many iterations as possible within `measurement_time`,\n\n // but chooses some minimum number of iterations based on `sample_size`. So,\n\n // because we want to have a tight limit on the number of iterations, as each\n\n // incurs substantial memory usage for both file and mem blobs (because of how\n\n // caching is currently implemented), we have to manually specify both.\n\n group.sample_size(10);\n\n group.warm_up_time(Duration::from_secs(1));\n\n group.measurement_time(Duration::from_secs(1));\n\n\n\n let mem_blob = MemRegistry::new()\n\n .blob_no_reentrance()\n\n .expect(\"creating a MemBlob cannot fail\");\n\n let metrics = Metrics::register_with(&MetricsRegistry::new());\n", "file_path": "src/persist/benches/writer.rs", "rank": 68, "score": 255598.91169270588 }, { "content": "/// Parses a [`bool`] from `s`.\n\n///\n\n/// The accepted values are \"true\", \"false\", \"yes\", \"no\", \"on\", \"off\", \"1\", and\n\n/// \"0\", or any unambiguous prefix of one of those values. Leading or trailing\n\n/// whitespace is permissible.\n\npub fn parse_bool(s: &str) -> Result<bool, ParseError> {\n\n match s.trim().to_lowercase().as_str() {\n\n \"t\" | \"tr\" | \"tru\" | \"true\" | \"y\" | \"ye\" | \"yes\" | \"on\" | \"1\" => Ok(true),\n\n \"f\" | \"fa\" | \"fal\" | \"fals\" | \"false\" | \"n\" | \"no\" | \"of\" | \"off\" | \"0\" => Ok(false),\n\n _ => Err(ParseError::invalid_input_syntax(\"boolean\", s)),\n\n }\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 69, "score": 255572.4565105918 }, { "content": "fn parse_entry(parsed_object: &mut json::JsonValue) -> (String, Option<String>) {\n\n assert!(parsed_object.is_object());\n\n parsed_object.remove(\"type\");\n\n let key = parsed_object[\"id\"].take_string().unwrap();\n\n parsed_object.remove(\"id\");\n\n let value = if parsed_object.len() == 0 {\n\n None\n\n } else {\n\n Some(parsed_object.to_string())\n\n };\n\n (key, value)\n\n}\n\n\n", "file_path": "play/mbta/src/main.rs", "rank": 70, "score": 254951.38029348842 }, { "content": "fn any_matches(haystack: &[String], needle: &str) -> bool {\n\n haystack.iter().any(|s| s.contains(needle))\n\n}\n", "file_path": "demo/billing/src/main.rs", "rank": 71, "score": 253979.86249167295 }, { "content": "/// Normalizes `WITH` option keys without normalizing their corresponding\n\n/// values.\n\npub fn option_objects(options: &[SqlOption<Raw>]) -> BTreeMap<String, SqlOption<Raw>> {\n\n options\n\n .iter()\n\n .map(|o| (ident(o.name().clone()), o.clone()))\n\n .collect()\n\n}\n\n\n", "file_path": "src/sql/src/normalize.rs", "rank": 72, "score": 252933.48939116084 }, { "content": "/// Reports whether the environment variable `key` is set to a truthy value in\n\n/// the current process's environment.\n\n///\n\n/// The empty string and the string \"0\" are considered false. All other values\n\n/// are considered true.\n\npub fn is_var_truthy<K>(key: K) -> bool\n\nwhere\n\n K: AsRef<OsStr>,\n\n{\n\n match env::var_os(key) {\n\n None => false,\n\n Some(val) => val != \"0\" && val != \"\",\n\n }\n\n}\n", "file_path": "src/ore/src/env.rs", "rank": 73, "score": 249394.1505993381 }, { "content": "/// Convert a Datum to a String such that [test_spec_to_row] can convert the\n\n/// String back into a row containing the same Datum.\n\n///\n\n/// Currently supports only Datums supported by [test_spec_to_row].\n\npub fn datum_to_test_spec(datum: Datum) -> String {\n\n let result = format!(\"{}\", datum);\n\n match datum {\n\n Datum::Timestamp(_) => result.quoted().to_string(),\n\n _ => result,\n\n }\n\n}\n\n\n", "file_path": "src/repr-test-util/src/lib.rs", "rank": 74, "score": 248859.6632041988 }, { "content": "/// Parse a token as a vec of strings that can be parsed as datums in a row.\n\n///\n\n/// The token is assumed to be of the form `[datum1 datum2 .. datumn]`.\n\npub fn parse_vec_of_literals(token: &TokenTree) -> Result<Vec<String>, String> {\n\n match token {\n\n TokenTree::Group(group) => {\n\n let mut inner_iter = group.stream().into_iter();\n\n let mut result = Vec::new();\n\n while let Some(symbol) = inner_iter.next() {\n\n match extract_literal_string(&symbol, &mut inner_iter)? {\n\n Some(dat) => result.push(dat),\n\n None => {\n\n return Err(format!(\"{:?} cannot be interpreted as a literal.\", symbol));\n\n }\n\n }\n\n }\n\n Ok(result)\n\n }\n\n invalid => Err(format!(\n\n \"{:?} cannot be parsed as a vec of literals\",\n\n invalid\n\n )),\n\n }\n\n}\n", "file_path": "src/repr-test-util/src/lib.rs", "rank": 75, "score": 248739.90125643217 }, { "content": "/// Normalizes a list of `WITH` options.\n\npub fn options<T: AstInfo>(options: &[SqlOption<T>]) -> BTreeMap<String, Value> {\n\n options\n\n .iter()\n\n .map(|o| match o {\n\n SqlOption::Value { name, value } => (ident(name.clone()), value.clone()),\n\n SqlOption::ObjectName { name, object_name } => (\n\n ident(name.clone()),\n\n Value::String(object_name.to_ast_string()),\n\n ),\n\n SqlOption::DataType { name, data_type } => (\n\n ident(name.clone()),\n\n Value::String(data_type.to_ast_string()),\n\n ),\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/sql/src/normalize.rs", "rank": 76, "score": 248673.93693049127 }, { "content": "fn decode(row: &Row) -> Result<HashMap<String, String>, String> {\n\n let mut out = HashMap::new();\n\n for (i, col) in row.columns().iter().enumerate() {\n\n let ty = col.type_();\n\n let conversion = match *ty {\n\n Type::BOOL => row.get::<_, Option<bool>>(i).map(|x| x.to_string()),\n\n Type::BPCHAR | Type::TEXT => row.get::<_, Option<String>>(i),\n\n Type::BYTEA => row.get::<_, Option<Vec<u8>>>(i).map(|x| {\n\n let s = x.into_iter().map(ascii::escape_default).flatten().collect();\n\n String::from_utf8(s).unwrap()\n\n }),\n\n Type::INT4 => row.get::<_, Option<i32>>(i).map(|x| x.to_string()),\n\n Type::INT8 => row.get::<_, Option<i64>>(i).map(|x| x.to_string()),\n\n Type::NUMERIC => row.get::<_, Option<Numeric>>(i).map(|x| x.to_string()),\n\n Type::TIMESTAMP => row\n\n .get::<_, Option<chrono::NaiveDateTime>>(i)\n\n .map(|x| x.to_string()),\n\n Type::TIMESTAMPTZ => row\n\n .get::<_, Option<chrono::DateTime<chrono::Utc>>>(i)\n\n .map(|x| x.to_string()),\n", "file_path": "test/correctness/checker.rs", "rank": 77, "score": 248097.2406891256 }, { "content": "fn any_matches(haystack: &[String], needle: &str) -> bool {\n\n haystack.iter().any(|s| s.contains(needle))\n\n}\n", "file_path": "test/performance/perf-upsert/src/main.rs", "rank": 78, "score": 246649.33251516995 }, { "content": "fn lex_string(buf: &mut LexBuf) -> Result<String, ParserError> {\n\n let mut s = String::new();\n\n loop {\n\n let pos = buf.pos() - 1;\n\n loop {\n\n match buf.next() {\n\n Some('\\'') if buf.consume('\\'') => s.push('\\''),\n\n Some('\\'') => break,\n\n Some(c) => s.push(c),\n\n None => bail!(pos, \"unterminated quoted string\"),\n\n }\n\n }\n\n if !lex_to_adjacent_string(buf) {\n\n return Ok(s);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/sql-parser/src/lexer.rs", "rank": 79, "score": 246541.70108473144 }, { "content": "fn slurp_all(line_reader: &mut LineReader) -> Vec<String> {\n\n let mut out = Vec::new();\n\n while let Some((_, line)) = slurp_one(line_reader) {\n\n out.push(line);\n\n }\n\n out\n\n}\n\n\n", "file_path": "src/testdrive/src/parser.rs", "rank": 80, "score": 246252.49065964908 }, { "content": "/// Try to build the views and sources that are needed for this script\n\n/// This ignores errors (just logging them), and can just be run multiple times.\n\n///\n\n/// # Returns Success: `true` if everything succeed\n\nfn try_initialize(client: &mut Client, check: &Check) -> bool {\n\n let mut success = true;\n\n if !check.enabled {\n\n info!(\"skipping disabled query group {}\", check.name);\n\n return success;\n\n }\n\n let mz_result = client.batch_execute(&format!(\n\n \"CREATE MATERIALIZED VIEW {} AS {}\",\n\n check.name, check.query\n\n ));\n\n match mz_result {\n\n Ok(_) => info!(\"installed view {}\", check.name),\n\n Err(err) => {\n\n let errmsg = err.to_string();\n\n if !errmsg.ends_with(\"already exists\") {\n\n success = false;\n\n warn!(\"error trying to create view {}: {}\", check.name, err);\n\n } else {\n\n // this only matters for timeline debugging, in general it is fine\n\n debug!(\"view previously installed: {} err={}\", check.name, err);\n\n }\n\n }\n\n }\n\n success\n\n}\n", "file_path": "test/correctness/checker.rs", "rank": 81, "score": 244111.31038842924 }, { "content": "/// Converts `s` into a [proc_macro2::TokenStream]\n\npub fn tokenize(s: &str) -> Result<TokenStream, String> {\n\n s.parse::<TokenStream>().map_err_to_string()\n\n}\n\n\n", "file_path": "src/lowertest/src/lib.rs", "rank": 82, "score": 243319.5589894561 }, { "content": "/// Rewrites predicates that contain subqueries so that the subqueries\n\n/// appear in their own later predicate when possible.\n\n///\n\n/// For example, this function rewrites this expression\n\n///\n\n/// ```text\n\n/// Filter {\n\n/// predicates: [a = b AND EXISTS (<subquery 1>) AND c = d AND (<subquery 2>) = e]\n\n/// }\n\n/// ```\n\n///\n\n/// like so:\n\n///\n\n/// ```text\n\n/// Filter {\n\n/// predicates: [\n\n/// a = b AND c = d,\n\n/// EXISTS (<subquery>),\n\n/// (<subquery 2>) = e,\n\n/// ]\n\n/// }\n\n/// ```\n\n///\n\n/// The rewrite causes decorrelation to incorporate prior predicates into\n\n/// the outer relation upon which the subquery is evaluated. In the above\n\n/// rewritten example, the `EXISTS (<subquery>)` will only be evaluated for\n\n/// outer rows where `a = b AND c = d`. The second subquery, `(<subquery 2>)\n\n/// = e`, will be further restricted to outer rows that match `A = b AND c =\n\n/// d AND EXISTS(<subquery>)`. This can vastly reduce the cost of the\n\n/// subquery, especially when the original conjunction contains join keys.\n\npub fn split_subquery_predicates(expr: &mut HirRelationExpr) {\n\n fn walk_relation(expr: &mut HirRelationExpr) {\n\n expr.visit_mut(&mut |expr| match expr {\n\n HirRelationExpr::Map { scalars, .. } => {\n\n for scalar in scalars {\n\n walk_scalar(scalar);\n\n }\n\n }\n\n HirRelationExpr::CallTable { exprs, .. } => {\n\n for expr in exprs {\n\n walk_scalar(expr);\n\n }\n\n }\n\n HirRelationExpr::Filter { predicates, .. } => {\n\n let mut subqueries = vec![];\n\n for predicate in &mut *predicates {\n\n walk_scalar(predicate);\n\n extract_conjuncted_subqueries(predicate, &mut subqueries);\n\n }\n\n // TODO(benesch): we could be smarter about the order in which\n", "file_path": "src/sql/src/plan/transform_expr.rs", "rank": 83, "score": 242311.757480725 }, { "content": "/// Turns the json version of a MirRelationExpr into the [lowertest::to_json]\n\n/// syntax.\n\n///\n\n/// The return value is a tuple of:\n\n/// 1. The translated MirRelationExpr.\n\n/// 2. The commands to register sources referenced by the MirRelationExpr with\n\n/// the test catalog.\n\npub fn json_to_spec(rel_json: &str, catalog: &TestCatalog) -> (String, Vec<String>) {\n\n let mut ctx = MirRelationExprDeserializeContext::new(&catalog);\n\n let spec = from_json(\n\n &serde_json::from_str(rel_json).unwrap(),\n\n \"MirRelationExpr\",\n\n &RTI,\n\n &mut ctx,\n\n );\n\n let mut source_defs = ctx\n\n .list_scope_references()\n\n .map(|(name, typ)| {\n\n format!(\n\n \"(defsource {} {})\",\n\n name,\n\n from_json(\n\n &serde_json::to_value(typ).unwrap(),\n\n \"RelationType\",\n\n &RTI,\n\n &mut GenericTestDeserializeContext::default()\n\n )\n", "file_path": "src/expr-test-util/src/lib.rs", "rank": 84, "score": 241302.8842831285 }, { "content": "pub fn hmac_string<'a>(\n\n datums: &[Datum<'a>],\n\n temp_storage: &'a RowArena,\n\n) -> Result<Datum<'a>, EvalError> {\n\n let to_digest = datums[0].unwrap_str().as_bytes();\n\n let key = datums[1].unwrap_str().as_bytes();\n\n let typ = datums[2].unwrap_str();\n\n hmac_inner(to_digest, key, typ, temp_storage)\n\n}\n\n\n", "file_path": "src/expr/src/scalar/func.rs", "rank": 85, "score": 240333.03782242525 }, { "content": "fn write_records<W>(writer: &mut Writer<W>, records: &[String]) -> Result<(), String>\n\nwhere\n\n W: Write,\n\n{\n\n let schema = writer.schema().clone();\n\n for record in records {\n\n let record = avro::from_json(\n\n &serde_json::from_str(record).map_err(|e| format!(\"parsing avro datum: {:#}\", e))?,\n\n schema.top_node(),\n\n )?;\n\n writer\n\n .append(record)\n\n .map_err(|e| format!(\"writing avro record: {:#}\", e))?;\n\n }\n\n writer\n\n .flush()\n\n .map_err(|e| format!(\"flushing avro writer: {:#}\", e))?;\n\n Ok(())\n\n}\n\n\n\npub struct VerifyAction {\n\n sink: String,\n\n expected: Vec<String>,\n\n context: Context,\n\n}\n\n\n", "file_path": "src/testdrive/src/action/avro_ocf.rs", "rank": 86, "score": 240120.01054039248 }, { "content": "fn trait_generics(generics: &BTreeMap<String, BTreeSet<String>>) -> String {\n\n generics.keys().map(|id| format!(\"{}, \", id)).join(\"\")\n\n}\n\n\n", "file_path": "src/walkabout/src/gen.rs", "rank": 87, "score": 239657.84983705662 }, { "content": "/// Rewrites quantified comparisons into simpler EXISTS operators.\n\n///\n\n/// Note that this transformation is only valid when the expression is\n\n/// used in a context where the distinction between `FALSE` and `NULL`\n\n/// is immaterial, e.g., in a `WHERE` clause or a `CASE` condition, or\n\n/// when the inputs to the comparison are non-nullable. This function is careful\n\n/// to only apply the transformation when it is valid to do so.\n\n///\n\n/// WHERE (SELECT any(<pred>) FROM <rel>)\n\n/// =>\n\n/// WHERE EXISTS(SELECT * FROM <rel> WHERE <pred>)\n\n///\n\n/// WHERE (SELECT all(<pred>) FROM <rel>)\n\n/// =>\n\n/// WHERE NOT EXISTS(SELECT * FROM <rel> WHERE (NOT <pred>) OR <pred> IS NULL)\n\n///\n\n/// See Section 3.5 of \"Execution Strategies for SQL Subqueries\" by\n\n/// M. Elhemali, et al.\n\npub fn try_simplify_quantified_comparisons(expr: &mut HirRelationExpr) {\n\n fn walk_relation(expr: &mut HirRelationExpr, outers: &[RelationType]) {\n\n match expr {\n\n HirRelationExpr::Map { scalars, input } => {\n\n walk_relation(input, outers);\n\n let mut outers = outers.to_vec();\n\n outers.insert(0, input.typ(&outers, &NO_PARAMS));\n\n for scalar in scalars {\n\n walk_scalar(scalar, &outers, false);\n\n let (inner, outers) = outers\n\n .split_first_mut()\n\n .expect(\"outers known to have at least one element\");\n\n let scalar_type = scalar.typ(&outers, inner, &NO_PARAMS);\n\n inner.column_types.push(scalar_type);\n\n }\n\n }\n\n HirRelationExpr::Filter { predicates, input } => {\n\n walk_relation(input, outers);\n\n let mut outers = outers.to_vec();\n\n outers.insert(0, input.typ(&outers, &NO_PARAMS));\n", "file_path": "src/sql/src/plan/transform_expr.rs", "rank": 88, "score": 239366.65755598157 }, { "content": "/// Rewrites `query`'s references of `from` to `to` or errors if too ambiguous.\n\nfn rewrite_query(from: FullName, to: String, query: &mut Query<Raw>) -> Result<(), String> {\n\n let from_ident = Ident::new(from.item.clone());\n\n let to_ident = Ident::new(to);\n\n let qual_depth =\n\n QueryIdentAgg::determine_qual_depth(&from_ident, Some(to_ident.clone()), query)?;\n\n CreateSqlRewriter::rewrite_query_with_qual_depth(from, to_ident.clone(), qual_depth, query);\n\n // Ensure that our rewrite didn't didn't introduce ambiguous\n\n // references to `to_name`.\n\n match QueryIdentAgg::determine_qual_depth(&to_ident, None, query) {\n\n Ok(_) => Ok(()),\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n", "file_path": "src/sql/src/ast/transform.rs", "rank": 89, "score": 238882.84193898976 }, { "content": "pub fn plan_create_source(\n\n scx: &StatementContext,\n\n stmt: CreateSourceStatement<Raw>,\n\n) -> Result<Plan, anyhow::Error> {\n\n let CreateSourceStatement {\n\n name,\n\n col_names,\n\n connector,\n\n with_options,\n\n envelope,\n\n if_not_exists,\n\n materialized,\n\n format,\n\n key_constraint,\n\n include_metadata,\n\n } = &stmt;\n\n\n\n let with_options_original = with_options;\n\n let mut with_options = normalize::options(with_options);\n\n\n", "file_path": "src/sql/src/plan/statement/ddl.rs", "rank": 90, "score": 238574.0035539952 }, { "content": "pub fn describe_create_source(\n\n _: &StatementContext,\n\n _: CreateSourceStatement<Raw>,\n\n) -> Result<StatementDesc, anyhow::Error> {\n\n Ok(StatementDesc::new(None))\n\n}\n\n\n", "file_path": "src/sql/src/plan/statement/ddl.rs", "rank": 91, "score": 238574.0035539952 }, { "content": "/// Creates a source dataflow operator from a connector implementing [SimpleSource](SimpleSource)\n\npub fn create_source_simple<G, C>(\n\n config: SourceConfig<G>,\n\n connector: C,\n\n) -> (\n\n (\n\n timely::dataflow::Stream<G, (Row, Timestamp, Diff)>,\n\n timely::dataflow::Stream<G, SourceError>,\n\n ),\n\n Option<SourceToken>,\n\n)\n\nwhere\n\n G: Scope<Timestamp = Timestamp>,\n\n C: SimpleSource + Send + 'static,\n\n{\n\n let SourceConfig {\n\n id,\n\n name,\n\n upstream_name,\n\n scope,\n\n active,\n", "file_path": "src/dataflow/src/source/mod.rs", "rank": 92, "score": 238020.80568620737 }, { "content": "fn trait_generics_and_bounds(generics: &BTreeMap<String, BTreeSet<String>>) -> String {\n\n generics\n\n .iter()\n\n .map(|(ident, bounds)| {\n\n if bounds.len() == 0 {\n\n format!(\"{}, \", ident.to_string())\n\n } else {\n\n format!(\"{}: {}, \", ident, bounds.iter().join(\"+\"))\n\n }\n\n })\n\n .join(\"\")\n\n}\n\n\n", "file_path": "src/walkabout/src/gen.rs", "rank": 93, "score": 236999.21515586032 }, { "content": "fn build_row_schema_field<F: FnMut() -> String>(\n\n namer: &mut F,\n\n names_seen: &mut HashSet<String>,\n\n typ: &ColumnType,\n\n) -> serde_json::value::Value {\n\n let mut field_type = match &typ.scalar_type {\n\n ScalarType::Bool => json!(\"boolean\"),\n\n ScalarType::Int16\n\n | ScalarType::Int32\n\n | ScalarType::Oid\n\n | ScalarType::RegClass\n\n | ScalarType::RegProc\n\n | ScalarType::RegType => {\n\n json!(\"int\")\n\n }\n\n ScalarType::Int64 => json!(\"long\"),\n\n ScalarType::Float32 => json!(\"float\"),\n\n ScalarType::Float64 => json!(\"double\"),\n\n ScalarType::Date => json!({\n\n \"type\": \"int\",\n", "file_path": "src/interchange/src/json.rs", "rank": 94, "score": 235571.7700978657 }, { "content": "pub fn plan_show_create_source(\n\n scx: &StatementContext,\n\n ShowCreateSourceStatement { source_name }: ShowCreateSourceStatement,\n\n) -> Result<Plan, anyhow::Error> {\n\n let source = scx.resolve_item(source_name)?;\n\n if let CatalogItemType::Source = source.item_type() {\n\n Ok(Plan::SendRows(SendRowsPlan {\n\n rows: vec![Row::pack_slice(&[\n\n Datum::String(&source.name().to_string()),\n\n Datum::String(source.create_sql()),\n\n ])],\n\n }))\n\n } else {\n\n bail!(\"{} is not a source\", source.name());\n\n }\n\n}\n\n\n", "file_path": "src/sql/src/plan/statement/show.rs", "rank": 95, "score": 234826.8638434366 }, { "content": "pub fn describe_show_create_source(\n\n _: &StatementContext,\n\n _: ShowCreateSourceStatement,\n\n) -> Result<StatementDesc, anyhow::Error> {\n\n Ok(StatementDesc::new(Some(\n\n RelationDesc::empty()\n\n .with_column(\"Source\", ScalarType::String.nullable(false))\n\n .with_column(\"Create Source\", ScalarType::String.nullable(false)),\n\n )))\n\n}\n\n\n", "file_path": "src/sql/src/plan/statement/show.rs", "rank": 96, "score": 234826.8638434366 }, { "content": "/// Unescapes a testdrive byte string.\n\n///\n\n/// The escape character is `\\` and the only interesting escape sequence is\n\n/// `\\xNN`, where each `N` is a valid hexadecimal digit. All other characters\n\n/// following a backslash are taken literally.\n\npub fn unescape(s: &[u8]) -> Result<Vec<u8>, String> {\n\n let mut out = vec![];\n\n let mut s = s.iter().copied().fuse();\n\n while let Some(b) = s.next() {\n\n match b {\n\n b'\\\\' if s.next() == Some(b'x') => match (next_hex(&mut s), next_hex(&mut s)) {\n\n (Some(c1), Some(c0)) => out.push((c1 << 4) + c0),\n\n _ => return Err(\"invalid hexadecimal escape\".into()),\n\n },\n\n b'\\\\' => continue,\n\n _ => out.push(b),\n\n }\n\n }\n\n Ok(out)\n\n}\n\n\n", "file_path": "src/testdrive/src/format/bytes.rs", "rank": 97, "score": 234511.16085939299 }, { "content": "pub fn zig_i64(n: i64, buffer: &mut Vec<u8>) {\n\n encode_variable(((n << 1) ^ (n >> 63)) as u64, buffer)\n\n}\n\n\n", "file_path": "src/avro/src/util.rs", "rank": 98, "score": 234318.15858528664 }, { "content": "pub fn zig_i32(n: i32, buffer: &mut Vec<u8>) {\n\n zig_i64(n as i64, buffer)\n\n}\n\n\n", "file_path": "src/avro/src/util.rs", "rank": 99, "score": 234318.15858528664 } ]
Rust
gtk4/src/auto/icon_theme.rs
haecker-felix/gtk4-rs
f225c9f2d1b4f563aafb0c54581b8e8cafd5807c
use crate::IconLookupFlags; use crate::IconPaintable; use crate::TextDirection; use glib::object::Cast; use glib::object::IsA; use glib::object::ObjectType as ObjectType_; use glib::signal::connect_raw; use glib::signal::SignalHandlerId; use glib::translate::*; use glib::StaticType; use glib::ToValue; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; glib::wrapper! { pub struct IconTheme(Object<ffi::GtkIconTheme>); match fn { get_type => || ffi::gtk_icon_theme_get_type(), } } impl IconTheme { #[doc(alias = "gtk_icon_theme_new")] pub fn new() -> IconTheme { assert_initialized_main_thread!(); unsafe { from_glib_full(ffi::gtk_icon_theme_new()) } } #[doc(alias = "gtk_icon_theme_add_resource_path")] pub fn add_resource_path(&self, path: &str) { unsafe { ffi::gtk_icon_theme_add_resource_path(self.to_glib_none().0, path.to_glib_none().0); } } #[doc(alias = "gtk_icon_theme_add_search_path")] pub fn add_search_path<P: AsRef<std::path::Path>>(&self, path: P) { unsafe { ffi::gtk_icon_theme_add_search_path( self.to_glib_none().0, path.as_ref().to_glib_none().0, ); } } #[doc(alias = "gtk_icon_theme_get_display")] pub fn get_display(&self) -> Option<gdk::Display> { unsafe { from_glib_none(ffi::gtk_icon_theme_get_display(self.to_glib_none().0)) } } #[doc(alias = "gtk_icon_theme_get_icon_names")] pub fn get_icon_names(&self) -> Vec<glib::GString> { unsafe { FromGlibPtrContainer::from_glib_full(ffi::gtk_icon_theme_get_icon_names( self.to_glib_none().0, )) } } #[doc(alias = "gtk_icon_theme_get_resource_path")] pub fn get_resource_path(&self) -> Vec<glib::GString> { unsafe { FromGlibPtrContainer::from_glib_full(ffi::gtk_icon_theme_get_resource_path( self.to_glib_none().0, )) } } #[doc(alias = "gtk_icon_theme_get_search_path")] pub fn get_search_path(&self) -> Vec<std::path::PathBuf> { unsafe { FromGlibPtrContainer::from_glib_full(ffi::gtk_icon_theme_get_search_path( self.to_glib_none().0, )) } } #[doc(alias = "gtk_icon_theme_get_theme_name")] pub fn get_theme_name(&self) -> Option<glib::GString> { unsafe { from_glib_full(ffi::gtk_icon_theme_get_theme_name(self.to_glib_none().0)) } } #[doc(alias = "gtk_icon_theme_has_icon")] pub fn has_icon(&self, icon_name: &str) -> bool { unsafe { from_glib(ffi::gtk_icon_theme_has_icon( self.to_glib_none().0, icon_name.to_glib_none().0, )) } } #[doc(alias = "gtk_icon_theme_lookup_by_gicon")] pub fn lookup_by_gicon<P: IsA<gio::Icon>>( &self, icon: &P, size: i32, scale: i32, direction: TextDirection, flags: IconLookupFlags, ) -> Option<IconPaintable> { unsafe { from_glib_full(ffi::gtk_icon_theme_lookup_by_gicon( self.to_glib_none().0, icon.as_ref().to_glib_none().0, size, scale, direction.to_glib(), flags.to_glib(), )) } } #[doc(alias = "gtk_icon_theme_lookup_icon")] pub fn lookup_icon( &self, icon_name: &str, fallbacks: &[&str], size: i32, scale: i32, direction: TextDirection, flags: IconLookupFlags, ) -> Option<IconPaintable> { unsafe { from_glib_full(ffi::gtk_icon_theme_lookup_icon( self.to_glib_none().0, icon_name.to_glib_none().0, fallbacks.to_glib_none().0, size, scale, direction.to_glib(), flags.to_glib(), )) } } #[doc(alias = "gtk_icon_theme_set_search_path")] pub fn set_search_path(&self, path: &[&std::path::Path]) { unsafe { ffi::gtk_icon_theme_set_search_path(self.to_glib_none().0, path.to_glib_none().0); } } #[doc(alias = "gtk_icon_theme_set_theme_name")] pub fn set_theme_name(&self, theme_name: Option<&str>) { unsafe { ffi::gtk_icon_theme_set_theme_name(self.to_glib_none().0, theme_name.to_glib_none().0); } } pub fn set_property_display(&self, display: Option<&gdk::Display>) { unsafe { glib::gobject_ffi::g_object_set_property( self.as_ptr() as *mut glib::gobject_ffi::GObject, b"display\0".as_ptr() as *const _, glib::Value::from(display).to_glib_none().0, ); } } pub fn set_property_resource_path(&self, resource_path: &[&str]) { unsafe { glib::gobject_ffi::g_object_set_property( self.as_ptr() as *mut glib::gobject_ffi::GObject, b"resource-path\0".as_ptr() as *const _, glib::Value::from(resource_path).to_glib_none().0, ); } } #[doc(alias = "gtk_icon_theme_get_for_display")] pub fn get_for_display(display: &gdk::Display) -> Option<IconTheme> { assert_initialized_main_thread!(); unsafe { from_glib_none(ffi::gtk_icon_theme_get_for_display( display.to_glib_none().0, )) } } pub fn connect_changed<F: Fn(&IconTheme) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn changed_trampoline<F: Fn(&IconTheme) + 'static>( this: *mut ffi::GtkIconTheme, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&from_glib_borrow(this)) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"changed\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( changed_trampoline::<F> as *const (), )), Box_::into_raw(f), ) } } pub fn connect_property_display_notify<F: Fn(&IconTheme) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_display_trampoline<F: Fn(&IconTheme) + 'static>( this: *mut ffi::GtkIconTheme, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&from_glib_borrow(this)) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::display\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_display_trampoline::<F> as *const (), )), Box_::into_raw(f), ) } } pub fn connect_property_icon_names_notify<F: Fn(&IconTheme) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_icon_names_trampoline<F: Fn(&IconTheme) + 'static>( this: *mut ffi::GtkIconTheme, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&from_glib_borrow(this)) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::icon-names\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_icon_names_trampoline::<F> as *const (), )), Box_::into_raw(f), ) } } pub fn connect_property_resource_path_notify<F: Fn(&IconTheme) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_resource_path_trampoline<F: Fn(&IconTheme) + 'static>( this: *mut ffi::GtkIconTheme, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&from_glib_borrow(this)) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::resource-path\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_resource_path_trampoline::<F> as *const (), )), Box_::into_raw(f), ) } } pub fn connect_property_search_path_notify<F: Fn(&IconTheme) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_search_path_trampoline<F: Fn(&IconTheme) + 'static>( this: *mut ffi::GtkIconTheme, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&from_glib_borrow(this)) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::search-path\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_search_path_trampoline::<F> as *const (), )), Box_::into_raw(f), ) } } pub fn connect_property_theme_name_notify<F: Fn(&IconTheme) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_theme_name_trampoline<F: Fn(&IconTheme) + 'static>( this: *mut ffi::GtkIconTheme, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&from_glib_borrow(this)) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::theme-name\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_theme_name_trampoline::<F> as *const (), )), Box_::into_raw(f), ) } } } impl Default for IconTheme { fn default() -> Self { Self::new() } } #[derive(Clone, Default)] pub struct IconThemeBuilder { display: Option<gdk::Display>, resource_path: Option<Vec<String>>, search_path: Option<Vec<String>>, theme_name: Option<String>, } impl IconThemeBuilder { pub fn new() -> Self { Self::default() } pub fn build(self) -> IconTheme { let mut properties: Vec<(&str, &dyn ToValue)> = vec![]; if let Some(ref display) = self.display { properties.push(("display", display)); } if let Some(ref resource_path) = self.resource_path { properties.push(("resource-path", resource_path)); } if let Some(ref search_path) = self.search_path { properties.push(("search-path", search_path)); } if let Some(ref theme_name) = self.theme_name { properties.push(("theme-name", theme_name)); } let ret = glib::Object::new::<IconTheme>(&properties).expect("object new"); ret } pub fn display(mut self, display: &gdk::Display) -> Self { self.display = Some(display.clone()); self } pub fn resource_path(mut self, resource_path: Vec<String>) -> Self { self.resource_path = Some(resource_path); self } pub fn search_path(mut self, search_path: Vec<String>) -> Self { self.search_path = Some(search_path); self } pub fn theme_name(mut self, theme_name: &str) -> Self { self.theme_name = Some(theme_name.to_string()); self } } impl fmt::Display for IconTheme { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("IconTheme") } }
use crate::IconLookupFlags; use crate::IconPaintable; use crate::TextDirection; use glib::object::Cast; use glib::object::IsA; use glib::object::ObjectType as ObjectType_; use glib::signal::connect_raw; use glib::signal::SignalHandlerId; use glib::translate::*; use glib::StaticType; use glib::ToValue; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; glib::wrapper! { pub struct IconTheme(Object<ffi::GtkIconTheme>); match fn { get_type => || ffi::gtk_icon_theme_get_type(), } } impl IconTheme { #[doc(alias = "gtk_icon_theme_new")] pub fn new() -> IconTheme { assert_initialized_main_thread!(); unsafe { from_glib_full(ffi::gtk_icon_theme_new()) } } #[doc(alias = "gtk_icon_theme_add_resource_path")] pub fn add_resource_path(&self, path: &str) { unsafe { ffi::gtk_icon_theme_add_resource_path(self.to_glib_none().0, path.to_glib_none().0); } } #[doc(alias = "gtk_icon_theme_add_search_path")] pub fn add_search_path<P: AsRef<std::path::Path>>(&self, path: P) { unsafe { ffi::gtk_icon_theme_add_search_path( self.to_glib_none().0, path.as_ref().to_glib_none().0, ); } } #[doc(alias = "gtk_icon_theme_get_display")] pub fn get_display(&self) -> Option<gdk::Display> { unsafe { from_glib_none(ffi::gtk_icon_theme_get_display(self.to_glib_none().0)) } } #[doc(alias = "gtk_icon_theme_get_icon_names")] pub fn get_icon_names(&self) -> Vec<glib::GString> { unsafe { FromGlibPtrContainer::from_glib_full(ffi::gtk_icon_theme_get_icon_names( self.to_glib_none().0, )) } } #[doc(alias = "gtk_icon_theme_get_resource_path")] pub fn get_resource_path(&self) -> Vec<glib::GString> { unsafe { FromGlibPtrContainer::from_glib_full(ffi::gtk_icon_theme_get_resource_path( self.to_glib_none().0, )) } } #[doc(alias = "gtk_icon_theme_get_search_path")] pub fn get_search_path(&self) -> Vec<std::path::PathBuf> { unsafe { FromGlibPtrContainer::from_glib_full(ffi::gtk_icon_theme_get_search_path( self.to_glib_none().0, )) } } #[doc(alias = "gtk_icon_theme_get_theme_name")] pub fn get_theme_name(&self) -> Option<glib::GString> { unsafe { from_glib_full(ffi::gtk_icon_theme_get
b_full(ffi::gtk_icon_theme_lookup_icon( self.to_glib_none().0, icon_name.to_glib_none().0, fallbacks.to_glib_none().0, size, scale, direction.to_glib(), flags.to_glib(), )) } } #[doc(alias = "gtk_icon_theme_set_search_path")] pub fn set_search_path(&self, path: &[&std::path::Path]) { unsafe { ffi::gtk_icon_theme_set_search_path(self.to_glib_none().0, path.to_glib_none().0); } } #[doc(alias = "gtk_icon_theme_set_theme_name")] pub fn set_theme_name(&self, theme_name: Option<&str>) { unsafe { ffi::gtk_icon_theme_set_theme_name(self.to_glib_none().0, theme_name.to_glib_none().0); } } pub fn set_property_display(&self, display: Option<&gdk::Display>) { unsafe { glib::gobject_ffi::g_object_set_property( self.as_ptr() as *mut glib::gobject_ffi::GObject, b"display\0".as_ptr() as *const _, glib::Value::from(display).to_glib_none().0, ); } } pub fn set_property_resource_path(&self, resource_path: &[&str]) { unsafe { glib::gobject_ffi::g_object_set_property( self.as_ptr() as *mut glib::gobject_ffi::GObject, b"resource-path\0".as_ptr() as *const _, glib::Value::from(resource_path).to_glib_none().0, ); } } #[doc(alias = "gtk_icon_theme_get_for_display")] pub fn get_for_display(display: &gdk::Display) -> Option<IconTheme> { assert_initialized_main_thread!(); unsafe { from_glib_none(ffi::gtk_icon_theme_get_for_display( display.to_glib_none().0, )) } } pub fn connect_changed<F: Fn(&IconTheme) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn changed_trampoline<F: Fn(&IconTheme) + 'static>( this: *mut ffi::GtkIconTheme, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&from_glib_borrow(this)) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"changed\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( changed_trampoline::<F> as *const (), )), Box_::into_raw(f), ) } } pub fn connect_property_display_notify<F: Fn(&IconTheme) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_display_trampoline<F: Fn(&IconTheme) + 'static>( this: *mut ffi::GtkIconTheme, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&from_glib_borrow(this)) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::display\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_display_trampoline::<F> as *const (), )), Box_::into_raw(f), ) } } pub fn connect_property_icon_names_notify<F: Fn(&IconTheme) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_icon_names_trampoline<F: Fn(&IconTheme) + 'static>( this: *mut ffi::GtkIconTheme, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&from_glib_borrow(this)) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::icon-names\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_icon_names_trampoline::<F> as *const (), )), Box_::into_raw(f), ) } } pub fn connect_property_resource_path_notify<F: Fn(&IconTheme) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_resource_path_trampoline<F: Fn(&IconTheme) + 'static>( this: *mut ffi::GtkIconTheme, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&from_glib_borrow(this)) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::resource-path\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_resource_path_trampoline::<F> as *const (), )), Box_::into_raw(f), ) } } pub fn connect_property_search_path_notify<F: Fn(&IconTheme) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_search_path_trampoline<F: Fn(&IconTheme) + 'static>( this: *mut ffi::GtkIconTheme, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&from_glib_borrow(this)) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::search-path\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_search_path_trampoline::<F> as *const (), )), Box_::into_raw(f), ) } } pub fn connect_property_theme_name_notify<F: Fn(&IconTheme) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_theme_name_trampoline<F: Fn(&IconTheme) + 'static>( this: *mut ffi::GtkIconTheme, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&from_glib_borrow(this)) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::theme-name\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_theme_name_trampoline::<F> as *const (), )), Box_::into_raw(f), ) } } } impl Default for IconTheme { fn default() -> Self { Self::new() } } #[derive(Clone, Default)] pub struct IconThemeBuilder { display: Option<gdk::Display>, resource_path: Option<Vec<String>>, search_path: Option<Vec<String>>, theme_name: Option<String>, } impl IconThemeBuilder { pub fn new() -> Self { Self::default() } pub fn build(self) -> IconTheme { let mut properties: Vec<(&str, &dyn ToValue)> = vec![]; if let Some(ref display) = self.display { properties.push(("display", display)); } if let Some(ref resource_path) = self.resource_path { properties.push(("resource-path", resource_path)); } if let Some(ref search_path) = self.search_path { properties.push(("search-path", search_path)); } if let Some(ref theme_name) = self.theme_name { properties.push(("theme-name", theme_name)); } let ret = glib::Object::new::<IconTheme>(&properties).expect("object new"); ret } pub fn display(mut self, display: &gdk::Display) -> Self { self.display = Some(display.clone()); self } pub fn resource_path(mut self, resource_path: Vec<String>) -> Self { self.resource_path = Some(resource_path); self } pub fn search_path(mut self, search_path: Vec<String>) -> Self { self.search_path = Some(search_path); self } pub fn theme_name(mut self, theme_name: &str) -> Self { self.theme_name = Some(theme_name.to_string()); self } } impl fmt::Display for IconTheme { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("IconTheme") } }
_theme_name(self.to_glib_none().0)) } } #[doc(alias = "gtk_icon_theme_has_icon")] pub fn has_icon(&self, icon_name: &str) -> bool { unsafe { from_glib(ffi::gtk_icon_theme_has_icon( self.to_glib_none().0, icon_name.to_glib_none().0, )) } } #[doc(alias = "gtk_icon_theme_lookup_by_gicon")] pub fn lookup_by_gicon<P: IsA<gio::Icon>>( &self, icon: &P, size: i32, scale: i32, direction: TextDirection, flags: IconLookupFlags, ) -> Option<IconPaintable> { unsafe { from_glib_full(ffi::gtk_icon_theme_lookup_by_gicon( self.to_glib_none().0, icon.as_ref().to_glib_none().0, size, scale, direction.to_glib(), flags.to_glib(), )) } } #[doc(alias = "gtk_icon_theme_lookup_icon")] pub fn lookup_icon( &self, icon_name: &str, fallbacks: &[&str], size: i32, scale: i32, direction: TextDirection, flags: IconLookupFlags, ) -> Option<IconPaintable> { unsafe { from_gli
random
[]
Rust
src/utils.rs
drahnr/pyroscope-rs
c0a64b7b3d3b9a166bf3648ff3c53534a24d68b1
use crate::error::Result; use crate::PyroscopeError; use std::collections::HashMap; pub fn merge_tags_with_app_name( application_name: String, tags: HashMap<String, String>, ) -> Result<String> { let mut tags_vec = tags .into_iter() .filter(|(k, _)| k != "__name__") .map(|(k, v)| format!("{}={}", k, v)) .collect::<Vec<String>>(); tags_vec.sort(); let tags_str = tags_vec.join(","); if !tags_str.is_empty() { Ok(format!("{}{{{}}}", application_name, tags_str,)) } else { Ok(application_name) } } #[cfg(test)] mod merge_tags_with_app_name_tests { use std::collections::HashMap; use crate::utils::merge_tags_with_app_name; #[test] fn merge_tags_with_app_name_with_tags() { let mut tags = HashMap::new(); tags.insert("env".to_string(), "staging".to_string()); tags.insert("region".to_string(), "us-west-1".to_string()); tags.insert("__name__".to_string(), "reserved".to_string()); assert_eq!( merge_tags_with_app_name("my.awesome.app.cpu".to_string(), tags).unwrap(), "my.awesome.app.cpu{env=staging,region=us-west-1}".to_string() ) } #[test] fn merge_tags_with_app_name_without_tags() { assert_eq!( merge_tags_with_app_name("my.awesome.app.cpu".to_string(), HashMap::default()).unwrap(), "my.awesome.app.cpu".to_string() ) } } pub fn check_err<T: Ord + Default>(num: T) -> Result<T> { if num < T::default() { return Err(PyroscopeError::from(std::io::Error::last_os_error())); } Ok(num) } #[cfg(test)] mod check_err_tests { use crate::utils::check_err; #[test] fn check_err_success() { assert_eq!(check_err(1).unwrap(), 1) } #[test] fn check_err_error() { assert!(check_err(-1).is_err()) } } pub fn get_current_time_secs() -> Result<u64> { Ok(std::time::SystemTime::now() .duration_since(std::time::UNIX_EPOCH)? .as_secs()) } #[cfg(test)] mod get_current_time_secs_tests { use crate::utils::get_current_time_secs; #[test] fn get_current_time_secs_success() { assert!(get_current_time_secs().is_ok()) } } #[derive(Debug, PartialEq)] pub struct TimeRange { pub from: u64, pub until: u64, pub current: u64, pub rem: u64, } pub fn get_time_range(timestamp: u64) -> Result<TimeRange> { if timestamp == 0 { return get_time_range(get_current_time_secs()?); } Ok(TimeRange { from: timestamp / 10 * 10, until: timestamp / 10 * 10 + 10, current: timestamp, rem: 10 - (timestamp % 10), }) } #[cfg(test)] mod get_time_range_tests { use crate::utils::{get_time_range, TimeRange}; #[test] fn get_time_range_verify() { assert_eq!( get_time_range(1644194479).unwrap(), TimeRange { from: 1644194470, until: 1644194480, current: 1644194479, rem: 1, } ); assert_eq!( get_time_range(1644194470).unwrap(), TimeRange { from: 1644194470, until: 1644194480, current: 1644194470, rem: 10, } ); assert_eq!( get_time_range(1644194476).unwrap(), TimeRange { from: 1644194470, until: 1644194480, current: 1644194476, rem: 4, } ); } }
use crate::error::Result; use crate::PyroscopeError; use std::collections::HashMap; pub fn merge_tags_with_app_name( application_name: String, tags: HashMap<String, String>, ) -> Result<String> { let mut tags_vec = tags .into_iter() .filter(|(k, _)| k != "__name__") .map(|(k, v)| format!("{}={}", k, v)) .collect::<Vec<String>>(); tags_vec.sort(); let tags_str = tags_vec.join(","); if !tags_str.is_empty() { Ok(format!("{}{{{}}}", application_name, tags_str,)) } else { Ok(application_name) } } #[cfg(test)] mod merge_tags_with_app_name_tests { use std::collections::HashMap; use crate::utils::merge_tags_with_app_name; #[test] fn merge_tags_with_app_name_with_tags() { let mut tags = HashMap::new(); tags.insert("env".to_string(), "staging".to_string()); tags.insert("region".to_string(), "us-west-1".to_string()); tags.insert("__name__".to_string(), "reserved".to_string()); assert_eq!( merge_tags_with_app_name("my.awesome.app.cpu".to_string(), tags).unwrap(), "my.awesome.app.cpu{env=staging,region=us-west-1}".to_string() ) } #[test] fn merge_tags_with_app_name_without_tags() { assert_eq!( merge_tags_with_app_name("my.awesome.app.cpu".to_string(), HashMap::default()).unwrap(), "my.awesome.app.cpu".to_string() ) } } pub fn check_err<T: Ord + Default>(num: T) -> Result<T> { if num < T::default() { return Err(PyroscopeError::from(std::io::Error::last_os_error())); } Ok(num) } #[cfg(test)] mod check_err_tests { use crate::utils::check_err; #[test] fn check_err_success() { assert_eq!(check_err(1).unwrap(), 1) } #[test] fn check_err_error() { assert!(check_err(-1).is_err()) } } pub fn get_current_time_secs() -> Result<u64> { Ok(std::time::SystemTime::now() .duration_since(std::time::UNIX_EPOCH)? .as_secs()) } #[cfg(test)] mod get_current_time_secs_tests { use crate::utils::get_current_time_secs; #[test] fn get_current_time_secs_success() { assert!(get_current_time_secs().is_ok()) } } #[derive(Debug, PartialEq)] pub struct TimeRange { pub from: u64, pub until: u64, pub current: u64, pub rem: u64, } pub fn get_time_range(timestamp: u64) -> Result<TimeRange> { if timestamp == 0 { return get_time_range(get_current_time_secs()?); } Ok(TimeRange { from: timestamp / 10 * 10, until: timestamp / 10 * 10 + 10, current: timestamp, rem: 10 - (timestamp % 10), }) } #[cfg(test)] mod get_time_range_tests { use crate::utils::{get_time_range, TimeRange}; #[test] fn get_time_range_verify() { assert_eq!( get_time_range(1644194479).unwrap(), TimeRange { from: 1644194470, until: 1644194480, current: 1644194479, rem: 1, } ); assert_eq!( get_time_range(1644194470).unwrap(), TimeRange { from: 1644194470, until: 1644194480,
until: 1644194480, current: 1644194476, rem: 4, } ); } }
current: 1644194470, rem: 10, } ); assert_eq!( get_time_range(1644194476).unwrap(), TimeRange { from: 1644194470,
random
[ { "content": "fn fibonacci(n: u64) -> u64 {\n\n match n {\n\n 0 | 1 => 1,\n\n n => fibonacci(n - 1) + fibonacci(n - 2),\n\n }\n\n}\n\n\n", "file_path": "examples/tags.rs", "rank": 3, "score": 102153.85947132888 }, { "content": "fn fibonacci(n: u64) -> u64 {\n\n match n {\n\n 0 | 1 => 1,\n\n n => fibonacci(n - 1) + fibonacci(n - 2),\n\n }\n\n}\n\n\n", "file_path": "examples/with-logger.rs", "rank": 5, "score": 79077.1395550161 }, { "content": "fn fibonacci2(n: u64) -> u64 {\n\n match n {\n\n 0 | 1 => 1,\n\n n => fibonacci2(n - 1) + fibonacci2(n - 2),\n\n }\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<()> {\n\n let mut agent = PyroscopeAgent::builder(\"http://localhost:4040\", \"example.async\")\n\n .tags(&[(\"TagA\", \"ValueA\"), (\"TagB\", \"ValueB\")])\n\n .build()?;\n\n\n\n // Start Agent\n\n agent.start();\n\n\n\n tokio::task::spawn(async {\n\n let n = fibonacci1(45);\n\n println!(\"Thread 1: {}\", n);\n\n })\n", "file_path": "examples/async.rs", "rank": 6, "score": 79077.1395550161 }, { "content": "fn fibonacci1(n: u64) -> u64 {\n\n match n {\n\n 0 | 1 => 1,\n\n n => fibonacci1(n - 1) + fibonacci1(n - 2),\n\n }\n\n}\n\n\n", "file_path": "examples/async.rs", "rank": 7, "score": 79077.1395550161 }, { "content": "fn fibonacci(n: u64) -> u64 {\n\n match n {\n\n 0 | 1 => 1,\n\n n => fibonacci(n - 1) + fibonacci(n - 2),\n\n }\n\n}\n\n\n", "file_path": "examples/basic.rs", "rank": 8, "score": 79077.1395550161 }, { "content": "fn fibonacci(n: u64) -> u64 {\n\n match n {\n\n 0 | 1 => 1,\n\n n => fibonacci(n - 1) + fibonacci(n - 2),\n\n }\n\n}\n\n\n", "file_path": "examples/backend.rs", "rank": 9, "score": 79077.1395550161 }, { "content": "fn fibonacci(n: u64) -> u64 {\n\n match n {\n\n 0 | 1 => 1,\n\n n => fibonacci(n - 1) + fibonacci(n - 2),\n\n }\n\n}\n\n\n", "file_path": "examples/error.rs", "rank": 10, "score": 79077.1395550161 }, { "content": "#[test]\n\nfn test_PyroscopeConfig_tags() {\n\n let config = PyroscopeConfig::new(\"http://localhost:8080\", \"myapp\").tags(&[(\"tag\", \"value\")]);\n\n assert_eq!(config.tags.len(), 1);\n\n assert_eq!(config.tags.get(\"tag\"), Some(&\"value\".to_owned()));\n\n}\n\n\n", "file_path": "tests/agent.rs", "rank": 11, "score": 78622.12736325353 }, { "content": "fn fibonacci(n: u64) -> u64 {\n\n match n {\n\n 0 | 1 => 1,\n\n n => fibonacci(n - 1) + fibonacci(n - 2),\n\n }\n\n}\n\n\n", "file_path": "examples/backend-pprof.rs", "rank": 12, "score": 76843.86540972274 }, { "content": "fn fibonacci1(n: u64) -> u64 {\n\n match n {\n\n 0 | 1 => 1,\n\n n => fibonacci1(n - 1) + fibonacci1(n - 2),\n\n }\n\n}\n\n\n", "file_path": "examples/multi-thread.rs", "rank": 13, "score": 76843.86540972274 }, { "content": "fn fibonacci2(n: u64) -> u64 {\n\n match n {\n\n 0 | 1 => 1,\n\n n => fibonacci2(n - 1) + fibonacci2(n - 2),\n\n }\n\n}\n\n\n", "file_path": "examples/multi-thread.rs", "rank": 14, "score": 76843.86540972274 }, { "content": "#[test]\n\nfn test_PyroscopeConfig_tags_empty() {\n\n let config = PyroscopeConfig::new(\"http://localhost:8080\", \"myapp\");\n\n assert_eq!(config.tags.len(), 0);\n\n}\n\n\n", "file_path": "tests/agent.rs", "rank": 15, "score": 75510.21684697428 }, { "content": "#[test]\n\nfn test_PyroscopeConfig_tags_multiple() {\n\n let config = PyroscopeConfig::new(\"http://localhost:8080\", \"myapp\")\n\n .tags(&[(\"tag1\", \"value1\"), (\"tag2\", \"value2\")]);\n\n assert_eq!(config.tags.len(), 2);\n\n assert_eq!(config.tags.get(\"tag1\"), Some(&\"value1\".to_owned()));\n\n assert_eq!(config.tags.get(\"tag2\"), Some(&\"value2\".to_owned()));\n\n}\n", "file_path": "tests/agent.rs", "rank": 16, "score": 75510.21684697428 }, { "content": "/// libc::timerfd_settime wrapper\n\npub fn timerfd_settime(\n\n timer_fd: i32, set_flags: libc::c_int, new_value: &mut libc::itimerspec,\n\n old_value: &mut libc::itimerspec,\n\n) -> Result<()> {\n\n check_err(unsafe { libc::timerfd_settime(timer_fd, set_flags, new_value, old_value) })?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/timer/epoll.rs", "rank": 17, "score": 67417.77598621773 }, { "content": "/// libc::epoll_ctl wrapper\n\npub fn epoll_ctl(\n\n epoll_fd: i32, epoll_flags: libc::c_int, timer_fd: i32, event: &mut libc::epoll_event,\n\n) -> Result<()> {\n\n check_err(unsafe { libc::epoll_ctl(epoll_fd, epoll_flags, timer_fd, event) })?;\n\n Ok(())\n\n}\n\n\n\n/// libc::epoll_wait wrapper\n\n///\n\n/// # Safety\n\n/// This function is a wrapper for libc::epoll_wait.\n\npub unsafe fn epoll_wait(\n\n epoll_fd: i32, events: *mut libc::epoll_event, maxevents: libc::c_int, timeout: libc::c_int,\n\n) -> Result<()> {\n\n check_err(libc::epoll_wait(epoll_fd, events, maxevents, timeout))?;\n\n Ok(())\n\n}\n\n\n\n/// libc::read wrapper\n\n///\n\n/// # Safety\n\n/// This function is a wrapper for libc::read.\n\npub unsafe fn read(timer_fd: i32, bufptr: *mut libc::c_void, count: libc::size_t) -> Result<()> {\n\n check_err(libc::read(timer_fd, bufptr, count))?;\n\n Ok(())\n\n}\n", "file_path": "src/timer/epoll.rs", "rank": 18, "score": 67417.77598621773 }, { "content": "#[test]\n\nfn test_timer() {\n\n // Initialize Timer\n\n let mut timer = Timer::default().initialize().unwrap();\n\n\n\n // Attach a listener\n\n let (tx, rx) = std::sync::mpsc::channel();\n\n timer.attach_listener(tx).unwrap();\n\n\n\n // Wait for event (should arrive in 10s)\n\n let recv: u64 = rx.recv().unwrap();\n\n\n\n // Get current time\n\n let now = std::time::SystemTime::now()\n\n .duration_since(std::time::UNIX_EPOCH)\n\n .unwrap()\n\n .as_secs();\n\n\n\n // Check that recv and now are within 10s of each other\n\n assert!(recv - now < 10);\n\n\n\n // Check that recv is divisible by 10\n\n assert!(recv % 10 == 0);\n\n}\n", "file_path": "tests/timer.rs", "rank": 19, "score": 61395.988642959885 }, { "content": "#[test]\n\nfn test_session_new() {\n\n let config = PyroscopeConfig {\n\n url: \"http://localhost:8080\".to_string(),\n\n application_name: \"test\".to_string(),\n\n tags: HashMap::new(),\n\n sample_rate: 100,\n\n };\n\n\n\n let report = vec![1, 2, 3];\n\n\n\n let session = Session::new(1950, config, report).unwrap();\n\n\n\n assert_eq!(session.from, 1940);\n\n assert_eq!(session.until, 1950);\n\n}\n\n\n", "file_path": "tests/session.rs", "rank": 20, "score": 58961.70651023141 }, { "content": "#[test]\n\nfn test_state_default() {\n\n assert_eq!(State::default(), State::Uninitialized);\n\n}\n", "file_path": "tests/backends.rs", "rank": 21, "score": 58961.70651023141 }, { "content": "/// Backend Trait\n\npub trait Backend: Send + Debug {\n\n /// Get the backend state.\n\n fn get_state(&self) -> State;\n\n /// Initialize the backend.\n\n fn initialize(&mut self, sample_rate: i32) -> Result<()>;\n\n /// Start the backend.\n\n fn start(&mut self) -> Result<()>;\n\n /// Stop the backend.\n\n fn stop(&mut self) -> Result<()>;\n\n /// Generate profiling report\n\n fn report(&mut self) -> Result<Vec<u8>>;\n\n}\n\n\n\npub mod pprof;\n", "file_path": "src/backends/mod.rs", "rank": 22, "score": 57610.821103540555 }, { "content": "#[test]\n\nfn test_PyroscopeConfig_new() {\n\n let config = PyroscopeConfig::new(\"http://localhost:8080\", \"myapp\");\n\n assert_eq!(config.url, \"http://localhost:8080\");\n\n assert_eq!(config.application_name, \"myapp\");\n\n assert_eq!(config.sample_rate, 100i32);\n\n assert_eq!(config.tags.len(), 0);\n\n}\n\n\n", "file_path": "tests/agent.rs", "rank": 23, "score": 56741.8322260455 }, { "content": "#[test]\n\nfn test_session_send_error() {\n\n let config = PyroscopeConfig {\n\n url: \"http://invalid_url\".to_string(),\n\n application_name: \"test\".to_string(),\n\n tags: HashMap::new(),\n\n sample_rate: 100,\n\n };\n\n\n\n let report = vec![1, 2, 3];\n\n\n\n let session = Session::new(1950, config, report).unwrap();\n\n\n\n // TODO: to figure this out\n\n}\n", "file_path": "tests/session.rs", "rank": 24, "score": 56741.8322260455 }, { "content": "#[test]\n\nfn test_session_manager_new() {\n\n let session_manager = SessionManager::new().unwrap();\n\n assert!(session_manager.handle.is_some());\n\n}\n\n\n", "file_path": "tests/session.rs", "rank": 25, "score": 56741.8322260455 }, { "content": "#[test]\n\nfn test_session_manager_push_kill() {\n\n let session_manager = SessionManager::new().unwrap();\n\n session_manager.push(SessionSignal::Kill).unwrap();\n\n assert_eq!(session_manager.handle.unwrap().join().unwrap().unwrap(), ());\n\n}\n\n\n", "file_path": "tests/session.rs", "rank": 26, "score": 54708.40278853736 }, { "content": "#[test]\n\nfn test_PyroscopeConfig_sample_rate() {\n\n let config = PyroscopeConfig::new(\"http://localhost:8080\", \"myapp\").sample_rate(10);\n\n assert_eq!(config.sample_rate, 10i32);\n\n}\n\n\n", "file_path": "tests/agent.rs", "rank": 27, "score": 54708.40278853736 }, { "content": "fn main() -> Result<()> {\n\n let mut agent = PyroscopeAgent::builder(\"http://localhost:4040\", \"example.tags\")\n\n .sample_rate(100)\n\n .tags(&[(\"Hostname\", \"pyroscope\")])\n\n .build()?;\n\n\n\n // Start Agent\n\n agent.start();\n\n\n\n // Make some calculation\n\n let _result = fibonacci(47);\n\n\n\n // Add Tags\n\n agent.add_tags(&[(\"series\", \"Number 2\")])?;\n\n\n\n // Do more calculation\n\n let _result = fibonacci(47);\n\n\n\n // Stop Agent\n\n agent.stop();\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/tags.rs", "rank": 28, "score": 53304.814787919255 }, { "content": "/// libc::epoll_create1 wrapper\n\npub fn epoll_create1(epoll_flags: libc::c_int) -> Result<i32> {\n\n check_err(unsafe { libc::epoll_create1(epoll_flags) }).map(|epoll_fd| epoll_fd as i32)\n\n}\n\n\n", "file_path": "src/timer/epoll.rs", "rank": 29, "score": 49080.16693063815 }, { "content": "// Copyright: https://github.com/YangKeao\n\nfn fold<W>(report: &Report, with_thread_name: bool, mut writer: W) -> Result<()>\n\nwhere\n\n W: std::io::Write,\n\n{\n\n for (key, value) in report.data.iter() {\n\n if with_thread_name {\n\n if !key.thread_name.is_empty() {\n\n write!(writer, \"{};\", key.thread_name)?;\n\n } else {\n\n write!(writer, \"{:?};\", key.thread_id)?;\n\n }\n\n }\n\n\n\n for (index, frame) in key.frames.iter().rev().enumerate() {\n\n for (index, symbol) in frame.iter().rev().enumerate() {\n\n if index + 1 == frame.len() {\n\n write!(writer, \"{}\", symbol)?;\n\n } else {\n\n write!(writer, \"{};\", symbol)?;\n\n }\n", "file_path": "src/backends/pprof.rs", "rank": 30, "score": 44794.01005286888 }, { "content": "/// libc::timerfd wrapper\n\npub fn timerfd_create(clockid: libc::clockid_t, clock_flags: libc::c_int) -> Result<i32> {\n\n check_err(unsafe { libc::timerfd_create(clockid, clock_flags) }).map(|timer_fd| timer_fd as i32)\n\n}\n\n\n", "file_path": "src/timer/epoll.rs", "rank": 31, "score": 42876.84018299695 }, { "content": "/// libc::kevent wrapper\n\nfn kevent(\n\n kqueue: i32, change: *const libc::kevent, c_count: libc::c_int, events: *mut libc::kevent,\n\n e_count: libc::c_int, timeout: *const libc::timespec,\n\n) -> Result<()> {\n\n check_err(unsafe { libc::kevent(kqueue, change, c_count, events, e_count, timeout) })?;\n\n Ok(())\n\n}\n", "file_path": "src/timer/kqueue.rs", "rank": 32, "score": 29547.676209618163 }, { "content": "fn main() {\n\n // Initialize the Timer\n\n let mut timer = Timer::default().initialize().unwrap();\n\n\n\n // Create a streaming channel\n\n let (tx, rx): (Sender<u64>, Receiver<u64>) = channel();\n\n\n\n let (tx2, rx2): (Sender<u64>, Receiver<u64>) = channel();\n\n\n\n // Attach tx to Timer\n\n timer.attach_listener(tx).unwrap();\n\n timer.attach_listener(tx2).unwrap();\n\n\n\n // Show current time\n\n let now = std::time::SystemTime::now()\n\n .duration_since(std::time::UNIX_EPOCH)\n\n .unwrap()\n\n .as_secs();\n\n println!(\"Current Time: {}\", now);\n\n\n", "file_path": "examples/internals-timer.rs", "rank": 33, "score": 29547.676209618163 }, { "content": "fn main() -> Result<()> {\n\n // Force rustc to display the log messages in the console.\n\n std::env::set_var(\"RUST_LOG\", \"trace\");\n\n\n\n // Initialize the logger.\n\n pretty_env_logger::init_timed();\n\n\n\n info!(\"With Logger example\");\n\n\n\n // Create a new agent.\n\n let mut agent = PyroscopeAgent::builder(\"http://localhost:4040\", \"example.logger\").build()?;\n\n\n\n // Start Agent\n\n agent.start();\n\n\n\n let _result = fibonacci(47);\n\n\n\n // Stop Agent\n\n agent.stop();\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/with-logger.rs", "rank": 34, "score": 28893.25960865385 }, { "content": "fn main() -> Result<()> {\n\n let mut agent = PyroscopeAgent::builder(\"http://localhost:4040\", \"example.basic\")\n\n .tags(&[(\"TagA\", \"ValueA\"), (\"TagB\", \"ValueB\")])\n\n .build()?;\n\n\n\n // Show start time\n\n let start = std::time::SystemTime::now()\n\n .duration_since(std::time::UNIX_EPOCH)\n\n .unwrap()\n\n .as_secs();\n\n println!(\"Start Time: {}\", start);\n\n\n\n // Start Agent\n\n agent.start();\n\n\n\n let _result = fibonacci(47);\n\n\n\n // Show stop time\n\n let stop = std::time::SystemTime::now()\n\n .duration_since(std::time::UNIX_EPOCH)\n", "file_path": "examples/basic.rs", "rank": 35, "score": 28893.25960865385 }, { "content": "fn main() -> Result<()> {\n\n // Force rustc to display the log messages in the console.\n\n std::env::set_var(\"RUST_LOG\", \"trace\");\n\n\n\n // Initialize the logger.\n\n pretty_env_logger::init_timed();\n\n\n\n let mut agent = PyroscopeAgent::builder(\"http://invalid_url\", \"example.error\")\n\n .build()\n\n .unwrap();\n\n // Start Agent\n\n agent.start();\n\n\n\n let _result = fibonacci(47);\n\n\n\n // Stop Agent\n\n agent.stop();\n\n\n\n drop(agent);\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/error.rs", "rank": 36, "score": 28893.25960865385 }, { "content": "fn main() -> Result<()> {\n\n let mut agent = PyroscopeAgent::builder(\"http://localhost:4040\", \"example.backend\")\n\n .backend(Pprof::default())\n\n .sample_rate(100)\n\n .tags(&[(\"TagA\", \"ValueA\"), (\"TagB\", \"ValueB\")])\n\n .build()?;\n\n\n\n agent.start();\n\n let _result = fibonacci(45);\n\n agent.stop();\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/backend.rs", "rank": 37, "score": 28893.25960865385 }, { "content": "fn main() -> Result<()> {\n\n let mut agent = PyroscopeAgent::builder(\"http://localhost:4040\", \"example.multithread\")\n\n .sample_rate(100)\n\n .build()?;\n\n\n\n // Start Agent\n\n agent.start();\n\n\n\n let handle_1 = thread::spawn(|| {\n\n fibonacci1(45);\n\n });\n\n\n\n let handle_2 = thread::spawn(|| {\n\n fibonacci2(45);\n\n });\n\n\n\n // Wait for the threads to complete\n\n handle_1.join().unwrap();\n\n handle_2.join().unwrap();\n\n\n\n // Stop Agent\n\n agent.stop();\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/multi-thread.rs", "rank": 38, "score": 27639.6768811091 }, { "content": "fn main() -> Result<()> {\n\n let mut backend = Pprof::default();\n\n backend.initialize(100)?;\n\n backend.start()?;\n\n\n\n fibonacci(45);\n\n let report = backend.report()?;\n\n println!(\"{}\", std::str::from_utf8(&report).unwrap());\n\n\n\n backend.stop()?;\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/backend-pprof.rs", "rank": 39, "score": 27639.6768811091 }, { "content": "extern crate pyroscope;\n\n\n\nuse pyroscope::{PyroscopeAgent, Result};\n\n\n", "file_path": "examples/tags.rs", "rank": 40, "score": 27607.912554694452 }, { "content": "use pyroscope::timer::Timer;\n\n\n\n#[test]\n", "file_path": "tests/timer.rs", "rank": 41, "score": 27054.98903918724 }, { "content": "use pyroscope::backends::State;\n\n\n\n#[test]\n", "file_path": "tests/backends.rs", "rank": 42, "score": 27054.98903918724 }, { "content": "use pyroscope::pyroscope::PyroscopeConfig;\n\n\n\n#[test]\n", "file_path": "tests/agent.rs", "rank": 43, "score": 27054.854774097348 }, { "content": "use pyroscope::{\n\n pyroscope::PyroscopeConfig,\n\n session::{Session, SessionManager, SessionSignal},\n\n PyroscopeError,\n\n};\n\nuse std::{\n\n collections::HashMap,\n\n sync::mpsc::{sync_channel, Receiver, SyncSender},\n\n thread,\n\n thread::JoinHandle,\n\n};\n\n\n\n#[test]\n", "file_path": "tests/session.rs", "rank": 44, "score": 27053.54060521106 }, { "content": "// Copyright 2021 Developers of Pyroscope.\n\n\n\n// Licensed under the Apache License, Version 2.0 <LICENSE or\n\n// https://www.apache.org/licenses/LICENSE-2.0>. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\n// Possibly: ios, netbsd, openbsd, freebsd\n\n#[cfg(target_os = \"macos\")] pub mod kqueue;\n\n#[cfg(target_os = \"macos\")] pub use kqueue::Timer;\n\n\n\n// Possibly: android\n\n#[cfg(target_os = \"linux\")] pub mod epoll;\n\n#[cfg(target_os = \"linux\")] pub use epoll::Timer;\n\n\n\n#[cfg(not(any(target_os = \"linux\", target_os = \"macos\")))] pub mod sleep;\n\n#[cfg(not(any(target_os = \"linux\", target_os = \"macos\")))] pub use sleep::Timer;\n", "file_path": "src/timer/mod.rs", "rank": 45, "score": 26083.68912779638 }, { "content": "use crate::Result;\n\n\n\nuse std::fmt::Debug;\n\n\n\n/// Backend State\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum State {\n\n /// Backend is uninitialized.\n\n Uninitialized,\n\n /// Backend is ready to be used.\n\n Ready,\n\n /// Backend is running.\n\n Running,\n\n}\n\n\n\nimpl Default for State {\n\n fn default() -> Self {\n\n State::Uninitialized\n\n }\n\n}\n\n\n\n/// Backend Trait\n", "file_path": "src/backends/mod.rs", "rank": 46, "score": 26080.798031497678 }, { "content": "/// libc::kqueue wrapper\n\nfn kqueue() -> Result<i32> {\n\n check_err(unsafe { libc::kqueue() }).map(|kq| kq as i32)\n\n}\n\n\n", "file_path": "src/timer/kqueue.rs", "rank": 47, "score": 25979.373783845807 }, { "content": " &mut new_value,\n\n &mut old_value,\n\n )\n\n .unwrap();\n\n assert!(void == ());\n\n }\n\n\n\n #[test]\n\n fn test_epoll_create1() {\n\n let epoll_fd = epoll_create1(0).unwrap();\n\n assert!(epoll_fd > 0);\n\n }\n\n\n\n #[test]\n\n fn test_epoll_ctl() {\n\n let mut event = libc::epoll_event {\n\n events: libc::EPOLLIN as u32,\n\n u64: 1,\n\n };\n\n\n", "file_path": "tests/timer-epoll.rs", "rank": 48, "score": 25396.451181114473 }, { "content": "#[cfg(target_os = \"linux\")]\n\nmod tests {\n\n use pyroscope::timer::epoll::{\n\n epoll_create1, epoll_ctl, epoll_wait, timerfd_create, timerfd_settime,\n\n };\n\n\n\n #[test]\n\n fn test_timerfd_create() {\n\n let timer_fd = timerfd_create(libc::CLOCK_REALTIME, libc::TFD_NONBLOCK).unwrap();\n\n assert!(timer_fd > 0);\n\n }\n\n\n\n #[test]\n\n fn test_timerfd_settime() {\n\n let mut new_value = libc::itimerspec {\n\n it_interval: libc::timespec {\n\n tv_sec: 10,\n\n tv_nsec: 0,\n\n },\n\n it_value: libc::timespec {\n", "file_path": "tests/timer-epoll.rs", "rank": 49, "score": 25395.680112589285 }, { "content": " let epoll_fd = epoll_create1(0).unwrap();\n\n let timer_fd = timerfd_create(libc::CLOCK_REALTIME, libc::TFD_NONBLOCK).unwrap();\n\n let void = epoll_ctl(epoll_fd, libc::EPOLL_CTL_ADD, timer_fd, &mut event).unwrap();\n\n assert!(void == ());\n\n }\n\n\n\n #[test]\n\n fn test_epoll_wait() {\n\n let mut event = libc::epoll_event {\n\n events: libc::EPOLLIN as u32,\n\n u64: 1,\n\n };\n\n\n\n let epoll_fd = epoll_create1(0).unwrap();\n\n let timer_fd = timerfd_create(libc::CLOCK_REALTIME, libc::TFD_NONBLOCK).unwrap();\n\n epoll_ctl(epoll_fd, libc::EPOLL_CTL_ADD, timer_fd, &mut event).unwrap();\n\n\n\n let mut events = vec![libc::epoll_event { events: 0, u64: 0 }];\n\n\n\n // Expire in 1ms\n\n let void = unsafe { epoll_wait(epoll_fd, events.as_mut_ptr(), 1, 1).unwrap() };\n\n\n\n assert!(void == ());\n\n }\n\n}\n", "file_path": "tests/timer-epoll.rs", "rank": 50, "score": 25394.179907713766 }, { "content": " tv_sec: 0,\n\n tv_nsec: 0,\n\n },\n\n };\n\n\n\n let mut old_value = libc::itimerspec {\n\n it_interval: libc::timespec {\n\n tv_sec: 0,\n\n tv_nsec: 0,\n\n },\n\n it_value: libc::timespec {\n\n tv_sec: 0,\n\n tv_nsec: 0,\n\n },\n\n };\n\n\n\n let timer_fd = timerfd_create(libc::CLOCK_REALTIME, libc::TFD_NONBLOCK).unwrap();\n\n let void = timerfd_settime(\n\n timer_fd,\n\n libc::TFD_TIMER_ABSTIME,\n", "file_path": "tests/timer-epoll.rs", "rank": 51, "score": 25389.831126238903 }, { "content": "/// ```\n\n/// use pyroscope::pyroscope::PyroscopeConfig;\n\n/// let config = PyroscopeConfig::new(\"http://localhost:8080\", \"my-app\");\n\n/// ```\n\n#[derive(Clone, Debug)]\n\npub struct PyroscopeConfig {\n\n /// Pyroscope Server Address\n\n pub url: String,\n\n /// Application Name\n\n pub application_name: String,\n\n /// Tags\n\n pub tags: HashMap<String, String>,\n\n /// Sample rate used in Hz\n\n pub sample_rate: i32,\n\n // TODO\n\n // log_level\n\n // auth_token\n\n // upstream_request_timeout = 10s\n\n // no_logging\n\n}\n", "file_path": "src/pyroscope.rs", "rank": 53, "score": 10.60756112593422 }, { "content": " /// let mut config = PyroscopeConfig::new(\"http://localhost:8080\", \"my-app\");\n\n /// config.set_sample_rate(10)\n\n /// ?;\n\n /// ```\n\n pub fn sample_rate(self, sample_rate: i32) -> Self {\n\n Self {\n\n sample_rate,\n\n ..self\n\n }\n\n }\n\n\n\n /// Set the tags\n\n /// # Example\n\n /// ```ignore\n\n /// use pyroscope::pyroscope::PyroscopeConfig;\n\n /// let config = PyroscopeConfig::new(\"http://localhost:8080\", \"my-app\")\n\n /// .tags(vec![(\"env\", \"dev\")])?;\n\n /// ```\n\n pub fn tags(self, tags: &[(&str, &str)]) -> Self {\n\n // Convert &[(&str, &str)] to HashMap(String, String)\n", "file_path": "src/pyroscope.rs", "rank": 54, "score": 10.232384801474867 }, { "content": "//! To stop profiling code. You can restart the profiling at a later point.\n\n//!\n\n//! ```ignore\n\n//! agent.stop();\n\n//! ```\n\n\n\n// Re-exports structs\n\npub use crate::pyroscope::PyroscopeAgent;\n\npub use error::{PyroscopeError, Result};\n\n\n\n// Public modules\n\npub mod backends;\n\npub mod error;\n\npub mod pyroscope;\n\npub mod session;\n\npub mod timer;\n\n\n\n// Private modules\n\nmod utils;\n", "file_path": "src/lib.rs", "rank": 55, "score": 9.654363616550942 }, { "content": " ///\n\n /// Timer will dispatch an event with the timestamp of the current instant,\n\n /// every 10th second to all attached senders\n\n pub fn attach_listener(&mut self, tx: Sender<u64>) -> Result<()> {\n\n // Push Sender to a Vector of Sender(s)\n\n let txs = Arc::clone(&self.txs);\n\n txs.lock()?.push(tx);\n\n\n\n Ok(())\n\n }\n\n\n\n /// Clear the listeners (txs) from Timer. This will shutdown the Timer thread\n\n pub fn drop_listeners(&mut self) -> Result<()> {\n\n let txs = Arc::clone(&self.txs);\n\n txs.lock()?.clear();\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n/// Wrapper for libc functions.\n\n///\n\n/// Error wrapper for some libc functions used by the library. This only does\n\n/// Error (-1 return) wrapping. Alternatively, the nix crate could be used\n\n/// instead of expanding this wrappers (if more functions and types are used\n\n/// from libc)\n\n\n\n/// libc::timerfd wrapper\n", "file_path": "src/timer/epoll.rs", "rank": 56, "score": 9.49793048370932 }, { "content": "\n\n /// Attach an mpsc::Sender to Timer\n\n ///\n\n /// Timer will dispatch an event with the timestamp of the current instant,\n\n /// every 10th second to all attached senders\n\n pub fn attach_listener(&mut self, tx: Sender<u64>) -> Result<()> {\n\n // Push Sender to a Vector of Sender(s)\n\n let txs = Arc::clone(&self.txs);\n\n txs.lock()?.push(tx);\n\n\n\n Ok(())\n\n }\n\n\n\n /// Clear the listeners (txs) from Timer. This will shutdown the Timer thread\n\n pub fn drop_listeners(&mut self) -> Result<()> {\n\n let txs = Arc::clone(&self.txs);\n\n txs.lock()?.clear();\n\n\n\n Ok(())\n\n }\n", "file_path": "src/timer/kqueue.rs", "rank": 57, "score": 8.632852047260474 }, { "content": " // Convert &[(&str, &str)] to HashMap(String, String)\n\n let tags_hashmap: HashMap<String, String> = tags\n\n .to_owned()\n\n .iter()\n\n .cloned()\n\n .map(|(a, b)| (a.to_owned(), b.to_owned()))\n\n .collect();\n\n\n\n self.config.tags.extend(tags_hashmap);\n\n\n\n // Restart Agent\n\n self.start();\n\n\n\n Ok(())\n\n }\n\n\n\n /// Remove tags. This will restart the agent.\n\n /// # Example\n\n /// ```ignore\n\n /// # use pyroscope::*;\n", "file_path": "src/pyroscope.rs", "rank": 58, "score": 8.462845524816867 }, { "content": " txs.lock()?.iter().for_each(|tx| {\n\n // Send event to attached Sender\n\n let _res = tx.send(current);\n\n });\n\n\n\n // Sleep for 10s\n\n thread::sleep(Duration::from_millis(10000));\n\n }\n\n }));\n\n\n\n Ok(Self { handle, ..self })\n\n }\n\n\n\n /// Attach an mpsc::Sender to Timer\n\n ///\n\n /// Timer will dispatch an event with the timestamp of the current instant,\n\n /// every 10th second to all attached senders\n\n pub fn attach_listener(&mut self, tx: Sender<u64>) -> Result<()> {\n\n // Push Sender to a Vector of Sender(s)\n\n let txs = Arc::clone(&self.txs);\n", "file_path": "src/timer/sleep.rs", "rank": 59, "score": 8.429896898156114 }, { "content": " /// # use std::result;\n\n /// # fn main() -> result::Result<(), error::PyroscopeError> {\n\n /// let agent = PyroscopeAgent::builder(\"http://localhost:8080\", \"my-app\")\n\n /// .tags(vec![(\"tag\", \"value\")])\n\n /// .build()?;\n\n /// agent.start()?;\n\n /// // Expensive operation\n\n /// agent.remove_tags(vec![\"tag\"])?;\n\n /// // Un-Tagged operation\n\n /// agent.stop()?;\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn remove_tags(&mut self, tags: &[&str]) -> Result<()> {\n\n log::debug!(target: LOG_TAG, \"Removing tags\");\n\n\n\n // Check that tags are not empty\n\n if tags.is_empty() {\n\n return Ok(());\n\n }\n", "file_path": "src/pyroscope.rs", "rank": 60, "score": 7.967814818038094 }, { "content": " let tags_hashmap: HashMap<String, String> = tags\n\n .to_owned()\n\n .iter()\n\n .cloned()\n\n .map(|(a, b)| (a.to_owned(), b.to_owned()))\n\n .collect();\n\n\n\n Self {\n\n tags: tags_hashmap,\n\n ..self\n\n }\n\n }\n\n}\n\n\n\n/// PyroscopeAgent Builder\n\n///\n\n/// Alternatively, you can use PyroscopeAgent::build() which is a short-hand\n\n/// for calling PyroscopeAgentBuilder::new()\n\n///\n\n/// # Example\n", "file_path": "src/pyroscope.rs", "rank": 61, "score": 7.796086645947269 }, { "content": "///\n\n/// Used to contain the session data, and send it to the server.\n\n#[derive(Clone, Debug)]\n\npub struct Session {\n\n pub config: PyroscopeConfig,\n\n pub report: Vec<u8>,\n\n pub from: u64,\n\n pub until: u64,\n\n}\n\n\n\nimpl Session {\n\n /// Create a new Session\n\n /// # Example\n\n /// ```ignore\n\n /// let config = PyroscopeConfig::new(\"https://localhost:8080\", \"my-app\");\n\n /// let report = vec![1, 2, 3];\n\n /// let until = 154065120;\n\n /// let session = Session::new(until, config, report)?;\n\n /// ```\n\n pub fn new(until: u64, config: PyroscopeConfig, report: Vec<u8>) -> Result<Self> {\n", "file_path": "src/session.rs", "rank": 62, "score": 7.461701214523933 }, { "content": " /// Add tags. This will restart the agent.\n\n /// # Example\n\n /// ```ignore\n\n /// let agent = PyroscopeAgent::builder(\"http://localhost:8080\", \"my-app\").build()?;\n\n /// agent.start()?;\n\n /// // Expensive operation\n\n /// agent.add_tags(vec![\"tag\", \"value\"])?;\n\n /// // Tagged operation\n\n /// agent.stop()?;\n\n /// ```\n\n pub fn add_tags(&mut self, tags: &[(&str, &str)]) -> Result<()> {\n\n log::debug!(target: LOG_TAG, \"Adding tags\");\n\n // Check that tags are not empty\n\n if tags.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n // Stop Agent\n\n self.stop();\n\n\n", "file_path": "src/pyroscope.rs", "rank": 64, "score": 6.627717905000132 }, { "content": "use std::{\n\n sync::mpsc::{sync_channel, Receiver, SyncSender},\n\n thread,\n\n thread::JoinHandle,\n\n};\n\n\n\nuse crate::pyroscope::PyroscopeConfig;\n\nuse crate::utils::get_time_range;\n\nuse crate::utils::merge_tags_with_app_name;\n\nuse crate::Result;\n\n\n\nconst LOG_TAG: &str = \"Pyroscope::Session\";\n\n\n\n/// Session Signal\n\n///\n\n/// This enum is used to send data to the session thread. It can also kill the session thread.\n\n#[derive(Debug)]\n\npub enum SessionSignal {\n\n /// Send session data to the session thread.\n\n Session(Session),\n", "file_path": "src/session.rs", "rank": 65, "score": 6.522338566389541 }, { "content": "\n\n/// PyroscopeAgent is the main object of the library. It is used to start and stop the profiler, schedule the timer, and send the profiler data to the server.\n\n#[derive(Debug)]\n\npub struct PyroscopeAgent {\n\n timer: Timer,\n\n session_manager: SessionManager,\n\n tx: Option<Sender<u64>>,\n\n handle: Option<JoinHandle<Result<()>>>,\n\n running: Arc<(Mutex<bool>, Condvar)>,\n\n\n\n /// Profiler backend\n\n pub backend: Arc<Mutex<dyn Backend>>,\n\n /// Configuration Object\n\n pub config: PyroscopeConfig,\n\n}\n\n\n\n/// Gracefully stop the profiler.\n\nimpl Drop for PyroscopeAgent {\n\n /// Properly shutdown the agent.\n\n fn drop(&mut self) {\n", "file_path": "src/pyroscope.rs", "rank": 66, "score": 6.3956821434243185 }, { "content": "use pprof::{ProfilerGuard, ProfilerGuardBuilder, Report};\n\n\n\nuse crate::backends::Backend;\n\nuse crate::backends::State;\n\nuse crate::PyroscopeError;\n\nuse crate::Result;\n\n\n\n#[derive(Default)]\n\npub struct Pprof<'a> {\n\n inner_builder: Option<ProfilerGuardBuilder>,\n\n guard: Option<ProfilerGuard<'a>>,\n\n state: State,\n\n}\n\n\n\nimpl std::fmt::Debug for Pprof<'_> {\n\n fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {\n\n write!(fmt, \"Pprof Backend\")\n\n }\n\n}\n\n\n", "file_path": "src/backends/pprof.rs", "rank": 69, "score": 5.745623671046103 }, { "content": " txs.lock()?.push(tx);\n\n\n\n // Spawn a Thread\n\n let handle = Some(thread::spawn(move || {\n\n // Get remaining time for 10th second fire event\n\n let rem = get_time_range(0)?.rem;\n\n\n\n // Sleep for rem seconds\n\n thread::sleep(Duration::from_secs(rem));\n\n\n\n loop {\n\n // Exit thread if there are no listeners\n\n if txs.lock()?.len() == 0 {\n\n return Ok(());\n\n }\n\n\n\n // Get current time\n\n let current = get_time_range(0)?.from;\n\n\n\n // Iterate through Senders\n", "file_path": "src/timer/sleep.rs", "rank": 70, "score": 5.695207870972074 }, { "content": " /// ```ignore\n\n /// let builder = PyroscopeAgentBuilder::new(\"http://localhost:8080\", \"my-app\")\n\n /// .tags(vec![(\"env\", \"dev\")])\n\n /// .build()\n\n /// ?;\n\n /// ```\n\n pub fn tags(self, tags: &[(&str, &str)]) -> Self {\n\n Self {\n\n config: self.config.tags(tags),\n\n ..self\n\n }\n\n }\n\n\n\n /// Initialize the backend, timer and return a PyroscopeAgent object.\n\n pub fn build(self) -> Result<PyroscopeAgent> {\n\n // Initiliaze the backend\n\n let backend = Arc::clone(&self.backend);\n\n backend.lock()?.initialize(self.config.sample_rate)?;\n\n log::trace!(target: LOG_TAG, \"Backend initialized\");\n\n\n", "file_path": "src/pyroscope.rs", "rank": 71, "score": 5.507839665059274 }, { "content": " backend.lock()?.stop()?;\n\n\n\n Ok(())\n\n }\n\n\n\n /// Stop the agent. The agent will stop profiling and send a last report to the server.\n\n /// # Example\n\n /// ```ignore\n\n /// let agent = PyroscopeAgent::builder(\"http://localhost:8080\", \"my-app\").build()?;\n\n /// agent.start()?;\n\n /// // Expensive operation\n\n /// agent.stop();\n\n /// ```\n\n pub fn stop(&mut self) {\n\n match self._stop() {\n\n Ok(_) => log::trace!(target: LOG_TAG, \"Agent stopped\"),\n\n Err(_) => log::error!(target: LOG_TAG, \"Error stopping agent\"),\n\n }\n\n }\n\n\n", "file_path": "src/pyroscope.rs", "rank": 72, "score": 5.184117313778091 }, { "content": "\n\n return Ok(());\n\n }\n\n }\n\n Ok(())\n\n }));\n\n\n\n Ok(())\n\n }\n\n\n\n /// Start profiling and sending data. The agent will keep running until stopped. The agent will send data to the server every 10s secondy.\n\n /// # Example\n\n /// ```ignore\n\n /// let agent = PyroscopeAgent::builder(\"http://localhost:8080\", \"my-app\").build()?;\n\n /// agent.start();\n\n /// ```\n\n pub fn start(&mut self) {\n\n match self._start() {\n\n Ok(_) => log::trace!(target: LOG_TAG, \"Agent started\"),\n\n Err(_) => log::error!(target: LOG_TAG, \"Error starting agent\"),\n", "file_path": "src/pyroscope.rs", "rank": 73, "score": 5.134137607945836 }, { "content": " backend.lock()?.start()?;\n\n\n\n // set running to true\n\n let pair = Arc::clone(&self.running);\n\n let (lock, _cvar) = &*pair;\n\n let mut running = lock.lock()?;\n\n *running = true;\n\n drop(running);\n\n\n\n let (tx, rx): (Sender<u64>, Receiver<u64>) = channel();\n\n self.timer.attach_listener(tx.clone())?;\n\n self.tx = Some(tx);\n\n\n\n let config = self.config.clone();\n\n\n\n let stx = self.session_manager.tx.clone();\n\n\n\n self.handle = Some(std::thread::spawn(move || {\n\n log::trace!(target: LOG_TAG, \"Main Thread started\");\n\n\n", "file_path": "src/pyroscope.rs", "rank": 74, "score": 5.082880364965115 }, { "content": "///\n\n/// The Timer thread will run continously until all Senders are dropped.\n\n/// The Timer thread will be joined when all Senders are dropped.\n\n\n\n#[derive(Debug, Default)]\n\npub struct Timer {\n\n /// A vector to store listeners (mpsc::Sender)\n\n txs: Arc<Mutex<Vec<Sender<u64>>>>,\n\n\n\n /// Thread handle\n\n pub handle: Option<JoinHandle<Result<()>>>,\n\n}\n\n\n\nimpl Timer {\n\n /// Initialize Timer and run a thread to send events to attached listeners\n\n pub fn initialize(self) -> Result<Self> {\n\n let txs = Arc::clone(&self.txs);\n\n\n\n // Add Default tx\n\n let (tx, _rx): (Sender<u64>, Receiver<u64>) = channel();\n", "file_path": "src/timer/sleep.rs", "rank": 75, "score": 5.0398930017810635 }, { "content": "///\n\n/// The Timer thread will run continously until all Senders are dropped.\n\n/// The Timer thread will be joined when all Senders are dropped.\n\n\n\n#[derive(Debug, Default)]\n\npub struct Timer {\n\n /// A vector to store listeners (mpsc::Sender)\n\n txs: Arc<Mutex<Vec<Sender<u64>>>>,\n\n\n\n /// Thread handle\n\n pub handle: Option<JoinHandle<Result<()>>>,\n\n}\n\n\n\nimpl Timer {\n\n /// Initialize Timer and run a thread to send events to attached listeners\n\n pub fn initialize(self) -> Result<Self> {\n\n let txs = Arc::clone(&self.txs);\n\n\n\n // Add Default tx\n\n let (tx, _rx): (Sender<u64>, Receiver<u64>) = channel();\n", "file_path": "src/timer/epoll.rs", "rank": 76, "score": 5.0398930017810635 }, { "content": "///\n\n/// The Timer thread will run continously until all Senders are dropped.\n\n/// The Timer thread will be joined when all Senders are dropped.\n\n\n\n#[derive(Debug, Default)]\n\npub struct Timer {\n\n /// A vector to store listeners (mpsc::Sender)\n\n txs: Arc<Mutex<Vec<Sender<u64>>>>,\n\n\n\n /// Thread handle\n\n pub handle: Option<JoinHandle<Result<()>>>,\n\n}\n\n\n\nimpl Timer {\n\n /// Initialize Timer and run a thread to send events to attached listeners\n\n pub fn initialize(self) -> Result<Self> {\n\n let txs = Arc::clone(&self.txs);\n\n\n\n // Add Default tx\n\n let (tx, _rx): (Sender<u64>, Receiver<u64>) = channel();\n", "file_path": "src/timer/kqueue.rs", "rank": 77, "score": 5.0398930017810635 }, { "content": "## Pyroscope Profiler\n\n\n\n**Pyroscope Profiler for Rust. Profile your Rust applications.**\n\n\n\n[![license](https://img.shields.io/badge/license-Apache2.0-blue.svg)](LICENSE) \n\n![tests](https://github.com/pyroscope-io/pyroscope-rs/workflows/Tests/badge.svg)\n\n![build](https://github.com/pyroscope-io/pyroscope-rs/workflows/Build/badge.svg)\n\n[![Crate](https://img.shields.io/crates/v/pyroscope.svg)](https://crates.io/crates/pyroscope)\n\n\n\n---\n\n\n\nYou may be looking for:\n\n\n\n- [An overview of Pyroscope](https://pyroscope.io/docs/)\n\n- [Crate Documentation](https://docs.rs/pyroscope/)\n\n- [Examples](examples)\n\n- [Release notes](https://github.com/omarabid/pyroscope/releases)\n\n\n\n### Quick Start\n\n\n\nAdd this to your `Cargo.toml`:\n\n\n\n```toml\n\n[dependencies]\n\npyroscope = \"0.3.1\"\n\n```\n\n\n\nConfigure your profiler:\n\n\n\n```rust\n\n let mut agent =\n\n PyroscopeAgent::builder(\"http://localhost:4040\", \"myapp-profile\")\n\n .sample_rate(100)\n\n .build()?;\n\n```\n\n\n\nProfile your code:\n\n\n\n```rust\n\n\n\n agent.start();\n\n // Profiled computation\n\n agent.stop();\n\n \n\n // Non-profiled computation\n\n```\n\n\n\n### Limitations\n\n\n\n- **Backend**: The Pyroscope Agent uses [pprof-rs](https://github.com/tikv/pprof-rs) as a backend. As a result, the [limitations](https://github.com/tikv/pprof-rs#why-not-) for pprof-rs also applies.\n\n- **Tagging**: Adding or removing tags is not possible within threads. In general, the [Pyroscope Agent](https://docs.rs/pyroscope/latest/pyroscope/pyroscope/struct.PyroscopeAgent.html) is not Sync; and as a result a reference cannot be shared between threads. A multi-threaded program could be profiled but the agent is not thread-aware and a particular thread cannot be tagged.\n\n- **Timer**: epoll (for Linux) and kqueue (for macOS) are required for a more precise timer.\n\n- **Shutdown**: The Pyroscope Agent might take some time (usually less than 10 seconds) to shutdown properly and drop its threads.\n\n\n", "file_path": "README.md", "rank": 78, "score": 4.72815069089517 }, { "content": " fn epoll_wait(timer_fd: libc::c_int, epoll_fd: libc::c_int) -> Result<()> {\n\n // vector to store events\n\n let mut events = Vec::with_capacity(1);\n\n\n\n // wait for the timer to fire an event. This is function will block.\n\n unsafe {\n\n epoll_wait(epoll_fd, events.as_mut_ptr(), 1, -1)?;\n\n }\n\n\n\n // read the value from the timerfd. This is required to re-arm the timer.\n\n let mut buffer: u64 = 0;\n\n let bufptr: *mut _ = &mut buffer;\n\n unsafe {\n\n read(timer_fd, bufptr as *mut libc::c_void, 8)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n /// Attach an mpsc::Sender to Timer\n", "file_path": "src/timer/epoll.rs", "rank": 79, "score": 4.633375279378602 }, { "content": " /// let session = Session::new(until, config, report)?;\n\n /// session.send()?;\n\n /// ```\n\n pub fn send(self) -> Result<()> {\n\n log::info!(target: LOG_TAG, \"Sending Session: {} - {}\", self.from, self.until);\n\n\n\n // Check if the report is empty\n\n if self.report.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n // Create a new client\n\n let client = reqwest::blocking::Client::new();\n\n\n\n // Clone URL\n\n let url = self.config.url.clone();\n\n\n\n // Merge application name with Tags\n\n let application_name = merge_tags_with_app_name(\n\n self.config.application_name.clone(),\n", "file_path": "src/session.rs", "rank": 80, "score": 4.192175827703862 }, { "content": " txs.lock()?.push(tx);\n\n\n\n Ok(())\n\n }\n\n\n\n /// Clear the listeners (txs) from Timer. This will shutdown the Timer thread\n\n pub fn drop_listeners(&mut self) -> Result<()> {\n\n let txs = Arc::clone(&self.txs);\n\n txs.lock()?.clear();\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/timer/sleep.rs", "rank": 81, "score": 4.1295880170122725 }, { "content": "// Copyright 2021 Developers of Pyroscope.\n\n\n\n// Licensed under the Apache License, Version 2.0 <LICENSE or\n\n// https://www.apache.org/licenses/LICENSE-2.0>. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\n/// Result Alias with PyroscopeError\n\npub type Result<T> = std::result::Result<T, PyroscopeError>;\n\n\n\n/// Error type of Pyroscope\n\n#[non_exhaustive]\n\n#[derive(thiserror::Error, Debug)]\n\npub enum PyroscopeError {\n\n #[error(\"Other: {}\", &.0)]\n\n AdHoc(String),\n\n\n\n #[error(\"{msg}: {source:?}\")]\n\n Compat{ msg: String, #[source] source: Box<dyn std::error::Error + Send + Sync + 'static> },\n\n\n\n #[error(transparent)]\n", "file_path": "src/error.rs", "rank": 82, "score": 4.0498037482353535 }, { "content": "/// ```ignore\n\n/// use pyroscope::pyroscope::PyroscopeAgentBuilder;\n\n/// let builder = PyroscopeAgentBuilder::new(\"http://localhost:8080\", \"my-app\");\n\n/// let agent = builder.build()?;\n\n/// ```\n\npub struct PyroscopeAgentBuilder {\n\n /// Profiler backend\n\n backend: Arc<Mutex<dyn Backend>>,\n\n /// Configuration Object\n\n config: PyroscopeConfig,\n\n}\n\n\n\nimpl PyroscopeAgentBuilder {\n\n /// Create a new PyroscopeAgentBuilder object. url and application_name are required.\n\n /// tags and sample_rate are optional.\n\n ///\n\n /// # Example\n\n /// ```ignore\n\n /// let builder = PyroscopeAgentBuilder::new(\"http://localhost:8080\", \"my-app\");\n\n /// ```\n", "file_path": "src/pyroscope.rs", "rank": 83, "score": 4.037468024180189 }, { "content": " it_interval: libc::timespec {\n\n tv_sec: 0,\n\n tv_nsec: 0,\n\n },\n\n it_value: libc::timespec {\n\n tv_sec: 0,\n\n tv_nsec: 0,\n\n },\n\n };\n\n\n\n let set_flags = libc::TFD_TIMER_ABSTIME;\n\n\n\n // Set the timer\n\n timerfd_settime(tfd, set_flags, &mut new_value, &mut old_value)?;\n\n\n\n // Return file descriptor\n\n Ok(tfd)\n\n }\n\n\n\n /// Create a new epoll file descriptor and add the timer to its interests\n", "file_path": "src/timer/epoll.rs", "rank": 84, "score": 4.006400857974169 }, { "content": "}\n\n\n\nimpl PyroscopeAgent {\n\n /// Short-hand for PyroscopeAgentBuilder::build(). This is a convenience method.\n\n ///\n\n /// # Example\n\n /// ```ignore\n\n /// let agent = PyroscopeAgent::builder(\"http://localhost:8080\", \"my-app\").build()?;\n\n /// ```\n\n pub fn builder<S: AsRef<str>>(url: S, application_name: S) -> PyroscopeAgentBuilder {\n\n // Build PyroscopeAgent\n\n PyroscopeAgentBuilder::new(url, application_name)\n\n }\n\n\n\n fn _start(&mut self) -> Result<()> {\n\n log::debug!(target: LOG_TAG, \"Starting\");\n\n\n\n // Create a clone of Backend\n\n let backend = Arc::clone(&self.backend);\n\n // Call start()\n", "file_path": "src/pyroscope.rs", "rank": 85, "score": 4.005089682601506 }, { "content": "use std::{\n\n collections::HashMap,\n\n sync::{\n\n mpsc::{channel, Receiver, Sender},\n\n Arc, Condvar, Mutex,\n\n },\n\n thread::JoinHandle,\n\n};\n\n\n\nuse crate::{\n\n backends::{pprof::Pprof, Backend},\n\n error::Result,\n\n session::{Session, SessionManager, SessionSignal},\n\n timer::Timer,\n\n};\n\n\n\nconst LOG_TAG: &str = \"Pyroscope::Agent\";\n\n\n\n/// Pyroscope Agent Configuration. This is the configuration that is passed to the agent.\n\n/// # Example\n", "file_path": "src/pyroscope.rs", "rank": 86, "score": 3.974432796108425 }, { "content": " /// Kill the session thread.\n\n Kill,\n\n}\n\n\n\n/// Manage sessions and send data to the server.\n\n#[derive(Debug)]\n\npub struct SessionManager {\n\n /// The SessionManager thread.\n\n pub handle: Option<JoinHandle<Result<()>>>,\n\n /// Channel to send data to the SessionManager thread.\n\n pub tx: SyncSender<SessionSignal>,\n\n}\n\n\n\nimpl SessionManager {\n\n /// Create a new SessionManager\n\n pub fn new() -> Result<Self> {\n\n log::info!(target: LOG_TAG, \"Creating SessionManager\");\n\n\n\n // Create a channel for sending and receiving sessions\n\n let (tx, rx): (SyncSender<SessionSignal>, Receiver<SessionSignal>) = sync_channel(10);\n", "file_path": "src/session.rs", "rank": 87, "score": 3.949608419280043 }, { "content": "\n\n // Get current time\n\n let from = get_time_range(0)?.from;\n\n\n\n // Iterate through Senders\n\n txs.lock()?.iter().for_each(|tx| {\n\n // Send event to attached Sender\n\n match tx.send(from) {\n\n Ok(_) => {}\n\n Err(_) => {}\n\n }\n\n });\n\n\n\n // Wait 10s\n\n Timer::wait_event(kqueue, [loop_event].as_mut_ptr())?;\n\n }\n\n }));\n\n\n\n Ok(Self { handle, ..self })\n\n }\n", "file_path": "src/timer/kqueue.rs", "rank": 88, "score": 3.8267341041961047 }, { "content": " while let Ok(until) = rx.recv() {\n\n log::trace!(target: LOG_TAG, \"Sending session {}\", until);\n\n\n\n // Generate report from backend\n\n let report = backend.lock()?.report()?;\n\n\n\n // Send new Session to SessionManager\n\n stx.send(SessionSignal::Session(Session::new(\n\n until,\n\n config.clone(),\n\n report,\n\n )?))?;\n\n\n\n if until == 0 {\n\n log::trace!(target: LOG_TAG, \"Session Killed\");\n\n\n\n let (lock, cvar) = &*pair;\n\n let mut running = lock.lock()?;\n\n *running = false;\n\n cvar.notify_one();\n", "file_path": "src/pyroscope.rs", "rank": 89, "score": 3.731561533075389 }, { "content": " // Start Timer\n\n let timer = Timer::default().initialize()?;\n\n log::trace!(target: LOG_TAG, \"Timer initialized\");\n\n\n\n // Start the SessionManager\n\n let session_manager = SessionManager::new()?;\n\n log::trace!(target: LOG_TAG, \"SessionManager initialized\");\n\n\n\n // Return PyroscopeAgent\n\n Ok(PyroscopeAgent {\n\n backend: self.backend,\n\n config: self.config,\n\n timer,\n\n session_manager,\n\n tx: None,\n\n handle: None,\n\n running: Arc::new((Mutex::new(false), Condvar::new())),\n\n })\n\n }\n\n}\n", "file_path": "src/pyroscope.rs", "rank": 90, "score": 3.7304910520834875 }, { "content": " }\n\n }\n\n Ok(())\n\n }));\n\n\n\n Ok(SessionManager { handle, tx })\n\n }\n\n\n\n /// Push a new session into the SessionManager\n\n pub fn push(&self, session: SessionSignal) -> Result<()> {\n\n // Push the session into the SessionManager\n\n self.tx.send(session)?;\n\n\n\n log::trace!(target: LOG_TAG, \"SessionSignal pushed\");\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n/// Pyroscope Session\n", "file_path": "src/session.rs", "rank": 91, "score": 3.6465076617865346 }, { "content": "\n\n // Stop Agent\n\n self.stop();\n\n\n\n // Iterate through every tag\n\n tags.iter().for_each(|key| {\n\n // Remove tag\n\n self.config.tags.remove(key.to_owned());\n\n });\n\n\n\n // Restart Agent\n\n self.start();\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/pyroscope.rs", "rank": 92, "score": 3.5959452974322694 }, { "content": " fn create_epollfd(timer_fd: libc::c_int) -> Result<libc::c_int> {\n\n // create a new epoll fd\n\n let epoll_fd = epoll_create1(0)?;\n\n\n\n // event to pull\n\n let mut event = libc::epoll_event {\n\n events: libc::EPOLLIN as u32,\n\n u64: 1,\n\n };\n\n\n\n let epoll_flags = libc::EPOLL_CTL_ADD;\n\n\n\n // add event to the epoll\n\n epoll_ctl(epoll_fd, epoll_flags, timer_fd, &mut event)?;\n\n\n\n // return epoll fd\n\n Ok(epoll_fd)\n\n }\n\n\n\n /// Wait for an event on the epoll file descriptor\n", "file_path": "src/timer/epoll.rs", "rank": 93, "score": 3.559090398214855 }, { "content": " }\n\n }\n\n\n\n /// Set the Sample rate. Default value is 100.\n\n /// # Example\n\n /// ```ignore\n\n /// let builder = PyroscopeAgentBuilder::new(\"http://localhost:8080\", \"my-app\")\n\n /// .sample_rate(99)\n\n /// .build()\n\n /// ?;\n\n /// ```\n\n pub fn sample_rate(self, sample_rate: i32) -> Self {\n\n Self {\n\n config: self.config.sample_rate(sample_rate),\n\n ..self\n\n }\n\n }\n\n\n\n /// Set tags. Default is empty.\n\n /// # Example\n", "file_path": "src/pyroscope.rs", "rank": 94, "score": 3.551412001578966 }, { "content": "extern crate pyroscope;\n\n\n\nuse pyroscope::backends::pprof::Pprof;\n\nuse pyroscope::backends::Backend;\n\nuse pyroscope::Result;\n\n\n", "file_path": "examples/backend-pprof.rs", "rank": 95, "score": 3.47740172454507 }, { "content": " Reqwest(#[from] reqwest::Error),\n\n\n\n #[error(transparent)]\n\n Pprof(#[from] pprof::Error),\n\n\n\n #[error(transparent)]\n\n TimeSource(#[from] std::time::SystemTimeError),\n\n\n\n #[error(transparent)]\n\n Io(#[from] std::io::Error),\n\n}\n\n\n\nimpl PyroscopeError {\n\n /// Create a new instance of PyroscopeError\n\n pub fn new(msg: &str) -> Self {\n\n PyroscopeError::AdHoc(msg.to_string())\n\n }\n\n\n\n /// Create a new instance of PyroscopeError with source\n\n pub fn new_with_source<E>(msg: &str, source: E) -> Self where E: std::error::Error + Send + Sync + 'static {\n", "file_path": "src/error.rs", "rank": 96, "score": 3.4160168567483145 }, { "content": "extern crate pyroscope;\n\n\n\nuse std::sync::mpsc::channel;\n\nuse std::sync::mpsc::{Receiver, Sender};\n\n\n\nuse pyroscope::timer::Timer;\n\n\n", "file_path": "examples/internals-timer.rs", "rank": 97, "score": 3.389555263974911 }, { "content": "\n\nimpl PyroscopeConfig {\n\n /// Create a new PyroscopeConfig object. url and application_name are required.\n\n /// tags and sample_rate are optional. If sample_rate is not specified, it will default to 100.\n\n /// # Example\n\n /// ```ignore\n\n /// let config = PyroscopeConfig::new(\"http://localhost:8080\", \"my-app\");\n\n /// ```\n\n pub fn new(url: impl AsRef<str>, application_name: impl AsRef<str>) -> Self {\n\n Self {\n\n url: url.as_ref().to_owned(),\n\n application_name: application_name.as_ref().to_owned(),\n\n tags: HashMap::new(),\n\n sample_rate: 100i32,\n\n }\n\n }\n\n\n\n /// Set the Sample rate\n\n /// # Example\n\n /// ```ignore\n", "file_path": "src/pyroscope.rs", "rank": 98, "score": 3.341582488500948 }, { "content": "extern crate pyroscope;\n\n\n\nuse pyroscope::{PyroscopeAgent, Result};\n\n\n\nuse std::thread;\n\n\n", "file_path": "examples/multi-thread.rs", "rank": 99, "score": 3.303873165940188 } ]
Rust
src/writer/file_writer.rs
cspinetta/jon-listen
89134524b67443d7620b11bb840dfb9dfe04d0f8
use std::fs::File; use std::io::prelude::*; use std::fs::OpenOptions; use std::thread::{self, JoinHandle}; use std::path::PathBuf; use std::fs; use std::time::Duration; use chrono::prelude::*; use std::borrow::BorrowMut; use std::sync::mpsc::{sync_channel, SyncSender, Receiver}; use ::settings::FileWriterConfig; use ::settings::RotationPolicyType; use writer::file_rotation::FileRotation; use writer::rotation_policy::{RotationPolicy, RotationByDuration, RotationByDay}; pub struct FileWriter { file_dir_path: PathBuf, file_path: PathBuf, file_name: String, file: File, pub tx: SyncSender<FileWriterCommand>, rx: Receiver<FileWriterCommand>, file_config: FileWriterConfig, } impl FileWriter { pub fn new(buffer_bound: usize, file_config: FileWriterConfig) -> Self { let file_dir_path = file_config.filedir.clone(); let mut file_path = file_dir_path.clone(); file_path.push(file_config.filename.clone()); let file = Self::open_file(&file_path, file_config.formatting.startingmsg, true).unwrap(); let (tx, rx) = sync_channel(buffer_bound); FileWriter { file_dir_path, file_path, file_name: file_config.filename.clone(), file, tx, rx, file_config } } pub fn start(&mut self) -> Result<(), String> { info!("File writer starting"); let rotation_policy: Box<RotationPolicy> = match self.file_config.rotation.policy { RotationPolicyType::ByDuration => Box::new(RotationByDuration::new(Duration::from_secs(self.file_config.rotation.duration.unwrap()))), RotationPolicyType::ByDay => Box::new(RotationByDay::new()) }; let file_rotation = FileRotation::new( self.file_dir_path.clone(),self.file_path.clone(), self.file_name.clone(), self.file_config.rotation.count, rotation_policy, self.tx.clone()); let rotation_handle: JoinHandle<Result<(), String>> = file_rotation.start_async(); self.listen_commands()?; rotation_handle.join().unwrap_or_else(|e| Err(format!("Failed trying to join. Reason: {:?}", e)))?; Ok(()) } pub fn start_async(mut self) -> JoinHandle<Result<(), String>> { thread::spawn(move || { self.start() }) } fn listen_commands(&mut self) -> Result<(), String> { let mut count = 0; loop { let mut command = self.rx.recv() .map_err(|e| format!("Error getting file-write-command from channel: {}", e))?; debug!("Command received: {:?}", command); match command { FileWriterCommand::WriteDebug(id, value, i) => { count += 1; info!("WriteDebug - {} - Count in FileWriter: {} - In Server: {}", id, count, i); self.write(value.as_slice())? }, FileWriterCommand::Write(ref value) if value.last().map(|x| x.eq(&('\n' as u8))).unwrap_or(false) => { self.write(value)? }, FileWriterCommand::Write(ref mut value) => { let value: &mut Vec<u8> = value.as_mut(); value.push('\n' as u8); self.write((value).as_slice())? }, FileWriterCommand::Rename(new_path) => self.rotate(new_path)?, } } Ok(()) } pub fn write(&mut self, buf: &[u8]) -> Result<(), String> { Self::write_with(self.file.borrow_mut(), buf) } fn write_with(file: &mut File, buf: &[u8]) -> Result<(), String> { debug!("Writing to file {:?}", file); file.write(buf) .map_err(|e| format!("Failed trying to write to the log file. Reason: {}", e))?; Ok(()) } fn open_file(filepath: &PathBuf, with_starting_msg: bool, keep_content: bool) -> Result<File, String> { let starting_msg = format!("Starting {} at {}\n", filepath.to_string_lossy(), Local::now().to_rfc2822()); info!("Opening file {:?}", filepath); info!("{}", starting_msg); let mut options = OpenOptions::new(); let mut options = if keep_content { options.append(true) } else { options.write(true) }; let mut file = options.create(true).open(filepath) .expect(format!("Open the log file {:?}", filepath).as_ref()); if with_starting_msg { Self::write_with(file.borrow_mut(), starting_msg.as_bytes()); } Ok(file) } fn rotate(&mut self, new_path: PathBuf) -> Result<(), String> { fs::rename(self.file_path.clone(), new_path.clone()) .map_err(|e| format!("Failed trying to rename the file {:?} to {:?}. Reason: {}", self.file_path.clone(), new_path, e)) .and_then(|_| { let ending_msg = format!("Ending log as {} at {}\n", new_path.as_path().to_string_lossy(), Local::now().to_rfc2822()); info!("File rename successfully. {}", ending_msg); if self.file_config.formatting.endingmsg { self.write(ending_msg.as_bytes())?; } self.file = Self::open_file(&self.file_path.clone(), self.file_config.formatting.startingmsg, false)?; Ok(()) }) } } #[derive(Debug, Clone, PartialEq)] pub enum FileWriterCommand { Write(Vec<u8>), Rename(PathBuf), WriteDebug(String, Vec<u8>, i32), }
use std::fs::File; use std::io::prelude::*; use std::fs::OpenOptions; use std::thread::{self, JoinHandle}; use std::path::PathBuf; use std::fs; use std::time::Duration; use chrono::prelude::*; use std::borrow::BorrowMut; use std::sync::mpsc::{sync_channel, SyncSender, Receiver}; use ::settings::FileWriterConfig; use ::settings::RotationPolicyType; use writer::file_rotation::FileRotation; use writer::rotation_policy::{RotationPolicy, RotationByDuration, RotationByDay}; pub struct FileWriter { file_dir_path: PathBuf, file_path: PathBuf, file_name: String, file: File, pub tx: SyncSender<FileWriterCommand>, rx: Receiver<FileWriterCommand>, file_config: FileWriterConfig, } impl FileWriter { pub fn new(buffer_bound: usize, file_config: FileWriterConfig) -> Self { let file_dir_path = file_config.filedir.clone(); let mut file_path = file_dir_path.clone(); file_path.push(file_config.filename.clone()); let file = Self::open_file(&file_path, file_config.formatting.startingmsg, true).unwrap(); let (tx, rx) = sync_channel(buffer_bound); FileWriter { file_dir_path, file_path, file_name: file_config.filename.clone(), file, tx, rx, file_config } } pub fn start(&mut self) -> Result<(), String> { info!("File writer starting"); let rotation_policy: Box<RotationPolicy> = match self.file_config.rotation.policy { RotationPolicyType::ByDuration => Box::new(RotationByDuration::new(Duration::from_secs(self.file_config.rotation.duration.unwrap()))), RotationPolicyType::ByDay => Box::new(RotationByDay::new()) }; let file_rotation = FileRotation::new( self.file_dir_path.clone(),self.file_path.clone(), self.file_name.clone(), self.file_config.rotation.count, rotation_policy, self.tx.clone()); let rotation_handle: JoinHandle<Result<(), String>> = file_rotation.start_async(); self.listen_commands()?; rotation_handle.join().unwrap_or_else(|e| Err(format!("Failed trying to join. Reason: {:?}", e)))?; Ok(()) } pub fn start_async(mut self) -> JoinHandle<Result<(), String>> { thread::spawn(move || { self.start() }) } fn listen_commands(&mut self) -> Result<(), String> { let mut count = 0; loop { let mut command = self.rx.recv() .map_err(|e| format!("Error getting file-write-command from channel: {}", e))?; debug!("Command received: {:?}", command); match command { FileWriterCommand::WriteDebug(id, value, i) => { count += 1; info!("WriteDebug - {} - Count in FileWriter: {} - In Server: {}", id, count, i); self.write(value.as_slice())? }, FileWriterCommand::Write(ref value) if value.last().map(|x| x.eq(&('\n' as u8))).unwrap_or(false) => { self.write(value)? }, FileWriterCommand::Write(ref mut value) => { let value: &mut Vec<u8> = value.as_mut(); value.push('\n' as u8); self.write((value).as_slice())? }, FileWriterCommand::Rename(new_path) => self.rotate(new_path)?, } } Ok(()) } pub fn write(&mut self, buf: &[u8]) -> Result<(), String> { Self::write_with(self.file.borrow_mut(), buf) } fn write_with(file: &mut File, buf: &[u8]) -> Result<(), String> { debug!("Writing to file {:?}", file); file.write(buf) .map_err(|e| format!("Failed trying to write to the log file. Reason: {}", e))?; Ok(()) }
fn rotate(&mut self, new_path: PathBuf) -> Result<(), String> { fs::rename(self.file_path.clone(), new_path.clone()) .map_err(|e| format!("Failed trying to rename the file {:?} to {:?}. Reason: {}", self.file_path.clone(), new_path, e)) .and_then(|_| { let ending_msg = format!("Ending log as {} at {}\n", new_path.as_path().to_string_lossy(), Local::now().to_rfc2822()); info!("File rename successfully. {}", ending_msg); if self.file_config.formatting.endingmsg { self.write(ending_msg.as_bytes())?; } self.file = Self::open_file(&self.file_path.clone(), self.file_config.formatting.startingmsg, false)?; Ok(()) }) } } #[derive(Debug, Clone, PartialEq)] pub enum FileWriterCommand { Write(Vec<u8>), Rename(PathBuf), WriteDebug(String, Vec<u8>, i32), }
fn open_file(filepath: &PathBuf, with_starting_msg: bool, keep_content: bool) -> Result<File, String> { let starting_msg = format!("Starting {} at {}\n", filepath.to_string_lossy(), Local::now().to_rfc2822()); info!("Opening file {:?}", filepath); info!("{}", starting_msg); let mut options = OpenOptions::new(); let mut options = if keep_content { options.append(true) } else { options.write(true) }; let mut file = options.create(true).open(filepath) .expect(format!("Open the log file {:?}", filepath).as_ref()); if with_starting_msg { Self::write_with(file.borrow_mut(), starting_msg.as_bytes()); } Ok(file) }
function_block-full_function
[ { "content": "fn extract_command_arg(args: &mut Vec<String>, names: Vec<String>) -> bool {\n\n match args.iter().position(|a| names.contains(a)) {\n\n Some(i) => {\n\n args.remove(i);\n\n true\n\n }\n\n None => false,\n\n }\n\n}\n\n\n\nmod tcp {\n\n use std::io;\n\n use std::net::SocketAddr;\n\n\n\n use bytes::BytesMut;\n\n use futures::{future, Future, Stream};\n\n use tokio_core::net::TcpStream;\n\n use tokio_core::reactor::Handle;\n\n use tokio_io::AsyncRead;\n\n use tokio_io::codec::{Encoder, Decoder};\n", "file_path": "examples/logging_client.rs", "rank": 0, "score": 105776.316425534 }, { "content": "fn extract_arg<T>(args: &mut Vec<String>, names: Vec<String>, default: Option<T>, parser: fn(&String) -> T) -> T {\n\n match args.iter().position(|a| names.contains(a)) {\n\n Some(i) => {\n\n let value: T = parser(args.get(i + 1).unwrap());\n\n args.remove(i + 1);\n\n args.remove(i);\n\n value\n\n }\n\n None => default.expect(format!(\"This parameter is required: {:?}\", names).as_ref()),\n\n }\n\n}\n\n\n", "file_path": "examples/logging_client.rs", "rank": 1, "score": 86876.29844656333 }, { "content": "#[test]\n\nfn it_writes_multiple_messages() {\n\n pretty_env_logger::init().unwrap();\n\n\n\n let settings = settings_template();\n\n let msgs: Vec<String> = (0..100).map(|i| format!(\"Message # {}\", i)).collect();\n\n\n\n info!(\"Settings: {:?}\", settings);\n\n\n\n let mut file_writer = FileWriter::new(settings.buffer_bound, settings.filewriter.clone());\n\n\n\n let file_writer_tx = file_writer.tx.clone();\n\n\n\n // Start Writer\n\n let join_handle = file_writer.start_async();\n\n\n\n // Send messages\n\n info!(\"Sending {} messages\", msgs.len());\n\n for msg in &msgs {\n\n file_writer_tx.send(FileWriterCommand::Write(msg.as_bytes().to_vec()));\n\n }\n", "file_path": "tests/writer_spec.rs", "rank": 2, "score": 69602.01291654892 }, { "content": "#[test]\n\nfn it_receives_multiple_messages() {\n\n pretty_env_logger::init().unwrap();\n\n\n\n let settings = Arc::new(settings_template());\n\n let msgs: Vec<String> = (0..100).map(|i| format!(\"Message # {}\\n\", i)).collect();\n\n\n\n info!(\"Settings: {:?}\", settings);\n\n\n\n let server_addr = format!(\"{}:{}\", settings.server.host, settings.server.port).parse::<SocketAddr>().unwrap();\n\n let (file_writer_tx, file_writer_rx) = sync_channel(settings.buffer_bound);\n\n let threads = TcpServer::start(settings.clone(), file_writer_tx.clone());\n\n\n\n // To force server to get ready\n\n thread::sleep_ms(1);\n\n\n\n {\n\n let mut conn = std::net::TcpStream::connect(server_addr).unwrap();\n\n\n\n for msg in &msgs {\n\n conn.write(msg.as_ref());\n", "file_path": "tests/tcp_server_spec.rs", "rank": 3, "score": 66802.44842219613 }, { "content": "#[test]\n\nfn it_receives_multiple_messages() {\n\n pretty_env_logger::init().unwrap();\n\n\n\n let settings = Arc::new(settings_template());\n\n let msgs: Vec<String> = (0..100).map(|i| format!(\"Message # {}\", i)).collect();\n\n\n\n info!(\"Settings: {:?}\", settings);\n\n\n\n let (file_writer_tx, file_writer_rx) = sync_channel(settings.buffer_bound);\n\n let (server_addr_tx, server_addr_rx) = oneshot::channel();\n\n let (stop_c, stop_p) = oneshot::channel::<()>();\n\n\n\n let server_addr = format!(\"{}:{}\", settings.server.host, settings.server.port).parse::<SocketAddr>().unwrap();\n\n let settings_ref = settings.clone();\n\n\n\n let server_join = thread::spawn(move || {\n\n\n\n let mut l = Core::new().unwrap();\n\n let handle = l.handle();\n\n\n", "file_path": "tests/udp_server_spec.rs", "rank": 4, "score": 66802.44842219613 }, { "content": "struct TcpListenerService {\n\n pub id: i32,\n\n pub name: String,\n\n pub tx_file_writer: SyncSender<FileWriterCommand>,\n\n settings: Arc<Settings>,\n\n}\n\n\n\nimpl TcpListenerService {\n\n\n\n pub fn new(id: i32, tx_file_writer: SyncSender<FileWriterCommand>, settings: Arc<Settings>) -> Self {\n\n\n\n TcpListenerService {\n\n id,\n\n name: format!(\"server-tcp-{}\", id),\n\n tx_file_writer,\n\n settings\n\n }\n\n }\n\n\n\n}\n", "file_path": "src/listener/tcp_server.rs", "rank": 5, "score": 54908.55784398988 }, { "content": "fn main() {\n\n pretty_env_logger::init().unwrap();\n\n\n\n let mut args = env::args().skip(1).collect::<Vec<_>>();\n\n let tcp = extract_command_arg(args.as_mut(), vec![\"--tcp\".to_string()]);\n\n let threads = extract_arg(args.as_mut(), vec![\"--threads\".to_string(), \"-t\".to_string()], Option::Some(10), |x| x.parse::<usize>().unwrap());\n\n let addr = extract_arg(args.as_mut(), vec![\"--address\".to_string(), \"-a\".to_string()], Option::None, |x| x.parse::<SocketAddr>().unwrap());\n\n let exec_duration = extract_arg(args.as_mut(), vec![\"--duration\".to_string(), \"-d\".to_string()], Option::Some(Duration::from_secs(10)),\n\n |x| { Duration::from_secs(x.parse::<u64>().unwrap()) });\n\n\n\n let mut core = Core::new().expect(\"Creating event loop\");\n\n let handle = core.handle();\n\n\n\n let (msg_sender, msg_receiver) = mpsc::channel(0);\n\n let msg_receiver = msg_receiver.map_err(|_| panic!(\"Error in rx\")); // errors not possible on rx\n\n\n\n let stream = stream::repeat(\"hello world!!\\n\".as_bytes().to_vec());\n\n let generator = msg_sender.send_all(stream);\n\n let generator = generator\n\n .then(move |res| {\n", "file_path": "examples/logging_client.rs", "rank": 6, "score": 54266.06226191961 }, { "content": "fn system_time_to_date_time(t: SystemTime) -> DateTime<Utc> {\n\n let (sec, nsec) = match t.duration_since(UNIX_EPOCH) {\n\n Ok(dur) => (dur.as_secs() as i64, dur.subsec_nanos()),\n\n Err(e) => { // unlikely but should be handled\n\n let dur = e.duration();\n\n let (sec, nsec) = (dur.as_secs() as i64, dur.subsec_nanos());\n\n if nsec == 0 {\n\n (-sec, 0)\n\n } else {\n\n (-sec - 1, 1_000_000_000 - nsec)\n\n }\n\n },\n\n };\n\n Utc.timestamp(sec, nsec)\n\n}\n", "file_path": "src/writer/file_rotation.rs", "rank": 7, "score": 49920.2875613569 }, { "content": "#[test]\n\nfn it_rotate_by_duration() {\n\n pretty_env_logger::init().unwrap();\n\n\n\n let settings = settings_template();\n\n\n\n info!(\"Settings: {:?}\", settings);\n\n\n\n let (file_writer_tx, file_writer_rx) = sync_channel(settings.buffer_bound);\n\n\n\n let mut file_path = settings.filewriter.filedir.clone();\n\n file_path.push(settings.filewriter.filename.clone());\n\n\n\n let rotation_policy: Box<RotationPolicy> = match settings.filewriter.rotation.policy {\n\n RotationPolicyType::ByDuration => Box::new(RotationByDuration::new(Duration::from_secs(settings.filewriter.rotation.duration.unwrap()))),\n\n RotationPolicyType::ByDay => Box::new(RotationByDay::new())\n\n };\n\n\n\n let file_rotation = FileRotation::new(\n\n settings.filewriter.filedir.clone(), file_path.clone(),\n\n settings.filewriter.filename.clone(), settings.filewriter.rotation.count, rotation_policy, file_writer_tx.clone());\n\n\n\n let rotation_handle: JoinHandle<Result<(), String>> = file_rotation.start_async();\n\n\n\n let received_msg = file_writer_rx.recv_timeout(Duration::from_secs(settings.filewriter.rotation.duration.unwrap() + 5));\n\n assert!(received_msg.is_ok());\n\n assert!(matches!(received_msg, Ok(FileWriterCommand::Rename(new_filename))));\n\n\n\n// settings.file_writer.join().unwrap();\n\n}\n", "file_path": "tests/file_rotation_spec.rs", "rank": 15, "score": 49424.00223951832 }, { "content": "fn settings_template() -> Settings {\n\n let now = SystemTime::now().duration_since(UNIX_EPOCH).expect(\"Time went backwards\");\n\n let filename = format!(\"writer_test_{}.log\", now.subsec_nanos());\n\n let server = ServerConfig { protocol: ProtocolType::UDP, host: \"0.0.0.0\".to_string(), port: 0 };\n\n let rotation_policy_config = RotationPolicyConfig { count: 10, policy: RotationPolicyType::ByDuration, duration: Option::Some(9999999) };\n\n let formatting_config = FormattingConfig { startingmsg: false, endingmsg: false };\n\n let file_config = FileWriterConfig { filedir: PathBuf::from(r\"/tmp/\"), filename, rotation: rotation_policy_config, formatting: formatting_config };\n\n Settings { debug: false, threads: 1, buffer_bound: 20, server, filewriter: file_config }\n\n}\n\n\n", "file_path": "tests/writer_spec.rs", "rank": 16, "score": 47801.17044745227 }, { "content": "pub trait RotationPolicy: Sync + Send {\n\n fn next_rotation(&self, last_rotation: DateTime<Local>) -> DateTime<Local>;\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct RotationByDuration {\n\n duration: Duration\n\n}\n\n\n\nimpl RotationByDuration {\n\n\n\n pub fn new(duration: Duration) -> Self {\n\n RotationByDuration { duration }\n\n }\n\n}\n\n\n\nimpl RotationPolicy for RotationByDuration {\n\n\n\n fn next_rotation(&self, last_rotation: DateTime<Local>) -> DateTime<Local> {\n\n last_rotation.clone() + time::Duration::from_std(self.duration).unwrap()\n", "file_path": "src/writer/rotation_policy.rs", "rank": 17, "score": 46441.907236993036 }, { "content": "fn settings_template() -> Settings {\n\n let now = SystemTime::now().duration_since(UNIX_EPOCH).expect(\"Time went backwards\");\n\n let filename = format!(\"writer_test_{}.log\", now.subsec_nanos());\n\n let server = ServerConfig { protocol: ProtocolType::UDP, host: \"0.0.0.0\".to_string(), port: 0 };\n\n let rotation_policy_config = RotationPolicyConfig { count: 10, policy: RotationPolicyType::ByDuration, duration: Option::Some(1) };\n\n let formatting_config = FormattingConfig { startingmsg: false, endingmsg: false };\n\n let file_config = FileWriterConfig { filedir: PathBuf::from(r\"/tmp/\"), filename, rotation: rotation_policy_config, formatting: formatting_config };\n\n Settings { debug: false, threads: 1, buffer_bound: 20, server, filewriter: file_config }\n\n}\n\n\n", "file_path": "tests/file_rotation_spec.rs", "rank": 18, "score": 46378.80791602543 }, { "content": "fn settings_template() -> Settings {\n\n let now = SystemTime::now().duration_since(UNIX_EPOCH).expect(\"Time went backwards\");\n\n let filename = format!(\"writer_test_{}.log\", now.subsec_nanos());\n\n let server = ServerConfig { protocol: ProtocolType::UDP, host: \"0.0.0.0\".to_string(), port: 0 };\n\n let rotation_policy_config = RotationPolicyConfig { count: 10, policy: RotationPolicyType::ByDuration, duration: Option::default() };\n\n let formatting_config = FormattingConfig { startingmsg: false, endingmsg: false };\n\n let file_config = FileWriterConfig { filedir: PathBuf::from(r\"/tmp/\"), filename, rotation: rotation_policy_config, formatting: formatting_config };\n\n Settings { debug: false, threads: 1, buffer_bound: 20, server, filewriter: file_config }\n\n}\n\n\n", "file_path": "tests/udp_server_spec.rs", "rank": 19, "score": 46036.04307518096 }, { "content": "fn settings_template() -> Settings {\n\n let now = SystemTime::now().duration_since(UNIX_EPOCH).expect(\"Time went backwards\");\n\n let filename = format!(\"writer_test_{}.log\", now.subsec_nanos());\n\n let server = ServerConfig { protocol: ProtocolType::TCP, host: \"0.0.0.0\".to_string(), port: 9999 };\n\n let rotation_policy_config = RotationPolicyConfig { count: 10, policy: RotationPolicyType::ByDuration, duration: Option::default() };\n\n let formatting_config = FormattingConfig { startingmsg: false, endingmsg: false };\n\n let file_config = FileWriterConfig { filedir: PathBuf::from(r\"/tmp/\"), filename, rotation: rotation_policy_config, formatting: formatting_config };\n\n Settings { debug: false, threads: 1, buffer_bound: 20, server, filewriter: file_config }\n\n}\n\n\n", "file_path": "tests/tcp_server_spec.rs", "rank": 20, "score": 46036.04307518096 }, { "content": "pub struct FileRotation {\n\n file_dir_path: PathBuf,\n\n file_path: PathBuf,\n\n file_name: String,\n\n max_files: i32,\n\n rotation_policy: Box<RotationPolicy>,\n\n tx_file_writer: SyncSender<FileWriterCommand>\n\n}\n\n\n\nimpl FileRotation {\n\n\n\n pub fn new(file_dir_path: PathBuf, file_path: PathBuf, file_name: String, max_files: i32,\n\n rotation_policy: Box<RotationPolicy>, tx_file_writer: SyncSender<FileWriterCommand>) -> Self {\n\n FileRotation { file_dir_path, file_path, file_name, max_files, rotation_policy, tx_file_writer}\n\n }\n\n\n\n pub fn start(&self) -> Result<(), String> {\n\n let mut last_rotation = Local::now(); // FIXME: get modified of the current file\n\n loop {\n\n info!(\"loop rotate...\");\n", "file_path": "src/writer/file_rotation.rs", "rank": 21, "score": 42828.75717511377 }, { "content": " Ok(())\n\n }\n\n\n\n pub fn start_async(self) -> JoinHandle<Result<(), String>> {\n\n thread::spawn(move || {\n\n self.start()\n\n })\n\n }\n\n\n\n fn request_rotate(&self) -> Result<PathBuf, RotateError> {\n\n\n\n let files: Vec<PathBuf> = Self::search_files(self.file_path.clone())?;\n\n\n\n let new_path = if files.len() >= self.max_files as usize {\n\n self.oldest_file(&files)?\n\n } else {\n\n self.next_path(&files)?\n\n };\n\n\n\n self.tx_file_writer.send(FileWriterCommand::Rename(new_path.clone()))\n", "file_path": "src/writer/file_rotation.rs", "rank": 22, "score": 42828.6780125251 }, { "content": " .map_err(|e| RotateError::OtherError(format!(\"Error sending RenameCommand: {:?}\", e)))?;\n\n\n\n Ok(new_path)\n\n }\n\n\n\n fn search_files(path: PathBuf) -> Result<Vec<PathBuf>, RotateError> {\n\n\n\n let files_query = path.to_str().ok_or(RotateError::OtherError(format!(\"Impossible get file path from {:?}\", &path)))?;\n\n let files_query = format!(\"{}.*\", files_query);\n\n\n\n let mut files: Vec<PathBuf> = vec![];\n\n for result in glob(&files_query)? {\n\n files.push(result?);\n\n }\n\n Ok(files)\n\n }\n\n\n\n fn oldest_file(&self, files: &Vec<PathBuf>) -> Result<PathBuf, io::Error> {\n\n info!(\"Getting oldest log file from {:?}\", files);\n\n let mut default_file = self.file_dir_path.clone();\n", "file_path": "src/writer/file_rotation.rs", "rank": 23, "score": 42820.21543137382 }, { "content": " })\n\n .and_then(|digit| {\n\n digit.as_str().parse::<i32>()\n\n .map_err(|e| RotateError::RegexError(format!(\"Impossible parse {:?} as integer. Reason: {}\", digit, e)))\n\n })?;\n\n if file_id >= next_id { next_id = file_id + 1 }\n\n }\n\n Ok(Path::new(&format!(\"{}.{}\", self.file_path.to_str().unwrap(), next_id)).to_path_buf())\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum RotateError {\n\n RegexError(String),\n\n InvalidFile(String),\n\n IOError(String),\n\n OtherError(String),\n\n SearchFilesError(String),\n\n}\n\n\n", "file_path": "src/writer/file_rotation.rs", "rank": 24, "score": 42818.253713120546 }, { "content": " default_file.push(format!(\"{}.0\", self.file_name));\n\n let oldest = files.iter().min_by(|x, y| {\n\n let modified_x = x.metadata().unwrap().modified().unwrap();\n\n let modified_y = y.metadata().unwrap().modified().unwrap();\n\n modified_x.cmp(&modified_y)\n\n }).unwrap_or(&default_file).canonicalize()?;\n\n Ok(oldest.clone())\n\n }\n\n\n\n fn next_path(&self, files: &Vec<PathBuf>) -> Result<PathBuf, RotateError> {\n\n info!(\"Getting next name of log file to use. Current files: {:?}\", files);\n\n let re = Regex::new(r\".*(\\d+)$\").map_err(|e| RotateError::RegexError(format!(\"{}\", e)))?;\n\n let mut next_id = 0;\n\n for file in files.iter() {\n\n let filename_x = file.file_name().and_then(|fname| fname.to_str()).ok_or(RotateError::InvalidFile(format!(\"invalid file: {:?}\", file)))?;\n\n let file_id = re.captures(filename_x)\n\n .ok_or(RotateError::RegexError(format!(\"digit not found in {}\", filename_x)))\n\n .and_then(|captures| {\n\n captures.get(1)\n\n .ok_or(RotateError::RegexError(format!(\"It was impossible to capture first group of regex to get the number of file {}\", filename_x)))\n", "file_path": "src/writer/file_rotation.rs", "rank": 25, "score": 42817.72729411238 }, { "content": "\n\nuse std::thread::{self, JoinHandle};\n\nuse std::path::Path;\n\nuse std::path::PathBuf;\n\nuse std::io;\n\n\n\nuse std::time::{Duration, SystemTime, UNIX_EPOCH};\n\nuse chrono::prelude::*;\n\n\n\nuse regex::Regex;\n\n\n\nuse glob::glob;\n\nuse glob::PatternError;\n\nuse glob::GlobError;\n\n\n\nuse std::sync::mpsc::SyncSender;\n\nuse writer::file_writer::FileWriterCommand;\n\nuse writer::rotation_policy::RotationPolicy;\n\n\n\n\n", "file_path": "src/writer/file_rotation.rs", "rank": 26, "score": 42815.99496063297 }, { "content": "impl From<RotateError> for String {\n\n fn from(error: RotateError) -> Self {\n\n format!(\"{:?}\", error)\n\n }\n\n}\n\n\n\nimpl From<io::Error> for RotateError {\n\n fn from(error: io::Error) -> Self {\n\n RotateError::IOError(error.to_string())\n\n }\n\n}\n\n\n\nimpl From<PatternError> for RotateError {\n\n fn from(error: PatternError) -> Self {\n\n RotateError::SearchFilesError(error.to_string())\n\n }\n\n}\n\n\n\nimpl From<GlobError> for RotateError {\n\n fn from(error: GlobError) -> Self {\n\n RotateError::SearchFilesError(error.to_string())\n\n }\n\n}\n\n\n", "file_path": "src/writer/file_rotation.rs", "rank": 27, "score": 42814.87912757366 }, { "content": " let time_for_rotate = self.rotation_policy.next_rotation(last_rotation);\n\n let now = Local::now();\n\n if time_for_rotate.gt(&now) {\n\n let dur_to_rotate = time_for_rotate.signed_duration_since(now.clone()).to_std().unwrap();\n\n info!(\"Sleep and wait {:?} for the time to rotate\", dur_to_rotate);\n\n thread::sleep(dur_to_rotate);\n\n } else {\n\n info!(\"it's the time to rotate: {}\", &now);\n\n match self.request_rotate() {\n\n Err(err) => {\n\n error!(\"Failed trying to rename the file. Reason: {}\", String::from(err));\n\n thread::sleep(Duration::from_secs(1));\n\n },\n\n Ok(new_path) => {\n\n info!(\"File rename requested. It will be saved as {:?}\", new_path);\n\n last_rotation = now;\n\n }\n\n }\n\n }\n\n }\n", "file_path": "src/writer/file_rotation.rs", "rank": 28, "score": 42810.96270393978 }, { "content": "pub trait DeserializeWith: Sized {\n\n fn deserialize_with<'de, D>(de: D) -> Result<Self, D::Error>\n\n where D: Deserializer<'de>;\n\n}\n\n\n\n#[derive(Debug, Deserialize, Clone)]\n\npub struct ServerConfig {\n\n #[serde(deserialize_with=\"ProtocolType::deserialize_with\")]\n\n pub protocol: ProtocolType,\n\n pub host: String,\n\n pub port: i32,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Eq, PartialEq, Clone)]\n\npub enum ProtocolType {\n\n TCP,\n\n UDP\n\n}\n\n\n\nimpl DeserializeWith for ProtocolType {\n", "file_path": "src/settings.rs", "rank": 29, "score": 34846.032042978535 }, { "content": "fn main() {\n\n pretty_env_logger::init().unwrap();\n\n\n\n info!(\"Starting jon-listen app...\");\n\n\n\n let settings = Settings::load();\n\n\n\n App::start_up(Arc::new(settings));\n\n}\n", "file_path": "src/main.rs", "rank": 30, "score": 33390.70122238513 }, { "content": "fn main() {\n\n pretty_env_logger::init().unwrap();\n\n\n\n let settings = Arc::new(settings_template());\n\n let settings_ref = settings.clone();\n\n\n\n info!(\"Settings: {:?}\", settings);\n\n\n\n let server_join = thread::spawn(move || {\n\n App::start_up(settings_ref);\n\n });\n\n\n\n let server_addr = format!(\"{}:{}\", settings.server.host, settings.server.port).parse::<SocketAddr>().unwrap();\n\n let any_addr = \"127.0.0.1:0\".to_string().parse::<SocketAddr>().unwrap();\n\n let client = std::net::UdpSocket::bind(&any_addr).unwrap();\n\n\n\n thread::sleep(Duration::from_millis(1));\n\n\n\n for i in 1..10000 {\n\n trace!(\"Sending message {}\", i);\n\n let msg = format!(\"Message # {}\", i);\n\n client.send_to(msg.as_ref(), &server_addr).unwrap();\n\n };\n\n\n\n}\n", "file_path": "examples/send_via_udp.rs", "rank": 31, "score": 30963.17871343714 }, { "content": "fn settings_template() -> Settings {\n\n let now = SystemTime::now().duration_since(UNIX_EPOCH).expect(\"Time went backwards\");\n\n let filename = format!(\"writer_test_{}.log\", now.subsec_nanos());\n\n let server = ServerConfig { protocol: ProtocolType::UDP, host: \"0.0.0.0\".to_string(), port: 9999 };\n\n let rotation_policy_config = RotationPolicyConfig { count: 10, policy: RotationPolicyType::ByDuration, duration: Option::Some(9999) };\n\n let formatting_config = FormattingConfig { startingmsg: false, endingmsg: false };\n\n let file_config = FileWriterConfig { filedir: PathBuf::from(r\"/tmp/\"), filename, rotation: rotation_policy_config, formatting: formatting_config };\n\n Settings { debug: false, threads: 5, buffer_bound: 20, server, filewriter: file_config }\n\n}\n\n\n", "file_path": "examples/send_via_udp.rs", "rank": 32, "score": 27807.497672251513 }, { "content": " type Error = io::Error;\n\n\n\n fn decode(&mut self, buf: &mut BytesMut) -> io::Result<Option<BytesMut>> {\n\n if buf.len() > 0 {\n\n let len = buf.len();\n\n Ok(Some(buf.split_to(len)))\n\n } else {\n\n Ok(None)\n\n }\n\n }\n\n\n\n fn decode_eof(&mut self, buf: &mut BytesMut) -> io::Result<Option<BytesMut>> {\n\n self.decode(buf)\n\n }\n\n }\n\n\n\n impl Encoder for Bytes {\n\n type Item = Vec<u8>;\n\n type Error = io::Error;\n\n\n", "file_path": "examples/logging_client.rs", "rank": 33, "score": 23463.609607296647 }, { "content": " fn encode(&mut self, data: Vec<u8>, buf: &mut BytesMut) -> io::Result<()> {\n\n buf.extend(data);\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n\nmod udp {\n\n use std::io;\n\n use std::net::SocketAddr;\n\n\n\n use futures::{Future, Stream};\n\n use tokio_core::net::{UdpCodec, UdpSocket};\n\n use tokio_core::reactor::Handle;\n\n\n\n\n\n pub fn connect(addr: &SocketAddr, handle: Handle,\n\n input_stream: Box<Stream<Item = Vec<u8>, Error = io::Error> + Send>) -> Box<Future<Item=(), Error=io::Error>> {\n\n let client_addr = \"127.0.0.1:0\".parse::<SocketAddr>().unwrap();\n\n let udp = UdpSocket::bind(&client_addr, &handle).expect(\"Failed to bind client UDP socket\");\n", "file_path": "examples/logging_client.rs", "rank": 34, "score": 23461.455402440624 }, { "content": " Ok((_, _)) => Ok(())\n\n }\n\n });\n\n Box::new(log_emitter\n\n .select(receive_stream))\n\n .then(|res| -> Box<Future<Item=_, Error=_>> {\n\n match res {\n\n Ok((a, b)) => Box::new(b.map(move |b| ())),\n\n Err((a, _)) => Box::new(future::err(a)),\n\n }\n\n })\n\n });\n\n\n\n Box::new(client)\n\n }\n\n\n\n struct Bytes;\n\n\n\n impl Decoder for Bytes {\n\n type Item = BytesMut;\n", "file_path": "examples/logging_client.rs", "rank": 35, "score": 23461.424301492792 }, { "content": " use futures::IntoFuture;\n\n\n\n\n\n pub fn connect(addr: &SocketAddr, handle: Handle,\n\n input_stream: Box<Stream<Item = Vec<u8>, Error = io::Error> + Send>) -> Box<Future<Item=(), Error=io::Error>> {\n\n\n\n let tcp = TcpStream::connect(&addr, &handle);\n\n\n\n let client = tcp.and_then(|stream| {\n\n let (sink, stream) = stream.framed(Bytes).split();\n\n let receive_stream = stream.for_each(move |buf| {\n\n info!(\"Received via TCP connection: {:?}\", buf.as_ref());\n\n Ok(())\n\n });\n\n let log_emitter = input_stream\n\n .forward(sink)\n\n .into_future()\n\n .then(|res| {\n\n match res {\n\n Err(e) => Err(io::Error::new(io::ErrorKind::Other, format!(\"Error sending log messages: {:?}\", e))),\n", "file_path": "examples/logging_client.rs", "rank": 36, "score": 23461.42417978657 }, { "content": "\n\n let (sink, stream) = udp.framed(Bytes).split();\n\n\n\n let addr = addr.clone();\n\n Box::new(input_stream\n\n .map(move |chunk| (addr, chunk))\n\n .forward(sink)\n\n .map_err(|e| io::Error::new(io::ErrorKind::Other, format!(\"Occur an error sending datagrams: {:?}\", e)))\n\n .map(|_| ()))\n\n }\n\n\n\n\n\n struct Bytes;\n\n\n\n impl UdpCodec for Bytes {\n\n type In = (SocketAddr, Vec<u8>);\n\n type Out = (SocketAddr, Vec<u8>);\n\n\n\n fn decode(&mut self, addr: &SocketAddr, buf: &[u8]) -> io::Result<Self::In> {\n\n Ok((*addr, buf.to_vec()))\n", "file_path": "examples/logging_client.rs", "rank": 37, "score": 23461.245737931516 }, { "content": " }\n\n\n\n fn encode(&mut self, (addr, buf): Self::Out, into: &mut Vec<u8>) -> SocketAddr {\n\n into.extend(buf);\n\n addr\n\n }\n\n }\n\n}\n\n\n\n\n\n// const TICK_DURATION: u64 = 100;\n\n// const TIMER_INTERVAL: u64 = 200;\n\n\n\n// let timer = tokio_timer::wheel().tick_duration(Duration::from_micros(TICK_DURATION)).build();\n\n// let timer = timer.interval(Duration::from_micros(TIMER_INTERVAL)).for_each(move |_| {\n\n// let msg = \"hello world!!\\n\";\n\n// println!(\"Sending: {}\", msg);\n\n// msg_sender.clone().send(msg.as_bytes().to_vec()).wait().unwrap();\n\n// Ok(())\n\n// });\n", "file_path": "examples/logging_client.rs", "rank": 38, "score": 23458.29486840171 }, { "content": " Ok(())\n\n }))\n\n .map_err(|e| {\n\n io::Error::new(io::ErrorKind::Other, format!(\"Error performing timeout: {:?}\", e))\n\n });\n\n\n\n let f = timeout_emitter\n\n .select(sender)\n\n .then(|res| -> Box<Future<Item=_, Error=_>> {\n\n match res {\n\n Ok((_, _)) => Box::new(future::ok(())),\n\n Err((error, _)) => Box::new(future::err(error)),\n\n }\n\n });\n\n\n\n core.run(f).expect(\"Event loop running\");\n\n}\n\n\n", "file_path": "examples/logging_client.rs", "rank": 39, "score": 23455.547206173695 }, { "content": " if let Err(e) = res {\n\n panic!(\"Occur an error generating messages: {:?}\", e);\n\n ()\n\n }\n\n Ok(())\n\n });\n\n\n\n core.handle().spawn(generator);\n\n\n\n let sender: Box<Future<Item=(), Error=io::Error>> = if tcp {\n\n info!(\"Starting TCP client\");\n\n tcp::connect(&addr, core.handle(), Box::new(msg_receiver))\n\n } else {\n\n info!(\"Starting UDP client\");\n\n udp::connect(&addr, core.handle(), Box::new(msg_receiver))\n\n };\n\n\n\n let timeout_emitter = reactor::Timeout::new(exec_duration, &handle)\n\n .into_future()\n\n .and_then(|timeout| timeout.and_then(move |_| {\n", "file_path": "examples/logging_client.rs", "rank": 40, "score": 23455.212402470275 }, { "content": "\n\n#[macro_use]\n\nextern crate log;\n\nextern crate pretty_env_logger;\n\n\n\nextern crate futures;\n\nextern crate tokio_core;\n\nextern crate tokio_io;\n\nextern crate tokio_timer;\n\nextern crate bytes;\n\n\n\nuse std::env;\n\nuse std::vec::Vec;\n\nuse std::io;\n\nuse std::net::SocketAddr;\n\nuse std::time::Duration;\n\n\n\nuse tokio_core::reactor::{self, Core};\n\n\n\nuse futures::sync::mpsc;\n\nuse futures::stream;\n\nuse futures::{Sink, Future, Stream, future};\n\nuse futures::IntoFuture;\n\n\n", "file_path": "examples/logging_client.rs", "rank": 41, "score": 23453.919700959545 }, { "content": "\n\n#[macro_use]\n\nextern crate log;\n\nextern crate pretty_env_logger;\n\nextern crate matches;\n\n\n\nextern crate tokio_core;\n\nextern crate tokio_io;\n\n\n\nextern crate futures;\n\n\n\nextern crate jon_listen;\n\nextern crate net2;\n\n\n\n\n\nuse jon_listen::writer::file_writer::*;\n\nuse jon_listen::writer::file_writer::FileWriterCommand;\n\nuse jon_listen::settings::*;\n\n\n\n\n\nuse std::fs::{self, File};\n\n\n\nuse std::io::BufReader;\n\nuse std::io::prelude::*;\n\n\n\nuse std::time::{SystemTime, UNIX_EPOCH};\n\nuse std::path::PathBuf;\n\n\n\n\n", "file_path": "tests/writer_spec.rs", "rank": 42, "score": 22408.95897344485 }, { "content": "\n\n let mut file_path = settings.filewriter.filedir.clone();\n\n file_path.push(settings.filewriter.filename.clone());\n\n\n\n info!(\"Log file {:?}\", file_path);\n\n\n\n {\n\n let file = File::open(file_path.clone()).expect(format!(\"Open the log file {:?}\", file_path).as_ref());\n\n let file_reader = BufReader::new(file);\n\n\n\n let mut msg_iter = msgs.iter();\n\n for line in file_reader.lines() {\n\n let next = msg_iter.next();\n\n assert!(next.is_some());\n\n assert!(line.is_ok());\n\n\n\n let line_writer = line.unwrap();\n\n let line_writer = line_writer.as_bytes();\n\n\n\n let line_file = next.unwrap();\n", "file_path": "tests/writer_spec.rs", "rank": 43, "score": 22406.320203490275 }, { "content": " let line_file = line_file.as_bytes();\n\n\n\n assert_eq!(line_writer, line_file);\n\n\n\n// println!(\"{:?} - {:?}\", String::from_utf8_lossy(line_writer), String::from_utf8_lossy(line_file));\n\n }\n\n }\n\n\n\n fs::remove_file(file_path);\n\n}\n", "file_path": "tests/writer_spec.rs", "rank": 44, "score": 22403.802983397734 }, { "content": "\n\npub mod file_writer;\n\npub mod file_rotation;\n\npub mod rotation_policy;\n", "file_path": "src/writer/mod.rs", "rank": 45, "score": 22401.178080613743 }, { "content": "\n\n#[macro_use]\n\nextern crate log;\n\nextern crate pretty_env_logger;\n\n#[macro_use]\n\nextern crate matches;\n\n\n\nextern crate tokio_core;\n\nextern crate tokio_io;\n\n\n\nextern crate futures;\n\n\n\nextern crate jon_listen;\n\nextern crate net2;\n\n\n\n\n\nuse jon_listen::writer::file_writer::FileWriterCommand;\n\nuse jon_listen::writer::file_rotation::*;\n\nuse jon_listen::writer::rotation_policy::*;\n\nuse jon_listen::settings::*;\n\n\n\nuse std::time::{SystemTime, UNIX_EPOCH, Duration};\n\nuse std::path::PathBuf;\n\n\n\nuse std::thread::JoinHandle;\n\n\n\nuse std::sync::mpsc::sync_channel;\n\n\n\n\n", "file_path": "tests/file_rotation_spec.rs", "rank": 46, "score": 21658.136840407642 }, { "content": "\n\nimpl Service for TcpListenerService {\n\n type Request = String;\n\n type Response = ();\n\n type Error = io::Error;\n\n type Future = FutureResult<Self::Response, Self::Error>;\n\n\n\n fn call(&self, req: Self::Request) -> Self::Future {\n\n debug!(\"Received a log line in {}\", self.name);\n\n let sent_data = self.tx_file_writer\n\n .send(FileWriterCommand::Write(req.clone().into_bytes()));\n\n match sent_data {\n\n Ok(_) => future::ok(()),\n\n Err(e) => future::err(io::Error::new(io::ErrorKind::Other,\n\n format!(\"Error trying to send a log line to write: {}\", e)))\n\n }\n\n }\n\n}\n", "file_path": "src/listener/tcp_server.rs", "rank": 47, "score": 21267.454178253596 }, { "content": "}\n\n\n\nimpl Future for UdpService {\n\n type Item = ();\n\n type Error = io::Error;\n\n\n\n fn poll(&mut self) -> Poll<(), io::Error> {\n\n loop {\n\n let (size, _): (usize, SocketAddr) = try_nb!(self.socket.recv_from(&mut self.buf));\n\n if self.settings.debug {\n\n self.count += 1;\n\n info!(\"Poll datagram from server {}. Count: {}\", self.name, self.count);\n\n self.writer_sender.send(FileWriterCommand::WriteDebug(self.name.clone(), self.buf[..size].to_vec(), self.count));\n\n } else {\n\n debug!(\"Poll datagram from server {}.\", self.name);\n\n self.writer_sender.send(FileWriterCommand::Write(self.buf[..size].to_vec()));\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/listener/udp_server.rs", "rank": 48, "score": 21267.23942460826 }, { "content": " pub writer_sender: SyncSender<FileWriterCommand>,\n\n settings: Arc<Settings>,\n\n count: i32\n\n}\n\n\n\nimpl UdpService {\n\n\n\n pub fn new(s: UdpSocket, writer_sender: SyncSender<FileWriterCommand>, id: i32, settings: Arc<Settings>) -> Self {\n\n\n\n UdpService {\n\n id,\n\n name: format!(\"server-udp-{}\", id),\n\n socket: s,\n\n buf: vec![0u8; 15000],\n\n writer_sender,\n\n settings,\n\n count: 0 // For debug only\n\n }\n\n }\n\n\n", "file_path": "src/listener/udp_server.rs", "rank": 49, "score": 21267.150543297637 }, { "content": " let msg: &[u8] = msg.as_ref();\n\n let received_msg = file_writer_rx.recv_timeout(Duration::from_secs(4));\n\n assert!(received_msg.is_ok());\n\n assert!(matches!(received_msg, Ok(FileWriterCommand::Write(ref v)) if v.as_slice() == msg));\n\n }\n\n\n\n info!(\"Received {} messages successfully\", msgs.len());\n\n\n\n stop_c.complete(());\n\n server_join.join().unwrap();\n\n}\n", "file_path": "tests/udp_server_spec.rs", "rank": 50, "score": 21267.026276657143 }, { "content": " }\n\n }\n\n\n\n for msg in &msgs {\n\n let msg: &[u8] = msg.as_ref();\n\n let received_msg = file_writer_rx.recv_timeout(Duration::from_secs(4));\n\n debug!(\"Received: {:?} . It should be {:?}\", received_msg, msg.to_ascii_lowercase());\n\n assert!(received_msg.is_ok());\n\n assert!(matches!(received_msg, Ok(FileWriterCommand::Write(ref v)) if v.as_slice() == msg));\n\n }\n\n\n\n info!(\"Received {} messages successfully\", msgs.len());\n\n}\n", "file_path": "tests/tcp_server_spec.rs", "rank": 51, "score": 21262.537176254074 }, { "content": "use tokio_proto::pipeline::ServerProto;\n\nuse tokio_service::Service;\n\nuse tokio_service::NewService;\n\n\n\nuse ::writer::file_writer::FileWriterCommand;\n\nuse std::sync::mpsc::{SyncSender, SendError};\n\n\n\nuse std::io;\n\nuse std::str;\n\nuse std::borrow::Borrow;\n\n\n\npub struct TcpServer;\n\n\n\nimpl TcpServer {\n\n\n\n pub fn start(settings: Arc<Settings>, sender: SyncSender<FileWriterCommand>) -> Vec<JoinHandle<()>> {\n\n\n\n let addr = format!(\"{}:{}\", settings.server.host, settings.server.port).parse::<SocketAddr>().unwrap();\n\n let addr = Arc::new(addr);\n\n\n", "file_path": "src/listener/tcp_server.rs", "rank": 52, "score": 21262.260432653537 }, { "content": "\n\nimpl UdpServer {\n\n\n\n pub fn start(settings: Arc<Settings>, sender: SyncSender<FileWriterCommand>) -> Vec<JoinHandle<()>> {\n\n\n\n let addr = format!(\"{}:{}\", settings.server.host, settings.server.port).parse::<SocketAddr>().unwrap();\n\n let addr = Arc::new(addr);\n\n\n\n let mut threads: Vec<JoinHandle<()>> = Vec::new();\n\n\n\n for i in 0..settings.threads {\n\n let settings_ref = settings.clone();\n\n let tx_file_writer = sender.clone();\n\n let addr_ref = addr.clone();\n\n threads.push(thread::spawn(move || {\n\n info!(\"Spawning thread {}\", i);\n\n\n\n let mut l = Core::new().unwrap();\n\n let handle = l.handle();\n\n\n", "file_path": "src/listener/udp_server.rs", "rank": 53, "score": 21261.54284582712 }, { "content": "}\n\n\n\npub struct LineCodec;\n\n\n\nimpl Encoder for LineCodec {\n\n type Item = ();\n\n type Error = io::Error;\n\n\n\n fn encode(&mut self, msg: (), buf: &mut BytesMut) -> io::Result<()> {\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Decoder for LineCodec {\n\n type Item = String;\n\n type Error = io::Error;\n\n\n\n fn decode(&mut self, buf: &mut BytesMut) -> io::Result<Option<String>> {\n\n if let Some(i) = buf.iter().position(|&b| b == b'\\n') {\n\n let line = buf.split_to(i + 1);\n", "file_path": "src/listener/tcp_server.rs", "rank": 54, "score": 21260.741059660024 }, { "content": "\n\n#[macro_use]\n\nextern crate log;\n\nextern crate pretty_env_logger;\n\n#[macro_use]\n\nextern crate matches;\n\n\n\nextern crate tokio_core;\n\nextern crate tokio_io;\n\n\n\nextern crate futures;\n\n\n\nextern crate jon_listen;\n\nextern crate net2;\n\n\n\n\n\nuse jon_listen::listener::tcp_server::TcpServer;\n\nuse jon_listen::writer::file_writer::FileWriterCommand;\n\nuse jon_listen::settings::*;\n\n\n", "file_path": "tests/tcp_server_spec.rs", "rank": 55, "score": 21258.014682878395 }, { "content": "\n\n#[macro_use]\n\nextern crate log;\n\nextern crate pretty_env_logger;\n\n#[macro_use]\n\nextern crate matches;\n\n\n\nextern crate tokio_core;\n\nextern crate tokio_io;\n\n\n\nextern crate futures;\n\n\n\nextern crate jon_listen;\n\nextern crate net2;\n\n\n\n\n\nuse jon_listen::listener::udp_server;\n\nuse jon_listen::writer::file_writer::FileWriterCommand;\n\nuse jon_listen::settings::*;\n\n\n", "file_path": "tests/udp_server_spec.rs", "rank": 56, "score": 21257.513947670268 }, { "content": "\n\n\n\nuse tokio_core::net::UdpSocket;\n\nuse tokio_core::reactor::Core;\n\nuse net2;\n\nuse net2::unix::UnixUdpBuilderExt;\n\n\n\nuse std::io;\n\nuse std::net::SocketAddr;\n\n\n\nuse futures::{Future, Poll};\n\n\n\nuse ::writer::file_writer::FileWriterCommand;\n\nuse ::settings::Settings;\n\n\n\nuse std::thread::{self, JoinHandle};\n\nuse std::sync::Arc;\n\nuse std::sync::mpsc::SyncSender;\n\n\n\npub struct UdpServer;\n", "file_path": "src/listener/udp_server.rs", "rank": 57, "score": 21257.29610853236 }, { "content": " let udp_socket = net2::UdpBuilder::new_v4().unwrap()\n\n .reuse_port(true).unwrap()\n\n .bind(addr_ref.as_ref()).unwrap();\n\n\n\n let socket = UdpSocket::from_socket(udp_socket, &handle).unwrap(); // UdpSocket::bind(&addr_ref, &handle).unwrap();\n\n l.run(UdpService::new(socket, tx_file_writer, i, settings_ref)).unwrap();\n\n }));\n\n }\n\n\n\n info!(\"Listening at {} via UDP with {} threads...\", addr, settings.threads);\n\n\n\n threads\n\n }\n\n}\n\n\n\npub struct UdpService {\n\n pub id: i32,\n\n pub name: String,\n\n pub socket: UdpSocket,\n\n pub buf: Vec<u8>,\n", "file_path": "src/listener/udp_server.rs", "rank": 58, "score": 21256.630170418903 }, { "content": " let socket = tokio_core::net::UdpSocket::bind(&server_addr, &handle).unwrap();\n\n server_addr_tx.complete(socket.local_addr().unwrap());\n\n\n\n let server = udp_server::UdpService::new(socket, file_writer_tx, 1, settings_ref);\n\n let server = server.select(stop_p.map_err(|_| panic!()));\n\n let server = server.map_err(|_| ());\n\n\n\n l.run(server).unwrap();\n\n });\n\n\n\n let server_addr = server_addr_rx.wait().unwrap();\n\n\n\n let any_addr = \"127.0.0.1:0\".to_string().parse::<SocketAddr>().unwrap();\n\n let client = std::net::UdpSocket::bind(&any_addr).unwrap();\n\n\n\n for msg in &msgs {\n\n client.send_to(msg.as_ref(), &server_addr).unwrap();\n\n }\n\n\n\n for msg in &msgs {\n", "file_path": "tests/udp_server_spec.rs", "rank": 59, "score": 21253.617277213085 }, { "content": "\n\nuse std::net::SocketAddr;\n\nuse std::thread::{self, JoinHandle};\n\nuse std::sync::Arc;\n\n\n\nuse tokio_core::reactor::Core;\n\n\n\nuse tokio_core::net::TcpListener;\n\n\n\nuse net2;\n\nuse net2::unix::UnixTcpBuilderExt;\n\n\n\nuse settings::Settings;\n\n\n\nuse futures::future::{self, FutureResult};\n\nuse futures::{Stream, Sink, Future};\n\nuse tokio_io::{AsyncRead, AsyncWrite};\n\nuse tokio_io::codec::{Framed, Encoder, Decoder};\n\n\n\nuse bytes::BytesMut;\n", "file_path": "src/listener/tcp_server.rs", "rank": 60, "score": 21253.485606595554 }, { "content": "use std::time::{SystemTime, UNIX_EPOCH, Duration};\n\nuse std::path::PathBuf;\n\n\n\nuse std::net::SocketAddr;\n\nuse std::thread;\n\nuse std::sync::Arc;\n\n\n\nuse std::sync::mpsc::sync_channel;\n\nuse std::io::Write;\n\n\n\n\n", "file_path": "tests/tcp_server_spec.rs", "rank": 61, "score": 21253.389462950516 }, { "content": "\n\n // Turn this data into a UTF string and return it in a Frame.\n\n match str::from_utf8(&line) {\n\n Ok(s) => Ok(Some(s.to_string())),\n\n Err(_) => Err(io::Error::new(io::ErrorKind::Other,\n\n \"invalid UTF-8\")),\n\n }\n\n } else {\n\n Ok(None)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/listener/tcp_server.rs", "rank": 62, "score": 21252.218493294473 }, { "content": " let server = listener.incoming().for_each(|(tcp, _)| {\n\n\n\n let (writer, reader) = tcp.framed(LineCodec).split();\n\n let service = (|| Ok(TcpListenerService::new(i, sender_ref.clone(), settings_ref.clone()))).new_service()?;\n\n\n\n let responses = reader.and_then(move |req| service.call(req));\n\n let server = writer.send_all(responses)\n\n .then(|_| Ok(()));\n\n handle.spawn(server);\n\n\n\n Ok(())\n\n });\n\n l.run(server).unwrap();\n\n }));\n\n }\n\n\n\n info!(\"Listening at {} via TCP with {} threads...\", addr, settings.threads);\n\n\n\n threads\n\n }\n", "file_path": "src/listener/tcp_server.rs", "rank": 63, "score": 21251.829103963988 }, { "content": "use tokio_core::reactor::Core;\n\nuse std::time::{SystemTime, UNIX_EPOCH, Duration};\n\nuse std::path::PathBuf;\n\n\n\nuse std::net::SocketAddr;\n\nuse std::thread;\n\nuse std::sync::Arc;\n\n\n\nuse futures::sync::oneshot;\n\nuse futures::Future;\n\nuse std::sync::mpsc::sync_channel;\n\n\n\n\n", "file_path": "tests/udp_server_spec.rs", "rank": 64, "score": 21251.17702865549 }, { "content": " let mut threads = Vec::new();\n\n\n\n for i in 0..settings.threads {\n\n let settings_ref = settings.clone();\n\n let sender_ref = sender.clone();\n\n let addr_ref = addr.clone();\n\n\n\n threads.push(thread::spawn(move || {\n\n info!(\"Spawning thread {}\", i);\n\n\n\n let mut l = Core::new().unwrap();\n\n let handle = l.handle();\n\n\n\n let tcp_listener = net2::TcpBuilder::new_v4().unwrap()\n\n .reuse_port(true).unwrap()\n\n .bind(addr_ref.clone().as_ref()).unwrap()\n\n .listen(128).unwrap(); // limit for pending connections. https://stackoverflow.com/a/36597268/3392786\n\n\n\n let listener = TcpListener::from_listener(tcp_listener, addr_ref.as_ref(), &handle).unwrap();\n\n\n", "file_path": "src/listener/tcp_server.rs", "rank": 65, "score": 21246.307054857098 }, { "content": " }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct RotationByDay;\n\n\n\nimpl RotationByDay {\n\n\n\n pub fn new() -> Self {\n\n RotationByDay { }\n\n }\n\n}\n\n\n\nimpl RotationPolicy for RotationByDay {\n\n\n\n fn next_rotation(&self, last_rotation: DateTime<Local>) -> DateTime<Local> {\n\n (last_rotation + time::Duration::days(1)).date().and_hms(0, 0, 0)\n\n }\n\n}\n", "file_path": "src/writer/rotation_policy.rs", "rank": 66, "score": 21167.585825970713 }, { "content": "\n\nuse std::time::Duration;\n\nuse chrono::prelude::*;\n\nuse time;\n\n\n", "file_path": "src/writer/rotation_policy.rs", "rank": 67, "score": 21163.87736985507 }, { "content": "\n\nuse ::settings::{Settings, ProtocolType};\n\nuse listener::tcp_server::TcpServer;\n\nuse listener::udp_server::UdpServer;\n\nuse std::sync::Arc;\n\nuse std::thread::{self, JoinHandle};\n\nuse std::sync::mpsc::SyncSender;\n\nuse ::writer::file_writer::FileWriterCommand;\n\n\n\npub mod udp_server;\n\npub mod tcp_server;\n\n\n\n\n\npub struct Listener;\n\n\n\nimpl Listener {\n\n pub fn start(settings: Arc<Settings>, sender: SyncSender<FileWriterCommand>) -> Vec<JoinHandle<()>> {\n\n match settings.server.protocol {\n\n ProtocolType::TCP => TcpServer::start(settings.clone(), sender),\n\n ProtocolType::UDP => UdpServer::start(settings.clone(), sender)\n\n }\n\n }\n\n}", "file_path": "src/listener/mod.rs", "rank": 68, "score": 21.023409085674203 }, { "content": "\n\npub struct App;\n\n\n\nimpl App {\n\n\n\n pub fn start_up(settings: Arc<Settings>) {\n\n\n\n let mut file_writer = FileWriter::new(settings.buffer_bound, settings.filewriter.clone());\n\n\n\n let conn_threads = Listener::start(settings.clone(), file_writer.tx.clone());\n\n\n\n file_writer.start();\n\n for t in conn_threads {\n\n t.join().unwrap();\n\n }\n\n\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 69, "score": 17.279341630931807 }, { "content": " pub endingmsg: bool,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Clone)]\n\npub struct FileWriterConfig {\n\n pub filedir: PathBuf,\n\n pub filename: String,\n\n pub rotation: RotationPolicyConfig,\n\n pub formatting: FormattingConfig,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Clone)]\n\npub struct Settings {\n\n pub debug: bool,\n\n pub threads: i32,\n\n pub buffer_bound: usize,\n\n pub server: ServerConfig,\n\n pub filewriter: FileWriterConfig,\n\n}\n\n\n", "file_path": "src/settings.rs", "rank": 70, "score": 14.665426206721957 }, { "content": "use jon_listen::App;\n\n\n\nuse jon_listen::listener::udp_server;\n\nuse jon_listen::writer::file_writer::FileWriterCommand;\n\nuse jon_listen::settings::*;\n\n\n\nuse tokio_core::net::UdpSocket;\n\nuse tokio_core::reactor::Core;\n\nuse net2::unix::UnixUdpBuilderExt;\n\nuse std::time::{SystemTime, UNIX_EPOCH, Duration};\n\nuse std::path::PathBuf;\n\n\n\nuse test::Bencher;\n\n\n\nuse std::net::SocketAddr;\n\nuse std::thread;\n\nuse std::sync::Arc;\n\n\n\nuse futures::sync::oneshot;\n\nuse futures::{Future, Poll};\n\nuse std::sync::mpsc::{sync_channel, SyncSender, Receiver};\n\n\n\n\n", "file_path": "examples/send_via_udp.rs", "rank": 71, "score": 12.676976536907524 }, { "content": "use settings::{Settings, ProtocolType};\n\nuse listener::Listener;\n\nuse writer::file_writer::FileWriter;\n\n\n\nuse futures::future::{self, FutureResult};\n\nuse futures::{Stream, Sink, Future};\n\nuse tokio_io::{AsyncRead, AsyncWrite};\n\nuse tokio_io::codec::{Framed, Encoder, Decoder};\n\n\n\nuse bytes::BytesMut;\n\nuse tokio_proto::TcpServer;\n\nuse tokio_proto::pipeline::ServerProto;\n\nuse tokio_service::Service;\n\nuse tokio_service::NewService;\n\n\n\nuse std::sync::mpsc::SyncSender;\n\n\n\nuse std::io;\n\nuse std::str;\n\nuse std::borrow::Borrow;\n", "file_path": "src/lib.rs", "rank": 72, "score": 12.587956821973533 }, { "content": " fn deserialize_with<'de, D>(de: D) -> Result<Self, D::Error> where D: Deserializer<'de> {\n\n let s = String::deserialize(de)?;\n\n\n\n match s.as_ref() {\n\n \"TCP\" => Ok(ProtocolType::TCP),\n\n \"UDP\" => Ok(ProtocolType::UDP),\n\n _ => Err(serde::de::Error::custom(\"error trying to deserialize protocol config\"))\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize, Eq, PartialEq, Clone)]\n\npub enum RotationPolicyType {\n\n ByDuration,\n\n ByDay\n\n}\n\n\n\nimpl DeserializeWith for RotationPolicyType {\n\n fn deserialize_with<'de, D>(de: D) -> Result<Self, D::Error> where D: Deserializer<'de> {\n\n let s = String::deserialize(de)?;\n", "file_path": "src/settings.rs", "rank": 73, "score": 12.242771286198307 }, { "content": "Jon Listen\n\n=================================\n\n[![Build Status](https://travis-ci.org/cspinetta/jon-listen.svg?branch=master)](https://travis-ci.org/cspinetta/jon-listen)\n\n\n\nSimple and multithreading TCP/UDP logger. It writes data to a plain text file and it also provides additional functionalities such as log file rotation, control the number of rotated files, etc.\n\n\n\nWritten in [Rust] language.\n\n\n\n![alt text](https://upload.wikimedia.org/wikipedia/commons/4/44/Jon_Postel.jpg)\n\n\n\n> *[Jon Postel] in 1994, with map of Internet top-level domains.*\n\n\n\n## Dependencies\n\n\n\n- Rust stable:\n\n\n\n```bash\n\nrustup install stable\n\nrustup default stable\n\n```\n\n\n\n## Start server\n\n\n\nExecute from the terminal:\n\n\n\n```bash\n\nRUST_LOG=info cargo run\n\n```\n\n\n\n## Config file\n\nThe config is written in [TOML].\n\nDefault config is set in [./config/default.toml](https://github.com/cspinetta/jon-listen/blob/master/config/default.toml).\n\nOptionally you can add a config file by environment setting `RUN_MODE={development|production|anything}` in your environment and providing the appropriate file: `./config/{development|production|anything}.toml`\n\n\n\n### Config from the environment\n\n\n\nYou can provide environment variable to define log level and override configuration:\n\n\n\n* Log level: `RUST_LOG={debug|info|warn|error}`. Also it's possible to define the level for a specific module: `RUST_LOG=writer=debug` turns on debug logging for `writer` module.\n\n* Override config: define variable with a prefix of APP. Eg:\n\n\n\n`APP_filewriter_rotation_policy=ByDay` would set:\n\n\n\n```toml\n\n[filewriter.rotation]\n\npolicy = \"ByDay\"\n\n```\n\n\n\n*Running with inline environment variable from the terminal:*\n\n\n\n```bash\n\nRUST_LOG=info APP_filewriter_rotation_policy=ByDuration cargo run\n\n```\n\n\n\n\n\n## Run tests\n\n\n\nExecute from the terminal:\n\n\n\n```bash\n\ncargo test\n\n```\n\n\n", "file_path": "README.md", "rank": 74, "score": 9.13911461780862 }, { "content": "impl Settings {\n\n\n\n pub fn load() -> Self {\n\n let mut s = Config::new();\n\n\n\n // Start off by merging in the \"default\" configuration file\n\n s.merge(File::with_name(\"config/default\")).unwrap();\n\n\n\n // Add in the current environment file\n\n // Default to 'development' env\n\n // Note that this file is _optional_\n\n let env = env::var(\"RUN_MODE\").unwrap_or(\"development\".into());\n\n s.merge(File::with_name(&format!(\"config/{}\", env)).required(false)).unwrap();\n\n\n\n // Add in a local configuration file\n\n // This file shouldn't be checked in to git\n\n s.merge(File::with_name(\"config/local\").required(false)).unwrap();\n\n\n\n // Add in settings from the environment (with a prefix of APP)\n\n // Eg.. `APP_DEBUG=1 ./target/app` would set the `debug` key\n", "file_path": "src/settings.rs", "rank": 75, "score": 8.852573929395602 }, { "content": "\n\nextern crate futures;\n\nextern crate tokio_core;\n\n#[macro_use]\n\nextern crate tokio_io;\n\nextern crate tokio_proto;\n\nextern crate tokio_service;\n\n\n\nextern crate bytes;\n\n\n\npub mod listener;\n\npub mod writer;\n\npub mod settings;\n\n\n\nuse std::net::SocketAddr;\n\nuse std::thread::{self, JoinHandle};\n\nuse std::sync::Arc;\n\n\n\nuse tokio_core::reactor::Core;\n\n\n", "file_path": "src/lib.rs", "rank": 76, "score": 8.677879397051148 }, { "content": "\n\n match s.as_ref() {\n\n \"ByDuration\" => Ok(RotationPolicyType::ByDuration),\n\n \"ByDay\" => Ok(RotationPolicyType::ByDay),\n\n _ => Err(serde::de::Error::custom(\"error trying to deserialize rotation policy config\"))\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize, Clone)]\n\npub struct RotationPolicyConfig {\n\n pub count: i32,\n\n #[serde(deserialize_with=\"RotationPolicyType::deserialize_with\")]\n\n pub policy: RotationPolicyType,\n\n pub duration: Option<u64>,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Clone)]\n\npub struct FormattingConfig {\n\n pub startingmsg: bool,\n", "file_path": "src/settings.rs", "rank": 77, "score": 8.140483760577737 }, { "content": "use std::env;\n\nuse config::{Config, File, Environment};\n\nuse std::path::PathBuf;\n\nuse serde;\n\nuse serde::de::Deserializer;\n\nuse serde::Deserialize;\n\nuse config::Source;\n\n\n", "file_path": "src/settings.rs", "rank": 78, "score": 7.353109536336191 }, { "content": "#![feature(test)]\n\n\n\n#[macro_use]\n\nextern crate log;\n\nextern crate pretty_env_logger;\n\n#[macro_use]\n\nextern crate matches;\n\n\n\nextern crate test;\n\n\n\nextern crate tokio_core;\n\n#[macro_use]\n\nextern crate tokio_io;\n\n\n\nextern crate futures;\n\n\n\nextern crate jon_listen;\n\nextern crate net2;\n\n\n\n\n", "file_path": "examples/send_via_udp.rs", "rank": 79, "score": 6.355770351096462 }, { "content": "\n\n#[macro_use]\n\nextern crate log;\n\nextern crate pretty_env_logger;\n\n\n\nextern crate jon_listen;\n\n\n\nuse std::sync::Arc;\n\n\n\nuse jon_listen::App;\n\nuse jon_listen::settings::Settings;\n\n\n", "file_path": "src/main.rs", "rank": 80, "score": 5.223104519778045 }, { "content": "### Testing through logging_client\n\n\n\nHaving a log server listening at `0.0.0.0:8080` via UDP. To send it log for 10 seconds, just execute from the terminal:\n\n\n\n```bash\n\nRUST_LOG=info cargo run --example logging_client -- --address '127.0.0.1:8080' --duration 10\n\n```\n\n\n\nIf the server is listening via TCP:\n\n\n\n```bash\n\nRUST_LOG=info cargo run --example logging_client -- --address '127.0.0.1:8080' --duration 10 --tcp\n\n```\n\n\n\n## License\n\n\n\nApache-2.0\n\n\n\n[Rust]:https://www.rust-lang.org/en-US/index.html\n\n[TOML]:https://github.com/toml-lang/toml\n\n[Jon Postel]:https://en.wikipedia.org/wiki/Jon_Postel\n", "file_path": "README.md", "rank": 81, "score": 5.201730407610697 }, { "content": " s.merge(Environment::with_prefix(\"app\")).unwrap();\n\n\n\n // Now that we're done, let's access our configuration\n\n info!(\"Debug: {:?}\", s.get_bool(\"debug\"));\n\n debug!(\"Provided settings: {:?}\", s.collect());\n\n// info!(\"database: {:?}\", s.get::<String>(\"database.url\"));\n\n\n\n // You can deserialize (and thus freeze) the entire configuration as\n\n let settings = s.deserialize().unwrap();\n\n info!(\"Settings: {:?}\", settings);\n\n settings\n\n }\n\n}\n", "file_path": "src/settings.rs", "rank": 82, "score": 4.480439059767949 }, { "content": "\n\n#![feature(try_trait)]\n\n#![feature(custom_attribute)]\n\n\n\n#[macro_use]\n\nextern crate log;\n\nextern crate pretty_env_logger;\n\n\n\n#[macro_use]\n\nextern crate serde_derive;\n\nextern crate serde;\n\nextern crate toml;\n\nextern crate config;\n\n\n\nextern crate net2;\n\n\n\nextern crate chrono;\n\nextern crate time;\n\nextern crate glob;\n\nextern crate regex;\n", "file_path": "src/lib.rs", "rank": 83, "score": 4.034892108819717 } ]
Rust
src/connection/info.rs
bayne/libpq.rs
124f73ac438fd542f933e94499550b1158b8d38e
#[derive(Clone, Debug, PartialEq)] pub struct Info { pub keyword: String, pub envvar: Option<String>, pub compiled: Option<String>, pub val: Option<String>, pub label: Option<String>, pub dispchar: String, pub dispsize: i32, } impl Info { /** * Returns the default connection options. * * See [PQconndefaults](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PQCONNDEFAULTS) */ pub fn new() -> Self { Self::default() } /** * Returns parsed connection options from the provided connection string. * * See * [PQconninfoParse](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PQCONNINFOPARSE). */ pub fn from(dsn: &str) -> std::result::Result<Vec<Self>, String> { let c_dsn = crate::ffi::to_cstr(dsn); unsafe { let mut errmsg: *mut i8 = std::ptr::null_mut(); let raw = pq_sys::PQconninfoParse(c_dsn.as_ptr(), &mut errmsg); if raw.is_null() { if errmsg.is_null() { return Err("Unknow error".to_string()); } else { let err = crate::ffi::to_string(errmsg); pq_sys::PQfreemem(errmsg as *mut std::ffi::c_void); return Err(err); } } let info = Self::vec_from_nta(raw); pq_sys::PQconninfoFree(raw); Ok(info) } } fn from_raw(info: *mut pq_sys::_PQconninfoOption) -> Self { unsafe { Self { keyword: crate::ffi::to_string((*info).keyword), envvar: if (*info).envvar.is_null() { None } else { Some(crate::ffi::to_string((*info).envvar)) }, compiled: if (*info).compiled.is_null() { None } else { Some(crate::ffi::to_string((*info).compiled)) }, val: if (*info).val.is_null() { None } else { Some(crate::ffi::to_string((*info).val)) }, label: if (*info).label.is_null() { None } else { Some(crate::ffi::to_string((*info).label)) }, dispchar: crate::ffi::to_string((*info).dispchar), dispsize: (*info).dispsize, } } } fn vec_from_nta(raw: *mut pq_sys::_PQconninfoOption) -> Vec<Self> { let mut vec = Vec::new(); for x in 0.. { unsafe { if (*raw.offset(x)).keyword.is_null() { break; } else { let info = raw.offset(x).into(); vec.push(info); } } } vec } } impl Default for Info { fn default() -> Self { unsafe { let raw = pq_sys::PQconndefaults(); let info = raw.into(); pq_sys::PQconninfoFree(raw); info } } } #[doc(hidden)] impl From<*mut pq_sys::_PQconninfoOption> for Info { fn from(info: *mut pq_sys::_PQconninfoOption) -> Self { Self::from_raw(info) } } #[cfg(test)] mod test { #[test] fn parse_info() { assert!(crate::connection::Info::from("host=localhost user=postgres").is_ok()); assert_eq!( crate::connection::Info::from("'"), Err("missing \"=\" after \"'\" in connection info string\n".to_string()) ); } #[test] fn defaults() { let _ = crate::connection::Info::default(); } }
#[derive(Clone, Debug, PartialEq)] pub struct Info { pub keyword: String, pub envvar: Option<String>, pub compiled: Option<String>, pub val: Option<String>, pub label: Option<String>, pub dispchar: String, pub dispsize: i32, } impl Info { /** * Returns the default connection options. * * See [PQconndefaults](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PQCONNDEFAULTS) */ pub fn new() -> Self { Self::default() } /** * Returns parsed connection options from the provided connection string. * * See * [PQconninfoParse](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PQCONNINFOPARSE). */ pub fn from(dsn: &str) -> std::result::Result<Vec<Self>, String> { let c_dsn = crate::ffi::to_cstr(dsn); unsafe { let mut errmsg: *mut i8 = std::ptr::null_mut(); let raw = pq_sys::PQconninfoParse(c_dsn.as_ptr(), &mut errmsg); if raw.is_null() { if errmsg.is_null() { return Err("Unknow error".to_string()); } else { let err = crate::ffi::to_string(errmsg); pq_sys::PQfreemem(errmsg as *mut std::ffi::c_void); return Err(err); } } let info = Self::vec_from_nta(raw); pq_sys::PQconninfoFree(raw); Ok(info) } } fn from_raw(info: *mut pq_sys::_PQconninfoOption) -> Self { unsafe { Self { keyword: crate::ffi::to_string((*info).keyword), envvar: if (*info).envvar.is_null() { None } else { Some(crate::ffi::to_string((*info).envvar)) }, compiled: if (*info).compiled.is_null() { None } else { Some(crate::ffi::to_string((*info).compiled)) }, val: if (*info).val.is_null() { None } else { Some(crate::ffi::to_string((*info).val)) }, label: if (*info).label.is_null() { None } else { Some(crate::ffi::to_string((*info).label)) }, dispchar: crate::ffi::to_string((*info).dispchar), dispsize: (*info).dispsize, } } } fn vec_from_nta(raw: *mut pq_sys::_PQconninfoOption) -> Vec<Self> { let mut vec = Vec::new(); for x in 0.. { unsafe { if (*raw.offset(x)).keyword.is_nul
} impl Default for Info { fn default() -> Self { unsafe { let raw = pq_sys::PQconndefaults(); let info = raw.into(); pq_sys::PQconninfoFree(raw); info } } } #[doc(hidden)] impl From<*mut pq_sys::_PQconninfoOption> for Info { fn from(info: *mut pq_sys::_PQconninfoOption) -> Self { Self::from_raw(info) } } #[cfg(test)] mod test { #[test] fn parse_info() { assert!(crate::connection::Info::from("host=localhost user=postgres").is_ok()); assert_eq!( crate::connection::Info::from("'"), Err("missing \"=\" after \"'\" in connection info string\n".to_string()) ); } #[test] fn defaults() { let _ = crate::connection::Info::default(); } }
l() { break; } else { let info = raw.offset(x).into(); vec.push(info); } } } vec }
function_block-function_prefixed
[ { "content": "#[deprecated(note = \"Use libpq::Connection::escape_string instead\")]\n\npub fn string(from: &str) -> String {\n\n let c_from = crate::ffi::to_cstr(from);\n\n // @see https://github.com/postgres/postgres/blob/REL_12_2/src/interfaces/libpq/fe-exec.c#L3329\n\n let cstring = crate::ffi::new_cstring(2 * from.len() + 1);\n\n let raw = cstring.into_raw();\n\n\n\n unsafe {\n\n pq_sys::PQescapeString(raw, c_from.as_ptr(), from.len());\n\n };\n\n\n\n crate::ffi::from_raw(raw)\n\n}\n\n\n\npub(crate) fn bytea_conn(\n\n conn: &crate::Connection,\n\n from: &[u8],\n\n) -> std::result::Result<Vec<u8>, String> {\n\n let to = unsafe {\n\n let mut len = 0;\n\n let tmp = pq_sys::PQescapeByteaConn(conn.into(), from.as_ptr(), from.len(), &mut len);\n", "file_path": "src/escape.rs", "rank": 0, "score": 186307.6137397284 }, { "content": "pub fn identifier(conn: &crate::Connection, str: &str) -> std::result::Result<String, String> {\n\n let c_str = crate::ffi::to_cstr(str);\n\n unsafe {\n\n let raw = pq_sys::PQescapeIdentifier(conn.into(), c_str.as_ptr(), str.len());\n\n\n\n if raw.is_null() {\n\n return Err(conn\n\n .error_message()\n\n .unwrap_or_else(|| \"Unknow error\".to_string()));\n\n }\n\n\n\n let escaped = crate::ffi::to_string(raw);\n\n pq_sys::PQfreemem(raw as *mut std::ffi::c_void);\n\n\n\n Ok(escaped)\n\n }\n\n}\n\n\n\npub(crate) fn string_conn(\n\n conn: &crate::Connection,\n", "file_path": "src/escape.rs", "rank": 1, "score": 168992.76110099518 }, { "content": "pub fn password(passwd: &str, user: &str) -> String {\n\n let c_passwd = crate::ffi::to_cstr(passwd);\n\n let c_user = crate::ffi::to_cstr(user);\n\n\n\n let encrypt = unsafe { pq_sys::PQencryptPassword(c_passwd.as_ptr(), c_user.as_ptr()) };\n\n\n\n crate::ffi::from_raw(encrypt)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n #[test]\n\n fn password() {\n\n assert_eq!(\n\n crate::encrypt::password(\"1234\", \"postgres\"),\n\n \"md524bb002702969490e41e26e1a454036c\".to_string()\n\n );\n\n }\n\n}\n", "file_path": "src/encrypt.rs", "rank": 2, "score": 163442.35899223192 }, { "content": "pub fn version() -> i32 {\n\n unsafe { pq_sys::PQlibVersion() }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n static INIT: std::sync::Once = std::sync::Once::new();\n\n\n\n pub fn dsn() -> String {\n\n std::env::var(\"PQ_DSN\").unwrap_or_else(|_| \"host=localhost\".to_string())\n\n }\n\n\n\n pub fn new_conn() -> crate::Connection {\n\n INIT.call_once(|| {\n\n pretty_env_logger::init();\n\n });\n\n\n\n crate::Connection::new(&dsn()).unwrap()\n\n }\n\n\n\n #[test]\n\n fn version() {\n\n assert!(crate::version() > 0);\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 3, "score": 141235.75271754718 }, { "content": "#[deprecated(note = \"Use libpq::Connection::escape_bytea instead\")]\n\npub fn bytea(from: &[u8]) -> std::result::Result<Vec<u8>, String> {\n\n let to = unsafe {\n\n let mut len = 0;\n\n let tmp = pq_sys::PQescapeBytea(from.as_ptr(), from.len(), &mut len);\n\n let to = std::slice::from_raw_parts(tmp, len - 1).to_vec();\n\n pq_sys::PQfreemem(tmp as *mut std::ffi::c_void);\n\n\n\n to\n\n };\n\n\n\n Ok(to)\n\n}\n\n\n\n/**\n\n * Converts a string representation of binary data into binary data — the reverse of\n\n * `libpq::Connection::escape_bytea`.\n\n *\n\n * See\n\n * [PQunescapeBytea](https://www.postgresql.org/docs/current/libpq-exec.html#LIBPQ-PQUNESCAPEBYTEA).\n\n */\n", "file_path": "src/escape.rs", "rank": 4, "score": 130727.97278481135 }, { "content": "pub fn build(filename: &str) -> std::io::Result<()> {\n\n let mut file = BufWriter::new(File::create(filename)?);\n\n\n\n let errors = parse_errors();\n\n\n\n make_header(&mut file)?;\n\n make_consts(&errors, &mut file)?;\n\n make_type(&errors, &mut file)\n\n}\n\n\n", "file_path": "codegen/src/sqlstate.rs", "rank": 5, "score": 107013.16480745732 }, { "content": "pub fn build(filename: &str) -> std::io::Result<()> {\n\n let mut file = BufWriter::new(File::create(filename)?);\n\n let types = parse_types();\n\n\n\n make_header(&mut file)?;\n\n make_consts(&mut file, &types)?;\n\n make_impl(&mut file, &types)\n\n}\n\n\n", "file_path": "codegen/src/type_gen.rs", "rank": 6, "score": 104292.39868732153 }, { "content": "pub fn unescape_bytea(from: &[u8]) -> std::result::Result<Vec<u8>, ()> {\n\n let to = unsafe {\n\n let mut len = 0;\n\n let tmp = pq_sys::PQunescapeBytea(from.as_ptr(), &mut len);\n\n if tmp.is_null() {\n\n return Err(());\n\n }\n\n let to = std::slice::from_raw_parts(tmp, len).to_vec();\n\n pq_sys::PQfreemem(tmp as *mut std::ffi::c_void);\n\n\n\n to\n\n };\n\n\n\n Ok(to)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n #[test]\n\n fn literal() {\n", "file_path": "src/escape.rs", "rank": 7, "score": 99792.91304336868 }, { "content": "fn make_type(errors: &BTreeMap<String, Error>, file: &mut BufWriter<File>) -> std::io::Result<()> {\n\n let mut from_code = Vec::new();\n\n\n\n for (id, error) in errors {\n\n from_code.push(format!(\" \\\"{}\\\" => {},\", id, error.name));\n\n }\n\n\n\n write!(\n\n file,\n\n \"\n\nimpl State {{\n\n /// Creates a `State` from its error code.\n\n pub fn from_code(s: &str) -> State {{\n\n match s {{\n\n{}\n\n _ => unreachable!(),\n\n }}\n\n }}\n\n}}\n\n\",\n\n from_code.join(\"\\n\")\n\n )\n\n}\n\n\n", "file_path": "codegen/src/sqlstate.rs", "rank": 8, "score": 78632.77418539557 }, { "content": "fn parse_errors() -> BTreeMap<String, Error> {\n\n let mut errors = BTreeMap::new();\n\n\n\n for line in ERRCODES_TXT.lines() {\n\n if line.starts_with('#') || line.starts_with(\"Section\") || line.trim().is_empty() {\n\n continue;\n\n }\n\n\n\n let mut it = line.split_whitespace();\n\n let code = it.next().unwrap().to_string();\n\n let kind = match it.next().unwrap() {\n\n \"E\" => Kind::Error,\n\n \"W\" => Kind::Warning,\n\n \"S\" => Kind::Success,\n\n _ => unreachable!(),\n\n };\n\n let name = it.next().unwrap().replace(\"ERRCODE_\", \"\");\n\n let message = it.next().map(|x| x.replace(\"_\", \" \").to_string());\n\n\n\n let error = Error {\n", "file_path": "codegen/src/sqlstate.rs", "rank": 9, "score": 78558.94460075372 }, { "content": "fn make_impl(w: &mut BufWriter<File>, types: &BTreeMap<u32, Type>) -> std::io::Result<()> {\n\n writeln!(\n\n w,\n\n \"impl std::convert::TryFrom<u32> for Type {{\n\n type Error = String;\n\n\n\n fn try_from(oid: u32) -> std::result::Result<Self, Self::Error> {{\n\n match oid {{\"\n\n )?;\n\n\n\n for ty in types.values() {\n\n writeln!(w, \" {} => Ok({}),\", ty.oid, ty.ident)?;\n\n }\n\n\n\n write!(\n\n w,\n\n r#\"\n\n _ => Err(\"unknow type\".to_string()),\n\n }}\n\n }}\n\n}}\"#\n\n )\n\n}\n\n\n", "file_path": "codegen/src/type_gen.rs", "rank": 10, "score": 71536.01137463549 }, { "content": "fn make_header(w: &mut BufWriter<File>) -> std::io::Result<()> {\n\n writeln!(w, \"// Autogenerated file - DO NOT EDIT\")\n\n}\n\n\n", "file_path": "codegen/src/type_gen.rs", "rank": 18, "score": 64538.69562084453 }, { "content": "fn make_header(file: &mut BufWriter<File>) -> std::io::Result<()> {\n\n write!(\n\n file,\n\n \"// Autogenerated file - DO NOT EDIT\n\n\"\n\n )\n\n}\n\n\n", "file_path": "codegen/src/sqlstate.rs", "rank": 19, "score": 64538.69562084453 }, { "content": "struct Error {\n\n code: String,\n\n kind: Kind,\n\n name: String,\n\n message: Option<String>,\n\n}\n\n\n\nimpl std::fmt::Display for Error {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(\n\n f,\n\n r#\"{doc}\n\npub const {name}: State = State {{\n\n code: \"{code}\",\n\n name: \"{name}\",\n\n kind: Kind::{kind:?},\n\n message: {message},\n\n}};\n\n\"#,\n\n code = self.code,\n", "file_path": "codegen/src/sqlstate.rs", "rank": 20, "score": 54885.90494005311 }, { "content": "struct Type {\n\n oid: u32,\n\n name: String,\n\n ident: String,\n\n kind: String,\n\n element: u32,\n\n doc: String,\n\n}\n\n\n", "file_path": "codegen/src/type_gen.rs", "rank": 21, "score": 53399.98090930437 }, { "content": "fn make_consts(\n\n errors: &BTreeMap<String, Error>,\n\n file: &mut BufWriter<File>,\n\n) -> std::io::Result<()> {\n\n for (_, error) in errors {\n\n write!(file, \"{}\", error)?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "codegen/src/sqlstate.rs", "rank": 22, "score": 52468.789030027154 }, { "content": "fn make_consts(w: &mut BufWriter<File>, types: &BTreeMap<u32, Type>) -> std::io::Result<()> {\n\n for ty in types.values() {\n\n writeln!(\n\n w,\n\n r#\"\n\n/// {descr}\n\npub const {ident}: Type = Type {{\n\n oid: {oid},\n\n descr: \"{descr}\",\n\n name: \"{name}\",\n\n kind: {kind},\n\n}};\"#,\n\n ident = ty.ident,\n\n oid = ty.oid,\n\n name = ty.name,\n\n kind = match ty.kind.as_str() {\n\n \"A\" => format!(\"Kind::Array({})\", ty.element),\n\n \"B\" => \"Kind::Boolean\".to_string(),\n\n \"C\" => \"Kind::Composite\".to_string(),\n\n \"D\" => \"Kind::DateTime\".to_string(),\n", "file_path": "codegen/src/type_gen.rs", "rank": 23, "score": 52335.3157883737 }, { "content": "struct DatParser<'a> {\n\n it: iter::Peekable<str::CharIndices<'a>>,\n\n s: &'a str,\n\n}\n\n\n\nimpl<'a> DatParser<'a> {\n\n fn new(s: &'a str) -> DatParser<'a> {\n\n DatParser {\n\n it: s.char_indices().peekable(),\n\n s,\n\n }\n\n }\n\n\n\n fn parse_array(&mut self) -> Vec<HashMap<String, String>> {\n\n self.eat('[');\n\n let mut vec = vec![];\n\n while !self.try_eat(']') {\n\n let object = self.parse_object();\n\n vec.push(object);\n\n }\n", "file_path": "codegen/src/type_gen.rs", "rank": 24, "score": 48551.0380130382 }, { "content": "fn main() -> std::io::Result<()> {\n\n type_gen::build(concat!(env!(\"CARGO_MANIFEST_DIR\"), \"/../src/types/gen.rs\"))?;\n\n sqlstate::build(concat!(env!(\"CARGO_MANIFEST_DIR\"), \"/../src/state/gen.rs\"))\n\n}\n", "file_path": "codegen/src/main.rs", "rank": 25, "score": 42192.499686745876 }, { "content": " log::debug!(\"Starting connection with params {:?}\", params);\n\n\n\n let (_c_keywords, ptr_keywords) = crate::ffi::vec_to_nta(&params.keys().collect::<Vec<_>>());\n\n let (_c_values, ptr_values) = crate::ffi::vec_to_nta(&params.values().collect::<Vec<_>>());\n\n\n\n unsafe {\n\n pq_sys::PQconnectStartParams(ptr_keywords.as_ptr(), ptr_values.as_ptr(), expand_dbname as i32)\n\n }\n\n .try_into()\n\n }\n\n\n\n /**\n\n * Makes a new connection to the database server.\n\n *\n\n * See [PQsetdb](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PQSETDB).\n\n */\n\n pub fn set_db(\n\n host: Option<&str>,\n\n port: Option<&str>,\n\n options: Option<&str>,\n", "file_path": "src/connection/_connect.rs", "rank": 26, "score": 41379.80257710697 }, { "content": " tty: Option<&str>,\n\n db_name: Option<&str>,\n\n ) -> std::result::Result<Self, String> {\n\n Self::login(host, port, options, tty, db_name, None, None)\n\n }\n\n\n\n /**\n\n * Makes a new connection to the database server.\n\n *\n\n * See\n\n * [PQsetdbLogin](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PQSETDBLOGIN).\n\n */\n\n pub fn login(\n\n host: Option<&str>,\n\n port: Option<&str>,\n\n options: Option<&str>,\n\n tty: Option<&str>,\n\n db_name: Option<&str>,\n\n login: Option<&str>,\n\n pwd: Option<&str>,\n", "file_path": "src/connection/_connect.rs", "rank": 27, "score": 41376.53762362961 }, { "content": " *\n\n * See [PQconnectdbParams](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PQCONNECTDBPARAMS).\n\n */\n\n pub fn with_params(\n\n params: &std::collections::HashMap<&str, &str>,\n\n expand_dbname: bool,\n\n ) -> std::result::Result<Self, String> {\n\n log::debug!(\"Connecting with params {:?}\", params);\n\n\n\n let (_c_keywords, ptr_keywords) = crate::ffi::vec_to_nta(&params.keys().collect::<Vec<_>>());\n\n let (_c_values, ptr_values) = crate::ffi::vec_to_nta(&params.values().collect::<Vec<_>>());\n\n\n\n unsafe {\n\n pq_sys::PQconnectdbParams(ptr_keywords.as_ptr(), ptr_values.as_ptr(), expand_dbname as i32)\n\n }\n\n .try_into()\n\n }\n\n\n\n /**\n\n * Make a connection to the database server in a nonblocking manner.\n", "file_path": "src/connection/_connect.rs", "rank": 28, "score": 41376.499689433986 }, { "content": "/**\n\n * [Database Connection Control Functions](https://www.postgresql.org/docs/current/libpq-connect.html)\n\n */\n\nimpl Connection {\n\n /**\n\n * Makes a new connection to the database server.\n\n *\n\n * See\n\n * [PQconnectdb](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PQCONNECTDB).\n\n */\n\n pub fn new(dsn: &str) -> std::result::Result<Self, String> {\n\n log::debug!(\"Connecting to '{}'\", dsn);\n\n\n\n let c_dsn = crate::ffi::to_cstr(dsn);\n\n\n\n unsafe { pq_sys::PQconnectdb(c_dsn.as_ptr()) }.try_into()\n\n }\n\n\n\n /**\n\n * Makes a new connection to the database server.\n", "file_path": "src/connection/_connect.rs", "rank": 29, "score": 41374.666445545394 }, { "content": " * is not necessary to supply correct user name, password, or database name values to obtain\n\n * the server status; however, if incorrect values are provided, the server will log a failed\n\n * connection attempt.\n\n *\n\n * See [PQpingParams](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PQPINGPARAMS).\n\n */\n\n pub fn ping_params(\n\n params: &std::collections::HashMap<String, String>,\n\n expand_dbname: bool,\n\n ) -> crate::ping::Status {\n\n log::debug!(\"Ping with params {:?}\", params);\n\n\n\n let (_c_keywords, ptr_keywords) = crate::ffi::vec_to_nta(&params.keys().collect::<Vec<_>>());\n\n let (_c_values, ptr_values) = crate::ffi::vec_to_nta(&params.values().collect::<Vec<_>>());\n\n\n\n unsafe { pq_sys::PQpingParams(ptr_keywords.as_ptr(), ptr_values.as_ptr(), expand_dbname as i32) }\n\n .into()\n\n }\n\n\n\n /**\n", "file_path": "src/connection/_connect.rs", "rank": 30, "score": 41371.91998889456 }, { "content": " * Reports the status of the server.\n\n *\n\n * It accepts connection parameters identical to those of `libpq::Connection::new`. It is not\n\n * necessary to supply correct user name, password, or database name values to obtain the\n\n * server status; however, if incorrect values are provided, the server will log a failed\n\n * connection attempt.\n\n *\n\n * See [PQping](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PQPING).\n\n */\n\n pub fn ping(dsn: &str) -> crate::ping::Status {\n\n log::debug!(\"Ping '{}'\", dsn);\n\n\n\n let c_dsn = crate::ffi::to_cstr(dsn);\n\n\n\n unsafe { pq_sys::PQping(c_dsn.as_ptr()) }.into()\n\n }\n\n\n\n /**\n\n * Return the connection options used for the connection\n\n *\n", "file_path": "src/connection/_connect.rs", "rank": 31, "score": 41371.57759448289 }, { "content": " *\n\n * See [PQconnectStart](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PQCONNECTSTART).\n\n */\n\n pub fn start(conninfo: &str) -> std::result::Result<Self, String> {\n\n log::debug!(\"Starting connection to '{}'\", conninfo);\n\n\n\n let c_conninfo = crate::ffi::to_cstr(conninfo);\n\n\n\n unsafe { pq_sys::PQconnectStart(c_conninfo.as_ptr()) }.try_into()\n\n }\n\n\n\n /**\n\n * Make a connection to the database server in a nonblocking manner.\n\n *\n\n * See [PQconnectStartParams](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PQCONNECTSTARTPARAMS).\n\n */\n\n pub fn start_params(\n\n params: &std::collections::HashMap<String, String>,\n\n expand_dbname: bool,\n\n ) -> std::result::Result<Self, String> {\n", "file_path": "src/connection/_connect.rs", "rank": 32, "score": 41370.36079934713 }, { "content": " * See\n\n * [PQconninfo](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PQCONNINFO).\n\n */\n\n pub fn info(&self) -> crate::connection::Info {\n\n unsafe {\n\n let raw = pq_sys::PQconninfo(self.into());\n\n let info = raw.into();\n\n pq_sys::PQconninfoFree(raw);\n\n\n\n info\n\n }\n\n }\n\n\n\n}\n", "file_path": "src/connection/_connect.rs", "rank": 33, "score": 41369.33383096702 }, { "content": " ) -> std::result::Result<Self, String> {\n\n let c_host = crate::ffi::to_cstr(host.unwrap_or_default());\n\n let c_port = crate::ffi::to_cstr(port.unwrap_or_default());\n\n let c_options = crate::ffi::to_cstr(options.unwrap_or_default());\n\n let c_tty = crate::ffi::to_cstr(tty.unwrap_or_default());\n\n let c_db_name = crate::ffi::to_cstr(db_name.unwrap_or_default());\n\n let c_login = crate::ffi::to_cstr(login.unwrap_or_default());\n\n let c_pwd = crate::ffi::to_cstr(pwd.unwrap_or_default());\n\n\n\n unsafe {\n\n pq_sys::PQsetdbLogin(\n\n c_host.as_ptr(),\n\n c_port.as_ptr(),\n\n c_options.as_ptr(),\n\n c_tty.as_ptr(),\n\n c_db_name.as_ptr(),\n\n c_login.as_ptr(),\n\n c_pwd.as_ptr(),\n\n )\n\n }\n", "file_path": "src/connection/_connect.rs", "rank": 34, "score": 41362.52480057155 }, { "content": " .try_into()\n\n }\n\n\n\n /**\n\n * See [PQconnectPoll](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PQCONNECTPOLL).\n\n */\n\n pub fn poll(&self) -> crate::poll::Status {\n\n unsafe { pq_sys::PQconnectPoll(self.into()) }.into()\n\n }\n\n\n\n /**\n\n * Resets the communication channel to the server.\n\n *\n\n * See [PQreset](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PQRESET).\n\n */\n\n pub fn reset(&self) {\n\n unsafe { pq_sys::PQreset(self.into()) };\n\n }\n\n\n\n /**\n", "file_path": "src/connection/_connect.rs", "rank": 35, "score": 41361.259543925386 }, { "content": " * Reset the communication channel to the server, in a nonblocking manner.\n\n *\n\n * See [PQresetStart](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PQRESETSTART).\n\n */\n\n pub fn reset_start(&self) {\n\n unsafe { pq_sys::PQresetStart(self.into()) };\n\n }\n\n\n\n /**\n\n * See\n\n * [PQresetPoll](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PQRESETPOLL).\n\n */\n\n pub fn reset_poll(&self) -> crate::poll::Status {\n\n unsafe { pq_sys::PQresetPoll(self.into()) }.into()\n\n }\n\n\n\n /**\n\n * Reports the status of the server.\n\n *\n\n * It accepts connection parameters identical to those of `libpq::Connection::with_params`. It\n", "file_path": "src/connection/_connect.rs", "rank": 36, "score": 41361.04349923572 }, { "content": "fn parse_types() -> BTreeMap<u32, Type> {\n\n let raw_types = DatParser::new(PG_TYPE_DAT).parse_array();\n\n let raw_ranges = DatParser::new(PG_RANGE_DAT).parse_array();\n\n\n\n let oids_by_name = raw_types\n\n .iter()\n\n .map(|m| (m[\"typname\"].clone(), m[\"oid\"].parse::<u32>().unwrap()))\n\n .collect::<HashMap<_, _>>();\n\n\n\n let range_elements = raw_ranges\n\n .iter()\n\n .map(|m| {\n\n (\n\n oids_by_name[&*m[\"rngtypid\"]],\n\n oids_by_name[&*m[\"rngsubtype\"]],\n\n )\n\n })\n\n .collect::<HashMap<_, _>>();\n\n\n\n let range_vector_re = Regex::new(\"(range|vector)$\").unwrap();\n", "file_path": "codegen/src/type_gen.rs", "rank": 37, "score": 38125.744524376605 }, { "content": " */\n\n pub fn ssl_in_use(&self) -> bool {\n\n unsafe { pq_sys::PQsslInUse(self.into()) == 1 }\n\n }\n\n\n\n /**\n\n * Returns SSL-related information about the connection.\n\n *\n\n * See [PQsslAttribute](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQSSLATTRIBUTE).\n\n */\n\n pub fn ssl_attribute(&self, attribute: crate::ssl::Attribute) -> Option<String> {\n\n let c_attribute = crate::ffi::to_cstr(&attribute.to_string());\n\n\n\n let raw =\n\n unsafe { pq_sys::PQsslAttribute(self.into(), c_attribute.as_ptr()) };\n\n\n\n if raw.is_null() {\n\n None\n\n } else {\n\n crate::ffi::to_option_string(raw)\n", "file_path": "src/connection/_status.rs", "rank": 38, "score": 32645.809849058045 }, { "content": " )]\n\n pub fn tty(&self) -> Option<String> {\n\n crate::ffi::to_option_string(unsafe { pq_sys::PQtty(self.into()) })\n\n }\n\n\n\n /**\n\n * Returns the command-line options passed in the connection request.\n\n *\n\n * See [PQoptions](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQOPTIONS).\n\n */\n\n pub fn options(&self) -> Option<String> {\n\n crate::ffi::to_option_string(unsafe { pq_sys::PQoptions(self.into()) })\n\n }\n\n\n\n /**\n\n * Returns the status of the connection.\n\n *\n\n * See [PQstatus](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQSTATUS).\n\n */\n\n pub fn status(&self) -> crate::connection::Status {\n", "file_path": "src/connection/_status.rs", "rank": 39, "score": 32644.263371060035 }, { "content": "/**\n\n * [Asynchronous Notification](https://www.postgresql.org/docs/current/libpq-notify.html)\n\n */\n\nimpl Connection {\n\n /**\n\n * Returns the next notification from a list of unhandled notification messages received from\n\n * the server.\n\n */\n\n pub fn notifies(&self) -> Option<crate::connection::Notify> {\n\n let raw = unsafe { pq_sys::PQnotifies(self.into()) };\n\n\n\n if raw.is_null() {\n\n None\n\n } else {\n\n Some(raw.into())\n\n }\n\n }\n\n}\n", "file_path": "src/connection/_notify.rs", "rank": 40, "score": 32644.232699057167 }, { "content": "#[derive(Clone, Debug)]\n\npub struct Cancel {\n\n cancel: *mut pq_sys::pg_cancel,\n\n}\n\n\n\nimpl Cancel {\n\n /**\n\n * Requests that the server abandon processing of the current command.\n\n *\n\n * See [PQcancel](https://www.postgresql.org/docs/current/libpq-cancel.html#LIBPQ-PQCANCEL).\n\n */\n\n pub fn request(&self) -> std::result::Result<(), String> {\n\n log::debug!(\"Canceling\");\n\n\n\n let capacity = 256;\n\n let c_error = crate::ffi::new_cstring(capacity);\n\n let ptr_error = c_error.into_raw();\n\n\n\n let sucess = unsafe { pq_sys::PQcancel(self.into(), ptr_error, capacity as i32) };\n\n let error = crate::ffi::from_raw(ptr_error);\n", "file_path": "src/connection/cancel.rs", "rank": 41, "score": 32644.066842164197 }, { "content": " unsafe { pq_sys::PQserverVersion(self.into()) }\n\n }\n\n\n\n /**\n\n * Returns the error message most recently generated by an operation on the connection.\n\n *\n\n * See [PQerrorMessage](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQERRORMESSAGE).\n\n */\n\n pub fn error_message(&self) -> Option<String> {\n\n crate::ffi::to_option_string(unsafe { pq_sys::PQerrorMessage(self.into()) })\n\n }\n\n\n\n /**\n\n * Obtains the file descriptor number of the connection socket to the server.\n\n *\n\n * See [PQsocket](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQSOCKET).\n\n */\n\n pub fn socket(&self) -> std::result::Result<i32, ()> {\n\n let socket = unsafe { pq_sys::PQsocket(self.into()) };\n\n\n", "file_path": "src/connection/_status.rs", "rank": 42, "score": 32642.029902697224 }, { "content": " } else {\n\n Err(self\n\n .error_message()\n\n .unwrap_or_else(|| \"Unknow error\".to_string()))\n\n }\n\n }\n\n\n\n /**\n\n * Waits for the next result a prior `send_*` call, and returns it.\n\n *\n\n * See [PQgetResult](https://www.postgresql.org/docs/current/libpq-async.html#LIBPQ-PQGETRESULT).\n\n */\n\n pub fn result(&self) -> Option<crate::Result> {\n\n let raw = unsafe { pq_sys::PQgetResult(self.into()) };\n\n\n\n if raw.is_null() {\n\n None\n\n } else {\n\n Some(raw.into())\n\n }\n", "file_path": "src/connection/_async.rs", "rank": 43, "score": 32641.848451713224 }, { "content": " *\n\n * See\n\n * [PQgetCopyData](https://www.postgresql.org/docs/current/libpq-copy.html#LIBPQ-PQGETCOPYDATA).\n\n */\n\n pub fn copy_data(&self, r#async: bool) -> std::result::Result<String, String> {\n\n let mut ptr = std::ptr::null_mut();\n\n\n\n let success = unsafe { pq_sys::PQgetCopyData(self.into(), &mut ptr, r#async as i32) };\n\n let buffer = crate::ffi::from_raw(ptr);\n\n\n\n match success {\n\n -2 => Err(self\n\n .error_message()\n\n .unwrap_or_else(|| \"Unknow error\".to_string())),\n\n -1 => Err(\"COPY is done\".to_string()),\n\n 0 => Err(\"COPY still in progress\".to_string()),\n\n _ => Ok(buffer),\n\n }\n\n }\n\n}\n", "file_path": "src/connection/_copy.rs", "rank": 44, "score": 32641.792730034816 }, { "content": " } else {\n\n log::debug!(\"Set blocking\");\n\n }\n\n\n\n let status = unsafe { pq_sys::PQsetnonblocking(self.into(), non_blocking as i32) };\n\n\n\n if status < 0 {\n\n Err(())\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n\n\n /**\n\n * Returns the blocking status of the database connection.\n\n *\n\n * See\n\n * [PQisnonblocking](https://www.postgresql.org/docs/current/libpq-async.html#LIBPQ-PQISNONBLOCKING).\n\n */\n\n pub fn is_non_blocking(&self) -> bool {\n", "file_path": "src/connection/_async.rs", "rank": 45, "score": 32641.556055396268 }, { "content": "/**\n\n * [Connection Status Functions](https://www.postgresql.org/docs/current/libpq-status.html)\n\n */\n\nimpl Connection {\n\n /**\n\n * Returns the database name of the connection.\n\n *\n\n * See [PQdb](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQDB).\n\n */\n\n pub fn db(&self) -> String {\n\n crate::ffi::to_string(unsafe { pq_sys::PQdb(self.into()) })\n\n }\n\n\n\n /**\n\n * Returns the user name of the connection.\n\n *\n\n * See [PQuser](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQUSER).\n\n */\n\n pub fn user(&self) -> String {\n\n crate::ffi::to_string(unsafe { pq_sys::PQuser(self.into()) })\n", "file_path": "src/connection/_status.rs", "rank": 46, "score": 32641.540102681643 }, { "content": " &self,\n\n name: Option<&str>,\n\n param_values: &[Option<Vec<u8>>],\n\n param_formats: &[crate::Format],\n\n result_format: crate::Format,\n\n ) -> std::result::Result<(), String> {\n\n log::debug!(\n\n \"Send {} prepared query with params [{}]\",\n\n name.unwrap_or(\"anonymous\"),\n\n param_values\n\n .iter()\n\n .map(|x| if let Some(s) = x {\n\n match String::from_utf8(s.to_vec()) {\n\n Ok(str) => format!(\"'{}'\", str),\n\n Err(_) => \"?\".to_string(),\n\n }\n\n } else {\n\n \"null\".to_string()\n\n })\n\n .collect::<Vec<_>>()\n", "file_path": "src/connection/_async.rs", "rank": 47, "score": 32640.940991314445 }, { "content": " * See [PQsendDescribePortal](https://www.postgresql.org/docs/current/libpq-async.html#LIBPQ-PQSENDDESCRIBEPORTAL).\n\n */\n\n pub fn send_describe_prepared(&self, name: Option<&str>) -> std::result::Result<(), String> {\n\n log::debug!(\n\n \"Sending describe prepared query {}\",\n\n name.unwrap_or(\"anonymous\")\n\n );\n\n\n\n let c_name = crate::ffi::to_cstr(name.unwrap_or_default());\n\n\n\n let success = unsafe {\n\n pq_sys::PQsendDescribePrepared(self.into(), c_name.as_ptr())\n\n };\n\n\n\n if success == 1 {\n\n Ok(())\n\n } else {\n\n Err(self\n\n .error_message()\n\n .unwrap_or_else(|| \"Unknow error\".to_string()))\n", "file_path": "src/connection/_async.rs", "rank": 48, "score": 32640.858251978116 }, { "content": " pub fn host(&self) -> String {\n\n crate::ffi::to_string(unsafe { pq_sys::PQhost(self.into()) })\n\n }\n\n\n\n /**\n\n * Returns the port of the active connection.\n\n *\n\n * See [PQport](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQPORT).\n\n */\n\n pub fn port(&self) -> String {\n\n crate::ffi::to_string(unsafe { pq_sys::PQport(self.into()) })\n\n }\n\n\n\n /**\n\n * Returns the debug TTY of the connection.\n\n *\n\n * See [PQtty](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQTTY).\n\n */\n\n #[deprecated(\n\n note = \"the server no longer pays attention to the TTY setting, but the function remains for backward compatibility.\"\n", "file_path": "src/connection/_status.rs", "rank": 49, "score": 32640.84041287895 }, { "content": " Ok(s)\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for Connection {\n\n fn drop(&mut self) {\n\n unsafe {\n\n pq_sys::PQfinish(self.into());\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for Connection {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"Connection\")\n\n .field(\"inner\", &self.conn)\n\n .field(\"db\", &self.db())\n\n .field(\"user\", &self.user())\n\n .field(\"pass\", &self.pass())\n", "file_path": "src/connection/mod.rs", "rank": 50, "score": 32640.726906216787 }, { "content": "\n\n#[doc(hidden)]\n\nimpl Into<*const pq_sys::pg_conn> for &Connection {\n\n fn into(self) -> *const pq_sys::pg_conn {\n\n self.conn\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\nimpl std::convert::TryFrom<*mut pq_sys::pg_conn> for Connection {\n\n type Error = String;\n\n\n\n fn try_from(conn: *mut pq_sys::pg_conn) -> std::result::Result<Self, Self::Error> {\n\n let s = Self { conn };\n\n\n\n if s.status() == crate::connection::Status::Bad {\n\n Err(s\n\n .error_message()\n\n .unwrap_or_else(|| \"Unknow error\".to_string()))\n\n } else {\n", "file_path": "src/connection/mod.rs", "rank": 51, "score": 32640.504038579085 }, { "content": " * notification payload string\n\n */\n\n pub fn extra(&self) -> String {\n\n crate::ffi::to_string(unsafe { (*self.notify).extra })\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\nimpl From<*mut pq_sys::pgNotify> for Notify {\n\n fn from(notify: *mut pq_sys::pgNotify) -> Self {\n\n Self { notify }\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\nimpl Into<*mut pq_sys::pgNotify> for &Notify {\n\n fn into(self) -> *mut pq_sys::pgNotify {\n\n self.notify\n\n }\n\n}\n", "file_path": "src/connection/notify.rs", "rank": 52, "score": 32640.315563009117 }, { "content": " * See [PQexecParams](https://www.postgresql.org/docs/current/libpq-exec.html#LIBPQ-PQEXECPARAMS).\n\n */\n\n pub fn exec_params(\n\n &self,\n\n command: &str,\n\n param_types: &[crate::Oid],\n\n param_values: &[Option<Vec<u8>>],\n\n param_formats: &[crate::Format],\n\n result_format: crate::Format,\n\n ) -> crate::Result {\n\n let (values, formats, lengths) =\n\n Self::transform_params(param_values, param_formats);\n\n\n\n if log::log_enabled!(log::Level::Debug) {\n\n use std::convert::TryFrom;\n\n\n\n let mut p = Vec::new();\n\n\n\n for (x, value) in param_values.iter().enumerate() {\n\n let v = if let Some(s) = value {\n", "file_path": "src/connection/_exec.rs", "rank": 53, "score": 32640.292405444372 }, { "content": " }\n\n\n\n /**\n\n * Returns the password of the connection.\n\n *\n\n * See [PQpass](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQPASS).\n\n */\n\n pub fn pass(&self) -> Option<String> {\n\n crate::ffi::to_option_string(unsafe { pq_sys::PQpass(self.into()) })\n\n }\n\n\n\n /**\n\n * Returns the server host name of the active connection.\n\n *\n\n * This can be a host name, an IP address, or a directory path if the connection is via Unix\n\n * socket. (The path case can be distinguished because it will always be an absolute path,\n\n * beginning with /.)\n\n *\n\n * See [PQhost](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQHOST).\n\n */\n", "file_path": "src/connection/_status.rs", "rank": 54, "score": 32639.725366704013 }, { "content": " ) -> crate::Result {\n\n log::debug!(\n\n \"Execute {} prepared query with params [{}]\",\n\n name.unwrap_or(\"anonymous\"),\n\n param_values\n\n .iter()\n\n .map(|x| if let Some(s) = x {\n\n match String::from_utf8(s.to_vec()) {\n\n Ok(str) => format!(\"'{}'\", str),\n\n Err(_) => \"?\".to_string(),\n\n }\n\n } else {\n\n \"null\".to_string()\n\n })\n\n .collect::<Vec<_>>()\n\n .join(\", \")\n\n );\n\n\n\n let (values, formats, lengths) =\n\n Self::transform_params(param_values, param_formats);\n", "file_path": "src/connection/_exec.rs", "rank": 55, "score": 32639.612639784344 }, { "content": " if socket < 0 {\n\n Err(())\n\n } else {\n\n Ok(socket)\n\n }\n\n }\n\n\n\n /**\n\n * Returns the process ID (PID) of the backend process handling this connection.\n\n *\n\n * See [PQbackendPID](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQBACKENDPID).\n\n */\n\n pub fn backend_pid(&self) -> u32 {\n\n unsafe { pq_sys::PQbackendPID(self.into()) as u32 }\n\n }\n\n\n\n /**\n\n * Returns `true` if the connection authentication method required a password, but none was\n\n * available. Returns `false` if not.\n\n *\n", "file_path": "src/connection/_status.rs", "rank": 56, "score": 32639.54959015669 }, { "content": " }\n\n }\n\n\n\n /**\n\n * Return an array of SSL attribute names available.\n\n *\n\n * See [PQsslAttributeNames](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQSSLATTRIBUTENAMES).\n\n */\n\n pub fn ssl_attribute_names(&self) -> Vec<crate::ssl::Attribute> {\n\n let raw = unsafe { pq_sys::PQsslAttributeNames(self.into()) };\n\n\n\n crate::ffi::vec_from_nta(raw)\n\n .iter()\n\n .map(|x| x.into())\n\n .collect()\n\n }\n\n\n\n /**\n\n * Return a pointer to an SSL-implementation-specific object describing the connection.\n\n *\n", "file_path": "src/connection/_status.rs", "rank": 57, "score": 32639.47629116297 }, { "content": " *\n\n * See [PQdescribePortal](https://www.postgresql.org/docs/current/libpq-exec.html#LIBPQ-PQDESCRIBEPORTAL).\n\n */\n\n pub fn describe_portal(&self, name: Option<&str>) -> crate::Result {\n\n let c_name = crate::ffi::to_cstr(name.unwrap_or_default());\n\n\n\n unsafe { pq_sys::PQdescribePortal(self.into(), c_name.as_ptr()) }\n\n .into()\n\n }\n\n\n\n /**\n\n * Escape a string for use within an SQL command.\n\n *\n\n * See\n\n * [PQescapeLiteral](https://www.postgresql.org/docs/current/libpq-exec.html#LIBPQ-PQESCAPELITERAL).\n\n */\n\n pub fn escape_literal(&self, str: &str) -> std::result::Result<String, String> {\n\n crate::escape::literal(&self, str)\n\n }\n\n\n", "file_path": "src/connection/_exec.rs", "rank": 58, "score": 32639.215039951443 }, { "content": " return Default::default();\n\n }\n\n\n\n let mut values = Vec::new();\n\n let mut formats = Vec::new();\n\n let mut lengths = Vec::new();\n\n\n\n for (x, value) in param_values.iter().enumerate() {\n\n let format = param_formats.get(x).unwrap_or(&crate::Format::Text);\n\n formats.push(format.into());\n\n\n\n if let Some(v) = value {\n\n if format == &crate::Format::Text && v.last() != Some(&b'\\0') {\n\n panic!(\"Param value as text should be null terminated\");\n\n }\n\n values.push(v.as_ptr() as *const i8);\n\n lengths.push(v.len() as i32);\n\n } else {\n\n values.push(std::ptr::null());\n\n lengths.push(0);\n", "file_path": "src/connection/mod.rs", "rank": 59, "score": 32638.752239109428 }, { "content": "/**\n\n * [Functions Associated with the `COPY`\n\n * Command](https://www.postgresql.org/docs/current/libpq-copy.html)\n\n */\n\nimpl Connection {\n\n /**\n\n * Sends data to the server during `libpq::Status::CopyIn` state.\n\n *\n\n * See\n\n * [PQputCopyData](https://www.postgresql.org/docs/current/libpq-copy.html#LIBPQ-PQPUTCOPYDATA).\n\n */\n\n pub fn put_copy_data(&self, buffer: &str) -> std::result::Result<(), String> {\n\n log::debug!(\"Sending copy data\");\n\n\n\n let c_buffer = crate::ffi::to_cstr(buffer);\n\n\n\n let success = unsafe {\n\n pq_sys::PQputCopyData(self.into(), c_buffer.as_ptr(), buffer.len() as i32)\n\n };\n\n\n", "file_path": "src/connection/_copy.rs", "rank": 60, "score": 32638.679119265547 }, { "content": " * See [PQsslStruct](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQSSLSTRUCT).\n\n *\n\n * # Safety\n\n *\n\n * This function returns a `void*` pointer.\n\n */\n\n pub unsafe fn ssl_struct(&self, struct_name: &str) -> *const std::ffi::c_void {\n\n let c_struct_name = crate::ffi::to_cstr(struct_name);\n\n\n\n pq_sys::PQsslStruct(self.into(), c_struct_name.as_ptr())\n\n }\n\n\n\n /**\n\n * Returns the SSL structure used in the connection, or null if SSL is not in use.\n\n *\n\n * See [PQgetssl](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQGETSSL).\n\n *\n\n * # Safety\n\n *\n\n * This function returns a `void*` pointer.\n\n */\n\n pub unsafe fn ssl(&self) -> *const std::ffi::c_void {\n\n pq_sys::PQgetssl(self.into())\n\n }\n\n}\n", "file_path": "src/connection/_status.rs", "rank": 61, "score": 32638.562518535706 }, { "content": "mod cancel;\n\nmod info;\n\nmod notify;\n\nmod status;\n\n\n\npub use cancel::*;\n\npub use info::*;\n\npub use notify::*;\n\npub use status::*;\n\n\n\nuse std::convert::TryInto;\n\n\n\npub type NoticeProcessor = pq_sys::PQnoticeProcessor;\n\npub type NoticeReceiver = pq_sys::PQnoticeReceiver;\n\n\n\npub struct Connection {\n\n conn: *mut pq_sys::PGconn,\n\n}\n\n\n\nunsafe impl Send for Connection {}\n", "file_path": "src/connection/mod.rs", "rank": 62, "score": 32637.974700342565 }, { "content": " match success {\n\n -1 => Err(self\n\n .error_message()\n\n .unwrap_or_else(|| \"Unknow error\".to_string())),\n\n 0 => Err(\"Full buffers\".to_string()),\n\n 1 => Ok(()),\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n /**\n\n * Sends end-of-data indication to the server during `libpq::Status::CopyIn` state.\n\n *\n\n * See\n\n * [PQputCopyEnd](https://www.postgresql.org/docs/current/libpq-copy.html#LIBPQ-PQPUTCOPYEND).\n\n */\n\n pub fn put_copy_end(&self, errormsg: Option<&str>) -> std::result::Result<(), String> {\n\n log::debug!(\"End of copy\");\n\n\n\n let cstr = errormsg.map(crate::ffi::to_cstr);\n", "file_path": "src/connection/_copy.rs", "rank": 63, "score": 32637.903861775863 }, { "content": " }\n\n }\n\n\n\n /**\n\n * Submits a request to obtain information about the specified portal, without waiting for completion.\n\n *\n\n * See\n\n * [PQsendDescribePortal](https://www.postgresql.org/docs/current/libpq-async.html#LIBPQ-PQSENDDESCRIBEPORTAL).\n\n */\n\n pub fn send_describe_portal(&self, name: Option<&str>) -> std::result::Result<(), String> {\n\n log::debug!(\"Sending describe portal {}\", name.unwrap_or(\"anonymous\"));\n\n\n\n let c_name = crate::ffi::to_cstr(name.unwrap_or_default());\n\n\n\n let success = unsafe {\n\n pq_sys::PQsendDescribePortal(self.into(), c_name.as_ptr())\n\n };\n\n\n\n if success == 1 {\n\n Ok(())\n", "file_path": "src/connection/_async.rs", "rank": 64, "score": 32637.5074121448 }, { "content": "\n\ninclude!(\"_async.rs\");\n\ninclude!(\"_cancel.rs\");\n\ninclude!(\"_connect.rs\");\n\ninclude!(\"_control.rs\");\n\ninclude!(\"_copy.rs\");\n\ninclude!(\"_exec.rs\");\n\ninclude!(\"_notice_processing.rs\");\n\ninclude!(\"_notify.rs\");\n\ninclude!(\"_single_row_mode.rs\");\n\ninclude!(\"_ssl.rs\");\n\ninclude!(\"_status.rs\");\n\ninclude!(\"_threading.rs\");\n\n\n\nimpl Connection {\n\n fn transform_params(\n\n param_values: &[Option<Vec<u8>>],\n\n param_formats: &[crate::Format],\n\n ) -> (Vec<*const i8>, Vec<i32>, Vec<i32>) {\n\n if param_values.is_empty() {\n", "file_path": "src/connection/mod.rs", "rank": 65, "score": 32637.493395929985 }, { "content": "/**\n\n * [Asynchronous Command Processing](https://www.postgresql.org/docs/current/libpq-async.html)\n\n */\n\nimpl Connection {\n\n /**\n\n * Submits a command to the server without waiting for the result(s).\n\n *\n\n * See\n\n * [PQsendQuery](https://www.postgresql.org/docs/current/libpq-async.html#LIBPQ-PQSENDQUERY).\n\n */\n\n pub fn send_query(&self, command: &str) -> std::result::Result<(), String> {\n\n log::debug!(\"Sending query '{}'\", command);\n\n\n\n let c_command = crate::ffi::to_cstr(command);\n\n\n\n let success = unsafe { pq_sys::PQsendQuery(self.into(), c_command.as_ptr()) };\n\n\n\n if success == 1 {\n\n Ok(())\n\n } else {\n", "file_path": "src/connection/_async.rs", "rank": 66, "score": 32637.186842084444 }, { "content": " Err(self\n\n .error_message()\n\n .unwrap_or_else(|| \"Unknow error\".to_string()))\n\n }\n\n }\n\n\n\n /**\n\n * Submits a command and separate parameters to the server without waiting for the result(s).\n\n *\n\n * See\n\n * [PQsendQueryParams](https://www.postgresql.org/docs/current/libpq-async.html#LIBPQ-PQSENDQUERYPARAMS).\n\n */\n\n pub fn send_query_params(\n\n &self,\n\n command: &str,\n\n param_types: &[crate::Oid],\n\n param_values: &[Option<Vec<u8>>],\n\n param_formats: &[crate::Format],\n\n result_format: crate::Format,\n\n ) -> std::result::Result<(), String> {\n", "file_path": "src/connection/_async.rs", "rank": 67, "score": 32636.646973626877 }, { "content": " let (values, formats, lengths) =\n\n Self::transform_params(param_values, param_formats);\n\n\n\n if log::log_enabled!(log::Level::Debug) {\n\n use std::convert::TryFrom;\n\n\n\n let mut p = Vec::new();\n\n\n\n for (x, value) in param_values.iter().enumerate() {\n\n let v = if let Some(s) = value {\n\n String::from_utf8(s.to_vec()).unwrap_or_else(|_| \"?\".to_string())\n\n } else {\n\n \"null\".to_string()\n\n };\n\n let default_type = crate::types::TEXT;\n\n let t = crate::Type::try_from(\n\n *param_types.get(x).unwrap_or(&default_type.oid)\n\n ).unwrap_or(default_type);\n\n\n\n p.push(format!(\"'{}'::{}\", v, t.name));\n", "file_path": "src/connection/_async.rs", "rank": 68, "score": 32636.565810303662 }, { "content": "pub struct Notify {\n\n notify: *mut pq_sys::pgNotify,\n\n}\n\n\n\nimpl Notify {\n\n /**\n\n * notification channel name\n\n */\n\n pub fn relname(&self) -> String {\n\n crate::ffi::to_string(unsafe { (*self.notify).relname })\n\n }\n\n\n\n /**\n\n * process ID of notifying server process\n\n */\n\n pub fn be_pid(&self) -> u32 {\n\n unsafe { (*self.notify).be_pid as u32 }\n\n }\n\n\n\n /**\n", "file_path": "src/connection/notify.rs", "rank": 69, "score": 32636.460639339715 }, { "content": "\n\nimpl Drop for Notify {\n\n fn drop(&mut self) {\n\n unsafe {\n\n pq_sys::PQfreemem(self.notify as *mut std::ffi::c_void);\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for Notify {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_struct(\"Result\")\n\n .field(\"inner\", &self.notify)\n\n .field(\"relname\", &self.relname())\n\n .field(\"be_pid\", &self.be_pid())\n\n .field(\"extra\", &self.extra())\n\n .finish()\n\n }\n\n}\n", "file_path": "src/connection/notify.rs", "rank": 70, "score": 32636.348663315523 }, { "content": " crate::ffi::to_string(unsafe {\n\n pq_sys::PQparameterStatus(self.into(), c_param.as_ptr())\n\n })\n\n }\n\n\n\n /**\n\n * Interrogates the frontend/backend protocol being used.\n\n *\n\n * See [PQprotocolVersion](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQPROTOCOLVERSION).\n\n */\n\n pub fn protocol_version(&self) -> i32 {\n\n unsafe { pq_sys::PQprotocolVersion(self.into()) }\n\n }\n\n\n\n /**\n\n * Returns an integer representing the server version.\n\n *\n\n * See [PQserverVersion](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQSERVERVERSION).\n\n */\n\n pub fn server_version(&self) -> i32 {\n", "file_path": "src/connection/_status.rs", "rank": 71, "score": 32636.28078393387 }, { "content": " /**\n\n * Escapes a string for use as an SQL identifier, such as a table, column, or function name.\n\n *\n\n * See\n\n * [PQescapeIdentifier](https://www.postgresql.org/docs/current/libpq-exec.html#LIBPQ-PQESCAPEIDENTIFIER).\n\n */\n\n pub fn escape_identifier(&self, str: &str) -> std::result::Result<String, String> {\n\n crate::escape::identifier(&self, str)\n\n }\n\n\n\n /**\n\n * Escape string literals, much like `libpq::Connection::literal`.\n\n *\n\n * See\n\n * [PQescapeStringConn](https://www.postgresql.org/docs/current/libpq-exec.html#LIBPQ-PQESCAPESTRINGCONN).\n\n */\n\n pub fn escape_string(&self, from: &str) -> std::result::Result<String, String> {\n\n crate::escape::string_conn(&self, from)\n\n }\n\n\n", "file_path": "src/connection/_exec.rs", "rank": 72, "score": 32635.782392388282 }, { "content": " }\n\n\n\n /**\n\n * If input is available from the server, consume it.\n\n *\n\n * See\n\n * [PQconsumeInput](https://www.postgresql.org/docs/current/libpq-async.html#LIBPQ-PQCONSUMEINPUT).\n\n */\n\n pub fn consume_input(&self) -> std::result::Result<(), String> {\n\n log::debug!(\"Consume input\");\n\n\n\n let success = unsafe { pq_sys::PQconsumeInput(self.into()) };\n\n\n\n if success == 1 {\n\n Ok(())\n\n } else {\n\n Err(self\n\n .error_message()\n\n .unwrap_or_else(|| \"Unknow error\".to_string()))\n\n }\n", "file_path": "src/connection/_async.rs", "rank": 73, "score": 32635.515998785897 }, { "content": "/**\n\n * [Behavior in Threaded Programs](https://www.postgresql.org/docs/current/libpq-threading.html)\n\n */\n\nimpl Connection {\n\n /**\n\n * Returns the thread safety status of the libpq library.\n\n *\n\n * See\n\n * [PQisthreadsafe](https://www.postgresql.org/docs/current/libpq-threading.html#LIBPQ-PQISTHREADSAFE).\n\n */\n\n pub fn is_thread_safe() -> bool {\n\n unsafe { pq_sys::PQisthreadsafe() != 0 }\n\n }\n\n}\n", "file_path": "src/connection/_threading.rs", "rank": 74, "score": 32635.170079994372 }, { "content": " c_query.as_ptr(),\n\n param_types.len() as i32,\n\n param_types.as_ptr(),\n\n )\n\n }\n\n .into()\n\n }\n\n\n\n /**\n\n * Sends a request to execute a prepared statement with given parameters, and waits for the\n\n * result.\n\n *\n\n * See [PQexecPrepared](https://www.postgresql.org/docs/current/libpq-exec.html#LIBPQ-PQEXECPREPARED).\n\n */\n\n pub fn exec_prepared(\n\n &self,\n\n name: Option<&str>,\n\n param_values: &[Option<Vec<u8>>],\n\n param_formats: &[crate::Format],\n\n result_format: crate::Format,\n", "file_path": "src/connection/_exec.rs", "rank": 75, "score": 32635.10847868183 }, { "content": " unsafe { pq_sys::PQstatus(self.into()) }.into()\n\n }\n\n\n\n /**\n\n * Returns the current in-transaction status of the server.\n\n *\n\n * See [PQtransactionStatus](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQTRANSACTIONSTATUS).\n\n */\n\n pub fn transaction_status(&self) -> crate::transaction::Status {\n\n unsafe { pq_sys::PQtransactionStatus(self.into()) }.into()\n\n }\n\n\n\n /**\n\n * Looks up a current parameter setting of the server.\n\n *\n\n * See [PQparameterStatus](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQPARAMETERSTATUS).\n\n */\n\n pub fn parameter_status(&self, param: &str) -> String {\n\n let c_param = crate::ffi::to_cstr(param);\n\n\n", "file_path": "src/connection/_status.rs", "rank": 76, "score": 32634.994848917995 }, { "content": "/**\n\n * [SSL Support](https://www.postgresql.org/docs/current/libpq-ssl.html)\n\n */\n\nimpl Connection {\n\n /**\n\n * Allows applications to select which security libraries to initialize.\n\n *\n\n * See [PQinitOpenSSL](https://www.postgresql.org/docs/current/libpq-ssl.html#LIBPQ-PQINITOPENSSL).\n\n */\n\n pub fn init_openssl(do_ssl: bool, do_crypto: bool) {\n\n unsafe { pq_sys::PQinitOpenSSL(do_ssl as i32, do_crypto as i32); }\n\n }\n\n\n\n /**\n\n * Allows applications to select which security libraries to initialize.\n\n *\n\n * See [PQinitSSL](https://www.postgresql.org/docs/current/libpq-ssl.html#LIBPQ-PQINITSSL).\n\n */\n\n pub fn init_ssl(do_ssl: bool) {\n\n unsafe { pq_sys::PQinitSSL(do_ssl as i32); }\n\n }\n\n}\n", "file_path": "src/connection/_ssl.rs", "rank": 77, "score": 32634.976394540747 }, { "content": " #[cfg(unix)]\n\n {\n\n let options = crate::print::Options {\n\n header: true,\n\n align: true,\n\n standard: false,\n\n html3: false,\n\n expanded: false,\n\n pager: false,\n\n field_sep: \"|\".to_string(),\n\n table_opt: String::new(),\n\n caption: String::new(),\n\n field_name: Vec::new(),\n\n };\n\n\n\n result.print(&std::io::stdout(), &options);\n\n }\n\n }\n\n\n\n #[test]\n", "file_path": "src/connection/mod.rs", "rank": 78, "score": 32634.880478131054 }, { "content": "\n\n /**\n\n * Enables tracing of the client/server communication to a debugging file stream.\n\n *\n\n * See [PQtrace](https://www.postgresql.org/docs/current/libpq-control.html#LIBPQ-PQTRACE).\n\n */\n\n #[cfg(unix)]\n\n pub fn trace(&self, file: std::fs::File) {\n\n use std::os::unix::io::IntoRawFd;\n\n\n\n log::debug!(\"Enable trace\");\n\n\n\n let c_mode = crate::ffi::to_cstr(\"w\");\n\n\n\n unsafe {\n\n let stream = libc::fdopen(file.into_raw_fd(), c_mode.as_ptr());\n\n pq_sys::PQtrace(self.into(), stream as *mut pq_sys::__sFILE);\n\n }\n\n }\n\n\n", "file_path": "src/connection/_control.rs", "rank": 79, "score": 32634.81853913602 }, { "content": "\n\n if sucess == 1 {\n\n Ok(())\n\n } else {\n\n Err(error)\n\n }\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\nimpl From<*mut pq_sys::pg_cancel> for Cancel {\n\n fn from(cancel: *mut pq_sys::pg_cancel) -> Self {\n\n Self { cancel }\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\nimpl Into<*mut pq_sys::pg_cancel> for &Cancel {\n\n fn into(self) -> *mut pq_sys::pg_cancel {\n\n self.cancel\n", "file_path": "src/connection/cancel.rs", "rank": 80, "score": 32634.763342352082 }, { "content": "\n\n /**\n\n * Sends a request to create a prepared statement with the given parameters, without waiting\n\n * for completion.\n\n *\n\n * See\n\n * [PQsendPrepare](https://www.postgresql.org/docs/current/libpq-async.html#LIBPQ-PQSENDPREPARE).\n\n */\n\n pub fn send_prepare(\n\n &self,\n\n name: Option<&str>,\n\n query: &str,\n\n param_types: &[crate::Oid],\n\n ) -> std::result::Result<(), String> {\n\n log::debug!(\n\n \"Sending prepare {} query '{}' with param types [{}]\",\n\n name.unwrap_or(\"anonymous\"),\n\n query,\n\n param_types\n\n .iter()\n", "file_path": "src/connection/_async.rs", "rank": 81, "score": 32634.759478355572 }, { "content": " * See [PQconnectionNeedsPassword](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQCONNECTIONNEEDSPASSWORD).\n\n */\n\n pub fn needs_password(&self) -> bool {\n\n unsafe { pq_sys::PQconnectionNeedsPassword(self.into()) == 1 }\n\n }\n\n\n\n /**\n\n * Returns `true` if the connection authentication method used a password. Returns `false` if\n\n * not.\n\n *\n\n * See [PQconnectionUsedPassword](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQCONNECTIONUSEDPASSWORD).\n\n */\n\n pub fn used_password(&self) -> bool {\n\n unsafe { pq_sys::PQconnectionUsedPassword(self.into()) == 1 }\n\n }\n\n\n\n /**\n\n * Returns `true` if the connection uses SSL, `false` if not.\n\n *\n\n * See [PQsslInUse](https://www.postgresql.org/docs/current/libpq-status.html#LIBPQ-PQSSLINUSE).\n", "file_path": "src/connection/_status.rs", "rank": 82, "score": 32634.685046452923 }, { "content": "/**\n\n * [Command Execution Functions](https://www.postgresql.org/docs/current/libpq-exec.html)\n\n */\n\nimpl Connection {\n\n /**\n\n * Submits a command to the server and waits for the result.\n\n *\n\n * See [PQexec](https://www.postgresql.org/docs/current/libpq-exec.html#LIBPQ-PQEXEC).\n\n */\n\n pub fn exec(&self, query: &str) -> crate::Result {\n\n log::debug!(\"Execute query '{}'\", query);\n\n\n\n let c_query = crate::ffi::to_cstr(query);\n\n unsafe { pq_sys::PQexec(self.into(), c_query.as_ptr()) }.into()\n\n }\n\n\n\n /**\n\n * Submits a command to the server and waits for the result, with the ability to pass\n\n * parameters separately from the SQL command text.\n\n *\n", "file_path": "src/connection/_exec.rs", "rank": 83, "score": 32634.59245180447 }, { "content": " }\n\n\n\n /**\n\n * Returns `true` if a command is busy, that is, `Result` would block waiting for input.\n\n *\n\n * See [PQisBusy](https://www.postgresql.org/docs/current/libpq-async.html#LIBPQ-PQISBUSY).\n\n */\n\n pub fn is_busy(&self) -> bool {\n\n unsafe { pq_sys::PQisBusy(self.into()) == 1 }\n\n }\n\n\n\n /**\n\n * Sets the nonblocking status of the connection.\n\n *\n\n * See\n\n * [PQsetnonblocking](https://www.postgresql.org/docs/current/libpq-async.html#LIBPQ-PQSETNONBLOCKING).\n\n */\n\n pub fn set_non_blocking(&self, non_blocking: bool) -> std::result::Result<(), ()> {\n\n if non_blocking {\n\n log::debug!(\"Set non blocking\");\n", "file_path": "src/connection/_async.rs", "rank": 84, "score": 32634.174394851947 }, { "content": "/**\n\n * [Control Functions](https://www.postgresql.org/docs/current/libpq-control.html)\n\n */\n\nimpl Connection {\n\n /**\n\n * Returns the client encoding.\n\n *\n\n * See\n\n * [PQclientEncoding](https://www.postgresql.org/docs/current/libpq-control.html#LIBPQ-PQCLIENTENCODING).\n\n */\n\n pub fn client_encoding(&self) -> crate::Encoding {\n\n unsafe { pq_sys::PQclientEncoding(self.into()) }.into()\n\n }\n\n\n\n /**\n\n * Sets the client encoding.\n\n *\n\n * See [PQsetClientEncoding](https://www.postgresql.org/docs/current/libpq-control.html#LIBPQ-PQSETCLIENTENCODING).\n\n */\n\n pub fn set_client_encoding(&self, encoding: crate::Encoding) {\n", "file_path": "src/connection/_control.rs", "rank": 85, "score": 32634.08123909305 }, { "content": " log::debug!(\"Setting client encoding to '{:?}'\", encoding);\n\n\n\n let c_encoding = crate::ffi::to_cstr(&encoding.to_string());\n\n\n\n unsafe {\n\n pq_sys::PQsetClientEncoding(self.into(), c_encoding.as_ptr());\n\n }\n\n }\n\n\n\n /**\n\n * Determines the verbosity of messages returned by `libpq::Connection::error_message` and\n\n * `libpq::Result::error_message`.\n\n *\n\n * See [PQsetErrorVerbosity](https://www.postgresql.org/docs/current/libpq-control.html#LIBPQ-PQSETERRORVERBOSITY).\n\n */\n\n pub fn set_error_verbosity(&self, verbosity: crate::Verbosity) -> crate::Verbosity {\n\n log::debug!(\"Setting client encoding to '{:?}'\", verbosity);\n\n\n\n unsafe { pq_sys::PQsetErrorVerbosity(self.into(), verbosity.into()) }.into()\n\n }\n", "file_path": "src/connection/_control.rs", "rank": 86, "score": 32633.79575830755 }, { "content": " )\n\n }\n\n .into()\n\n }\n\n\n\n /**\n\n * Submits a request to obtain information about the specified prepared statement, and waits\n\n * for completion.\n\n *\n\n * See [PQdescribePrepared](https://www.postgresql.org/docs/current/libpq-exec.html#LIBPQ-PQDESCRIBEPREPARED).\n\n */\n\n pub fn describe_prepared(&self, name: Option<&str>) -> crate::Result {\n\n let c_name = crate::ffi::to_cstr(name.unwrap_or_default());\n\n\n\n unsafe { pq_sys::PQdescribePrepared(self.into(), c_name.as_ptr()) }\n\n .into()\n\n }\n\n\n\n /**\n\n * Submits a request to obtain information about the specified portal, and waits for completion.\n", "file_path": "src/connection/_exec.rs", "rank": 87, "score": 32633.71068004744 }, { "content": " /**\n\n * Disables tracing started by `libpq::Connection::trace`.\n\n *\n\n * See [PQuntrace](https://www.postgresql.org/docs/current/libpq-control.html#LIBPQ-PQUNTRACE).\n\n */\n\n #[cfg(unix)]\n\n pub fn untrace(&self) {\n\n log::debug!(\"Disable trace\");\n\n\n\n unsafe {\n\n pq_sys::PQuntrace(self.into());\n\n }\n\n }\n\n}\n", "file_path": "src/connection/_control.rs", "rank": 88, "score": 32633.63075925485 }, { "content": " unsafe { pq_sys::PQisnonblocking(self.into()) == 1 }\n\n }\n\n\n\n /**\n\n * Attempts to flush any queued output data to the server.\n\n *\n\n * See [PQflush](https://www.postgresql.org/docs/current/libpq-async.html#LIBPQ-PQFLUSH).\n\n */\n\n pub fn flush(&self) -> std::result::Result<(), ()> {\n\n log::debug!(\"Flush\");\n\n\n\n let status = unsafe { pq_sys::PQflush(self.into()) };\n\n\n\n if status == 0 {\n\n Ok(())\n\n } else {\n\n Err(())\n\n }\n\n }\n\n}\n", "file_path": "src/connection/_async.rs", "rank": 89, "score": 32633.48129629332 }, { "content": " }\n\n .into()\n\n }\n\n\n\n /**\n\n * Submits a request to create a prepared statement with the given parameters, and waits for completion.\n\n *\n\n * See [PQprepare](https://www.postgresql.org/docs/current/libpq-exec.html#LIBPQ-PQPREPARE).\n\n */\n\n pub fn prepare(\n\n &self,\n\n name: Option<&str>,\n\n query: &str,\n\n param_types: &[crate::Oid],\n\n ) -> crate::Result {\n\n log::debug!(\n\n \"Prepare {} query '{}' with param types [{}]\",\n\n name.unwrap_or(\"anonymous\"),\n\n query,\n\n param_types\n", "file_path": "src/connection/_exec.rs", "rank": 90, "score": 32633.016624302672 }, { "content": "/**\n\n * [Canceling Queries in Progress](https://www.postgresql.org/docs/current/libpq-cancel.html)\n\n */\n\nimpl Connection {\n\n /**\n\n * Creates a data structure containing the information needed to cancel a command issued\n\n * through a particular database connection.\n\n *\n\n * See\n\n * [PQgetCancel](https://www.postgresql.org/docs/current/libpq-cancel.html#LIBPQ-PQGETCANCEL).\n\n */\n\n pub fn cancel(&self) -> crate::connection::Cancel {\n\n unsafe { pq_sys::PQgetCancel(self.into()) }.into()\n\n }\n\n}\n", "file_path": "src/connection/_cancel.rs", "rank": 91, "score": 32632.905602031075 }, { "content": " }\n\n }\n\n\n\n (values, formats, lengths)\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\nimpl Into<*mut pq_sys::pg_conn> for &Connection {\n\n fn into(self) -> *mut pq_sys::pg_conn {\n\n self.conn\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\nimpl Into<*mut pq_sys::pg_conn> for &mut Connection {\n\n fn into(self) -> *mut pq_sys::pg_conn {\n\n self.conn\n\n }\n\n}\n", "file_path": "src/connection/mod.rs", "rank": 92, "score": 32632.049263681685 }, { "content": " param_types.len() as i32,\n\n param_types.as_ptr(),\n\n )\n\n };\n\n\n\n if success == 1 {\n\n Ok(())\n\n } else {\n\n Err(self\n\n .error_message()\n\n .unwrap_or_else(|| \"Unknow error\".to_string()))\n\n }\n\n }\n\n\n\n /**\n\n * Sends a request to execute a prepared statement with given parameters, without waiting for the result(s).\n\n *\n\n * See [PQsendQueryPrepared](https://www.postgresql.org/docs/current/libpq-async.html#LIBPQ-PQSENDQUERYPREPARED).\n\n */\n\n pub fn send_query_prepared(\n", "file_path": "src/connection/_async.rs", "rank": 93, "score": 32631.96402904607 }, { "content": " String::from_utf8(s.to_vec()).unwrap_or_else(|_| \"?\".to_string())\n\n } else {\n\n \"null\".to_string()\n\n };\n\n let default_type = crate::types::TEXT;\n\n let t = crate::Type::try_from(\n\n *param_types.get(x).unwrap_or(&default_type.oid)\n\n ).unwrap_or(default_type);\n\n\n\n p.push(format!(\"'{}'::{}\", v, t.name));\n\n }\n\n\n\n log::debug!(\"Execute query '{}' with params [{}]\", command, p.join(\", \"));\n\n }\n\n\n\n let c_command = crate::ffi::to_cstr(command);\n\n\n\n unsafe {\n\n pq_sys::PQexecParams(\n\n self.into(),\n", "file_path": "src/connection/_exec.rs", "rank": 94, "score": 32631.34572836341 }, { "content": " }\n\n}\n\n\n\nimpl Drop for Cancel {\n\n fn drop(&mut self) {\n\n unsafe {\n\n pq_sys::PQfreeCancel(self.cancel);\n\n }\n\n }\n\n}\n", "file_path": "src/connection/cancel.rs", "rank": 95, "score": 32630.461082758073 }, { "content": " .join(\", \")\n\n );\n\n\n\n let (values, formats, lengths) =\n\n Self::transform_params(param_values, param_formats);\n\n\n\n let c_name = crate::ffi::to_cstr(name.unwrap_or_default());\n\n\n\n let success = unsafe {\n\n pq_sys::PQsendQueryPrepared(\n\n self.into(),\n\n c_name.as_ptr(),\n\n values.len() as i32,\n\n values.as_ptr(),\n\n if lengths.is_empty() {\n\n std::ptr::null()\n\n } else {\n\n lengths.as_ptr()\n\n },\n\n if formats.is_empty() {\n", "file_path": "src/connection/_async.rs", "rank": 96, "score": 32630.36457916137 }, { "content": " /**\n\n * Escapes binary data for use within an SQL command with the type bytea.\n\n *\n\n * See\n\n * [PQescapeByteaConn](https://www.postgresql.org/docs/current/libpq-exec.html#LIBPQ-PQESCAPEBYTEACONN).\n\n */\n\n pub fn escape_bytea(&self, from: &[u8]) -> std::result::Result<Vec<u8>, String> {\n\n crate::escape::bytea_conn(&self, from)\n\n }\n\n}\n", "file_path": "src/connection/_exec.rs", "rank": 97, "score": 32630.08504565079 }, { "content": "#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum Status {\n\n Ok,\n\n Bad,\n\n /** Waiting for connection to be made. */\n\n Started,\n\n /** Connection OK; waiting to send. */\n\n Made,\n\n /** Waiting for a response from the server. */\n\n AwaitingResponse,\n\n /** Received authentication; waiting for backend start-up to finish. */\n\n AuthOk,\n\n /** Negotiating environment-driven parameter settings. */\n\n Setenv,\n\n /** Negotiating SSL encryption. */\n\n SslStartup,\n\n Needed,\n\n}\n\n\n\nimpl From<pq_sys::_bindgen_ty_2> for Status {\n", "file_path": "src/connection/status.rs", "rank": 98, "score": 32630.083625724725 }, { "content": " #[test]\n\n fn info() {\n\n let conn = crate::test::new_conn();\n\n let _ = conn.info();\n\n }\n\n\n\n #[test]\n\n fn ping() {\n\n assert_eq!(\n\n crate::Connection::ping(&crate::test::dsn()),\n\n crate::ping::Status::Ok\n\n );\n\n }\n\n\n\n #[test]\n\n fn ssl_attribute_names() {\n\n let conn = crate::test::new_conn();\n\n\n\n assert_eq!(\n\n conn.ssl_attribute_names(),\n", "file_path": "src/connection/mod.rs", "rank": 99, "score": 32629.844587849464 } ]
Rust
simulator/src/sim/actions/yin_yang_magic.rs
rainbowbismuth/birb-brains-bot
f168ec06c5c5cc8d41589437c6f91f0d97289167
use crate::sim::actions::{Ability, AbilityImpl, Action, AoE, ALLY_OK, FOE_OK}; use crate::sim::common::{mod_6_formula, AddConditionSpellImpl, ConditionClearSpellImpl}; use crate::sim::{ Combatant, CombatantId, Condition, Element, Event, Simulation, Source, CAN_BE_CALCULATED, CAN_BE_REFLECTED, SILENCEABLE, }; pub const YIN_YANG_MAGIC_ABILITIES: &[Ability] = &[ Ability { name: "Blind", flags: FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 4, aoe: AoE::Diamond(1, Some(1)), implementation: &AddConditionSpellImpl { condition: &[Condition::Darkness], can_be_evaded: true, ignore_magic_def: false, base_chance: 200, ctr: 2, range: 5, }, }, Ability { name: "Spell Absorb", flags: FOE_OK | SILENCEABLE, mp_cost: 2, aoe: AoE::None, implementation: &AbsorbSpellImpl { hp_not_mp: false, amount: 0.33, base_chance: 175, ctr: 2, range: 5, }, }, Ability { name: "Life Drain", flags: FOE_OK | SILENCEABLE, mp_cost: 16, aoe: AoE::None, implementation: &AbsorbSpellImpl { hp_not_mp: true, amount: 0.25, base_chance: 185, ctr: 2, range: 5, }, }, Ability { name: "Pray Faith", flags: ALLY_OK | FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 6, aoe: AoE::None, implementation: &AddConditionSpellImpl { condition: &[Condition::Faith], can_be_evaded: false, ignore_magic_def: false, base_chance: 150, ctr: 4, range: 5, }, }, Ability { name: "Doubt Faith", flags: ALLY_OK | FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 6, aoe: AoE::None, implementation: &AddConditionSpellImpl { condition: &[Condition::Innocent], can_be_evaded: false, ignore_magic_def: false, base_chance: 150, ctr: 4, range: 5, }, }, Ability { name: "Zombie", flags: FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 20, aoe: AoE::None, implementation: &AddConditionSpellImpl { condition: &[Condition::Undead], can_be_evaded: true, ignore_magic_def: false, base_chance: 115, ctr: 5, range: 5, }, }, Ability { name: "Silence Song", flags: FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 16, aoe: AoE::Diamond(1, Some(1)), implementation: &AddConditionSpellImpl { condition: &[Condition::Silence], can_be_evaded: true, ignore_magic_def: false, base_chance: 180, ctr: 3, range: 5, }, }, Ability { name: "Blind Rage", flags: FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 16, aoe: AoE::None, implementation: &AddConditionSpellImpl { condition: &[Condition::Berserk], can_be_evaded: true, ignore_magic_def: false, base_chance: 130, ctr: 5, range: 5, }, }, Ability { name: "Confusion Song", flags: FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 20, aoe: AoE::None, implementation: &AddConditionSpellImpl { condition: &[Condition::Confusion], can_be_evaded: true, ignore_magic_def: false, base_chance: 135, ctr: 5, range: 5, }, }, Ability { name: "Dispel Magic", flags: FOE_OK | SILENCEABLE | CAN_BE_CALCULATED, mp_cost: 34, aoe: AoE::None, implementation: &ConditionClearSpellImpl { conditions: &[ Condition::Float, Condition::Reraise, Condition::Transparent, Condition::Regen, Condition::Protect, Condition::Shell, Condition::Haste, Condition::Faith, Condition::Reflect, ], base_chance: 200, ignore_magic_def: false, ctr: 3, range: 5, }, }, Ability { name: "Paralyze", flags: ALLY_OK | FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 10, aoe: AoE::Diamond(1, Some(0)), implementation: &AddConditionSpellImpl { condition: &[Condition::DontAct], can_be_evaded: true, ignore_magic_def: false, base_chance: 185, ctr: 5, range: 5, }, }, Ability { name: "Sleep", flags: ALLY_OK | FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 24, aoe: AoE::Diamond(1, Some(1)), implementation: &AddConditionSpellImpl { condition: &[Condition::Sleep], can_be_evaded: true, ignore_magic_def: false, base_chance: 175, ctr: 6, range: 5, }, }, Ability { name: "Petrify", flags: FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 16, aoe: AoE::None, implementation: &AddConditionSpellImpl { condition: &[Condition::Petrify], can_be_evaded: true, ignore_magic_def: false, base_chance: 125, ctr: 9, range: 5, }, }, ]; struct AbsorbSpellImpl { hp_not_mp: bool, amount: f32, base_chance: i16, range: u8, ctr: u8, } impl AbilityImpl for AbsorbSpellImpl { fn consider<'a>( &self, actions: &mut Vec<Action<'a>>, ability: &'a Ability<'a>, _sim: &Simulation<'a>, _user: &Combatant<'a>, target: &Combatant<'a>, ) { actions.push(Action::new( ability, self.range, Some(self.ctr), target.id(), )); } fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) { let user = sim.combatant(user_id); let target = sim.combatant(target_id); if sim.do_magical_evade(user, target, Source::Ability) { return; } let success_chance = mod_6_formula(user, target, Element::None, self.base_chance, false); if !(sim.roll_auto_succeed() < success_chance) { sim.log_event(Event::AbilityMissed(user_id, target_id)); return; } if self.hp_not_mp { let absorbed_amount = (target.max_hp() as f32 * self.amount) as i16; sim.change_target_hp(target_id, absorbed_amount, Source::Ability); sim.change_target_hp(user_id, -absorbed_amount, Source::Ability); } else { let absorbed_amount = (target.max_mp() as f32 * self.amount) as i16; sim.change_target_mp(target_id, absorbed_amount, Source::Ability); sim.change_target_mp(user_id, -absorbed_amount, Source::Ability); } } }
use crate::sim::actions::{Ability, AbilityImpl, Action, AoE, ALLY_OK, FOE_OK}; use crate::sim::common::{mod_6_formula, AddConditionSpellImpl, ConditionClearSpellImpl}; use crate::sim::{ Combatant, CombatantId, Condition, Element, Event, Simulation, Source, CAN_BE_CALCULATED, CAN_BE_REFLECTED, SILENCEABLE, }; pub const YIN_YANG_MAGIC_ABILITIES: &[Ability] = &[ Ability { name: "Blind", flags: FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 4, aoe: AoE::Diamond(1, Some(1)), implementation: &AddConditionSpellImpl { condition: &[Condition::Darkness], can_be_evaded: true, ignore_magic_def: false, base_chance: 200, ctr: 2, range: 5, }, }, Ability { name: "Spell Absorb", flags: FOE_OK | SILENCEABLE, mp_cost: 2, aoe: AoE::None, implementation: &AbsorbSpellImpl { hp_not_mp: false, amount: 0.33, base_chance: 175, ctr: 2, range: 5, }, }, Ability { name: "Life Drain", flags: FOE_OK | SILENCEABLE, mp_cost: 16, aoe: AoE::None, implementation: &AbsorbSpellImpl { hp_not_mp: true, amount: 0.25, base_chance: 185, ctr: 2, range: 5, }, }, Ability { name: "Pray Faith", flags: ALLY_OK | FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 6, aoe: AoE::None, implementation: &AddConditionSpellImpl { condition: &[Condition::Faith], can_be_evaded: false, ignore_magic_def: false, base_chance: 150, ctr: 4, range: 5, }, }, Ability { name: "Doubt Faith", flags: ALLY_OK | FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 6, aoe: AoE::None, implementation: &AddConditionSpellImpl { condition: &[Condition::Innocent], can_be_evaded: false, ignore_magic_def: false, base_chance: 150, ctr: 4, range: 5, }, }, Ability { name: "Zombie", flags: FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 20, aoe: AoE::None, implementation: &AddConditionSpellImpl { condition: &[Condition::Undead], can_be_evaded: true, ignore_magic_def: false, base_chance: 115, ctr: 5, range: 5, }, }, Ability { name: "Silence Song", flags: FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 16, aoe: AoE::Diamond(1, Some(1)), implementation: &AddConditionSpellImpl { condition: &[Condition::Silence], can_be_evaded: true, ignore_magic_def: false, base_chance: 180, ctr: 3, range: 5, }, }, Ability { name: "Blind Rage", flags: FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 16, aoe: AoE::None, implementation: &AddConditionSpellImpl { condition: &[Condition::Berserk], can_be_evaded: true, ignore_magic_def: false, base_chance: 130, ctr: 5, range: 5, }, }, Ability { name: "Confusion Song", flags: FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 20, aoe: AoE::None, implementation: &AddConditionSpellImpl { condition: &[Condition::Confusion], can_be_evaded: true, ignore_magic_def: false, base_chance: 135, ctr: 5, range: 5, }, }, Ability { name: "Dispel Magic", flags: FOE_OK | SILENCEABLE | CAN_BE_CALCULATED, mp_cost: 34, aoe: AoE::None, implementation: &ConditionClearSpellImpl { conditions: &[ Condition::Float, Condition::Reraise, Condition::Transparent, Condition::Regen, Condition::Protect, Condition::Shell, Condition::Haste, Condition::Faith, Condition::Reflect, ], base_chance: 200, ignore_magic_def: false, ctr: 3, range: 5, }, }, Ability { name: "Paralyze", flags: ALLY_OK | FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 10, aoe: AoE::Diamond(1, Some(0)), implementation: &AddConditionSpellImpl { condition: &[Condition::DontAct], can_be_evaded: true, ignore_magic_def: false, base_chance: 185, ctr: 5, range: 5, }, }, Ability { name: "Sleep", flags: ALLY_OK | FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 24, aoe: AoE::Diamond(1, Some(1)), implementation: &AddConditionSpellImpl { condition: &[Condition::Sleep], can_be_evaded: true, ignore_magic_def: false, base_chance: 175, ctr: 6, range: 5, }, }, Ability { name: "Petrify", flags: FOE_OK | SILENCEABLE | CAN_BE_REFLECTED | CAN_BE_CALCULATED, mp_cost: 16, aoe: AoE::None, implementation: &AddConditionSpellImpl { condition: &[Condition::Petrify], can_be_evaded: true, ignore_magic_def: false, base_chance: 125, ctr: 9, range: 5, }, }, ]; struct AbsorbSpellImpl { hp_not_mp: bool, amount: f32, base_chance: i16, range: u8, ctr: u8, } impl AbilityImpl for AbsorbSpellImpl { fn consider<'a>( &self, actions: &mut Vec<Action<'a>>, ability: &'a Ability<'a>, _sim: &Simulation<'a>, _user: &Combatant<'a>, target: &Combatant<'a>, ) { actions.push(Action::new( ability, self.range, Some(self.ctr), target.id(), )); } fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) { let user = sim.combatant(user_id); let target = sim.combatant(target_id); if sim.do_magi
tyMissed(user_id, target_id)); return; } if self.hp_not_mp { let absorbed_amount = (target.max_hp() as f32 * self.amount) as i16; sim.change_target_hp(target_id, absorbed_amount, Source::Ability); sim.change_target_hp(user_id, -absorbed_amount, Source::Ability); } else { let absorbed_amount = (target.max_mp() as f32 * self.amount) as i16; sim.change_target_mp(target_id, absorbed_amount, Source::Ability); sim.change_target_mp(user_id, -absorbed_amount, Source::Ability); } } }
cal_evade(user, target, Source::Ability) { return; } let success_chance = mod_6_formula(user, target, Element::None, self.base_chance, false); if !(sim.roll_auto_succeed() < success_chance) { sim.log_event(Event::Abili
function_block-random_span
[ { "content": "pub fn in_range(user: &Combatant, range: u8, target: &Combatant) -> bool {\n\n let dist = user.distance(target);\n\n dist <= range as i16\n\n}\n\n\n", "file_path": "simulator/src/sim/simulation.rs", "rank": 0, "score": 452225.96033337904 }, { "content": "pub fn can_move_into_range(user: &Combatant, action: &Action, target: &Combatant) -> bool {\n\n if action.ability.aoe.is_line() {\n\n return can_move_into_range_line(user, action, target);\n\n }\n\n\n\n let movement = if user.dont_move() { 0 } else { user.movement() };\n\n user.distance(target) <= action.range as i16 + movement as i16\n\n}\n\n\n", "file_path": "simulator/src/sim/simulation.rs", "rank": 1, "score": 443237.2960545194 }, { "content": "pub fn can_move_into_range_line(user: &Combatant, action: &Action, target: &Combatant) -> bool {\n\n let movement = if user.dont_move() { 0 } else { user.movement() } as i16;\n\n let user_loc = user.panel.location();\n\n let target_loc = target.panel.location();\n\n let x_diff = (user_loc.x - target_loc.x).abs();\n\n let y_diff = (user_loc.y - target_loc.y).abs();\n\n let min_diff = x_diff.min(y_diff);\n\n let max_diff = x_diff.max(y_diff);\n\n // TODO: Revisit, I'm nearly certain this isn't correct though it should be... mostly ok.\n\n movement >= min_diff && movement + action.range as i16 >= min_diff + max_diff\n\n}\n\n\n\n//\n\n//\n\n// # IDEAS:\n\n// #\n\n// # - Need to account for picking up crystals. I think this will go with expanding the\n\n// # where do I move to selection function? Because I will want to get out of AoEs I guess?\n\n// # - Pick up crystal Y/N could just happen after movement.\n\n// # will need a state for 'no longer exists at all?' can I just remove from combatants? do I want to?\n\n// # - Can I keep statistics on how much different actions happen? Could be a useful part of testing.\n\n// # - Would be interesting to see if these true positives align with bird's true positives\n\n\n", "file_path": "simulator/src/sim/simulation.rs", "rank": 2, "score": 435856.99889521464 }, { "content": "pub fn mod_5_formula(user: &Combatant, target: &Combatant, element: Element, q: i16) -> i16 {\n\n mod_5_formula_pass_ma(user.ma() as i16, user, target, element, q)\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/common.rs", "rank": 3, "score": 435375.2408591497 }, { "content": "pub fn attack_range(sim: &Simulation, user: &Combatant, target: &Combatant) -> u8 {\n\n if user.frog() || user.berserk() && user.monster() {\n\n 1\n\n } else {\n\n user.main_hand().map_or(1, |eq| {\n\n let wp_range = eq.range as u8;\n\n if eq.weapon_type == Some(WeaponType::Bow) {\n\n // TODO: This doesn't really make sense to put here, but I'm going to try it anyways\n\n let user_height = sim.combatant_height(user.id());\n\n let target_height = sim.combatant_height(target.id());\n\n let bonus = (user_height - target_height) / 2.0;\n\n ((wp_range as i8) + (bonus as i8).min(0)) as u8\n\n } else {\n\n wp_range\n\n }\n\n })\n\n }\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/attack.rs", "rank": 4, "score": 433365.5447387659 }, { "content": "fn throw_formula(user: &Combatant, target: &Combatant, element: Element, wp: i16) -> i16 {\n\n let mut speed = user.speed() as i16;\n\n\n\n // 1. If target has Defense UP, then (Sp1 = [Sp0 * 2/3]), else Sp1 = Sp0\n\n if target.defense_up() {\n\n speed = (speed * 2) / 3;\n\n }\n\n\n\n // 2. If target has Protect, then (Sp2 = [Sp1 * 2/3]), else Sp2 = Sp1\n\n if target.protect() {\n\n speed = (speed * 2) / 3;\n\n }\n\n // 3. If target is Charging, then (Sp3 = [Sp2 * 3/2]), else Sp3 = Sp2\n\n if target.charging() {\n\n speed = (speed * 3) / 2;\n\n }\n\n\n\n // 4. If target is Sleeping, then (Sp4 = [Sp3 * 3/2]), else Sp4 = Sp3\n\n if target.sleep() {\n\n speed = (speed * 3) / 2;\n", "file_path": "simulator/src/sim/actions/throw.rs", "rank": 5, "score": 396277.1800797878 }, { "content": "pub fn in_range_panel(user: &Combatant, action: &Action, panel: Panel) -> bool {\n\n if action.ability.aoe.is_line() {\n\n user.panel.lined_up(panel) && user.panel.distance(panel) <= action.range as i16\n\n } else {\n\n let dist = user.panel.distance(panel);\n\n dist <= action.range as i16\n\n }\n\n}\n\n\n", "file_path": "simulator/src/sim/simulation.rs", "rank": 6, "score": 391410.1460090363 }, { "content": "fn filter_target_level(user: &Combatant, ability: &Ability, target: &Combatant) -> bool {\n\n let flags = ability.flags;\n\n if target.crystal() || target.jumping() {\n\n false\n\n } else if flags & TARGET_NOT_SELF != 0 && user.id() == target.id() {\n\n false\n\n } else if flags & TARGET_SELF_ONLY != 0 && user.id() != target.id() {\n\n false\n\n } else if flags & ALLY_OK == 0 && user.ally(target) && !user.confusion() {\n\n false\n\n } else if flags & FOE_OK == 0 && user.foe(target) && !user.confusion() {\n\n false\n\n } else if flags & NOT_ALIVE_OK == 0 && !target.alive() {\n\n false\n\n } else if flags & PETRIFY_OK == 0 && target.petrify() {\n\n false\n\n } else if flags & USE_ON_CRITICAL_ONLY != 0 && !target.critical() {\n\n false\n\n } else if flags & UNDER_50_PERCENT_HP_ONLY != 0 && target.hp_percent() > 0.50 {\n\n false\n\n } else if ability.flags & MISS_SLEEPING != 0 && target.sleep() {\n\n false\n\n } else {\n\n true\n\n }\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/mod.rs", "rank": 7, "score": 380236.65295559034 }, { "content": "fn ai_calculate_target_value(user: &Combatant, target: &Combatant, ignore_confusion: bool) -> f32 {\n\n let mut priority = target.hp_percent();\n\n priority += -0.51 * target.broken_equip_count() as f32;\n\n priority += ai_calculate_status_target_value_mod(target, ignore_confusion);\n\n priority += ai_calculate_caster_hate_mod(target);\n\n // TODO: Golem fear\n\n\n\n priority += ai_calculate_stat_buff_mod(target);\n\n\n\n if user.foe(target) {\n\n -priority\n\n } else {\n\n priority\n\n }\n\n}\n\n\n", "file_path": "simulator/src/sim/target_value.rs", "rank": 8, "score": 378300.7488424706 }, { "content": "pub fn perform_action<'a>(sim: &mut Simulation<'a>, user_id: CombatantId, action: Action<'a>) {\n\n let ability = action.ability;\n\n let mut action_target = action.target;\n\n\n\n if action.ability.flags & TRIGGERS_HAMEDO != 0 {\n\n if let Some(target_id) = action_target.to_target_id(sim) {\n\n if sim.try_hamedo(user_id, target_id) {\n\n return;\n\n }\n\n }\n\n }\n\n\n\n let user = sim.combatant_mut(user_id);\n\n if !action_target.is_math() && ability.mp_cost > 0 && !user.no_mp() {\n\n let mp_cost = if user.halve_mp() {\n\n 1.max(ability.mp_cost / 2)\n\n } else {\n\n ability.mp_cost\n\n };\n\n let new_mp = user.mp() - mp_cost;\n", "file_path": "simulator/src/sim/actions/mod.rs", "rank": 9, "score": 371186.84289090364 }, { "content": "pub fn perform_action_slow<'a>(sim: &mut Simulation<'a>, user_id: CombatantId, action: Action<'a>) {\n\n let ability = action.ability;\n\n let user = sim.combatant(user_id);\n\n\n\n // TODO: These are redundant with the entire check below..\n\n if action.ability.flags & SILENCEABLE != 0 && user.silence() {\n\n sim.log_event(Event::Silenced(user_id, action));\n\n return;\n\n } else if ability.mp_cost > 0 && user.mp() < ability.mp_cost {\n\n sim.log_event(Event::NoMP(user_id, action));\n\n return;\n\n }\n\n\n\n if !filter_ability_level(user, ability) {\n\n return;\n\n }\n\n\n\n perform_action(sim, user_id, action)\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/mod.rs", "rank": 10, "score": 365959.4043110938 }, { "content": "fn perform_frog_attack(sim: &mut Simulation, user_id: CombatantId, target_id: CombatantId) {\n\n let pa = sim.combatant(user_id).pa_bang();\n\n sim.change_target_hp(target_id, pa.into(), Source::Weapon(user_id, None));\n\n sim.try_countergrasp(user_id, target_id);\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/attack.rs", "rank": 11, "score": 362417.19073768635 }, { "content": "fn mod_4_formula(user: &Combatant, target: &Combatant, k: f32) -> f32 {\n\n let mut speed = user.speed() as i16;\n\n\n\n // 1. If caster has Attack UP, then (Sp1 = [Sp0 * 4/3]), else Sp1 = Sp0\n\n if user.attack_up() {\n\n speed = (speed * 4) / 3;\n\n }\n\n\n\n // 2. If caster has Martial Arts, then (Sp2 = [Sp1 * 3/2]), else Sp2 = Sp1\n\n if user.martial_arts() {\n\n speed = (speed * 3) / 2;\n\n }\n\n // 3. If target has Defense UP, then (Sp3 = [Sp2 * 2/3]), else Sp3 = Sp2\n\n if target.defense_up() {\n\n speed = (speed * 2) / 3;\n\n }\n\n // 4. If target has Protect, then (Sp4 = [Sp3 * 2/3]), else Sp4 = Sp3\n\n if target.protect() {\n\n speed = (speed * 2) / 3;\n\n }\n", "file_path": "simulator/src/sim/actions/steal.rs", "rank": 12, "score": 357424.7595849221 }, { "content": "fn should_attack_ally(user: &Combatant, target: &Combatant) -> bool {\n\n if DAMAGE_CANCELS\n\n .iter()\n\n .any(|condition| target.has_condition(*condition))\n\n {\n\n return true;\n\n }\n\n if let Some(element) = user.main_hand().and_then(|w| w.weapon_element) {\n\n target.absorbs(element)\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/attack.rs", "rank": 13, "score": 353580.3367003368 }, { "content": "fn should_attack_foe(user: &Combatant, target: &Combatant) -> bool {\n\n if let Some(element) = user.main_hand().and_then(|w| w.weapon_element) {\n\n if target.absorbs(element) {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/attack.rs", "rank": 14, "score": 353580.3367003368 }, { "content": "fn item_range(user: &Combatant) -> u8 {\n\n if user.throw_item() {\n\n 4\n\n } else {\n\n 1\n\n }\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/item.rs", "rank": 16, "score": 350921.95347235544 }, { "content": "pub fn should_heal_foe(target: &Combatant, hurts_undead: bool) -> bool {\n\n hurts_undead && target.undead()\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/common.rs", "rank": 17, "score": 343989.2964360351 }, { "content": "pub fn should_heal_ally(target: &Combatant, hurts_undead: bool) -> bool {\n\n if hurts_undead && target.undead() {\n\n false\n\n } else {\n\n target.hp_percent() <= 0.50\n\n }\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/common.rs", "rank": 18, "score": 343989.2964360351 }, { "content": "fn filter_ability_level(user: &Combatant, ability: &Ability) -> bool {\n\n let flags = ability.flags;\n\n if flags & BERSERK_OK == 0 && user.berserk() {\n\n false\n\n } else if flags & FROG_OK == 0 && user.frog() {\n\n false\n\n } else if flags & SILENCEABLE != 0 && user.silence() {\n\n false\n\n } else if ability.mp_cost > 0 && user.mp() < ability.mp_cost {\n\n false\n\n } else {\n\n true\n\n }\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/mod.rs", "rank": 19, "score": 335301.8597334186 }, { "content": "fn ai_calculate_status_target_value_mod(target: &Combatant, ignore_confusion: bool) -> f32 {\n\n let mut total = 0.0;\n\n\n\n // # 0x0058: Current Statuses 1\n\n // # \t\t0x80 - \t\t\t\t\t\t\t0% (0000)\n\n // # \t\t0x40 - Crystal\t\t\t\t\t-150% -c0(ff40)\n\n // # \t\t0x20 - Dead\t\t\t\t\t\t-150% -c0(ff40)\n\n // # \t\t0x10 - Undead\t\t\t\t\t-30.5% -27(ffd9)\n\n // # \t\t0x08 - Charging\t\t\t\t\t0% (0000)\n\n // # \t\t0x04 - Jump\t\t\t\t\t\t0% (0000)\n\n // # \t\t0x02 - Defending\t\t\t\t0% (0000)\n\n // # \t\t0x01 - Performing\t\t\t\t0% (0000)\n\n if target.crystal() {\n\n total -= 1.5;\n\n }\n\n\n\n if target.dead() {\n\n total -= 1.5;\n\n }\n\n\n", "file_path": "simulator/src/sim/target_value.rs", "rank": 20, "score": 330952.5212987247 }, { "content": "pub fn has_ability(combatants: &[CombatantInfo], name: &str) -> bool {\n\n combatants\n\n .iter()\n\n .any(|info| info.abilities.iter().any(|ability| ability.name == name))\n\n}\n\n\n", "file_path": "simulator/src/runner.rs", "rank": 21, "score": 324377.83970204793 }, { "content": "struct RaiseSpellImpl {\n\n hp_percent: f32,\n\n base_chance: i16,\n\n ctr: u8,\n\n range: u8,\n\n}\n\n\n\nimpl AbilityImpl for RaiseSpellImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n if user.ally(target) && !should_heal_ally(target, true) {\n\n return;\n\n }\n\n if user.foe(target) && !should_heal_foe(target, true) {\n", "file_path": "simulator/src/sim/actions/white_magic.rs", "rank": 22, "score": 288337.2239387086 }, { "content": "pub fn has_equip(combatants: &[CombatantInfo], name: &str) -> bool {\n\n combatants.iter().any(|info| {\n\n info.main_hand.map_or(false, |eq| eq.name == name)\n\n || info.off_hand.map_or(false, |eq| eq.name == name)\n\n || info.headgear.map_or(false, |eq| eq.name == name)\n\n || info.armor.map_or(false, |eq| eq.name == name)\n\n || info.accessory.map_or(false, |eq| eq.name == name)\n\n })\n\n}\n\n\n", "file_path": "simulator/src/runner.rs", "rank": 23, "score": 288246.2492952454 }, { "content": "pub fn has_skill(combatants: &[CombatantInfo], name: &str) -> bool {\n\n combatants\n\n .iter()\n\n .any(|info| info.all_skills.iter().any(|skill| *skill == name))\n\n}\n\n\n", "file_path": "simulator/src/runner.rs", "rank": 24, "score": 288246.2492952454 }, { "content": "pub fn combatant_height(tile: &Tile, combatant: &Combatant) -> f32 {\n\n let tile_height = tile_height(tile);\n\n let float_bonus = if combatant.float() {\n\n 0.5 + tile.depth as f32\n\n } else {\n\n 0.0\n\n };\n\n tile_height + float_bonus\n\n}\n\n\n", "file_path": "simulator/src/sim/simulation.rs", "rank": 25, "score": 278862.7300663178 }, { "content": "pub fn combatant_submerged(tile: &Tile, combatant: &Combatant) -> bool {\n\n if tile.depth >= 2 {\n\n return !combatant.float();\n\n }\n\n false\n\n}\n\n\n", "file_path": "simulator/src/sim/simulation.rs", "rank": 26, "score": 278792.3816428898 }, { "content": "struct ElementalImpl {\n\n range: u8,\n\n element: Element,\n\n terrain: &'static [u8],\n\n add_conditions: &'static [Condition],\n\n}\n\n\n\nimpl AbilityImpl for ElementalImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n sim: &Simulation<'a>,\n\n user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n let tile = sim.tile(user.panel);\n\n if !self.terrain.contains(&tile.surface_type) {\n\n return;\n\n }\n", "file_path": "simulator/src/sim/actions/elemental.rs", "rank": 27, "score": 274707.7895155607 }, { "content": "fn ai_calculate_stat_buff_mod(target: &Combatant) -> f32 {\n\n // The game's actual AI doesn't do this kind of calculation, but I'm going to add\n\n // a teeny bonus for stat buffs to try to emulate the 'only use stat buffs when there is\n\n // no other options' behaviour.\n\n target.pa_mod as f32 * 1e-5\n\n + target.ma_mod as f32 * 1e-5\n\n + target.speed_mod as f32 * 1e-5\n\n + target.raw_brave as f32 * 1e-5\n\n + target.raw_faith as f32 * 1e-5\n\n + target.ct as f32 * 1e-5\n\n}\n\n\n", "file_path": "simulator/src/sim/target_value.rs", "rank": 28, "score": 274212.96447549167 }, { "content": "fn ai_calculate_caster_hate_mod(target: &Combatant) -> f32 {\n\n if !target.can_cast_mp_ability() {\n\n 0.0\n\n } else {\n\n (target.mp_percent() / 16.0) * (target.info.number_of_mp_using_abilities as f32)\n\n }\n\n}\n\n\n", "file_path": "simulator/src/sim/target_value.rs", "rank": 29, "score": 274212.96447549167 }, { "content": "pub fn describe_event(event: &Event, combatants: &[Combatant], arena: &Arena) -> String {\n\n match event {\n\n Event::DidNothing(target_id) => format!(\n\n \"{} did nothing!\",\n\n describe_combatant(*target_id, combatants, arena)\n\n ),\n\n\n\n Event::HpDamage(target_id, amount, src) => format!(\n\n \"{} took {} damage from {}\",\n\n describe_combatant(*target_id, combatants, arena),\n\n amount,\n\n describe_source(*src, combatants)\n\n ),\n\n\n\n Event::HpHeal(target_id, amount, src) => format!(\n\n \"{} was healed for {} HP from {}\",\n\n describe_combatant(*target_id, combatants, arena),\n\n amount.abs(),\n\n describe_source(*src, combatants)\n\n ),\n", "file_path": "simulator/src/sim/log/entry.rs", "rank": 30, "score": 268694.3198739281 }, { "content": "pub trait AbilityImpl: Sync {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n sim: &Simulation<'a>,\n\n user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n );\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId);\n\n}\n\n\n\npub type AbilityFlags = u32;\n\n\n\npub const BERSERK_OK: AbilityFlags = 1;\n\npub const ALLY_OK: AbilityFlags = 1 << 1;\n\npub const FOE_OK: AbilityFlags = 1 << 2;\n\npub const NOT_ALIVE_OK: AbilityFlags = 1 << 3;\n\npub const PETRIFY_OK: AbilityFlags = 1 << 4;\n\npub const SILENCEABLE: AbilityFlags = 1 << 5;\n", "file_path": "simulator/src/sim/actions/mod.rs", "rank": 31, "score": 266726.32185601053 }, { "content": "pub fn instant_aoe_consider<'a>(\n\n range: u8,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n target: &Combatant<'a>,\n\n) {\n\n match ability.aoe {\n\n AoE::Diamond(size, _) => {\n\n for target_panel in target.panel.diamond(size) {\n\n actions.push(Action::target_panel(ability, range, None, target_panel));\n\n }\n\n }\n\n _ => {\n\n actions.push(Action::new(ability, range, None, target.id()));\n\n }\n\n }\n\n}\n", "file_path": "simulator/src/sim/actions/mod.rs", "rank": 32, "score": 266236.15707139776 }, { "content": "pub fn unit_card(user: &Combatant) -> String {\n\n let l1 = format!(\n\n \" HP {:>10} | {:>1}. {:<10}\",\n\n user.max_hp(),\n\n user.id().id,\n\n user.info.name\n\n );\n\n let l2 = format!(\n\n \" MP {:>10} | {:<10}\",\n\n user.max_mp(),\n\n user.info.job\n\n );\n\n let l4 = format!(\n\n \" {:>10} {:>1} | {:<2} Brave {:<02} Faith {:<02}\",\n\n \"\",\n\n \"\",\n\n user.info.sign.to_emoji(),\n\n user.raw_brave,\n\n user.raw_faith\n\n );\n", "file_path": "simulator/src/sim/log/mod.rs", "rank": 33, "score": 265583.58885375294 }, { "content": "struct HealSongImpl {\n\n ct: u8,\n\n hp_not_mp: bool,\n\n bonus: i16,\n\n}\n\n\n\nimpl AbilityImpl for HealSongImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n user: &Combatant<'a>,\n\n _target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action {\n\n ability,\n\n range: 255,\n\n ctr: Some(self.ct),\n\n target: ActionTarget::Panel(user.panel),\n", "file_path": "simulator/src/sim/actions/perform.rs", "rank": 34, "score": 256662.70064387005 }, { "content": "struct PotionAbilityImpl {\n\n hp_amount: i16,\n\n mp_amount: i16,\n\n}\n\n\n\nimpl AbilityImpl for PotionAbilityImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n if user.ally(target) && !should_heal_ally(target, true) {\n\n return;\n\n }\n\n if user.foe(target) && !should_heal_foe(target, true) {\n\n return;\n\n }\n", "file_path": "simulator/src/sim/actions/item.rs", "rank": 35, "score": 256562.17686457795 }, { "content": "struct CarbunkleImpl {\n\n base_chance: i16,\n\n heal_percent: f32,\n\n conditions: &'static [Condition],\n\n range: u8,\n\n ctr: u8,\n\n}\n\n\n\nimpl AbilityImpl for CarbunkleImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(\n\n ability,\n\n self.range,\n", "file_path": "simulator/src/sim/actions/summon_magic.rs", "rank": 36, "score": 256403.53687615442 }, { "content": "pub fn has_monster(combatants: &[CombatantInfo]) -> bool {\n\n combatants.iter().any(|info| info.gender == Gender::Monster)\n\n}\n\n\n", "file_path": "simulator/src/runner.rs", "rank": 37, "score": 255225.82802323453 }, { "content": "pub fn describe_source(src: Source, combatants: &[Combatant]) -> String {\n\n match src {\n\n Source::Phase => String::from(\"the current phase\"),\n\n Source::Ability => String::from(\"the used ability\"),\n\n Source::Constant(str) => str.to_owned(),\n\n Source::Condition(cond) => String::from(cond.name()),\n\n Source::Weapon(c_id, Some(weapon)) => format!(\n\n \"{}\\'s {}\",\n\n describe_combatant_short(c_id, combatants),\n\n weapon.name\n\n ),\n\n Source::Weapon(c_id, None) => format!(\n\n \"{}\\'s bare hands\",\n\n describe_combatant_short(c_id, combatants)\n\n ),\n\n }\n\n}\n", "file_path": "simulator/src/sim/log/entry.rs", "rank": 38, "score": 253131.2937275598 }, { "content": "struct SelfDestructImpl;\n\n\n\nimpl AbilityImpl for SelfDestructImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, 0, None, target.id()))\n\n }\n\n\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, _target_id: CombatantId) {\n\n let user = sim.combatant(user_id);\n\n let hp_diff = user.max_hp() - user.hp();\n\n for target_panel in user.panel.diamond(2) {\n\n if let Some(target_id) = sim.combatant_on_panel(target_panel) {\n\n if user_id == target_id {\n\n continue;\n\n }\n\n sim.add_condition(target_id, Condition::Oil, Source::Ability);\n\n sim.change_target_hp(target_id, hp_diff, Source::Ability);\n\n }\n\n }\n\n sim.add_condition(user_id, Condition::Death, Source::Ability);\n\n }\n\n}\n", "file_path": "simulator/src/sim/actions/monster/bomb.rs", "rank": 39, "score": 250314.43914942455 }, { "content": "struct AbsorbSwordImpl {\n\n hp_not_mp: bool,\n\n range: u8,\n\n ctr: u8,\n\n}\n\n\n\nimpl AbilityImpl for AbsorbSwordImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(\n\n ability,\n\n self.range,\n\n Some(self.ctr),\n\n target.id(),\n", "file_path": "simulator/src/sim/actions/battle_skill.rs", "rank": 40, "score": 250306.0583663613 }, { "content": "struct TripleElementalImpl {\n\n ma_plus: i16,\n\n ctr: Option<u8>,\n\n element: Element,\n\n range: u8,\n\n}\n\n\n\nimpl AbilityImpl for TripleElementalImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n sim: &Simulation<'a>,\n\n user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n // This will only very roughly simulate the 'random hits' AI characteristic\n\n if sim.roll_inclusive(1, 2) == 1 {\n\n return;\n\n }\n", "file_path": "simulator/src/sim/actions/monster/tiamat.rs", "rank": 41, "score": 250243.98224695632 }, { "content": "struct ConditionCureItemImpl {\n\n cures: &'static [Condition],\n\n}\n\n\n\nimpl AbilityImpl for ConditionCureItemImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n if !self.cures.iter().any(|cond| target.has_condition(*cond)) {\n\n return;\n\n }\n\n actions.push(Action::new(ability, item_range(user), None, target.id()));\n\n }\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, _user_id: CombatantId, target_id: CombatantId) {\n\n for condition in self.cures {\n\n sim.cancel_condition(target_id, *condition, Source::Ability);\n\n }\n\n }\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/item.rs", "rank": 42, "score": 250236.21887866015 }, { "content": "struct MagicBreakImpl {\n\n base_chance: i16,\n\n mp_percent: f32,\n\n}\n\n\n\nimpl AbilityImpl for MagicBreakImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(\n\n ability,\n\n user.main_hand().map_or(1, |eq| eq.range),\n\n None,\n\n target.id(),\n\n ));\n", "file_path": "simulator/src/sim/actions/battle_skill.rs", "rank": 43, "score": 250062.62628969384 }, { "content": "pub fn tile_height(tile: &Tile) -> f32 {\n\n tile.height as f32 + tile.slope_height as f32 / 2.0\n\n}\n", "file_path": "simulator/src/sim/simulation.rs", "rank": 44, "score": 244910.54646482697 }, { "content": "pub fn ai_target_value_sum(\n\n user: &Combatant,\n\n combatants: &[Combatant],\n\n ignore_confusion: bool,\n\n) -> f32 {\n\n combatants\n\n .iter()\n\n .map(|target| ai_calculate_target_value(user, target, ignore_confusion))\n\n .sum()\n\n}\n\n\n", "file_path": "simulator/src/sim/target_value.rs", "rank": 45, "score": 243809.71970265714 }, { "content": "pub fn do_hp_heal(\n\n sim: &mut Simulation,\n\n target_id: CombatantId,\n\n mut amount: i16,\n\n hurts_undead: bool,\n\n) {\n\n let target = sim.combatant(target_id);\n\n if hurts_undead && target.undead() {\n\n amount = -amount;\n\n }\n\n sim.change_target_hp(target_id, -amount, Source::Ability);\n\n}\n\n\n\npub struct AddConditionSpellImpl {\n\n pub condition: &'static [Condition],\n\n pub can_be_evaded: bool,\n\n pub ignore_magic_def: bool,\n\n pub base_chance: i16,\n\n pub range: u8,\n\n pub ctr: u8,\n", "file_path": "simulator/src/sim/actions/common.rs", "rank": 46, "score": 240348.20935174392 }, { "content": "pub fn do_hp_damage(\n\n sim: &mut Simulation,\n\n target_id: CombatantId,\n\n mut amount: i16,\n\n heals_undead: bool,\n\n) {\n\n let target = sim.combatant(target_id);\n\n if heals_undead && target.undead() {\n\n amount = -amount;\n\n }\n\n sim.change_target_hp(target_id, amount, Source::Ability);\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/common.rs", "rank": 47, "score": 240348.20935174392 }, { "content": "pub fn mod_6_formula(\n\n user: &Combatant,\n\n target: &Combatant,\n\n element: Element,\n\n base_chance: i16,\n\n ignore_magic_def: bool,\n\n) -> f32 {\n\n let mut ma = user.ma();\n\n\n\n // 1. If caster has 'Strengthen: [element of spell]', then (MA1 = [MA0 * 5/4])\n\n // else MA1 = MA0\n\n if user.strengthens(element) {\n\n ma = (ma * 5) / 4;\n\n }\n\n\n\n // 2. If caster has Magic AttackUP, then (MA2 = [MA1 * 4/3]), else MA2 = MA1\n\n if user.magic_attack_up() {\n\n ma = (ma * 4) / 3;\n\n }\n\n\n", "file_path": "simulator/src/sim/actions/common.rs", "rank": 48, "score": 240348.20935174392 }, { "content": "pub fn ai_consider_actions<'a>(\n\n actions: &mut Vec<Action<'a>>,\n\n sim: &Simulation<'a>,\n\n user: &Combatant<'a>,\n\n targets: &[Combatant<'a>],\n\n) {\n\n let foes_have_non_disabled = sim.ai_foes_have_non_disabled_units(user);\n\n for ability in user.abilities() {\n\n if !filter_ability_level(user, ability) {\n\n continue;\n\n }\n\n\n\n // TODO: Not sure what the probability is supposed to be here.\n\n if ability.flags & STATS_ABILITY != 0 && sim.roll_inclusive(0, 1) == 1 {\n\n continue;\n\n }\n\n\n\n for target in targets {\n\n if !filter_target_level(user, ability, target) {\n\n continue;\n", "file_path": "simulator/src/sim/actions/mod.rs", "rank": 49, "score": 239157.80651325514 }, { "content": "pub fn describe_target_short(\n\n target: ActionTarget,\n\n combatants: &[Combatant],\n\n arena: &Arena,\n\n) -> String {\n\n match target {\n\n ActionTarget::Id(target_id) => describe_combatant_short(target_id, combatants),\n\n ActionTarget::Panel(location) => describe_location(location, arena),\n\n ActionTarget::Math(attr, algo) => describe_math(attr, algo),\n\n }\n\n}\n\n\n", "file_path": "simulator/src/sim/log/entry.rs", "rank": 50, "score": 236908.44474349282 }, { "content": "pub fn mod_5_formula_xa(\n\n mut xa: i16,\n\n user: &Combatant,\n\n target: &Combatant,\n\n element: Element,\n\n ignores_shell_and_defense_up: bool,\n\n) -> i16 {\n\n // 1. If caster has 'Strengthen: [element of spell]', then (MA1 = [MA0 * 5/4])\n\n // else MA1 = MA0\n\n if user.strengthens(element) {\n\n xa = (xa * 5) / 4;\n\n }\n\n // 2. If caster has Magic AttackUP, then (MA2 = [MA1 * 4/3]), else MA2 = MA1\n\n if user.magic_attack_up() {\n\n xa = (xa * 4) / 3;\n\n }\n\n\n\n // 3. If target has Magic DefendUP, then (MA3 = [MA2 * 2/3]), else MA3 = MA2\n\n if !ignores_shell_and_defense_up && target.magic_defense_up() {\n\n xa = (xa * 2) / 3;\n", "file_path": "simulator/src/sim/actions/common.rs", "rank": 51, "score": 235119.85657323844 }, { "content": "pub fn mod_2_formula_xa(\n\n sim: &Simulation,\n\n mut xa: i16,\n\n user: &Combatant,\n\n target: &Combatant,\n\n element: Element,\n\n crit: bool,\n\n always_apply_martial_arts: bool,\n\n ignores_protect_and_defense_up: bool,\n\n) -> i16 {\n\n // 1. If this is a critical hit, then XA1 = XA0 + (1..XA0) - 1.\n\n if crit {\n\n xa += sim.roll_inclusive(1, xa.max(2)) - 1;\n\n }\n\n\n\n // 2. If the attack is endowed with an Element, and the caster has\n\n // equipment that 'Strengthens' that element, then (XA2 = [XA1 * 5/4]),\n\n // else XA2 = XA1\n\n if user.strengthens(element) {\n\n xa = (xa * 5) / 4;\n", "file_path": "simulator/src/sim/actions/common.rs", "rank": 52, "score": 235119.85657323844 }, { "content": "pub fn mod_3_formula_xa(\n\n mut xa: i16,\n\n user: &Combatant,\n\n target: &Combatant,\n\n always_apply_martial_arts: bool,\n\n ignores_protect_and_defense_up: bool,\n\n) -> i16 {\n\n // 3. If caster has Attack UP, then (XA3 = [XA2 * 4/3]), else XA3 = XA2\n\n if user.attack_up() {\n\n xa = (xa * 4) / 3;\n\n }\n\n\n\n // 4. If caster has Martial Arts AND this is not a wpn-elemental attack,\n\n // then (XA4 = [XA3 * 3/2]), else XA4 = XA3\n\n if user.martial_arts() && (always_apply_martial_arts || user.barehanded()) {\n\n xa = (xa * 3) / 2;\n\n }\n\n\n\n // 5. If caster is Berserk, then (XA5 = [XA4 * 3/2]), else XA5 = XA4\n\n if user.berserk() {\n", "file_path": "simulator/src/sim/actions/common.rs", "rank": 53, "score": 235119.85657323844 }, { "content": "pub fn mod_5_formula_pass_ma(\n\n ma: i16,\n\n user: &Combatant,\n\n target: &Combatant,\n\n element: Element,\n\n q: i16,\n\n) -> i16 {\n\n let ma = mod_5_formula_xa(ma, user, target, element, false);\n\n // damage = [(CFa * TFa * Q * MA5 * N) / (10000 * D)]\n\n (user.faith_percent() * target.faith_percent() * q as f32 * ma as f32) as i16\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/common.rs", "rank": 54, "score": 230179.91673174233 }, { "content": "pub fn describe_combatant_short(c_id: CombatantId, combatants: &[Combatant]) -> String {\n\n let combatant = &combatants[c_id.index()];\n\n match combatant.team() {\n\n Team::Left => combatant.name().red().to_string(),\n\n Team::Right => combatant.name().blue().to_string(),\n\n }\n\n}\n\n\n", "file_path": "simulator/src/sim/log/entry.rs", "rank": 55, "score": 225507.25145247148 }, { "content": "struct StealImpl {\n\n base_chance: i16,\n\n equip_slot: EquipSlot,\n\n}\n\n\n\nimpl AbilityImpl for StealImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n if target.monster() {\n\n return;\n\n }\n\n match self.equip_slot {\n\n EquipSlot::Weapon => {\n\n if target.main_hand().is_none() {\n", "file_path": "simulator/src/sim/actions/steal.rs", "rank": 56, "score": 222811.73590217758 }, { "content": "struct ThrowImpl {\n\n items: &'static [ThrowableItem],\n\n}\n\n\n\nimpl AbilityImpl for ThrowImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, user.movement(), None, target.id()));\n\n }\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n let user = sim.combatant(user_id);\n\n let target = sim.combatant(target_id);\n\n\n\n if sim.do_physical_evade(user, target, None, Source::Ability) {\n", "file_path": "simulator/src/sim/actions/throw.rs", "rank": 57, "score": 222811.73590217758 }, { "content": "struct NamelessImpl {\n\n hit_chance: f32,\n\n ct: u8,\n\n conditions: &'static [Condition],\n\n}\n\n\n\nimpl AbilityImpl for NamelessImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n user: &Combatant<'a>,\n\n _target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action {\n\n ability,\n\n range: 255,\n\n ctr: Some(self.ct),\n\n target: ActionTarget::Panel(user.panel),\n", "file_path": "simulator/src/sim/actions/perform.rs", "rank": 58, "score": 222811.73590217758 }, { "content": "struct JumpImpl {}\n\n\n\nimpl AbilityImpl for JumpImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n sim: &Simulation<'a>,\n\n user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n if user.ally(target)\n\n && !DAMAGE_CANCELS\n\n .iter()\n\n .any(|condition| target.has_condition(*condition))\n\n {\n\n return;\n\n }\n\n\n\n let ct_remaining = 0.max(100 - target.ct.min(100));\n", "file_path": "simulator/src/sim/actions/jump.rs", "rank": 59, "score": 222811.73590217758 }, { "content": "struct ChargeImpl {\n\n k: i16,\n\n ct: u8,\n\n}\n\n\n\nimpl AbilityImpl for ChargeImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n let ct_remaining = 0.max(100 - target.ct.min(100));\n\n let speed = if target.haste() {\n\n // TODO: Real AI doesn't account for this, but, since I haven't implemented\n\n // tile targeting, I'm going to only target those that will certainly hit\n\n (target.speed().saturating_mul(3)) / 2\n\n } else {\n", "file_path": "simulator/src/sim/actions/charge.rs", "rank": 60, "score": 222811.73590217758 }, { "content": "struct PhoenixDownImpl {}\n\n\n\nimpl AbilityImpl for PhoenixDownImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n if user.foe(target) && !target.dead() && target.undead() {\n\n actions.push(Action::new(ability, item_range(user), None, target.id()));\n\n } else if user.ally(target) && !target.undead() && target.dead() && !target.reraise() {\n\n actions.push(Action::new(ability, item_range(user), None, target.id()));\n\n }\n\n }\n\n\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, _user_id: CombatantId, target_id: CombatantId) {\n\n let target = sim.combatant(target_id);\n\n if target.undead() && !target.dead() {\n\n sim.change_target_hp(target_id, target.max_hp(), Source::Ability);\n\n } else if !target.undead() && target.dead() {\n\n let heal_amount = sim.roll_inclusive(1, 20);\n\n sim.change_target_hp(target_id, -heal_amount, Source::Ability);\n\n }\n\n }\n\n}\n", "file_path": "simulator/src/sim/actions/item.rs", "rank": 61, "score": 222811.73590217758 }, { "content": "fn real_target(\n\n sim: &Simulation,\n\n user_id: CombatantId,\n\n weapon: Option<&Equipment>,\n\n target_id: CombatantId,\n\n) -> Option<CombatantId> {\n\n let is_gun = weapon.map_or(false, |eq| eq.weapon_type == Some(WeaponType::Gun));\n\n\n\n if !is_gun {\n\n return Some(target_id);\n\n }\n\n\n\n let user = sim.combatant(user_id);\n\n let user_height = sim.combatant_height(user_id);\n\n let target = sim.combatant(target_id);\n\n let mut real_target = Some(target_id);\n\n for location in user.panel.line(target.panel).skip(1) {\n\n // TODO: Not really sure what to do here...\n\n if sim.height(location) > user_height + 3.0 {\n\n real_target = None;\n\n break;\n\n }\n\n if let Some(new_target_id) = sim.combatant_on_panel(location) {\n\n real_target = Some(new_target_id);\n\n break;\n\n }\n\n }\n\n real_target\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/attack.rs", "rank": 62, "score": 222667.9680662092 }, { "content": "fn perform_on_target(\n\n sim: &mut Simulation,\n\n user_id: CombatantId,\n\n ability: &Ability,\n\n target_id: CombatantId,\n\n) {\n\n let user = sim.combatant(user_id);\n\n let target = sim.combatant(target_id);\n\n if target.crystal() || target.jumping() {\n\n return;\n\n }\n\n if ability.flags & CASTER_IMMUNE != 0 && user_id == target_id {\n\n return;\n\n }\n\n if ability.flags & HITS_FOES_ONLY != 0 && !user.foe(target) {\n\n return;\n\n }\n\n if ability.flags & HITS_ALLIES_ONLY != 0 && !user.ally(target) {\n\n return;\n\n }\n", "file_path": "simulator/src/sim/actions/mod.rs", "rank": 63, "score": 222667.9680662092 }, { "content": "pub fn do_single_weapon_attack<'a, 'b>(\n\n sim: &'a mut Simulation<'b>,\n\n user_id: CombatantId,\n\n weapon: Option<&'b Equipment>,\n\n original_target_id: CombatantId,\n\n k: i16,\n\n) -> (i16, bool) {\n\n let target_id = match real_target(sim, user_id, weapon, original_target_id) {\n\n Some(target_id) => target_id,\n\n None => return (0, false),\n\n };\n\n\n\n let is_gun = weapon.map_or(false, |eq| eq.weapon_type == Some(WeaponType::Gun));\n\n\n\n if let Some(weapon) = weapon {\n\n if is_gun && weapon.weapon_element.is_some() {\n\n return do_single_magical_gun_attack(sim, user_id, weapon, target_id);\n\n }\n\n }\n\n\n", "file_path": "simulator/src/sim/actions/attack.rs", "rank": 64, "score": 222425.4463459115 }, { "content": "struct ByblosElemental {\n\n element: Element,\n\n q: i16,\n\n range: u8,\n\n ctr: Option<u8>,\n\n condition: Option<Condition>,\n\n}\n\n\n\nimpl AbilityImpl for ByblosElemental {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n if user.ally(target) && !target.absorbs(self.element) {\n\n return;\n\n }\n", "file_path": "simulator/src/sim/actions/monster/byblos.rs", "rank": 65, "score": 217399.5945373824 }, { "content": "struct SoulAbility {\n\n ma_factor: i16,\n\n range: u8,\n\n element: Element,\n\n}\n\n\n\nimpl AbilityImpl for SoulAbility {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, self.range, None, target.id()));\n\n }\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n let user = sim.combatant(user_id);\n\n let target = sim.combatant(target_id);\n\n if sim.do_magical_evade(user, target, Source::Ability) {\n\n return;\n\n }\n\n let xa = mod_5_formula_xa(user.ma() as i16, user, target, self.element, false);\n\n sim.change_target_hp(target_id, xa * self.ma_factor, Source::Ability);\n\n }\n\n}\n", "file_path": "simulator/src/sim/actions/monster/reaper.rs", "rank": 66, "score": 217371.58443481338 }, { "content": "struct RepairImpl {\n\n base_chance: i16,\n\n}\n\n\n\nimpl AbilityImpl for RepairImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, 0, None, target.id()));\n\n }\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n let user = sim.combatant(user_id);\n\n\n\n let chance = (user.pa() as f32 + self.base_chance as f32) / 100.0;\n\n if !(sim.roll_auto_succeed() < chance) {\n", "file_path": "simulator/src/sim/actions/monster/work.rs", "rank": 67, "score": 217266.33189929565 }, { "content": "struct DrawOutDamageImpl {\n\n ma_factor: i16,\n\n range: u8,\n\n damage_hp_not_mp: bool,\n\n chance_to_add_random: &'static [Condition],\n\n chance_to_cancel: &'static [Condition],\n\n}\n\n\n\nimpl AbilityImpl for DrawOutDamageImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, self.range, None, target.id()));\n\n }\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n", "file_path": "simulator/src/sim/actions/draw_out.rs", "rank": 68, "score": 217266.33189929565 }, { "content": "struct GilTakingImpl {\n\n base_chance: i16,\n\n range: u8,\n\n}\n\n\n\nimpl AbilityImpl for GilTakingImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n if target.berserk() {\n\n return;\n\n }\n\n actions.push(Action::new(ability, self.range, None, target.id()));\n\n }\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n", "file_path": "simulator/src/sim/actions/steal.rs", "rank": 69, "score": 217266.33189929565 }, { "content": "struct OinkImpl;\n\n\n\nimpl AbilityImpl for OinkImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n if !target.dead() {\n\n return;\n\n }\n\n actions.push(Action::new(ability, 2, None, target.id()));\n\n }\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n let user = sim.combatant(user_id);\n\n let target = sim.combatant(target_id);\n\n let max_hp = target.max_hp();\n\n let compat = user.zodiac_compatibility(target);\n\n let chance = ((user.pa() + 71) as f32 * compat) / 100.0;\n\n if sim.roll_auto_succeed() < chance {\n\n sim.change_target_hp(target_id, -((max_hp / 4) * 3), Source::Ability);\n\n }\n\n }\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/monster/porky.rs", "rank": 70, "score": 217266.33189929565 }, { "content": "struct HurtDanceImpl {\n\n ct: u8,\n\n hp_not_mp: bool,\n\n}\n\n\n\nimpl AbilityImpl for HurtDanceImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n user: &Combatant<'a>,\n\n _target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action {\n\n ability,\n\n range: 255,\n\n ctr: Some(self.ct),\n\n target: ActionTarget::Panel(user.panel),\n\n });\n", "file_path": "simulator/src/sim/actions/perform.rs", "rank": 71, "score": 217266.33189929565 }, { "content": "struct PurificationImpl {\n\n base_chance: i16,\n\n cancels: &'static [Condition],\n\n}\n\n\n\nimpl AbilityImpl for PurificationImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, 0, None, target.id()));\n\n }\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n let user = sim.combatant(user_id);\n\n let target = sim.combatant(target_id);\n\n\n", "file_path": "simulator/src/sim/actions/punch_art.rs", "rank": 72, "score": 217266.33189929565 }, { "content": "struct TootImpl {\n\n conditions: &'static [Condition],\n\n}\n\n\n\nimpl AbilityImpl for TootImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, 2, None, target.id()));\n\n }\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, _user_id: CombatantId, target_id: CombatantId) {\n\n let idx = sim.roll_inclusive(0, (self.conditions.len() - 1) as i16);\n\n sim.add_condition(target_id, self.conditions[idx as usize], Source::Ability);\n\n }\n\n}\n", "file_path": "simulator/src/sim/actions/monster/porky.rs", "rank": 73, "score": 217266.33189929565 }, { "content": "struct ReviveImpl {\n\n base_chance: i16,\n\n heal_amount: f32,\n\n}\n\n\n\nimpl AbilityImpl for ReviveImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n if !target.dead() {\n\n return;\n\n }\n\n\n\n actions.push(Action::new(ability, 1, None, target.id()));\n\n }\n", "file_path": "simulator/src/sim/actions/punch_art.rs", "rank": 74, "score": 217266.33189929565 }, { "content": "struct DashImpl {\n\n rand_min: i16,\n\n rand_max: i16,\n\n range: u8,\n\n}\n\n\n\nimpl AbilityImpl for DashImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, self.range, None, target.id()));\n\n }\n\n\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n let user = sim.combatant(user_id);\n", "file_path": "simulator/src/sim/actions/basic_skill.rs", "rank": 75, "score": 217266.33189929565 }, { "content": "struct MutilateImpl {\n\n base_chance: i16,\n\n}\n\n\n\nimpl AbilityImpl for MutilateImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, 1, None, target.id()))\n\n }\n\n\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n let user = sim.combatant(user_id);\n\n let target = sim.combatant(target_id);\n\n\n", "file_path": "simulator/src/sim/actions/monster/goblin.rs", "rank": 76, "score": 217266.33189929565 }, { "content": "struct DrawOutBuffImpl {\n\n add_random: &'static [Condition],\n\n}\n\n\n\nimpl AbilityImpl for DrawOutBuffImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, 0, None, target.id()));\n\n }\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, _user_id: CombatantId, target_id: CombatantId) {\n\n // TODO: Do we ever add more than one?\n\n let length = (self.add_random.len() - 1) as i16;\n\n let condition = self.add_random[sim.roll_inclusive(0, length) as usize];\n\n sim.add_condition(target_id, condition, Source::Ability);\n\n }\n\n}\n", "file_path": "simulator/src/sim/actions/draw_out.rs", "rank": 77, "score": 217266.33189929565 }, { "content": "struct GooImpl {\n\n base_chance: i16,\n\n range: u8,\n\n conditions: &'static [Condition],\n\n}\n\n\n\nimpl AbilityImpl for GooImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, self.range, None, target.id()));\n\n }\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n // TODO: Not sure if Zodiac affects this.. need to ask Nacho. In base game it was MOD 0.\n\n let user = sim.combatant(user_id);\n", "file_path": "simulator/src/sim/actions/monster/molboro.rs", "rank": 78, "score": 217266.33189929565 }, { "content": "struct StatPerformanceImpl {\n\n hit_chance: f32,\n\n ct: u8,\n\n speed_buff: i8,\n\n pa_buff: i8,\n\n ma_buff: i8,\n\n}\n\n\n\nimpl AbilityImpl for StatPerformanceImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n user: &Combatant<'a>,\n\n _target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action {\n\n ability,\n\n range: 255,\n", "file_path": "simulator/src/sim/actions/perform.rs", "rank": 79, "score": 217266.33189929565 }, { "content": "struct PeckImpl {\n\n base_chance: i16,\n\n}\n\n\n\nimpl AbilityImpl for PeckImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, 1, None, target.id()))\n\n }\n\n\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n let user = sim.combatant(user_id);\n\n let target = sim.combatant(target_id);\n\n\n", "file_path": "simulator/src/sim/actions/monster/juravis.rs", "rank": 80, "score": 217266.33189929565 }, { "content": "struct ChakraImpl {\n\n hp_multiplier: i16,\n\n mp_multiplier: i16,\n\n}\n\n\n\nimpl AbilityImpl for ChakraImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, 0, None, target.id()));\n\n }\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n let user = sim.combatant(user_id);\n\n let _target = sim.combatant(target_id);\n\n\n\n let mut pa = user.pa() as i16;\n\n if user.martial_arts() {\n\n pa = (pa * 3) / 2;\n\n }\n\n\n\n sim.change_target_hp(target_id, self.hp_multiplier * -pa, Source::Ability);\n\n sim.change_target_mp(target_id, (self.mp_multiplier * -pa) / 2, Source::Ability);\n\n }\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/punch_art.rs", "rank": 81, "score": 217266.33189929565 }, { "content": "struct BeakImpl {\n\n base_chance: i16,\n\n}\n\n\n\nimpl AbilityImpl for BeakImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, 1, None, target.id()))\n\n }\n\n\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n let user = sim.combatant(user_id);\n\n let target = sim.combatant(target_id);\n\n\n", "file_path": "simulator/src/sim/actions/monster/juravis.rs", "rank": 82, "score": 217266.33189929565 }, { "content": "fn perform_aoe_on_panel(\n\n sim: &mut Simulation,\n\n user_id: CombatantId,\n\n ability: &Ability,\n\n panel: Panel,\n\n) {\n\n if let Some(target_id) = sim.combatant_on_panel(panel) {\n\n perform_on_target(sim, user_id, ability, target_id);\n\n }\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/mod.rs", "rank": 83, "score": 217242.67633323735 }, { "content": "fn handle_normal_ability(\n\n sim: &mut Simulation,\n\n user_id: CombatantId,\n\n action: Action,\n\n ability: &Ability,\n\n action_target: ActionTarget,\n\n) {\n\n match ability.aoe {\n\n AoE::None => {\n\n if let Some(target_id) = action.target.to_target_id(sim) {\n\n let user = sim.combatant(user_id);\n\n let target = sim.combatant(target_id);\n\n // TODO: Not a great place for this.. re: MP costs.\n\n if !target.jumping() && filter_target_level(user, ability, target) {\n\n ability.implementation.perform(sim, user_id, target_id);\n\n } else {\n\n // TODO: Log some sort of event for failing to perform an ability\n\n }\n\n } else {\n\n // TODO: Something about the ability missing.\n", "file_path": "simulator/src/sim/actions/mod.rs", "rank": 84, "score": 217146.6135733702 }, { "content": "fn handle_math_ability(\n\n sim: &mut Simulation,\n\n user_id: CombatantId,\n\n ability: &Ability,\n\n attr: CalcAttribute,\n\n algo: CalcAlgorithm,\n\n) {\n\n for cid in &COMBATANT_IDS {\n\n if !math_match(sim, *cid, attr, algo) {\n\n continue;\n\n }\n\n let target = sim.combatant(*cid);\n\n if ability.flags & NOT_ALIVE_OK == 0 && !target.alive() {\n\n return;\n\n }\n\n if ability.flags & PETRIFY_OK == 0 && target.petrify() {\n\n return;\n\n }\n\n ability.implementation.perform(sim, user_id, *cid);\n\n }\n\n}\n\n\n\nconst PRIME_NUMBERS: &[u8] = &[\n\n 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97,\n\n 101, 103, 107, 109, 113, 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193,\n\n];\n\n\n", "file_path": "simulator/src/sim/actions/mod.rs", "rank": 85, "score": 217146.6135733702 }, { "content": "pub fn describe_combatant(c_id: CombatantId, combatants: &[Combatant], arena: &Arena) -> String {\n\n let combatant = &combatants[c_id.index()];\n\n let conditions = combatant.all_conditions();\n\n let cond_str = if conditions.is_empty() {\n\n \"\".to_owned()\n\n } else {\n\n format!(\n\n \", {}\",\n\n conditions\n\n .iter()\n\n .map(|c| c.name())\n\n .collect::<Vec<_>>()\n\n .join(\", \")\n\n )\n\n };\n\n\n\n match combatant.team() {\n\n Team::Left => format!(\n\n \"{} [{} HP, {} MP, {}{}{}]\",\n\n combatant.name().red(),\n", "file_path": "simulator/src/sim/log/entry.rs", "rank": 86, "score": 215013.66722110903 }, { "content": "struct GoblinPunchImpl {\n\n base_chance: i16,\n\n}\n\n\n\nimpl AbilityImpl for GoblinPunchImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, 1, None, target.id()))\n\n }\n\n\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n let user = sim.combatant(user_id);\n\n let target = sim.combatant(target_id);\n\n\n", "file_path": "simulator/src/sim/actions/monster/goblin.rs", "rank": 87, "score": 212035.5070131139 }, { "content": "struct MathSkillImpl {}\n\n\n\nimpl AbilityImpl for MathSkillImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n _ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n user: &Combatant<'a>,\n\n _target: &Combatant<'a>,\n\n ) {\n\n for ability in user.abilities() {\n\n if ability.flags & CAN_BE_CALCULATED == 0 {\n\n continue;\n\n }\n\n if user.info.known_calc_attributes & CalcAttribute::Height.flag() != 0 {\n\n add_with_attr(actions, ability, user, CalcAttribute::Height);\n\n }\n\n if user.info.known_calc_attributes & CalcAttribute::CT.flag() != 0 {\n\n add_with_attr(actions, ability, user, CalcAttribute::CT);\n", "file_path": "simulator/src/sim/actions/math_skill.rs", "rank": 88, "score": 212035.5070131139 }, { "content": "struct TailSwingImpl {\n\n min_factor: i16,\n\n max_factor: i16,\n\n}\n\n\n\nimpl AbilityImpl for TailSwingImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, 1, None, target.id()));\n\n }\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n let user = sim.combatant(user_id);\n\n let xa = mod_2_formula_xa(\n\n sim,\n", "file_path": "simulator/src/sim/actions/monster/dragon.rs", "rank": 89, "score": 212035.5070131139 }, { "content": "struct SnakeCarrierImpl {\n\n conditions: &'static [Condition],\n\n}\n\n\n\nimpl AbilityImpl for SnakeCarrierImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, 1, None, target.id()))\n\n }\n\n\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n let target = sim.combatant(target_id);\n\n let amount = target.hp() / 4;\n\n sim.change_target_hp(target_id, amount, Source::Ability);\n\n sim.change_target_hp(user_id, -amount, Source::Ability);\n\n let idx = sim.roll_inclusive(0, (self.conditions.len() - 1) as i16);\n\n let cond = self.conditions[idx as usize];\n\n sim.add_condition(target_id, cond, Source::Ability);\n\n sim.try_countergrasp(user_id, target_id);\n\n }\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/monster/serpentarius.rs", "rank": 90, "score": 212035.5070131139 }, { "content": "struct ChocoCureImpl {\n\n ma_factor: i16,\n\n}\n\n\n\nimpl AbilityImpl for ChocoCureImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, 0, None, target.id()));\n\n }\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n let user = sim.combatant(user_id);\n\n let target = sim.combatant(target_id);\n\n let xa = mod_5_formula_xa(user.ma() as i16, user, target, Element::None, true);\n\n do_hp_heal(sim, target_id, xa * self.ma_factor, true);\n\n }\n\n}\n", "file_path": "simulator/src/sim/actions/monster/chocobo.rs", "rank": 91, "score": 212035.5070131139 }, { "content": "struct MindBlastImpl {\n\n conditions: &'static [Condition],\n\n base_chance: i16,\n\n range: u8,\n\n}\n\n\n\nimpl AbilityImpl for MindBlastImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n instant_aoe_consider(self.range, actions, ability, target)\n\n }\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n let user = sim.combatant(user_id);\n\n let target = sim.combatant(target_id);\n", "file_path": "simulator/src/sim/actions/monster/pisco.rs", "rank": 92, "score": 212035.5070131139 }, { "content": "struct MightySkillImpl {\n\n equip_slot: EquipSlot,\n\n range: u8,\n\n}\n\n\n\nimpl AbilityImpl for MightySkillImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n if target.monster() {\n\n return;\n\n }\n\n if target.get_equip(self.equip_slot).is_none() {\n\n return;\n\n }\n", "file_path": "simulator/src/sim/actions/battle_skill.rs", "rank": 93, "score": 212035.5070131139 }, { "content": "struct BreakEquipImpl {\n\n base_chance: i16,\n\n equip_slot: EquipSlot,\n\n}\n\n\n\nimpl AbilityImpl for BreakEquipImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n if target.monster() {\n\n return;\n\n }\n\n actions.push(Action::new(\n\n ability,\n\n user.main_hand().map_or(1, |eq| eq.range),\n", "file_path": "simulator/src/sim/actions/battle_skill.rs", "rank": 94, "score": 212035.5070131139 }, { "content": "struct FeatherBombImpl {\n\n range: u8,\n\n ma_factor: i16,\n\n}\n\n\n\nimpl AbilityImpl for FeatherBombImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, self.range, None, target.id()))\n\n }\n\n\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n let user = sim.combatant(user_id);\n\n let target = sim.combatant(target_id);\n\n\n\n let xa = mod_5_formula_xa(user.ma(), user, target, Element::Wind, false);\n\n let amount = xa * self.ma_factor;\n\n sim.change_target_hp(target_id, amount, Source::Ability);\n\n }\n\n}\n\n\n", "file_path": "simulator/src/sim/actions/monster/juravis.rs", "rank": 95, "score": 212035.5070131139 }, { "content": "struct ShineLoverImpl {\n\n base_chance: i16,\n\n}\n\n\n\nimpl AbilityImpl for ShineLoverImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, 1, None, target.id()))\n\n }\n\n\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n let user = sim.combatant(user_id);\n\n let target = sim.combatant(target_id);\n\n if !sim.do_physical_evade(user, target, None, Source::Ability) {\n", "file_path": "simulator/src/sim/actions/monster/juravis.rs", "rank": 96, "score": 212035.5070131139 }, { "content": "struct ToxicFrogImpl {\n\n range: u8,\n\n ctr: Option<u8>,\n\n base_chance: i16,\n\n conditions: &'static [Condition],\n\n}\n\n\n\nimpl AbilityImpl for ToxicFrogImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, self.range, self.ctr, target.id()))\n\n }\n\n\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n", "file_path": "simulator/src/sim/actions/monster/serpentarius.rs", "rank": 97, "score": 212035.5070131139 }, { "content": "struct EyeGougeImpl {\n\n base_chance: i16,\n\n}\n\n\n\nimpl AbilityImpl for EyeGougeImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, 1, None, target.id()))\n\n }\n\n\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n let user = sim.combatant(user_id);\n\n let target = sim.combatant(target_id);\n\n\n", "file_path": "simulator/src/sim/actions/monster/goblin.rs", "rank": 98, "score": 212035.5070131139 }, { "content": "struct ChocoBallImpl {\n\n element: Element,\n\n range: u8,\n\n}\n\n\n\nimpl AbilityImpl for ChocoBallImpl {\n\n fn consider<'a>(\n\n &self,\n\n actions: &mut Vec<Action<'a>>,\n\n ability: &'a Ability<'a>,\n\n _sim: &Simulation<'a>,\n\n _user: &Combatant<'a>,\n\n target: &Combatant<'a>,\n\n ) {\n\n actions.push(Action::new(ability, self.range, None, target.id()));\n\n }\n\n fn perform<'a>(&self, sim: &mut Simulation<'a>, user_id: CombatantId, target_id: CombatantId) {\n\n let user = sim.combatant(user_id);\n\n let target = sim.combatant(target_id);\n\n\n", "file_path": "simulator/src/sim/actions/monster/chocobo.rs", "rank": 99, "score": 212035.5070131139 } ]
Rust
src/kms/gcpkms.rs
stemid/roughenough
7bc4ea5d34fac53d33ff2ed53bb7a70a2c1aca93
#[cfg(feature = "gcpkms")] pub mod inner { extern crate base64; extern crate google_cloudkms1 as cloudkms1; extern crate hyper; extern crate hyper_rustls; extern crate yup_oauth2 as oauth2; use std::default::Default; use std::env; use std::path::Path; use std::result::Result; use self::cloudkms1::CloudKMS; use self::cloudkms1::{DecryptRequest, EncryptRequest}; use self::hyper::net::HttpsConnector; use self::hyper::status::StatusCode; use self::hyper_rustls::TlsClient; use self::oauth2::{ServiceAccountAccess, ServiceAccountKey}; use crate::kms::{EncryptedDEK, KmsError, KmsProvider, PlaintextDEK, AD}; const GOOGLE_APP_CREDS: &str = &"GOOGLE_APPLICATION_CREDENTIALS"; pub struct GcpKms { key_resource_id: String, service_account: ServiceAccountKey, } impl GcpKms { pub fn from_resource_id(resource_id: &str) -> Result<Self, KmsError> { let svc_acct = load_gcp_credential()?; Ok(GcpKms { key_resource_id: resource_id.to_string(), service_account: svc_acct, }) } fn new_hub(&self) -> CloudKMS<hyper::Client, ServiceAccountAccess<hyper::Client>> { let client1 = hyper::Client::with_connector(HttpsConnector::new(TlsClient::new())); let access = oauth2::ServiceAccountAccess::new(self.service_account.clone(), client1); let client2 = hyper::Client::with_connector(HttpsConnector::new(TlsClient::new())); CloudKMS::new(client2, access) } fn pretty_http_error(&self, resp: &hyper::client::Response) -> KmsError { let code = resp.status; let url = &resp.url; KmsError::OperationFailed(format!("Response {} from {}", code, url)) } } impl KmsProvider for GcpKms { fn encrypt_dek(&self, plaintext_dek: &PlaintextDEK) -> Result<EncryptedDEK, KmsError> { let mut request = EncryptRequest::default(); request.plaintext = Some(base64::encode(plaintext_dek)); request.additional_authenticated_data = Some(base64::encode(AD)); let hub = self.new_hub(); let result = hub .projects() .locations_key_rings_crypto_keys_encrypt(request, &self.key_resource_id) .doit(); match result { Ok((http_resp, enc_resp)) => { if http_resp.status == StatusCode::Ok { let ciphertext = enc_resp.ciphertext.unwrap(); let ct = base64::decode(&ciphertext)?; Ok(ct) } else { Err(self.pretty_http_error(&http_resp)) } } Err(e) => Err(KmsError::OperationFailed(format!("encrypt_dek() {:?}", e))), } } fn decrypt_dek(&self, encrypted_dek: &EncryptedDEK) -> Result<PlaintextDEK, KmsError> { let mut request = DecryptRequest::default(); request.ciphertext = Some(base64::encode(encrypted_dek)); request.additional_authenticated_data = Some(base64::encode(AD)); let hub = self.new_hub(); let result = hub .projects() .locations_key_rings_crypto_keys_decrypt(request, &self.key_resource_id) .doit(); match result { Ok((http_resp, enc_resp)) => { if http_resp.status == StatusCode::Ok { let plaintext = enc_resp.plaintext.unwrap(); let ct = base64::decode(&plaintext)?; Ok(ct) } else { Err(self.pretty_http_error(&http_resp)) } } Err(e) => Err(KmsError::OperationFailed(format!("decrypt_dek() {:?}", e))), } } } fn load_gcp_credential() -> Result<ServiceAccountKey, KmsError> { if let Ok(gac) = env::var(GOOGLE_APP_CREDS.to_string()) { return if Path::new(&gac).exists() { match oauth2::service_account_key_from_file(&gac) { Ok(svc_acct_key) => Ok(svc_acct_key), Err(e) => { Err(KmsError::InvalidConfiguration(format!( "Can't load service account credential '{}': {:?}", gac, e ))) } } } else { Err(KmsError::InvalidConfiguration(format!( "{} ='{}' does not exist", GOOGLE_APP_CREDS, gac ))) } } panic!( "Failed to load service account credential. Is {} set?", GOOGLE_APP_CREDS ); } }
#[cfg(feature = "gcpkms")] pub mod inner { extern crate base64; extern crate google_cloudkms1 as cloudkms1; extern crate hyper; extern crate hyper_rustls; extern crate yup_oauth2 as oauth2; use std::default::Default; use std::env; use std::path::Path; use std::result::Result; use self::cloudkms1::CloudKMS; use self::cloudkms1::{DecryptRequest, EncryptRequest}; use self::hyper::net::HttpsConnector; use self::hyper::status::StatusCode; use self::hyper_rustls::TlsClient; use self::oauth2::{ServiceAccountAccess, ServiceAccountKey}; use crate::kms::{EncryptedDEK, KmsError, KmsProvider, PlaintextDEK, AD}; const GOOGLE_APP_CREDS: &str = &"GOOGLE_APPLICATION_CREDENTIALS"; pub struct GcpKms { key_resource_id: String, service_account: ServiceAccountKey, } impl GcpKms { pub fn from_resource_id(resource_id: &str) -> Result<Self, KmsError> { let svc_acct = load_gcp_credential()?; Ok(GcpKms { key_resource_id: resource_id.to_string(), service_account: svc_acct, }) } fn new_hub(&self) -> CloudKMS<hyper::Client, ServiceAccountAccess<hyper::Client>> { let client1 = hyper::Client::with_connector(HttpsConnector::new(TlsClient::new())); let access = oauth2::ServiceAccountAccess::new(self.service_account.clone(), client1); let client2 = hyper::Client::with_connector(HttpsConnector::new(TlsClient::new())); CloudKMS::new(client2, access) } fn pretty_http_error(&self, resp: &hyper::client::Response) -> KmsError { let code = resp.status; let url = &resp.url; KmsError::OperationFailed(format!("Response {} from {}", code, url)) } } impl KmsProvider for GcpKms { fn encrypt_dek(&self, plaintext_dek: &PlaintextDEK) -> Result<EncryptedDEK, KmsError> { let mut request = EncryptRequest::default(); request.plaintext = Some(base64::encode(plaintext_dek)); request.additional_authenticated_data = Some(base64::encode(AD)); let hub = self.new_hub(); let result = hub .projects() .locations_key_rings_crypto_keys_encrypt(request, &self.key_resource_id) .doit();
} fn decrypt_dek(&self, encrypted_dek: &EncryptedDEK) -> Result<PlaintextDEK, KmsError> { let mut request = DecryptRequest::default(); request.ciphertext = Some(base64::encode(encrypted_dek)); request.additional_authenticated_data = Some(base64::encode(AD)); let hub = self.new_hub(); let result = hub .projects() .locations_key_rings_crypto_keys_decrypt(request, &self.key_resource_id) .doit(); match result { Ok((http_resp, enc_resp)) => { if http_resp.status == StatusCode::Ok { let plaintext = enc_resp.plaintext.unwrap(); let ct = base64::decode(&plaintext)?; Ok(ct) } else { Err(self.pretty_http_error(&http_resp)) } } Err(e) => Err(KmsError::OperationFailed(format!("decrypt_dek() {:?}", e))), } } } fn load_gcp_credential() -> Result<ServiceAccountKey, KmsError> { if let Ok(gac) = env::var(GOOGLE_APP_CREDS.to_string()) { return if Path::new(&gac).exists() { match oauth2::service_account_key_from_file(&gac) { Ok(svc_acct_key) => Ok(svc_acct_key), Err(e) => { Err(KmsError::InvalidConfiguration(format!( "Can't load service account credential '{}': {:?}", gac, e ))) } } } else { Err(KmsError::InvalidConfiguration(format!( "{} ='{}' does not exist", GOOGLE_APP_CREDS, gac ))) } } panic!( "Failed to load service account credential. Is {} set?", GOOGLE_APP_CREDS ); } }
match result { Ok((http_resp, enc_resp)) => { if http_resp.status == StatusCode::Ok { let ciphertext = enc_resp.ciphertext.unwrap(); let ct = base64::decode(&ciphertext)?; Ok(ct) } else { Err(self.pretty_http_error(&http_resp)) } } Err(e) => Err(KmsError::OperationFailed(format!("encrypt_dek() {:?}", e))), }
if_condition
[ { "content": "/// Factory function to create a `ServerConfig` _trait object_ based on the value\n\n/// of the provided `arg`.\n\n///\n\n/// * `ENV` will return an [`EnvironmentConfig`](struct.EnvironmentConfig.html)\n\n/// * any other value returns a [`FileConfig`](struct.FileConfig.html)\n\n///\n\npub fn make_config(arg: &str) -> Result<Box<dyn ServerConfig>, Error> {\n\n if arg == \"ENV\" {\n\n match EnvironmentConfig::new() {\n\n Ok(cfg) => Ok(Box::new(cfg)),\n\n Err(e) => Err(e),\n\n }\n\n } else {\n\n match FileConfig::new(arg) {\n\n Ok(cfg) => Ok(Box::new(cfg)),\n\n Err(e) => Err(e),\n\n }\n\n }\n\n}\n\n\n\n///\n\n/// Validate configuration settings. Returns `true` if the config is valid, `false` otherwise.\n\n///\n", "file_path": "src/config/mod.rs", "rank": 0, "score": 130314.16378948555 }, { "content": "/// Roughenough version string enriched with any compile-time optional features\n\npub fn roughenough_version() -> String {\n\n let kms_str = if cfg!(feature = \"awskms\") {\n\n \" (+AWS KMS)\"\n\n } else if cfg!(feature = \"gcpkms\") {\n\n \" (+GCP KMS)\"\n\n } else {\n\n \"\"\n\n };\n\n\n\n format!(\"{}{}\", VERSION, kms_str)\n\n}\n\n\n\n// Constants and magic numbers of the Roughtime protocol\n\n\n\n/// Minimum size (in bytes) of a client request\n\npub const MIN_REQUEST_LENGTH: u32 = 1024;\n\n\n\n/// Size (in bytes) of seeds used to derive private keys\n\npub const SEED_LENGTH: u32 = 32;\n\n\n", "file_path": "src/lib.rs", "rank": 1, "score": 119558.15049646609 }, { "content": "#[cfg(feature = \"gcpkms\")]\n\npub fn load_seed(config: &dyn ServerConfig) -> Result<Vec<u8>, error::Error> {\n\n match config.kms_protection() {\n\n KmsProtection::Plaintext => Ok(config.seed()),\n\n KmsProtection::GoogleKmsEnvelope(resource_id) => {\n\n info!(\"Unwrapping seed via Google KMS key '{}'\", resource_id);\n\n let kms = GcpKms::from_resource_id(resource_id)?;\n\n let seed = EnvelopeEncryption::decrypt_seed(&kms, &config.seed())?;\n\n Ok(seed)\n\n }\n\n _ => Err(error::Error::InvalidConfiguration(\n\n \"AWS KMS not supported\".to_string(),\n\n )),\n\n }\n\n}\n\n\n\n/// Load the seed value for the long-term key.\n\n///\n\n/// ## This build has KMS disabled\n\n///\n\n/// *The KMS feature is disabled in this build of Roughenough*.\n\n///\n\n/// The only supported `kms_protection` value in this build is `plaintext`. Any\n\n/// other value will cause a runtime error.\n\n///\n\n/// * `config.seed()` is used as-is and assumed to be a 32-byte hexadecimal value\n\n///\n", "file_path": "src/kms/mod.rs", "rank": 2, "score": 97099.22829872808 }, { "content": "pub fn root_from_paths(mut index: usize, data: &[u8], paths: &[u8]) -> Hash {\n\n let mut hash = {\n\n let mut ctx = digest::Context::new(&digest::SHA512);\n\n ctx.update(TREE_LEAF_TWEAK);\n\n ctx.update(data);\n\n Hash::from(ctx.finish().as_ref())\n\n };\n\n\n\n assert_eq!(paths.len() % 64, 0);\n\n\n\n for path in paths.chunks(64) {\n\n let mut ctx = digest::Context::new(&digest::SHA512);\n\n ctx.update(TREE_NODE_TWEAK);\n\n\n\n if index & 1 == 0 {\n\n // Left\n\n ctx.update(&hash);\n\n ctx.update(path);\n\n } else {\n\n // Right\n", "file_path": "src/merkle.rs", "rank": 3, "score": 82793.0882548556 }, { "content": "#[allow(clippy::useless_let_if_seq)]\n\npub fn is_valid_config(cfg: &dyn ServerConfig) -> bool {\n\n let mut is_valid = true;\n\n\n\n if cfg.port() == 0 {\n\n error!(\"server port not set: {}\", cfg.port());\n\n is_valid = false;\n\n }\n\n\n\n if cfg.interface().is_empty() {\n\n error!(\"'interface' is missing\");\n\n is_valid = false;\n\n }\n\n\n\n if cfg.seed().is_empty() {\n\n error!(\"'seed' value is missing\");\n\n is_valid = false;\n\n } else if *cfg.kms_protection() == KmsProtection::Plaintext && cfg.seed().len() != SEED_LENGTH as usize {\n\n error!(\"plaintext seed value must be 32 characters long, found {}\", cfg.seed().len());\n\n is_valid = false;\n\n } else if *cfg.kms_protection() != KmsProtection::Plaintext && cfg.seed().len() <= SEED_LENGTH as usize {\n", "file_path": "src/config/mod.rs", "rank": 4, "score": 82305.7249994005 }, { "content": "fn create_empty_message(c: &mut Criterion) {\n\n c.bench_function(\"create empty message\", |b| b.iter(|| RtMessage::new(0)));\n\n}\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 5, "score": 71446.55762031769 }, { "content": "fn reuse_merkle_trees(c: &mut Criterion) {\n\n let mut tree = MerkleTree::new();\n\n\n\n c.bench_function_over_inputs(\"reuse existing merkle tree\", move |b, &size| {\n\n b.iter(|| {\n\n tree.reset();\n\n for _ in 0..*size {\n\n tree.push_leaf(DATA);\n\n }\n\n black_box(tree.compute_root());\n\n })\n\n }, SIZES);\n\n}\n\n\n\ncriterion_group!(message_creation,\n\n create_empty_message,\n\n create_single_field_message,\n\n create_two_field_message,\n\n create_four_field_message,\n\n create_nested_message\n\n);\n\n\n\ncriterion_group!(merkle_tree,\n\n create_new_merkle_tree,\n\n reuse_merkle_trees\n\n);\n\n\n\ncriterion_main!(message_creation, merkle_tree);\n", "file_path": "benches/roughenough-bench.rs", "rank": 6, "score": 71446.55762031769 }, { "content": "fn create_nested_message(c: &mut Criterion) {\n\n let pad = [0u8; 400];\n\n\n\n c.bench_function(\"create nested message\", move |b| b.iter(|| {\n\n let mut msg1 = RtMessage::new(4);\n\n msg1.add_field(Tag::SIG, \"0987\".as_bytes()).unwrap();\n\n msg1.add_field(Tag::NONC, \"wxyz\".as_bytes()).unwrap();\n\n msg1.add_field(Tag::DELE, \"1234\".as_bytes()).unwrap();\n\n msg1.add_field(Tag::PATH, \"abcd\".as_bytes()).unwrap();\n\n\n\n let mut msg2 = RtMessage::new(2);\n\n msg2.add_field(Tag::PUBK, \"1234567890\".as_bytes()).unwrap();\n\n msg2.add_field(Tag::PAD, pad.as_ref()).unwrap();\n\n }));\n\n}\n\n\n\nstatic SIZES: &[u8] = &[1, 3, 9, 17, 200];\n\nstatic DATA: &[u8] = &[1u8; 64];\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 7, "score": 71446.55762031769 }, { "content": "fn create_four_field_message(c: &mut Criterion) {\n\n c.bench_function(\"create four field message\", |b| b.iter(|| {\n\n let mut msg = RtMessage::new(4);\n\n msg.add_field(Tag::SIG, \"0987\".as_bytes()).unwrap();\n\n msg.add_field(Tag::NONC, \"wxyz\".as_bytes()).unwrap();\n\n msg.add_field(Tag::DELE, \"1234\".as_bytes()).unwrap();\n\n msg.add_field(Tag::PATH, \"abcd\".as_bytes()).unwrap();\n\n }));\n\n}\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 8, "score": 69490.96336658097 }, { "content": "fn create_two_field_message(c: &mut Criterion) {\n\n c.bench_function(\"create two field message\", |b| b.iter(|| {\n\n let mut msg = RtMessage::new(2);\n\n msg.add_field(Tag::NONC, \"1234\".as_bytes()).unwrap();\n\n msg.add_field(Tag::PAD, \"abcd\".as_bytes()).unwrap();\n\n }));\n\n}\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 9, "score": 69490.96336658097 }, { "content": "fn create_new_merkle_tree(c: &mut Criterion) {\n\n c.bench_function_over_inputs(\"create new merkle trees\", move |b, &size| {\n\n b.iter(|| {\n\n let mut tree = MerkleTree::new();\n\n for _ in 0..*size {\n\n tree.push_leaf(DATA);\n\n }\n\n black_box(tree.compute_root())\n\n })\n\n }, SIZES);\n\n}\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 10, "score": 69490.96336658097 }, { "content": "fn create_single_field_message(c: &mut Criterion) {\n\n c.bench_function(\"create single field message\", |b| b.iter(|| {\n\n let mut msg = RtMessage::new(1);\n\n msg.add_field(Tag::NONC, \"1234\".as_bytes()).unwrap();\n\n }));\n\n}\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 11, "score": 69490.96336658097 }, { "content": "///\n\n/// Specifies parameters needed to configure a Roughenough server.\n\n///\n\n/// Parameters labeled \"**Required**\" must always be provided and have no default value\n\n/// while those labeled \"**Optional**\" provide sane default values that can be overridden.\n\n///\n\n/// YAML Key | Environment Variable | Necessity | Description\n\n/// --- | --- | --- | ---\n\n/// `interface` | `ROUGHENOUGH_INTERFACE` | Required | IP address or interface name for listening to client requests\n\n/// `port` | `ROUGHENOUGH_PORT` | Required | UDP port to listen for requests\n\n/// `seed` | `ROUGHENOUGH_SEED` | Required | A 32-byte hexadecimal value used to generate the server's long-term key pair. **This is a secret value and must be un-guessable**, treat it with care. (If compiled with KMS support, length will vary)\n\n/// `batch_size` | `ROUGHENOUGH_BATCH_SIZE` | Optional | The maximum number of requests to process in one batch. All nonces in a batch are used to build a Merkle tree, the root of which is signed. Default is `64` requests per batch.\n\n/// `status_interval` | `ROUGHENOUGH_STATUS_INTERVAL` | Optional | Number of _seconds_ between each logged status update. Default is `600` seconds (10 minutes).\n\n/// `health_check_port` | `ROUGHENOUGH_HEALTH_CHECK_PORT` | Optional | If present, enable an HTTP health check responder on the provided port. **Use with caution**.\n\n/// `kms_protection` | `ROUGHENOUGH_KMS_PROTECTION` | Optional | If compiled with KMS support, the ID of the KMS key used to protect the long-term identity.\n\n/// `client_stats` | `ROUGHENOUGH_CLIENT_STATS` | Optional | A value of `on` or `yes` will enable tracking of per-client request statistics that will be output each time server status is logged. Default is `off` (disabled).\n\n/// `fault_percentage` | `ROUGHENOUGH_FAULT_PERCENTAGE` | Optional | Likelihood (as a percentage) that the server will intentionally return an invalid client response. An integer range from `0` (disabled, all responses valid) to `50` (50% of responses will be invalid). Default is `0` (disabled).\n\n///\n\n/// Implementations of this trait obtain a valid configuration from different back-end\n\n/// sources. See:\n\n/// * [FileConfig](struct.FileConfig.html) - configure via a YAML file\n\n/// * [EnvironmentConfig](struct.EnvironmentConfig.html) - configure via environment variables\n\n/// * [MemoryConfig](struct.MemoryConfig.html) - in-memory configuration for testing\n\n///\n\npub trait ServerConfig {\n\n /// [Required] IP address or interface name to listen for client requests\n\n fn interface(&self) -> &str;\n\n\n\n /// [Required] UDP port to listen for requests\n\n fn port(&self) -> u16;\n\n\n\n /// [Required] A 32-byte hexadecimal value used to generate the server's\n\n /// long-term key pair. **This is a secret value and must be un-guessable**,\n\n /// treat it with care.\n\n fn seed(&self) -> Vec<u8>;\n\n\n\n /// [Optional] The maximum number of requests to process in one batch. All\n\n /// nonces in a batch are used to build a Merkle tree, the root of which is signed.\n\n /// Defaults to [DEFAULT_BATCH_SIZE](constant.DEFAULT_BATCH_SIZE.html)\n\n fn batch_size(&self) -> u8;\n\n\n\n /// [Optional] Amount of time between each logged status update.\n\n /// Defaults to [DEFAULT_STATUS_INTERVAL](constant.DEFAULT_STATUS_INTERVAL.html)\n\n fn status_interval(&self) -> Duration;\n", "file_path": "src/config/mod.rs", "rank": 12, "score": 66112.93947863701 }, { "content": "///\n\n/// Implementations of this trait record client activity\n\n///\n\npub trait ServerStats {\n\n fn add_valid_request(&mut self, addr: &IpAddr);\n\n\n\n fn add_invalid_request(&mut self, addr: &IpAddr);\n\n\n\n fn add_health_check(&mut self, addr: &IpAddr);\n\n\n\n fn add_response(&mut self, addr: &IpAddr, bytes_sent: usize);\n\n\n\n fn total_valid_requests(&self) -> u64;\n\n\n\n fn total_invalid_requests(&self) -> u64;\n\n\n\n fn total_health_checks(&self) -> u64;\n\n\n\n fn total_responses_sent(&self) -> u64;\n\n\n\n fn total_bytes_sent(&self) -> usize;\n\n\n\n fn total_unique_clients(&self) -> u64;\n", "file_path": "src/stats/mod.rs", "rank": 13, "score": 66105.42943956198 }, { "content": "///\n\n/// A key management system that wraps/unwraps a data encryption key (DEK).\n\n///\n\npub trait KmsProvider {\n\n /// Make a blocking request to encrypt (wrap) the provided plaintext data encryption key.\n\n fn encrypt_dek(&self, plaintext_dek: &PlaintextDEK) -> Result<EncryptedDEK, KmsError>;\n\n\n\n /// Make a blocking request to decrypt (unwrap) a previously encrypted data encryption key.\n\n fn decrypt_dek(&self, encrypted_dek: &EncryptedDEK) -> Result<PlaintextDEK, KmsError>;\n\n}\n\n\n\n#[cfg(feature = \"awskms\")]\n\nmod awskms;\n\n\n\n#[cfg(feature = \"awskms\")]\n\npub use crate::kms::awskms::inner::AwsKms;\n\n\n\n/// Load the seed value for the long-term key.\n\n///\n\n/// Loading behavior depends on the value of `config.kms_protection()`:\n\n///\n\n/// * If `config.kms_protection() == Plaintext` then the value returned from `config.seed()`\n\n/// is used as-is and assumed to be a 32-byte hexadecimal value.\n\n///\n\n/// * Otherwise `config.seed()` is assumed to be an encrypted opaque blob generated from\n\n/// a prior `EnvelopeEncryption::encrypt_seed` call. The value of `config.kms_protection()`\n\n/// is parsed as a KMS key id and `EnvelopeEncryption::decrypt_seed` is called to obtain\n\n/// the plaintext seed value.\n\n///\n", "file_path": "src/kms/mod.rs", "rank": 14, "score": 66105.42943956198 }, { "content": "// Convenience function to create zero-filled Vec of given size\n\nfn vec_zero_filled(len: usize) -> Vec<u8> {\n\n (0..len).map(|_| 0).collect()\n\n}\n\n\n\n/// Envelope encryption of the long-term key seed value.\n\n///\n\n/// The seed is encrypted using AES-GCM-256 with:\n\n///\n\n/// * 32 byte (256 bit) random key\n\n/// * 12 byte (96 bit) random nonce\n\n/// * 16 byte (128 bit) authentication tag\n\n///\n\n/// Randomness obtained from\n\n/// [`ring::rand::SecureRandom`](https://briansmith.org/rustdoc/ring/rand/trait.SecureRandom.html).\n\n///\n\n/// The key used to encrypt the seed is wrapped (encrypted) using a\n\n/// [`KmsProvider`](trait.KmsProvider.html) implementation.\n\n///\n\npub struct EnvelopeEncryption;\n\n\n", "file_path": "src/kms/envelope.rs", "rank": 15, "score": 31879.807024403668 }, { "content": " }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::key::KmsProtection;\n\n use std::str::FromStr;\n\n\n\n #[test]\n\n fn convert_from_string() {\n\n let arn =\n\n \"arn:aws:kms:some-aws-region:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\";\n\n let resource_id =\n\n \"projects/key-project/locations/global/keyRings/key-ring/cryptoKeys/my-key\";\n\n\n\n match KmsProtection::from_str(\"plaintext\") {\n\n Ok(KmsProtection::Plaintext) => (),\n\n e => panic!(\"unexpected result {:?}\", e),\n\n };\n", "file_path": "src/key/mod.rs", "rank": 16, "score": 29284.058004852755 }, { "content": "//! Implementations of `ServerConfig` obtain configurations from different back-end sources\n\n//! such as files or environment variables.\n\n//!\n\n\n\nmod environment;\n\nmod file;\n\nmod memory;\n\n\n\nuse std::net::SocketAddr;\n\nuse std::time::Duration;\n\n\n\npub use self::environment::EnvironmentConfig;\n\npub use self::file::FileConfig;\n\npub use self::memory::MemoryConfig;\n\n\n\nuse crate::key::KmsProtection;\n\nuse crate::Error;\n\nuse crate::SEED_LENGTH;\n\n\n\n/// Maximum number of requests to process in one batch and include the the Merkle tree.\n", "file_path": "src/config/mod.rs", "rank": 17, "score": 29281.546024002266 }, { "content": "\n\nimpl Display for KmsProtection {\n\n fn fmt(&self, f: &mut Formatter) -> Result<(), std::fmt::Error> {\n\n match self {\n\n KmsProtection::Plaintext => write!(f, \"Plaintext\"),\n\n KmsProtection::AwsKmsEnvelope(key_id) => write!(f, \"AwsKms({})\", key_id),\n\n KmsProtection::GoogleKmsEnvelope(key_id) => write!(f, \"GoogleKms({})\", key_id),\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for KmsProtection {\n\n type Err = String;\n\n\n\n fn from_str(s: &str) -> Result<KmsProtection, String> {\n\n match s {\n\n \"plaintext\" => Ok(KmsProtection::Plaintext),\n\n s if s.starts_with(\"arn:\") => Ok(KmsProtection::AwsKmsEnvelope(s.to_string())),\n\n s if s.starts_with(\"projects/\") => Ok(KmsProtection::GoogleKmsEnvelope(s.to_string())),\n\n s => Err(format!(\"unknown KmsProtection '{}'\", s)),\n", "file_path": "src/key/mod.rs", "rank": 18, "score": 29280.949495695706 }, { "content": "\n\nuse crate::config::ServerConfig;\n\nuse crate::error;\n\nuse crate::key::KmsProtection;\n\n\n\npub use self::envelope::EnvelopeEncryption;\n\n\n\n/// Errors generated by KMS operations\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Hash, Clone)]\n\npub enum KmsError {\n\n OperationFailed(String),\n\n InvalidConfiguration(String),\n\n InvalidData(String),\n\n InvalidKey(String),\n\n}\n\n\n\nimpl From<std::io::Error> for KmsError {\n\n fn from(error: std::io::Error) -> Self {\n\n KmsError::OperationFailed(format!(\"{:?}\", error))\n\n }\n", "file_path": "src/kms/mod.rs", "rank": 19, "score": 29280.74842435039 }, { "content": "\n\nuse std::net::IpAddr;\n\nuse std::collections::hash_map::Iter;\n\n\n\npub use crate::stats::aggregated::AggregatedStats;\n\npub use crate::stats::per_client::PerClientStats;\n\n\n\n///\n\n/// Specific metrics tracked per each client\n\n///\n\n#[derive(Debug, Clone, Copy)]\n\npub struct ClientStatEntry {\n\n pub valid_requests: u64,\n\n pub invalid_requests: u64,\n\n pub health_checks: u64,\n\n pub responses_sent: u64,\n\n pub bytes_sent: usize,\n\n}\n\n\n\nimpl ClientStatEntry {\n", "file_path": "src/stats/mod.rs", "rank": 20, "score": 29280.677429059197 }, { "content": "\n\nuse std::fmt::Display;\n\nuse std::fmt::Formatter;\n\nuse std::str::FromStr;\n\n\n\npub use self::longterm::LongTermKey;\n\npub use self::online::OnlineKey;\n\n\n\n/// Methods for protecting the server's long-term identity\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Hash, Clone)]\n\npub enum KmsProtection {\n\n /// No protection, seed is in plaintext\n\n Plaintext,\n\n\n\n /// Envelope encryption of the seed using AWS Key Management Service\n\n AwsKmsEnvelope(String),\n\n\n\n /// Envelope encryption of the seed using Google Cloud Key Management Service\n\n GoogleKmsEnvelope(String),\n\n}\n", "file_path": "src/key/mod.rs", "rank": 21, "score": 29277.84484137174 }, { "content": "\n\n fn stats_for_client(&self, addr: &IpAddr) -> Option<&ClientStatEntry>;\n\n\n\n fn iter(&self) -> Iter<IpAddr, ClientStatEntry>;\n\n\n\n fn clear(&mut self);\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::stats::{ServerStats, PerClientStats};\n\n use std::net::{IpAddr, Ipv4Addr};\n\n\n\n #[test]\n\n fn simple_stats_starts_empty() {\n\n let stats = PerClientStats::new();\n\n\n\n assert_eq!(stats.total_valid_requests(), 0);\n\n assert_eq!(stats.total_invalid_requests(), 0);\n", "file_path": "src/stats/mod.rs", "rank": 22, "score": 29277.226176934884 }, { "content": "//! system (KMS). The resulting opaque encrypted \"blob\" (encrypted seed + encrypted DEK) is\n\n//! stored in the Roughenough configuration.\n\n//!\n\n//! At server start-up the KMS is used to decrypt the DEK, which is then used to (in memory)\n\n//! decrypt the seed. The seed is used to generate the\n\n//! [delegated on-line key](../key/struct.OnlineKey.html) after which the seed and DEK are erased\n\n//! from memory.\n\n//!\n\n//! See\n\n//! * [`EnvelopeEncryption`](struct.EnvelopeEncryption.html) for Roughenough's implementation.\n\n//! * [Google](https://cloud.google.com/kms/docs/envelope-encryption) or\n\n//! [Amazon](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#enveloping)\n\n//! for more in-depth explanations of envelope encryption.\n\n//!\n\n\n\nmod envelope;\n\n\n\nuse base64;\n\nuse ring;\n\nuse std;\n", "file_path": "src/kms/mod.rs", "rank": 23, "score": 29274.278078243547 }, { "content": "}\n\n\n\nimpl From<ring::error::Unspecified> for KmsError {\n\n fn from(_: ring::error::Unspecified) -> Self {\n\n KmsError::OperationFailed(\"unspecified ring cryptographic failure\".to_string())\n\n }\n\n}\n\n\n\nimpl From<base64::DecodeError> for KmsError {\n\n fn from(error: base64::DecodeError) -> Self {\n\n KmsError::OperationFailed(format!(\"base64: {}\", error))\n\n }\n\n}\n\n\n\n// Size of the AEAD nonce in bytes.\n\nconst NONCE_SIZE_BYTES: usize = 12;\n\n\n\n// Size of the AEAD authentication tag in bytes.\n\nconst TAG_SIZE_BYTES: usize = 16;\n\n\n", "file_path": "src/kms/mod.rs", "rank": 24, "score": 29273.74128485283 }, { "content": "// Copyright 2017-2019 int08h LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//!\n\n//! Facilities for tracking client requests to the server\n\n//!\n\n\n\nmod aggregated;\n\nmod per_client;\n", "file_path": "src/stats/mod.rs", "rank": 25, "score": 29271.88163773291 }, { "content": " match KmsProtection::from_str(arn) {\n\n Ok(KmsProtection::AwsKmsEnvelope(msg)) => assert_eq!(msg, arn),\n\n e => panic!(\"unexpected result {:?}\", e),\n\n }\n\n match KmsProtection::from_str(resource_id) {\n\n Ok(KmsProtection::GoogleKmsEnvelope(msg)) => assert_eq!(msg, resource_id),\n\n e => panic!(\"unexpected result {:?}\", e),\n\n }\n\n match KmsProtection::from_str(\"frobble\") {\n\n Err(msg) => assert!(msg.contains(\"unknown KmsProtection\")),\n\n e => panic!(\"unexpected result {:?}\", e),\n\n }\n\n }\n\n}\n", "file_path": "src/key/mod.rs", "rank": 26, "score": 29271.671812483848 }, { "content": "// Size of the 256-bit Data Encryption Key (DEK) in bytes.\n\nconst DEK_SIZE_BYTES: usize = 32;\n\n\n\n// Trivial domain separation to guard against KMS key reuse\n\nconst AD: &str = \"roughenough\";\n\n\n\n/// An unencrypted (plaintext) 256-bit Data Encryption Key (DEK).\n\npub type PlaintextDEK = Vec<u8>;\n\n\n\n/// A Data Encryption Key (DEK) that has been encrypted (wrapped) by a Key Management System (KMS).\n\n///\n\n/// This is an opaque, implementation-specific value. AEAD tag size, nonce size,\n\n/// provider metadata, and so on will vary between [`KmsProvider`](trait.KmsProvider.html)\n\n/// implementations.\n\npub type EncryptedDEK = Vec<u8>;\n\n\n\n///\n\n/// A key management system that wraps/unwraps a data encryption key (DEK).\n\n///\n", "file_path": "src/kms/mod.rs", "rank": 27, "score": 29271.257827291418 }, { "content": "pub const DEFAULT_BATCH_SIZE: u8 = 64;\n\n\n\n/// Amount of time between each logged status update.\n\npub const DEFAULT_STATUS_INTERVAL: Duration = Duration::from_secs(600);\n\n\n\n///\n\n/// Specifies parameters needed to configure a Roughenough server.\n\n///\n\n/// Parameters labeled \"**Required**\" must always be provided and have no default value\n\n/// while those labeled \"**Optional**\" provide sane default values that can be overridden.\n\n///\n\n/// YAML Key | Environment Variable | Necessity | Description\n\n/// --- | --- | --- | ---\n\n/// `interface` | `ROUGHENOUGH_INTERFACE` | Required | IP address or interface name for listening to client requests\n\n/// `port` | `ROUGHENOUGH_PORT` | Required | UDP port to listen for requests\n\n/// `seed` | `ROUGHENOUGH_SEED` | Required | A 32-byte hexadecimal value used to generate the server's long-term key pair. **This is a secret value and must be un-guessable**, treat it with care. (If compiled with KMS support, length will vary)\n\n/// `batch_size` | `ROUGHENOUGH_BATCH_SIZE` | Optional | The maximum number of requests to process in one batch. All nonces in a batch are used to build a Merkle tree, the root of which is signed. Default is `64` requests per batch.\n\n/// `status_interval` | `ROUGHENOUGH_STATUS_INTERVAL` | Optional | Number of _seconds_ between each logged status update. Default is `600` seconds (10 minutes).\n\n/// `health_check_port` | `ROUGHENOUGH_HEALTH_CHECK_PORT` | Optional | If present, enable an HTTP health check responder on the provided port. **Use with caution**.\n\n/// `kms_protection` | `ROUGHENOUGH_KMS_PROTECTION` | Optional | If compiled with KMS support, the ID of the KMS key used to protect the long-term identity.\n\n/// `client_stats` | `ROUGHENOUGH_CLIENT_STATS` | Optional | A value of `on` or `yes` will enable tracking of per-client request statistics that will be output each time server status is logged. Default is `off` (disabled).\n\n/// `fault_percentage` | `ROUGHENOUGH_FAULT_PERCENTAGE` | Optional | Likelihood (as a percentage) that the server will intentionally return an invalid client response. An integer range from `0` (disabled, all responses valid) to `50` (50% of responses will be invalid). Default is `0` (disabled).\n\n///\n\n/// Implementations of this trait obtain a valid configuration from different back-end\n\n/// sources. See:\n\n/// * [FileConfig](struct.FileConfig.html) - configure via a YAML file\n\n/// * [EnvironmentConfig](struct.EnvironmentConfig.html) - configure via environment variables\n\n/// * [MemoryConfig](struct.MemoryConfig.html) - in-memory configuration for testing\n\n///\n", "file_path": "src/config/mod.rs", "rank": 28, "score": 29270.99355825883 }, { "content": " assert_eq!(stats.total_health_checks(), 0);\n\n assert_eq!(stats.total_responses_sent(), 0);\n\n assert_eq!(stats.total_bytes_sent(), 0);\n\n assert_eq!(stats.total_unique_clients(), 0);\n\n assert_eq!(stats.num_overflows(), 0);\n\n }\n\n\n\n #[test]\n\n fn client_requests_are_tracked() {\n\n let mut stats = PerClientStats::new();\n\n\n\n let ip1 = \"127.0.0.1\".parse().unwrap();\n\n let ip2 = \"127.0.0.2\".parse().unwrap();\n\n let ip3 = \"127.0.0.3\".parse().unwrap();\n\n\n\n stats.add_valid_request(&ip1);\n\n stats.add_valid_request(&ip2);\n\n stats.add_valid_request(&ip3);\n\n assert_eq!(stats.total_valid_requests(), 3);\n\n\n", "file_path": "src/stats/mod.rs", "rank": 29, "score": 29270.264921656613 }, { "content": " stats.add_invalid_request(&ip2);\n\n assert_eq!(stats.total_invalid_requests(), 1);\n\n\n\n stats.add_response(&ip2, 8192);\n\n assert_eq!(stats.total_bytes_sent(), 8192);\n\n\n\n assert_eq!(stats.total_unique_clients(), 3);\n\n }\n\n\n\n #[test]\n\n fn per_client_stats() {\n\n let mut stats = PerClientStats::new();\n\n let ip = \"127.0.0.3\".parse().unwrap();\n\n\n\n stats.add_valid_request(&ip);\n\n stats.add_response(&ip, 2048);\n\n stats.add_response(&ip, 1024);\n\n\n\n let entry = stats.stats_for_client(&ip).unwrap();\n\n assert_eq!(entry.valid_requests, 1);\n", "file_path": "src/stats/mod.rs", "rank": 30, "score": 29270.182504097073 }, { "content": " assert_eq!(entry.invalid_requests, 0);\n\n assert_eq!(entry.responses_sent, 2);\n\n assert_eq!(entry.bytes_sent, 3072);\n\n }\n\n\n\n #[test]\n\n fn overflow_max_entries() {\n\n let mut stats = PerClientStats::with_limit(100);\n\n\n\n for i in 0..201 {\n\n let ipv4 = Ipv4Addr::from(i as u32);\n\n let addr = IpAddr::from(ipv4);\n\n\n\n stats.add_valid_request(&addr);\n\n };\n\n\n\n assert_eq!(stats.total_unique_clients(), 100);\n\n assert_eq!(stats.num_overflows(), 101);\n\n }\n\n}\n\n\n\n\n", "file_path": "src/stats/mod.rs", "rank": 31, "score": 29269.895125394854 }, { "content": "// Copyright 2017-2019 int08h LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//!\n\n//! Representations and management of Roughtime's online and long-term Ed25519 keys\n\n//!\n\n\n\nmod longterm;\n\nmod online;\n", "file_path": "src/key/mod.rs", "rank": 32, "score": 29269.559535930002 }, { "content": " fn new() -> Self {\n\n ClientStatEntry {\n\n valid_requests: 0,\n\n invalid_requests: 0,\n\n health_checks: 0,\n\n responses_sent: 0,\n\n bytes_sent: 0,\n\n }\n\n }\n\n}\n\n\n\n///\n\n/// Implementations of this trait record client activity\n\n///\n", "file_path": "src/stats/mod.rs", "rank": 33, "score": 29268.14746562836 }, { "content": "\n\n /// [Optional] Method used to protect the seed for the server's long-term key pair.\n\n /// Defaults to \"`plaintext`\" (no encryption, seed is in the clear).\n\n fn kms_protection(&self) -> &KmsProtection;\n\n\n\n /// [Optional] If present, the TCP port to respond to Google-style HTTP \"legacy health check\".\n\n /// This is a *very* simplistic check, it emits a fixed HTTP response to all TCP connections.\n\n /// https://cloud.google.com/load-balancing/docs/health-checks#legacy-health-checks\n\n fn health_check_port(&self) -> Option<u16>;\n\n\n\n /// [Optional] A value of `on` or `yes` will enable tracking of per-client request statistics\n\n /// that will be output each time server status is logged. Default is `off` (disabled).\n\n fn client_stats_enabled(&self) -> bool;\n\n\n\n /// [Optional] Likelihood (as a percentage) that the server will intentionally return an\n\n /// invalid client response. An integer range from `0` (disabled, all responses valid) to `50`\n\n /// (~50% of responses will be invalid). Default is `0` (disabled).\n\n ///\n\n /// See the [Roughtime spec](https://roughtime.googlesource.com/roughtime/+/HEAD/ECOSYSTEM.md#maintaining-a-healthy-software-ecosystem)\n\n /// for background and rationale.\n", "file_path": "src/config/mod.rs", "rank": 34, "score": 29267.416941953965 }, { "content": " fn fault_percentage(&self) -> u8;\n\n\n\n /// Convenience function to create a `SocketAddr` from the provided `interface` and `port`\n\n fn udp_socket_addr(&self) -> Result<SocketAddr, Error> {\n\n let addr = format!(\"{}:{}\", self.interface(), self.port());\n\n match addr.parse() {\n\n Ok(v) => Ok(v),\n\n Err(_) => Err(Error::InvalidConfiguration(addr)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 35, "score": 29267.084734019456 }, { "content": " error!(\"KMS use enabled but seed value is too short to be an encrypted blob\");\n\n is_valid = false;\n\n }\n\n\n\n if cfg.batch_size() < 1 || cfg.batch_size() > 64 {\n\n error!(\n\n \"batch_size {} is invalid; valid range 1-64\",\n\n cfg.batch_size()\n\n );\n\n is_valid = false;\n\n }\n\n\n\n if cfg.fault_percentage() > 50 {\n\n error!(\"fault_percentage {} is invalid; valid range 0-50\", cfg.fault_percentage());\n\n is_valid = false;\n\n }\n\n\n\n if is_valid {\n\n if let Err(e) = cfg.udp_socket_addr() {\n\n error!(\n", "file_path": "src/config/mod.rs", "rank": 36, "score": 29266.56177254255 }, { "content": "\n\n/// Load the seed value for the long-term key.\n\n///\n\n/// Loading behavior depends on the value of `config.kms_protection()`:\n\n///\n\n/// * If `config.kms_protection() == Plaintext` then the value returned from `config.seed()`\n\n/// is used as-is and assumed to be a 32-byte hexadecimal value.\n\n///\n\n/// * Otherwise `config.seed()` is assumed to be an encrypted opaque blob generated from\n\n/// a prior `EnvelopeEncryption::encrypt_seed` call. The value of `config.kms_protection()`\n\n/// is parsed as a KMS key id and `EnvelopeEncryption::decrypt_seed` is called to obtain\n\n/// the plaintext seed value.\n\n///\n", "file_path": "src/kms/mod.rs", "rank": 37, "score": 29266.42400399135 }, { "content": "// Copyright 2017-2019 int08h LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//!\n\n//! Ways to configure the Roughenough server.\n\n//!\n\n//! The [ServerConfig](trait.ServerConfig.html) trait specifies the required and optional\n\n//! parameters available for configuring a Roughenoguh server instance.\n\n//!\n", "file_path": "src/config/mod.rs", "rank": 38, "score": 29266.231659601042 }, { "content": "//!\n\n//! ## Motivation\n\n//!\n\n//! The seed for the server's [long-term key](../key/struct.LongTermKey.html) is subject to\n\n//! contradictory requirements:\n\n//!\n\n//! 1. The seed must be kept secret, but\n\n//! 2. The seed must be available at server start-up to create the\n\n//! [delegated on-line key](../key/struct.OnlineKey.html)\n\n//!\n\n//! ## Plaintext seed\n\n//!\n\n//! The default option is to store the seed in plaintext as part of the server's configuration.\n\n//! This usually means the seed is present in the clear: on disk, in a repository, or otherwise\n\n//! durably persisted where it can be compromised (accidentally or maliciously).\n\n//!\n\n//! ## Encrypting the seed\n\n//!\n\n//! Envelope encryption protects the seed by encrypting it with a locally generated 256-bit\n\n//! Data Encryption Key (DEK). The DEK itself is then encrypted using a cloud key management\n", "file_path": "src/kms/mod.rs", "rank": 39, "score": 29266.16632169084 }, { "content": "// Copyright 2017-2019 int08h LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//!\n\n//! Protect the server's long-term key with envelope encryption and a key management system.\n\n//!\n\n//! Note: KMS support must be enabled at compile time, see the Roughenough's [documentation\n\n//! on optional features](https://github.com/int08h/roughenough/blob/master/doc/OPTIONAL-FEATURES.md#key-management-system-kms-support)\n\n//! for instructions.\n", "file_path": "src/kms/mod.rs", "rank": 40, "score": 29266.014741380488 }, { "content": " \"failed to create UDP socket {}:{} {:?}\",\n\n cfg.interface(),\n\n cfg.port(),\n\n e\n\n );\n\n is_valid = false;\n\n }\n\n }\n\n\n\n is_valid\n\n}\n", "file_path": "src/config/mod.rs", "rank": 41, "score": 29264.217819812562 }, { "content": "mod error;\n\nmod message;\n\nmod tag;\n\n\n\npub mod config;\n\npub mod grease;\n\npub mod key;\n\npub mod kms;\n\npub mod merkle;\n\npub mod stats;\n\npub mod server;\n\npub mod sign;\n\n\n\npub use crate::error::Error;\n\npub use crate::message::RtMessage;\n\npub use crate::tag::Tag;\n\n\n\n/// Version of Roughenough\n\npub const VERSION: &str = \"1.1.8\";\n\n\n\n/// Roughenough version string enriched with any compile-time optional features\n", "file_path": "src/lib.rs", "rank": 47, "score": 21.161599324324854 }, { "content": " use std::fmt;\n\n use std::fmt::Formatter;\n\n use std::str::FromStr;\n\n\n\n use rusoto_core::Region;\n\n use rusoto_kms::{DecryptRequest, EncryptRequest, Kms, KmsClient};\n\n use bytes::Bytes;\n\n\n\n use crate::kms::{EncryptedDEK, KmsError, KmsProvider, PlaintextDEK, AD, DEK_SIZE_BYTES};\n\n\n\n /// Amazon Web Services Key Management Service\n\n /// https://aws.amazon.com/kms/\n\n pub struct AwsKms {\n\n kms_client: KmsClient,\n\n key_id: String,\n\n }\n\n\n\n impl AwsKms {\n\n /// Create a new instance from the full ARN of a AWS KMS key. The ARN is expected\n\n /// to be of the form `arn:aws:kms:some-aws-region:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab`\n", "file_path": "src/kms/awskms.rs", "rank": 49, "score": 19.044911060432472 }, { "content": "\n\nimpl AggregatedStats {\n\n\n\n #[allow(dead_code)]\n\n pub fn new() -> Self {\n\n AggregatedStats {\n\n valid_requests: 0,\n\n invalid_requests: 0,\n\n health_checks: 0,\n\n responses_sent: 0,\n\n bytes_sent: 0,\n\n empty_map: HashMap::new()\n\n }\n\n }\n\n}\n\n\n\nimpl ServerStats for AggregatedStats {\n\n fn add_valid_request(&mut self, _: &IpAddr) {\n\n self.valid_requests += 1\n\n }\n", "file_path": "src/stats/aggregated.rs", "rank": 50, "score": 15.668741723359641 }, { "content": " match self.kms_client.encrypt(encrypt_req).sync() {\n\n Ok(result) => {\n\n if let Some(ciphertext) = result.ciphertext_blob {\n\n Ok(ciphertext.to_vec())\n\n } else {\n\n Err(KmsError::OperationFailed(\n\n \"no ciphertext despite successful response\".to_string(),\n\n ))\n\n }\n\n }\n\n Err(e) => Err(KmsError::OperationFailed(e.to_string())),\n\n }\n\n }\n\n\n\n fn decrypt_dek(&self, encrypted_dek: &EncryptedDEK) -> Result<PlaintextDEK, KmsError> {\n\n let mut decrypt_req: DecryptRequest = Default::default();\n\n decrypt_req.ciphertext_blob = Bytes::from(encrypted_dek.as_slice());\n\n\n\n let mut dec_context = HashMap::new();\n\n dec_context.insert(\"AD\".to_string(), AD.to_string());\n", "file_path": "src/kms/awskms.rs", "rank": 51, "score": 15.281582821912316 }, { "content": " batch_size: u8,\n\n status_interval: Duration,\n\n kms_protection: KmsProtection,\n\n health_check_port: Option<u16>,\n\n client_stats: bool,\n\n fault_percentage: u8,\n\n}\n\n\n\nimpl FileConfig {\n\n pub fn new(config_file: &str) -> Result<Self, Error> {\n\n let mut infile = File::open(config_file)\n\n .unwrap_or_else(|_| panic!(\"failed to open config file '{}'\", config_file));\n\n\n\n let mut contents = String::new();\n\n infile\n\n .read_to_string(&mut contents)\n\n .unwrap_or_else(|_| panic!(\"could not read config file '{}'\", config_file));\n\n\n\n let cfg = YamlLoader::load_from_str(&contents)\n\n .unwrap_or_else(|_| panic!(\"could not parse config file '{}'\", config_file));\n", "file_path": "src/config/file.rs", "rank": 52, "score": 15.129835520577984 }, { "content": " }\n\n }\n\n\n\n impl KmsProvider for AwsKms {\n\n fn encrypt_dek(&self, plaintext_dek: &PlaintextDEK) -> Result<EncryptedDEK, KmsError> {\n\n if plaintext_dek.len() != DEK_SIZE_BYTES {\n\n return Err(KmsError::InvalidKey(format!(\n\n \"provided DEK wrong length: {}\",\n\n plaintext_dek.len()\n\n )));\n\n }\n\n\n\n let mut encrypt_req: EncryptRequest = Default::default();\n\n encrypt_req.key_id = self.key_id.clone();\n\n encrypt_req.plaintext = Bytes::from(plaintext_dek.as_slice());\n\n\n\n let mut enc_context = HashMap::new();\n\n enc_context.insert(\"AD\".to_string(), AD.to_string());\n\n encrypt_req.encryption_context = Some(enc_context);\n\n\n", "file_path": "src/kms/awskms.rs", "rank": 53, "score": 14.943146799369263 }, { "content": "// Copyright 2017-2019 int08h LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n#[cfg(feature = \"awskms\")]\n\npub mod inner {\n\n extern crate bytes;\n\n\n\n use std::collections::HashMap;\n\n use std::default::Default;\n", "file_path": "src/kms/awskms.rs", "rank": 54, "score": 14.272569328345734 }, { "content": "\n\nuse crate::key::OnlineKey;\n\nuse crate::message::RtMessage;\n\nuse crate::sign::Signer;\n\nuse crate::tag::Tag;\n\nuse crate::CERTIFICATE_CONTEXT;\n\n\n\n///\n\n/// Represents the server's long-term identity.\n\n///\n\npub struct LongTermKey {\n\n signer: Signer,\n\n}\n\n\n\nimpl LongTermKey {\n\n pub fn new(seed: &[u8]) -> Self {\n\n LongTermKey {\n\n signer: Signer::from_seed(seed),\n\n }\n\n }\n", "file_path": "src/key/longterm.rs", "rank": 55, "score": 14.075791569614303 }, { "content": "use crate::tag::Tag;\n\n\n\n///\n\n/// A Roughtime protocol message; a map of u32 tags to arbitrary byte-strings.\n\n///\n\n#[derive(Debug, Clone)]\n\npub struct RtMessage {\n\n tags: Vec<Tag>,\n\n values: Vec<Vec<u8>>,\n\n}\n\n\n\nimpl RtMessage {\n\n /// Construct a new RtMessage\n\n ///\n\n /// ## Arguments\n\n ///\n\n /// * `num_fields` - Reserve space for this many fields.\n\n ///\n\n pub fn new(num_fields: u32) -> Self {\n\n RtMessage {\n", "file_path": "src/message.rs", "rank": 56, "score": 13.167316829422314 }, { "content": " }\n\n\n\n pub fn public_key_bytes(&self) -> &[u8] {\n\n self.key_pair.public_key_bytes()\n\n }\n\n}\n\n\n\nimpl fmt::Display for Signer {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{}\", hex::encode(self.public_key_bytes()))\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Signer {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"Signer({}, {:?})\",\n\n hex::encode(self.public_key_bytes()),\n\n self.buf\n", "file_path": "src/sign.rs", "rank": 57, "score": 13.149649012423389 }, { "content": "use crate::config::{DEFAULT_BATCH_SIZE, DEFAULT_STATUS_INTERVAL};\n\nuse crate::key::KmsProtection;\n\nuse crate::Error;\n\n\n\n///\n\n/// Read a Roughenough server configuration ([ServerConfig](trait.ServerConfig.html))\n\n/// from a YAML file.\n\n///\n\n/// Example minimal config:\n\n///\n\n/// ```yaml\n\n/// interface: 127.0.0.1\n\n/// port: 8686\n\n/// seed: f61075c988feb9cb700a4a6a3291bfbc9cab11b9c9eca8c802468eb38a43d7d3\n\n/// ```\n\n///\n\npub struct FileConfig {\n\n port: u16,\n\n interface: String,\n\n seed: Vec<u8>,\n", "file_path": "src/config/file.rs", "rank": 58, "score": 12.606531219206559 }, { "content": "\n\n///\n\n/// Implementation of `ServerStats` that provides high-level aggregated client statistics. No\n\n/// per-client statistic are maintained and runtime memory use is constant.\n\n///\n\n#[allow(dead_code)]\n\npub struct AggregatedStats {\n\n valid_requests: u64,\n\n invalid_requests: u64,\n\n health_checks: u64,\n\n responses_sent: u64,\n\n bytes_sent: usize,\n\n empty_map: HashMap<IpAddr, ClientStatEntry>,\n\n}\n\n\n\nimpl Default for AggregatedStats {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n", "file_path": "src/stats/aggregated.rs", "rank": 59, "score": 12.575978351825974 }, { "content": "#[cfg(test)]\n\nmod test {\n\n use crate::grease::Grease;\n\n use crate::RtMessage;\n\n use crate::tag::Tag;\n\n\n\n #[test]\n\n fn verify_error_probability() {\n\n const TRIALS: u64 = 100_000;\n\n const TOLERANCE: f64 = 0.75;\n\n\n\n for target in 1..50 {\n\n let mut g = Grease::new(target);\n\n let (lower, upper) = (target as f64 - TOLERANCE, target as f64 + TOLERANCE);\n\n\n\n let acc: u64 = (0..TRIALS)\n\n .map(|_| if g.should_add_error() { 1 } else { 0 })\n\n .sum();\n\n\n\n let percentage = 100.0 * (acc as f64 / TRIALS as f64);\n", "file_path": "src/grease.rs", "rank": 60, "score": 12.304936467808208 }, { "content": "#[macro_use]\n\nextern crate criterion;\n\nextern crate roughenough;\n\n\n\nuse criterion::{black_box, Criterion};\n\nuse roughenough::RtMessage;\n\nuse roughenough::Tag;\n\nuse roughenough::merkle::{MerkleTree, root_from_paths};\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 61, "score": 12.01960366724097 }, { "content": "impl EnvironmentConfig {\n\n pub fn new() -> Result<Self, Error> {\n\n let mut cfg = EnvironmentConfig {\n\n port: 0,\n\n interface: \"\".to_string(),\n\n seed: Vec::new(),\n\n batch_size: DEFAULT_BATCH_SIZE,\n\n status_interval: DEFAULT_STATUS_INTERVAL,\n\n kms_protection: KmsProtection::Plaintext,\n\n health_check_port: None,\n\n client_stats: false,\n\n fault_percentage: 0,\n\n };\n\n\n\n if let Ok(port) = env::var(ROUGHENOUGH_PORT) {\n\n cfg.port = port\n\n .parse()\n\n .unwrap_or_else(|_| panic!(\"invalid port: {}\", port));\n\n };\n\n\n", "file_path": "src/config/environment.rs", "rank": 62, "score": 11.571509098146972 }, { "content": "\n\n#[cfg(test)]\n\nmod test {\n\n use byteorder::{LittleEndian, ReadBytesExt};\n\n use crate::message::*;\n\n use std::io::{Cursor, Read};\n\n use crate::tag::Tag;\n\n\n\n #[test]\n\n fn empty_message_size() {\n\n let msg = RtMessage::new(0);\n\n\n\n assert_eq!(msg.num_fields(), 0);\n\n // Empty message is 4 bytes, a single num_tags value\n\n assert_eq!(msg.encoded_size(), 4);\n\n }\n\n\n\n #[test]\n\n fn single_field_message_size() {\n\n let mut msg = RtMessage::new(1);\n", "file_path": "src/message.rs", "rank": 63, "score": 11.530246649264603 }, { "content": " pub fn from_arn(arn: &str) -> Result<Self, KmsError> {\n\n let parts: Vec<&str> = arn.split(':').collect();\n\n\n\n if parts.len() != 6 {\n\n return Err(KmsError::InvalidConfiguration(format!(\n\n \"invalid KMS arn: too few parts {}\",\n\n parts.len()\n\n )));\n\n }\n\n\n\n let region_part = parts.get(3).expect(\"region is missing\");\n\n let region = match Region::from_str(region_part) {\n\n Ok(r) => r,\n\n Err(e) => return Err(KmsError::InvalidConfiguration(e.to_string())),\n\n };\n\n\n\n Ok(AwsKms {\n\n kms_client: KmsClient::new(region),\n\n key_id: arn.to_string(),\n\n })\n", "file_path": "src/kms/awskms.rs", "rank": 64, "score": 11.270426565162868 }, { "content": " output.write_u16::<LittleEndian>(wrapped_dek.len() as u16)?;\n\n output.write_u16::<LittleEndian>(nonce.len() as u16)?;\n\n output.write_all(&wrapped_dek)?;\n\n output.write_all(&nonce)?;\n\n output.write_all(&encrypted_seed)?;\n\n\n\n Ok(output)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::kms::envelope::{DEK_LEN_FIELD, MIN_PAYLOAD_SIZE, NONCE_LEN_FIELD};\n\n use crate::kms::EnvelopeEncryption;\n\n use crate::kms::{KmsError, KmsProvider};\n\n\n\n struct MockKmsProvider {}\n\n\n\n // Mock provider that returns a copy of the input\n\n impl KmsProvider for MockKmsProvider {\n", "file_path": "src/kms/envelope.rs", "rank": 65, "score": 11.047492057923392 }, { "content": "\n\n/// A purely in-memory Roughenough config for testing purposes.\n\n///\n\n/// This is useful for testing or fuzzing a server without the need to create additional files.\n\npub struct MemoryConfig {\n\n pub port: u16,\n\n pub interface: String,\n\n pub seed: Vec<u8>,\n\n pub batch_size: u8,\n\n pub status_interval: Duration,\n\n pub kms_protection: KmsProtection,\n\n pub health_check_port: Option<u16>,\n\n pub client_stats: bool,\n\n pub fault_percentage: u8,\n\n}\n\n\n\nimpl MemoryConfig {\n\n pub fn new(port: u16) -> Self {\n\n MemoryConfig {\n\n port,\n", "file_path": "src/config/memory.rs", "rank": 66, "score": 11.028776701191585 }, { "content": " ctx.update(path);\n\n ctx.update(&hash);\n\n }\n\n\n\n hash = Hash::from(ctx.finish().as_ref());\n\n index >>= 1;\n\n }\n\n\n\n hash\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::merkle::*;\n\n\n\n fn test_paths_with_num(num: usize) {\n\n let mut merkle = MerkleTree::new();\n\n\n\n for i in 0..num {\n\n merkle.push_leaf(&[i as u8]);\n", "file_path": "src/merkle.rs", "rank": 67, "score": 10.89865916904014 }, { "content": "\n\n let mut result = RtMessage::new(2);\n\n result.add_field(Tag::SIG, &srep_signature).unwrap();\n\n result.add_field(Tag::SREP, &srep_bytes).unwrap();\n\n\n\n result\n\n }\n\n}\n\n\n\nimpl fmt::Display for OnlineKey {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.signer)\n\n }\n\n}\n", "file_path": "src/key/online.rs", "rank": 68, "score": 10.700893521910787 }, { "content": "//!\n\n\n\nuse rand::{FromEntropy, Rng};\n\nuse rand::distributions::Bernoulli;\n\nuse rand::rngs::SmallRng;\n\nuse rand::seq::SliceRandom;\n\nuse rand::seq::index::sample as index_sample;\n\n\n\nuse crate::RtMessage;\n\nuse crate::tag::Tag;\n\nuse crate::grease::Pathologies::*;\n\nuse crate::SIGNATURE_LENGTH;\n\n\n\n///\n\n/// Ways that a message can be made invalid.\n\n///\n\npub enum Pathologies {\n\n /// Randomly re-order the (tag, value) pairs in the message. This violates the protocol's\n\n /// requirement that tags must be in strictly increasing order.\n\n RandomlyOrderTags,\n", "file_path": "src/grease.rs", "rank": 69, "score": 10.288234652904958 }, { "content": " let encrypted_seed = match seal_in_place(\n\n &dek_seal_key,\n\n &nonce,\n\n AD.as_bytes(),\n\n &mut plaintext_buf,\n\n TAG_SIZE_BYTES,\n\n ) {\n\n Ok(enc_len) => plaintext_buf[..enc_len].to_vec(),\n\n Err(_) => {\n\n return Err(KmsError::OperationFailed(\n\n \"failed to encrypt plaintext seed\".to_string(),\n\n ))\n\n }\n\n };\n\n\n\n // Use the KMS to wrap the DEK\n\n let wrapped_dek = kms.encrypt_dek(&dek.to_vec())?;\n\n\n\n // And coalesce everything together\n\n let mut output = Vec::new();\n", "file_path": "src/kms/envelope.rs", "rank": 70, "score": 10.19779509063714 }, { "content": "use std::io::Write;\n\nuse std::net::{IpAddr, Shutdown, SocketAddr};\n\nuse std::process;\n\nuse std::time::{Duration, SystemTime};\n\n\n\nuse byteorder::{LittleEndian, WriteBytesExt};\n\n\n\nuse humansize::{file_size_opts as fsopts, FileSize};\n\n\n\nuse mio::net::{TcpListener, UdpSocket};\n\nuse mio::{Events, Poll, PollOpt, Ready, Token};\n\nuse mio_extras::timer::Timer;\n\n\n\nuse crate::config::ServerConfig;\n\nuse crate::grease::Grease;\n\nuse crate::key::{LongTermKey, OnlineKey};\n\nuse crate::kms;\n\nuse crate::merkle::MerkleTree;\n\nuse crate::stats::{AggregatedStats, ClientStatEntry, PerClientStats, ServerStats};\n\nuse crate::{Error, RtMessage, Tag, MIN_REQUEST_LENGTH};\n", "file_path": "src/server.rs", "rank": 71, "score": 10.177604836703466 }, { "content": "\n\n/// A multi-step (init-update-finish) interface for creating an Ed25519 signature\n\npub struct Signer {\n\n key_pair: Ed25519KeyPair,\n\n buf: Vec<u8>,\n\n}\n\n\n\nimpl Default for Signer {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl Signer {\n\n pub fn new() -> Self {\n\n let rng = rand::SystemRandom::new();\n\n let mut seed = [0u8; 32];\n\n rng.fill(&mut seed).unwrap();\n\n\n\n Signer::from_seed(&seed)\n", "file_path": "src/sign.rs", "rank": 72, "score": 9.988682775849195 }, { "content": "impl ServerStats for PerClientStats {\n\n fn add_valid_request(&mut self, addr: &IpAddr) {\n\n if self.too_many_entries() {\n\n return;\n\n }\n\n self.clients\n\n .entry(*addr)\n\n .or_insert_with(ClientStatEntry::new)\n\n .valid_requests += 1;\n\n }\n\n\n\n fn add_invalid_request(&mut self, addr: &IpAddr) {\n\n if self.too_many_entries() {\n\n return;\n\n }\n\n self.clients\n\n .entry(*addr)\n\n .or_insert_with(ClientStatEntry::new)\n\n .invalid_requests += 1;\n\n }\n", "file_path": "src/stats/per_client.rs", "rank": 73, "score": 9.970112687853259 }, { "content": "use byteorder::{LittleEndian, WriteBytesExt};\n\n\n\nuse std::fmt;\n\nuse std::fmt::Formatter;\n\nuse std::time::{UNIX_EPOCH, SystemTime};\n\n\n\n///\n\n/// Represents the delegated Roughtime ephemeral online key.\n\n///\n\npub struct OnlineKey {\n\n signer: Signer,\n\n}\n\n\n\nimpl Default for OnlineKey {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl OnlineKey {\n", "file_path": "src/key/online.rs", "rank": 74, "score": 9.954054014833503 }, { "content": "use ring::signature;\n\nuse ring::signature::Ed25519KeyPair;\n\n\n\nuse untrusted::Input;\n\n\n\nuse std::fmt;\n\nuse std::fmt::Formatter;\n\n\n\nconst INITIAL_BUF_SIZE: usize = 1024;\n\n\n\n/// A multi-step (init-update-finish) interface for verifying an Ed25519 signature\n\n#[derive(Debug)]\n\npub struct Verifier<'a> {\n\n pubkey: Input<'a>,\n\n buf: Vec<u8>,\n\n}\n\n\n\nimpl<'a> Verifier<'a> {\n\n pub fn new(pubkey: &'a [u8]) -> Self {\n\n Verifier {\n", "file_path": "src/sign.rs", "rank": 75, "score": 9.848273008826105 }, { "content": " self.signer.public_key_bytes()\n\n }\n\n}\n\n\n\nimpl fmt::Display for LongTermKey {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.signer)\n\n }\n\n}\n", "file_path": "src/key/longterm.rs", "rank": 76, "score": 9.82904762537672 }, { "content": " }\n\n }\n\n }\n\n\n\n #[cfg(feature = \"awskms\")]\n\n impl fmt::Display for AwsKms {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.key_id)\n\n }\n\n }\n\n}\n", "file_path": "src/kms/awskms.rs", "rank": 77, "score": 9.82904762537672 }, { "content": "\n\n///\n\n/// Implementation of `ServerStats` that provides granular per-client request/response counts.\n\n///\n\n/// Each unique client address is used to key a hashmap. A maximum of `MAX_CLIENTS` entries\n\n/// are kept in the map to bound memory use. Excess entries beyond `MAX_CLIENTS` are ignored\n\n/// and `num_overflows` is incremented.\n\n///\n\npub struct PerClientStats {\n\n clients: HashMap<IpAddr, ClientStatEntry>,\n\n num_overflows: u64,\n\n max_clients: usize,\n\n}\n\n\n\nimpl Default for PerClientStats {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n", "file_path": "src/stats/per_client.rs", "rank": 78, "score": 9.71110562319173 }, { "content": "\n\n /// Returns a reference to the `ServerConfig` this server was configured with\n\n pub fn get_config(&self) -> &dyn ServerConfig {\n\n self.config.as_ref()\n\n }\n\n\n\n #[cfg(fuzzing)]\n\n pub fn send_to_self(&mut self, data: &[u8]) {\n\n let res = self\n\n .fake_client_socket\n\n .send_to(data, &self.socket.local_addr().unwrap());\n\n info!(\"Sent to self: {:?}\", res);\n\n }\n\n\n\n /// The main processing function for incoming connections. This method should be\n\n /// called repeatedly in a loop to process requests.\n\n ///\n\n pub fn process_events(&mut self, events: &mut Events) {\n\n self.poll\n\n .poll(events, self.poll_duration)\n", "file_path": "src/server.rs", "rank": 79, "score": 9.371698250025727 }, { "content": " ///\n\n pub fn encrypt_seed(kms: &dyn KmsProvider, plaintext_seed: &[u8]) -> Result<Vec<u8>, KmsError> {\n\n // Generate random DEK and nonce\n\n let rng = SystemRandom::new();\n\n let mut dek = [0u8; DEK_SIZE_BYTES];\n\n let mut nonce = [0u8; NONCE_SIZE_BYTES];\n\n rng.fill(&mut dek)?;\n\n rng.fill(&mut nonce)?;\n\n\n\n // Ring will overwrite plaintext with ciphertext in this buffer\n\n let mut plaintext_buf = plaintext_seed.to_vec();\n\n\n\n // Reserve space for the authentication tag which will be appended after the ciphertext\n\n plaintext_buf.reserve(TAG_SIZE_BYTES);\n\n for _ in 0..TAG_SIZE_BYTES {\n\n plaintext_buf.push(0);\n\n }\n\n\n\n // Encrypt the plaintext seed using the DEK\n\n let dek_seal_key = SealingKey::new(&AES_256_GCM, &dek)?;\n", "file_path": "src/kms/envelope.rs", "rank": 80, "score": 9.206537137492486 }, { "content": " }\n\n }\n\n\n\n #[inline]\n\n fn too_many_entries(&mut self) -> bool {\n\n let too_big = self.clients.len() >= self.max_clients;\n\n\n\n if too_big {\n\n self.num_overflows += 1;\n\n }\n\n\n\n too_big\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn num_overflows(&self) -> u64 {\n\n self.num_overflows\n\n }\n\n}\n\n\n", "file_path": "src/stats/per_client.rs", "rank": 81, "score": 9.176649584081767 }, { "content": "// Copyright 2017-2019 int08h LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::error::Error;\n\n\n\n/// An unsigned 32-bit value (key) that maps to a byte-string (value).\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Hash, Clone, Copy)]\n\npub enum Tag {\n\n // Enforcement of the \"tags in strictly increasing order\" rule is done using the\n", "file_path": "src/tag.rs", "rank": 82, "score": 9.058709051078582 }, { "content": " Ok(&buf[0x10..0x50])\n\n } else {\n\n Err(Error::InvalidRequest)\n\n }\n\n }\n\n\n\n fn send_responses(&mut self) {\n\n let merkle_root = self.merkle.compute_root();\n\n\n\n // The SREP tag is identical for each response\n\n let srep = self.online_key.make_srep(SystemTime::now(), &merkle_root);\n\n\n\n for (i, &(ref nonce, ref src_addr)) in self.requests.iter().enumerate() {\n\n let paths = self.merkle.get_paths(i);\n\n let resp_msg = {\n\n let r = self.make_response(&srep, &self.cert_bytes, &paths, i as u32);\n\n if self.grease.should_add_error() { self.grease.add_errors(&r) } else { r }\n\n };\n\n let resp_bytes = resp_msg.encode().unwrap();\n\n\n", "file_path": "src/server.rs", "rank": 83, "score": 9.022443938589358 }, { "content": " let dek_open_key = OpeningKey::new(&AES_256_GCM, &dek)?;\n\n match open_in_place(\n\n &dek_open_key,\n\n &nonce,\n\n AD.as_bytes(),\n\n IN_PREFIX_LEN,\n\n &mut encrypted_seed,\n\n ) {\n\n Ok(plaintext_seed) => Ok(plaintext_seed.to_vec()),\n\n Err(_) => Err(KmsError::OperationFailed(\n\n \"failed to decrypt plaintext seed\".to_string(),\n\n )),\n\n }\n\n }\n\n\n\n ///\n\n /// Encrypt the seed value and protect the seed's encryption key using a\n\n /// [`KmsProvider`](trait.KmsProvider.html).\n\n ///\n\n /// The returned encrypted byte blob is safe to store on unsecured media.\n", "file_path": "src/kms/envelope.rs", "rank": 84, "score": 8.762516928633207 }, { "content": " )\n\n }\n\n}\n\n\n\n#[rustfmt::skip] // rustfmt errors on the long signature strings\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn verify_ed25519_sig_on_empty_message() {\n\n let pubkey = hex::decode(\n\n \"d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a\",\n\n ).unwrap();\n\n\n\n let signature = hex::decode(\n\n \"e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b\"\n\n ).unwrap();\n\n\n\n let v = Verifier::new(&pubkey);\n", "file_path": "src/sign.rs", "rank": 85, "score": 8.494239832052141 }, { "content": " /// Offset was not 32-bit aligned\n\n InvalidAlignment(u32),\n\n\n\n /// Offset is outside of valid message range\n\n InvalidOffsetValue(u32),\n\n\n\n /// Could not convert bytes to message because bytes were too short\n\n MessageTooShort,\n\n\n\n /// Otherwise invalid request\n\n InvalidRequest,\n\n\n\n /// Runtime configuration is invalid for the reason provided\n\n InvalidConfiguration(String),\n\n}\n\n\n\nimpl From<std::io::Error> for Error {\n\n fn from(err: std::io::Error) -> Self {\n\n Error::EncodingFailure(err)\n\n }\n", "file_path": "src/error.rs", "rank": 86, "score": 8.298821482993052 }, { "content": "}\n\n\n\nimpl Grease {\n\n ///\n\n /// Creates a new instance `fault_percentage` likely to corrupt a source message.\n\n ///\n\n pub fn new(fault_percentage: u8) -> Self {\n\n Grease {\n\n enabled: fault_percentage > 0,\n\n dist: Bernoulli::from_ratio(u32::from(fault_percentage), 100),\n\n prng: SmallRng::from_entropy(),\n\n }\n\n }\n\n\n\n ///\n\n /// Returns true `fault_percentage` percent of the time.\n\n ///\n\n #[inline]\n\n pub fn should_add_error(&mut self) -> bool {\n\n if self.enabled { self.prng.sample(self.dist) } else { false }\n", "file_path": "src/grease.rs", "rank": 87, "score": 8.165365395179597 }, { "content": "\n\n fn add_invalid_request(&mut self, _: &IpAddr) {\n\n self.invalid_requests += 1\n\n }\n\n\n\n fn add_health_check(&mut self, _: &IpAddr) {\n\n self.health_checks += 1\n\n }\n\n\n\n fn add_response(&mut self, _: &IpAddr, bytes_sent: usize) {\n\n self.bytes_sent += bytes_sent;\n\n self.responses_sent += 1;\n\n }\n\n\n\n fn total_valid_requests(&self) -> u64 {\n\n self.valid_requests\n\n }\n\n\n\n fn total_invalid_requests(&self) -> u64 {\n\n self.invalid_requests\n", "file_path": "src/stats/aggregated.rs", "rank": 88, "score": 8.153965872503282 }, { "content": " pub fn pad_to_kilobyte(&mut self) {\n\n let size = self.encoded_size();\n\n if size >= 1024 {\n\n return;\n\n }\n\n\n\n let mut padding_needed = 1024 - size;\n\n if self.tags.len() == 1 {\n\n // If we currently only have one tag, adding a padding tag will cause\n\n // a 32-bit offset value to be written\n\n padding_needed -= 4;\n\n }\n\n padding_needed -= Tag::PAD.wire_value().len();\n\n let padding = vec![0; padding_needed];\n\n\n\n self.add_field(Tag::PAD, &padding).unwrap();\n\n\n\n assert_eq!(self.encoded_size(), 1024);\n\n }\n\n}\n", "file_path": "src/message.rs", "rank": 89, "score": 8.139585985317687 }, { "content": " #[test]\n\n fn invalid_nonce_length_detected() {\n\n let kms = MockKmsProvider {};\n\n let plaintext = Vec::from(\"This is the plaintext used for this test 3\");\n\n\n\n let enc_result = EnvelopeEncryption::encrypt_seed(&kms, &plaintext);\n\n assert_eq!(enc_result.is_ok(), true);\n\n\n\n let ciphertext = enc_result.unwrap();\n\n let mut ciphertext_copy = ciphertext.clone();\n\n\n\n ciphertext_copy[2] = 1;\n\n let dec_result = EnvelopeEncryption::decrypt_seed(&kms, &ciphertext_copy);\n\n match dec_result.expect_err(\"expected an error\") {\n\n KmsError::InvalidData(msg) => assert!(msg.contains(\"nonce (1)\")),\n\n e => panic!(\"unexpected error {:?}\", e),\n\n }\n\n }\n\n\n\n #[test]\n", "file_path": "src/kms/envelope.rs", "rank": 90, "score": 8.130031573438771 }, { "content": "\n\n #[test]\n\n fn invalid_dek_length_detected() {\n\n let kms = MockKmsProvider {};\n\n let plaintext = Vec::from(\"This is the plaintext used for this test 2\");\n\n\n\n let enc_result = EnvelopeEncryption::encrypt_seed(&kms, &plaintext);\n\n assert_eq!(enc_result.is_ok(), true);\n\n\n\n let ciphertext = enc_result.unwrap();\n\n let mut ciphertext_copy = ciphertext.clone();\n\n\n\n ciphertext_copy[1] = 99;\n\n let dec_result = EnvelopeEncryption::decrypt_seed(&kms, &ciphertext_copy);\n\n match dec_result.expect_err(\"expected an error\") {\n\n KmsError::InvalidData(msg) => assert!(msg.contains(\"invalid DEK\")),\n\n e => panic!(\"unexpected error {:?}\", e),\n\n }\n\n }\n\n\n", "file_path": "src/kms/envelope.rs", "rank": 91, "score": 8.130031573438771 }, { "content": "## Checking response vs. request size complicates server implementation\n\n\n\nIn Draft 02: \n\n\n\n Responding to requests shorter than 1024 bytes is OPTIONAL \n\n and servers MUST NOT send responses larger than the requests \n\n they are replying to.\n\n\n\nRoughtime servers can batch multiple requests into a single response making response \n\nsize a function of server load/batching parameters plus concurrent requests. Roughtime \n\nServer implementations may gather or buffer client requests prior to constructing the \n\nresponse. \n\n\n\n\"...servers MUST NOT send responses larger than the requests...\" will require implementations\n\nto perform additional tracking of per-request sizes and then compute the resulting response\n\nsize once the response *after* batch size has been determined. \n\n\n\nThis is more complex and incurs additional processing compared to simply rejecting all \n\nrequests <1024 bytes.\n\n\n\nSuggestion: mandate requests >= 1024 bytes\n\n\n\n## The \"ROUGHTIM\" packet format is redundant\n\n\n\nThe addition of the constant \"ROUGHTIM\" plus additional length field is redundant to \n\nthe message format (which also has a length field). The value this additional \n\npacket format is not clear.\n\n\n\nSuggestion: use \"bare\" Roughtime messages as the packet format \n\n\n\n## Stick with SHA-512; eliminate use of truncated SHA-512/256 \n\n\n\nTruncated SHA-512/256 is performed by a) compute SHA-512, then b) truncate the result. \n\nThe resulting computational effort of SHA-512 and SHA-512/256 is equivalent. \n\n\n\nThe draft utilizes SHA-512/256 for its 32 byte output, as opposed to 64 bytes for\n\nSHA-512. The motivation for this change is unclear and it complicates implementations\n\nwhich now need two hashing primitives (SHA-512/256 initialization is different than SHA-512).\n\n\n\nSuggestion: use SHA-512 throughout and drop any use of SHA-512/256\n\n\n\n## References \n\n\n\n* [AWS-smear] https://aws.amazon.com/blogs/aws/look-before-you-leap-the-coming-leap-second-and-aws/\n\n* [CF-DDoS] https://www.cloudflare.com/learning/ddos/ntp-amplification-ddos-attack/\n\n* [CWE-190] https://cwe.mitre.org/data/definitions/190.html\n\n* [CWE-TOP25] https://cwe.mitre.org/top25/archive/2019/2019_cwe_top25.html\n\n* [draft02] https://tools.ietf.org/html/draft-ietf-ntp-roughtime-02\n\n* [Google-smear] https://developers.google.com/time/smear\n\n* [int08h] https://int08h.com/post/to-catch-a-lying-timeserver/#keeping-response-sizes-compact\n\n* [roughtime] https://roughtime.googlesource.com/roughtime\n\n\n", "file_path": "doc/rfc-commentary.md", "rank": 92, "score": 8.07303485381788 }, { "content": " interface: \"127.0.0.1\".to_string(),\n\n seed: hex::decode(\"a32049da0ffde0ded92ce10a0230d35fe615ec8461c14986baa63fe3b3bac3db\")\n\n .unwrap(),\n\n batch_size: DEFAULT_BATCH_SIZE,\n\n status_interval: DEFAULT_STATUS_INTERVAL,\n\n kms_protection: KmsProtection::Plaintext,\n\n health_check_port: None,\n\n client_stats: false,\n\n fault_percentage: 0\n\n }\n\n }\n\n}\n\n\n\nimpl ServerConfig for MemoryConfig {\n\n fn interface(&self) -> &str {\n\n self.interface.as_ref()\n\n }\n\n\n\n fn port(&self) -> u16 {\n\n self.port\n", "file_path": "src/config/memory.rs", "rank": 93, "score": 8.054951151602735 }, { "content": "\n\n /// Returns a slice of the values in the message\n\n pub fn values(&self) -> &[Vec<u8>] {\n\n &self.values\n\n }\n\n\n\n /// Converts the message into a `HashMap` mapping each tag to its value\n\n pub fn into_hash_map(self) -> HashMap<Tag, Vec<u8>> {\n\n self.tags.into_iter().zip(self.values.into_iter()).collect()\n\n }\n\n\n\n /// Encode this message into its on-the-wire representation.\n\n pub fn encode(&self) -> Result<Vec<u8>, Error> {\n\n let num_tags = self.tags.len();\n\n let mut out = Vec::with_capacity(self.encoded_size());\n\n\n\n // number of tags\n\n out.write_u32::<LittleEndian>(num_tags as u32)?;\n\n\n\n // offset(s) to values, IFF there are two or more tags\n", "file_path": "src/message.rs", "rank": 94, "score": 7.861144355579092 }, { "content": "## Description \n\n\n\nThe server's long-term identity can be protected by encrypting it, storing the encrypted value\n\nin the configuration, and invoking a cloud key management system to temporarily decrypt \n\n(in memory) the long-term identity at server start-up. \n\n\n\nThis way the server's long-term identity is never stored in plaintext. Instead the encrypted \n\nlong-term identity \"blob\" is safe to store on disk, on Github, in a container, etc. Ability \n\nto access the unencrypted identity is controlled \"out of band\" by the KMS system.\n\n\n\n## How to enable KMS support\n\n\n\nKMS support must be compiled-in. To enable:\n\n\n\n```bash\n\n# Build with Google Cloud KMS support\n\n$ cargo build --release --features \"gcpkms\"\n\n\n\n# Build with AWS KMS support\n\n$ cargo build --release --features \"awskms\"\n\n```\n\n\n\n## Google or Amazon: choose one and one only\n\n\n\nSadly, due to incompatibilities with dependencies of the KMS libraries, only **one** \n\nKMS system can be enabled at a time. Attempting `--features \"awskms,gcpkms\"` will result\n\nin a build failure.\n\n\n\n## Using `roughtime-kms` to encrypt the long-term seed\n\n\n\nUse the command line tool `roughtime-kms` to encrypt the seed value for the \n\nserver's long-term identity. To do this you will need: \n\n\n\n 1. The long-term key seed value \n\n 2. Access credentials for your cloud of choice\n\n 3. An identifier for the KMS key to be used\n\n 4. Necessary permissions to perform symmetric encrypt/decrypt operations\n\n using the selected key\n\n\n\nFor Amazon the key identifier is an ARN in the form:\n\n```\n\narn:aws:kms:SOME_AWS_REGION:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\n\n```\n\n\n\nFor Google the key identifier is a resource ID in the form:\n\n```\n\nprojects/PROJECT_NAME/locations/GCP_LOCATION/keyRings/KEYRING_NAME/cryptoKeys/KEY_NAME\n\n```\n\n\n\n### AWS Example\n\n\n", "file_path": "doc/OPTIONAL-FEATURES.md", "rank": 95, "score": 7.806351068697672 }, { "content": " config: Box<dyn ServerConfig>,\n\n online_key: OnlineKey,\n\n cert_bytes: Vec<u8>,\n\n\n\n socket: UdpSocket,\n\n health_listener: Option<TcpListener>,\n\n poll_duration: Option<Duration>,\n\n grease: Grease,\n\n timer: Timer<()>,\n\n poll: Poll,\n\n merkle: MerkleTree,\n\n requests: Vec<(Vec<u8>, SocketAddr)>,\n\n buf: [u8; 65_536],\n\n\n\n public_key: String,\n\n\n\n stats: Box<dyn ServerStats>,\n\n\n\n // Used to send requests to ourselves in fuzzing mode\n\n #[cfg(fuzzing)]\n", "file_path": "src/server.rs", "rank": 96, "score": 7.7911776097216645 }, { "content": "// Copyright 2017-2019 int08h LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::message::RtMessage;\n\nuse crate::sign::Signer;\n\nuse crate::tag::Tag;\n\n\n\nuse crate::SIGNED_RESPONSE_CONTEXT;\n\n\n", "file_path": "src/key/online.rs", "rank": 97, "score": 7.699155369079509 }, { "content": " fn modified_ciphertext_is_detected() {\n\n let kms = MockKmsProvider {};\n\n let plaintext = Vec::from(\"This is the plaintext used for this test 4\");\n\n\n\n let enc_result = EnvelopeEncryption::encrypt_seed(&kms, &plaintext);\n\n assert_eq!(enc_result.is_ok(), true);\n\n\n\n let ciphertext = enc_result.unwrap();\n\n assert_ne!(plaintext, ciphertext);\n\n\n\n // start corruption 4 bytes in, after the DEK and NONCE length fields\n\n for i in (DEK_LEN_FIELD + NONCE_LEN_FIELD)..ciphertext.len() {\n\n let mut ciphertext_copy = ciphertext.clone();\n\n // flip some bits\n\n ciphertext_copy[i] = ciphertext[i].wrapping_add(1);\n\n\n\n let dec_result = EnvelopeEncryption::decrypt_seed(&kms, &ciphertext_copy);\n\n\n\n match dec_result.expect_err(\"Expected a KmsError error here\") {\n\n KmsError::OperationFailed(msg) => assert!(msg.contains(\"failed to decrypt\")),\n\n e => panic!(\"unexpected result {:?}\", e),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/kms/envelope.rs", "rank": 98, "score": 7.49921744896766 }, { "content": " /// * `value` - Value for the tag.\n\n ///\n\n pub fn add_field(&mut self, tag: Tag, value: &[u8]) -> Result<(), Error> {\n\n if let Some(last_tag) = self.tags.last() {\n\n if tag <= *last_tag {\n\n return Err(Error::TagNotStrictlyIncreasing(tag));\n\n }\n\n }\n\n\n\n self.tags.push(tag);\n\n self.values.push(value.to_vec());\n\n\n\n Ok(())\n\n }\n\n\n\n /// Retrieve the value associated with `tag`, if present.\n\n ///\n\n /// ## Arguments\n\n ///\n\n /// * `tag` - The [`Tag`](enum.Tag.html) to try and retrieve.\n", "file_path": "src/message.rs", "rank": 99, "score": 7.464598956666841 } ]
Rust
embrs/src/stm32f4/rcc/mod.rs
DCasFer/RustSimpleScheduler
25cda9d5d9601a56296589861556e7c173fbff04
use arm_m; use arm_m::reg::AtomicReg; use super::flash::FLASH; pub mod raw; pub use self::raw::{AhbPrescaler, ApbPrescaler, Cr, Cfgr, Pllcfgr}; pub use self::raw::Pllp as SysPrescaler; use self::raw::ClockDivisor; pub const BOOT_CLOCK_HZ : u32 = 16_000_000; pub struct Rcc; pub struct ClockConfig { pub crystal_hz: f32, pub crystal_divisor: u32, pub vco_multiplier: u32, pub general_divisor: SysPrescaler, pub pll48_divisor: u32, pub ahb_divisor: Option<AhbPrescaler>, pub apb1_divisor: Option<ApbPrescaler>, pub apb2_divisor: Option<ApbPrescaler>, pub flash_latency: u32, } pub struct ClockSpeeds { pub cpu: f32, pub ahb: f32, pub apb1: f32, pub apb2: f32, pub pll48: f32, } impl ClockSpeeds { pub fn get_clock_for<P: PeripheralName>(&self, p: P) -> f32 { p.get_clock(self) } } impl ClockConfig { pub fn compute_speeds(&self) -> ClockSpeeds { let vco_in_hz = self.crystal_hz / (self.crystal_divisor as f32); let vco_out_hz = vco_in_hz * (self.vco_multiplier as f32); let cpu = vco_out_hz / (self.general_divisor.to_divisor() as f32); ClockSpeeds { cpu: cpu, ahb: cpu / (self.ahb_divisor.to_divisor() as f32), apb1: cpu / (self.apb1_divisor.to_divisor() as f32), apb2: cpu / (self.apb2_divisor.to_divisor() as f32), pll48: vco_out_hz / (self.pll48_divisor as f32), } } } pub trait PeripheralName { fn enable_clock(self, rcc: &Rcc); fn get_clock(self, speeds: &ClockSpeeds) -> f32; } impl Rcc { fn reg(&self) -> &raw::Registers { unsafe { &*(raw::RCC_ADDRESS as *const raw::Registers) } } pub fn enable_clock<P: PeripheralName>(&self, p: P) { p.enable_clock(self); arm_m::data_synchronization_barrier(); } pub fn read_cr(&self) -> Cr { Cr(self.reg().cr.get()) } pub fn write_cr(&self, v: Cr) { self.reg().cr.set(v.0) } pub fn update_cr<F: FnOnce(Cr) -> Cr>(&self, f: F) { self.write_cr(f(self.read_cr())) } pub fn read_cfgr(&self) -> Cfgr { Cfgr(self.reg().cfgr.get()) } pub fn write_cfgr(&self, v: Cfgr) { self.reg().cfgr.set(v.0) } pub fn update_cfgr<F: FnOnce(Cfgr) -> Cfgr>(&self, f: F) { self.write_cfgr(f(self.read_cfgr())) } pub fn read_pllcfgr(&self) -> Pllcfgr { Pllcfgr(self.reg().pllcfgr.get()) } pub fn write_pllcfgr(&self, v: Pllcfgr) { self.reg().pllcfgr.set(v.0) } pub fn update_pllcfgr<F: FnOnce(Pllcfgr) -> Pllcfgr>(&self, f: F) { self.write_pllcfgr(f(self.read_pllcfgr())) } pub fn configure_clocks(&self, cfg: &ClockConfig) { self.update_cr(|v| v.with_hsion(true)); while !self.read_cr().get_hsirdy() {} self.update_cfgr(|v| v.with_sw(raw::ClockSwitch::Hsi)); while self.read_cfgr().get_sws() != Ok(raw::ClockSwitch::Hsi) {} self.update_cr(|v| v.with_pllon(false)); while self.read_cr().get_pllrdy() {} self.update_cfgr(|v| v.with_hpre(cfg.ahb_divisor) .with_ppre1(cfg.apb1_divisor) .with_ppre2(cfg.apb2_divisor)); FLASH.update_acr(|v| v.with_latency(cfg.flash_latency)); self.update_cr(|v| v.with_hseon(true)); while !self.read_cr().get_hserdy() {} self.update_pllcfgr(|v| v.with_pllm(cfg.crystal_divisor) .with_plln(cfg.vco_multiplier) .with_pllp(cfg.general_divisor) .with_pllq(cfg.pll48_divisor) .with_pllsrc(raw::PllSource::Hse)); self.update_cr(|v| v.with_pllon(true)); while !self.read_cr().get_pllrdy() {} self.update_cfgr(|v| v.with_sw(raw::ClockSwitch::Pll)); while self.read_cfgr().get_sws() != Ok(raw::ClockSwitch::Pll) {} } } #[derive(Copy, Clone)] pub enum AhbBus { Ahb1 = 0, Ahb2 = 1, Ahb3 = 2, } macro_rules! peripheral_enum { ( $(#[$m:meta])* pub enum $tyname:ident ($bty:ident) { $( $(#[$e_m:meta])* p $name:ident = $bus:tt | $idx:tt | $rst:tt | $clk:tt | $lp:tt, )* } ) => { $(#[$m])* #[derive(Copy, Clone, Eq, PartialEq)] #[repr(u32)] pub enum $tyname { $( $(#[$e_m])* $name = ($bty::$bus as u32) | ($idx << 8) | ($rst << 16) | ($clk << 17) | ($lp << 18), )* } impl $tyname { #[inline] pub fn get_bus(self) -> $bty { let idx = (self as u32) & 0xF; unsafe { ::core::mem::transmute(idx as u8) } } #[inline] pub fn get_bit_index(self) -> u32 { ((self as u32) >> 8) & 0x1F } #[inline] pub fn has_rst(self) -> bool { ((self as u32) & (1 << 16)) != 0 } #[inline] pub fn has_enr(self) -> bool { ((self as u32) & (1 << 17)) != 0 } #[inline] pub fn has_lpenr(self) -> bool { ((self as u32) & (1 << 18)) != 0 } } }; } peripheral_enum! { pub enum AhbPeripheral (AhbBus) { p GpioA = Ahb1 | 0 | 1 | 1 | 1, p GpioB = Ahb1 | 1 | 1 | 1 | 1, p GpioC = Ahb1 | 2 | 1 | 1 | 1, p GpioD = Ahb1 | 3 | 1 | 1 | 1, p GpioE = Ahb1 | 4 | 1 | 1 | 1, p GpioF = Ahb1 | 5 | 1 | 1 | 1, p GpioG = Ahb1 | 6 | 1 | 1 | 1, p GpioH = Ahb1 | 7 | 1 | 1 | 1, p GpioI = Ahb1 | 8 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p GpioJ = Ahb1 | 9 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p GpioK = Ahb1 | 10 | 1 | 1 | 1, p Crc = Ahb1 | 12 | 1 | 1 | 1, p FlashIface = Ahb1 | 15 | 0 | 0 | 1, p Sram1 = Ahb1 | 16 | 0 | 0 | 1, p Sram2 = Ahb1 | 17 | 0 | 0 | 1, p BackupSram = Ahb1 | 18 | 0 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p Sram3 = Ahb1 | 19 | 0 | 0 | 1, p CcmDataRam = Ahb1 | 20 | 0 | 1 | 0, p Dma1 = Ahb1 | 21 | 1 | 1 | 1, p Dma2 = Ahb1 | 22 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p Dma2d = Ahb1 | 23 | 1 | 1 | 1, p Ethernet = Ahb1 | 25 | 1 | 1 | 1, p EthernetTx = Ahb1 | 26 | 0 | 1 | 1, p EthernetRx = Ahb1 | 27 | 0 | 1 | 1, p EthernetPtp = Ahb1 | 28 | 0 | 1 | 1, p UsbOtgHs = Ahb1 | 29 | 1 | 1 | 1, p UsbOtgHsUlpi = Ahb1 | 30 | 0 | 1 | 1, p Dcmi = Ahb2 | 0 | 1 | 1 | 1, p Cryp = Ahb2 | 4 | 1 | 1 | 1, p Hash = Ahb2 | 5 | 1 | 1 | 1, p Rng = Ahb2 | 6 | 1 | 1 | 1, p UsbOtgFs = Ahb2 | 7 | 1 | 1 | 1, p Fsmc = Ahb3 | 0 | 1 | 1 | 1, } } impl PeripheralName for AhbPeripheral { fn enable_clock(self, rcc: &Rcc) { if !self.has_enr() { panic!("cannot control clock for AHB{} idx {}", (self.get_bus() as u32) + 1, self.get_bit_index()) } rcc.reg() .ahb_enr[self.get_bus() as usize] .atomic_or(1 << self.get_bit_index()) } fn get_clock(self, speeds: &ClockSpeeds) -> f32 { speeds.ahb } } #[derive(Copy, Clone)] pub enum ApbBus { Apb1 = 0, Apb2 = 1, } peripheral_enum! { pub enum ApbPeripheral (ApbBus) { p Tim2 = Apb1 | 0 | 1 | 1 | 1, p Tim3 = Apb1 | 1 | 1 | 1 | 1, p Tim4 = Apb1 | 2 | 1 | 1 | 1, p Tim5 = Apb1 | 3 | 1 | 1 | 1, p Tim6 = Apb1 | 4 | 1 | 1 | 1, p Tim7 = Apb1 | 5 | 1 | 1 | 1, p Tim12 = Apb1 | 6 | 1 | 1 | 1, p Tim13 = Apb1 | 7 | 1 | 1 | 1, p Tim14 = Apb1 | 8 | 1 | 1 | 1, p Wwdg = Apb1 | 11 | 1 | 1 | 1, p Spi2 = Apb1 | 14 | 1 | 1 | 1, p Spi3 = Apb1 | 15 | 1 | 1 | 1, p Usart2 = Apb1 | 17 | 1 | 1 | 1, p Usart3 = Apb1 | 18 | 1 | 1 | 1, p Uart4 = Apb1 | 19 | 1 | 1 | 1, p Uart5 = Apb1 | 20 | 1 | 1 | 1, p I2c1 = Apb1 | 21 | 1 | 1 | 1, p I2c2 = Apb1 | 22 | 1 | 1 | 1, p I2c3 = Apb1 | 23 | 1 | 1 | 1, p Can1 = Apb1 | 25 | 1 | 1 | 1, p Can2 = Apb1 | 26 | 1 | 1 | 1, p Pwr = Apb1 | 28 | 1 | 1 | 1, p Dac = Apb1 | 29 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p Uart7 = Apb1 | 30 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p Uart8 = Apb1 | 31 | 1 | 1 | 1, p Tim1 = Apb2 | 0 | 1 | 1 | 1, p Tim8 = Apb2 | 1 | 1 | 1 | 1, p Usart1 = Apb2 | 4 | 1 | 1 | 1, p Usart6 = Apb2 | 5 | 1 | 1 | 1, p Adc1 = Apb2 | 8 | 1 | 1 | 1, p Adc2 = Apb2 | 9 | 0 | 1 | 1, p Adc3 = Apb2 | 10 | 0 | 1 | 1, p Sdio = Apb2 | 11 | 1 | 1 | 1, p Spi1 = Apb2 | 12 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p Spi4 = Apb2 | 13 | 1 | 1 | 1, p Syscfg = Apb2 | 14 | 1 | 1 | 1, p Tim9 = Apb2 | 16 | 1 | 1 | 1, p Tim10 = Apb2 | 17 | 1 | 1 | 1, p Tim11 = Apb2 | 18 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p Spi5 = Apb2 | 20 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p Spi6 = Apb2 | 21 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p Sai1 = Apb2 | 22 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p Ltdc = Apb2 | 26 | 1 | 1 | 1, } } impl PeripheralName for ApbPeripheral { fn enable_clock(self, rcc: &Rcc) { if !self.has_enr() { panic!("cannot control clock for APB{} idx {}", (self.get_bus() as u32) + 1, self.get_bit_index()) } rcc.reg() .apb_enr[self.get_bus() as usize] .atomic_or(1 << self.get_bit_index()) } fn get_clock(self, speeds: &ClockSpeeds) -> f32 { match self.get_bus() { ApbBus::Apb1 => speeds.apb1, ApbBus::Apb2 => speeds.apb2, } } } pub static RCC: Rcc = Rcc;
use arm_m; use arm_m::reg::AtomicReg; use super::flash::FLASH; pub mod raw; pub use self::raw::{AhbPrescaler, ApbPrescaler, Cr, Cfgr, Pllcfgr}; pub use self::raw::Pllp as SysPrescaler; use self::raw::ClockDivisor; pub const BOOT_CLOCK_HZ : u32 = 16_000_000; pub struct Rcc; pub struct ClockConfig { pub crystal_hz: f32, pub crystal_divisor: u32, pub vco_multiplier: u32, pub general_divisor: SysPrescaler, pub pll48_divisor: u32, pub ahb_divisor: Option<AhbPrescaler>, pub apb1_divisor: Option<ApbPrescaler>, pub apb2_divisor: Option<ApbPrescaler>, pub flash_latency: u32, } pub struct ClockSpeeds { pub cpu: f32, pub ahb: f32, pub apb1: f32, pub apb2: f32, pub pll48: f32, } impl ClockSpeeds { pub fn get_clock_for<P: PeripheralName>(&self, p: P) -> f32 { p.get_clock(self) } } impl ClockConfig { pub fn compute_speeds(&self) -> ClockSpeeds { let vco_in_hz = self.crystal_hz / (self.crystal_divisor as f32); let vco_out_hz = vco_in_hz * (self.vco_multiplier as f32); let cpu = vco_out_hz / (self.general_divisor.to_divisor() as f32); ClockSpeeds { cpu: cpu, ahb: cpu / (self.ahb_divisor.to_divisor() as f32), apb1: cpu / (self.apb1_divisor.to_divisor() as f32), apb2: cpu / (self.apb2_divisor.to_divisor() as f32), pll48: vco_out_hz / (self.pll48_divisor as f32), } } } pub trait PeripheralName { fn enable_clock(self, rcc: &Rcc); fn get_clock(self, speeds: &ClockSpeeds) -> f32; } impl Rcc { fn reg(&self) -> &raw::Registers { unsafe { &*(raw::RCC_ADDRESS as *const raw::Registers) } } pub fn enable_clock<P: PeripheralName>(&self, p: P) { p.enable_clock(self); arm_m::data_synchronization_barrier(); } pub fn read_cr(&self) -> Cr { Cr(self.reg().cr.get()) } pub fn write_cr(&self, v: Cr) { self.reg().cr.set(v.0) } pub fn update_cr<F: FnOnce(Cr) -> Cr>(&self, f: F) { self.write_cr(f(self.read_cr())) } pub fn read_cfgr(&self) -> Cfgr { Cfgr(self.reg().cfgr.get()) } pub fn write_cfgr(&self, v: Cfgr) { self.reg().cfgr.set(v.0) } pub fn update_cfgr<F: FnOnce(Cfgr) -> Cfgr>(&self, f: F) { self.write_cfgr(f(self.read_cfgr())) } pub fn read_pllcfgr(&self) -> Pllcfgr { Pllcfgr(self.reg().pllcfgr.get()) } pub fn write_pllcfgr(&self, v: Pllcfgr) { self.reg().pllcfgr.set(v.0) } pub fn update_pllcfgr<F: FnOnce(Pllcfgr) -> Pllcfgr>(&self, f: F) { self.write_pllcfgr(f(self.read_pllcfgr())) } pub fn configure_clocks(&self, cfg: &ClockConfig) { self.update_cr(|v| v.with_hsion(true)); while !self.read_cr().get_hsirdy() {} self.update_cfgr(|v| v.with_sw(raw::ClockSwitch::Hsi)); while self.read_cfgr().get_sws() != Ok(raw::ClockSwitch::Hsi) {} self.update_cr(|v| v.with_pllon(false)); while self.read_cr().get_pllrdy() {} self.update_cfgr(|v| v.with_hpre(cfg.ahb_divisor) .with_ppre1(cfg.apb1_divisor) .with_ppre2(cfg.apb2_divisor)); FLASH.update_acr(|v| v.with_latency(cfg.flash_latency)); self.update_cr(|v| v.with_hseon(true)); while !self.read_cr().get_hserdy() {} self.update_pllcfgr(|v| v.with_pllm(cfg.crystal_divisor) .with_plln(cfg.vco_multiplier) .with_pllp(cfg.general_divisor) .with_pllq(cfg.pll48_divisor) .with_pllsrc(raw::PllSource::Hse)); self.update_cr(|v| v.with_pllon(true)); while !self.read_cr().get_pllrdy() {} self.update_cfgr(|v| v.with_sw(raw::ClockSwitch::Pll)); while self.read_cfgr().get_sws() != Ok(raw::ClockSwitch::Pll) {} } } #[derive(Copy, Clone)] pub enum AhbBus { Ahb1 = 0, Ahb2 = 1, Ahb3 = 2, } macro_rules! peripheral_enum { ( $(#[$m:meta])* pub enum $tyname:ident ($bty:ident) { $( $(#[$e_m:meta])* p $name:ident = $bus:tt | $idx:tt | $rst:tt | $clk:tt | $lp:tt, )* } ) => { $(#[$m])* #[derive(Copy, Clone, Eq, PartialEq)] #[repr(u32)] pub enum $tyname { $( $(#[$e_m])* $name = ($bty::$bus as u32) | ($idx << 8) | ($rst << 16) | ($clk << 17) | ($lp << 18), )* } impl $tyname { #[inline] pub fn get_bus(self) -> $bty { let idx = (self as u32) & 0xF; unsafe { ::core::mem::transmute(idx as u8) } } #[inline] pub fn get_bit_index(self) -> u32 { ((self as u32) >> 8) & 0x1F } #[inline] pub fn has_rst(self) -> bool { ((self as u32) & (1 << 16)) != 0 } #[inline] pub fn has_enr(self) -> bool { ((self as u32) & (1 << 17)) != 0 } #[inline] pub fn has_lpenr(self) -> bool { ((self as u32) & (1 << 18)) != 0 } } }; } peripheral_enum! { pub enum AhbPeripheral (AhbBus) { p GpioA = Ahb1 | 0 | 1 | 1 | 1, p GpioB = Ahb1 | 1 | 1 | 1 | 1, p GpioC = Ahb1 | 2 | 1 | 1 | 1, p GpioD = Ahb1 | 3 | 1 | 1 | 1, p GpioE = Ahb1 | 4 | 1 | 1 | 1, p GpioF = Ahb1 | 5 | 1 | 1 | 1, p GpioG = Ahb1 | 6 | 1 | 1 | 1, p GpioH = Ahb1 | 7 | 1 | 1 | 1, p GpioI = Ahb1 | 8 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p GpioJ = Ahb1 | 9 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p GpioK = Ahb1 | 10 | 1 | 1 | 1, p Crc = Ahb1 | 12 | 1 | 1 | 1, p FlashIface = Ahb1 | 15 | 0 | 0 | 1, p Sram1 = Ahb1 | 16 | 0 | 0 | 1, p Sram2 = Ahb1 | 17 | 0 | 0 | 1, p BackupSram = Ahb1 | 18 | 0 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p Sram3 = Ahb1 | 19 | 0 | 0 | 1, p CcmDataRam = Ahb1 | 20 | 0 | 1 | 0, p Dma1 = Ahb1 | 21 | 1 | 1 | 1, p Dma2 = Ahb1 | 22 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p Dma2d = Ahb1 | 23 | 1 | 1 | 1, p Ethernet = Ahb1 | 25 | 1 | 1 | 1, p EthernetTx = Ahb1 | 26 | 0 | 1 | 1, p EthernetRx = Ahb1 | 27 | 0 | 1 | 1, p EthernetPtp = Ahb1 | 28 | 0 | 1 | 1, p UsbOtgHs = Ahb1 | 29 | 1 | 1 | 1, p UsbOtgHsUlpi = Ahb1 | 30 | 0 | 1 | 1, p Dcmi = Ahb2 | 0 | 1 | 1 | 1, p Cryp = Ahb2 | 4 | 1 | 1 | 1, p Hash = Ahb2 | 5 | 1 | 1 | 1, p Rng = Ahb2 | 6 | 1 | 1 | 1, p UsbOtgFs = Ahb2 | 7 | 1 | 1 | 1, p Fsmc = Ahb3 | 0 | 1 | 1 | 1, } } impl PeripheralName for AhbPeripheral { f
fn get_clock(self, speeds: &ClockSpeeds) -> f32 { speeds.ahb } } #[derive(Copy, Clone)] pub enum ApbBus { Apb1 = 0, Apb2 = 1, } peripheral_enum! { pub enum ApbPeripheral (ApbBus) { p Tim2 = Apb1 | 0 | 1 | 1 | 1, p Tim3 = Apb1 | 1 | 1 | 1 | 1, p Tim4 = Apb1 | 2 | 1 | 1 | 1, p Tim5 = Apb1 | 3 | 1 | 1 | 1, p Tim6 = Apb1 | 4 | 1 | 1 | 1, p Tim7 = Apb1 | 5 | 1 | 1 | 1, p Tim12 = Apb1 | 6 | 1 | 1 | 1, p Tim13 = Apb1 | 7 | 1 | 1 | 1, p Tim14 = Apb1 | 8 | 1 | 1 | 1, p Wwdg = Apb1 | 11 | 1 | 1 | 1, p Spi2 = Apb1 | 14 | 1 | 1 | 1, p Spi3 = Apb1 | 15 | 1 | 1 | 1, p Usart2 = Apb1 | 17 | 1 | 1 | 1, p Usart3 = Apb1 | 18 | 1 | 1 | 1, p Uart4 = Apb1 | 19 | 1 | 1 | 1, p Uart5 = Apb1 | 20 | 1 | 1 | 1, p I2c1 = Apb1 | 21 | 1 | 1 | 1, p I2c2 = Apb1 | 22 | 1 | 1 | 1, p I2c3 = Apb1 | 23 | 1 | 1 | 1, p Can1 = Apb1 | 25 | 1 | 1 | 1, p Can2 = Apb1 | 26 | 1 | 1 | 1, p Pwr = Apb1 | 28 | 1 | 1 | 1, p Dac = Apb1 | 29 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p Uart7 = Apb1 | 30 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p Uart8 = Apb1 | 31 | 1 | 1 | 1, p Tim1 = Apb2 | 0 | 1 | 1 | 1, p Tim8 = Apb2 | 1 | 1 | 1 | 1, p Usart1 = Apb2 | 4 | 1 | 1 | 1, p Usart6 = Apb2 | 5 | 1 | 1 | 1, p Adc1 = Apb2 | 8 | 1 | 1 | 1, p Adc2 = Apb2 | 9 | 0 | 1 | 1, p Adc3 = Apb2 | 10 | 0 | 1 | 1, p Sdio = Apb2 | 11 | 1 | 1 | 1, p Spi1 = Apb2 | 12 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p Spi4 = Apb2 | 13 | 1 | 1 | 1, p Syscfg = Apb2 | 14 | 1 | 1 | 1, p Tim9 = Apb2 | 16 | 1 | 1 | 1, p Tim10 = Apb2 | 17 | 1 | 1 | 1, p Tim11 = Apb2 | 18 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p Spi5 = Apb2 | 20 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p Spi6 = Apb2 | 21 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p Sai1 = Apb2 | 22 | 1 | 1 | 1, #[cfg(feature = "soc_family:stm32f4[23]")] p Ltdc = Apb2 | 26 | 1 | 1 | 1, } } impl PeripheralName for ApbPeripheral { fn enable_clock(self, rcc: &Rcc) { if !self.has_enr() { panic!("cannot control clock for APB{} idx {}", (self.get_bus() as u32) + 1, self.get_bit_index()) } rcc.reg() .apb_enr[self.get_bus() as usize] .atomic_or(1 << self.get_bit_index()) } fn get_clock(self, speeds: &ClockSpeeds) -> f32 { match self.get_bus() { ApbBus::Apb1 => speeds.apb1, ApbBus::Apb2 => speeds.apb2, } } } pub static RCC: Rcc = Rcc;
n enable_clock(self, rcc: &Rcc) { if !self.has_enr() { panic!("cannot control clock for AHB{} idx {}", (self.get_bus() as u32) + 1, self.get_bit_index()) } rcc.reg() .ahb_enr[self.get_bus() as usize] .atomic_or(1 << self.get_bit_index()) }
function_block-function_prefixed
[ { "content": "#[inline]\n\npub fn set_primask(val: bool) {\n\n unsafe {\n\n asm!(\"msr PRIMASK, $0\"\n\n :: \"r\"(val)\n\n :: \"volatile\")\n\n }\n\n}\n\n\n\n/// Generates an instruction synchronization barrier (`ISB`) instruction.\n", "file_path": "embrs/src/arm_m/mod.rs", "rank": 1, "score": 140461.3527785074 }, { "content": "pub trait ClockDivisor {\n\n fn to_divisor(self) -> u32;\n\n}\n\n\n\nimpl ClockDivisor for ApbPrescaler {\n\n fn to_divisor(self) -> u32 {\n\n match self {\n\n ApbPrescaler::Div2 => 2,\n\n ApbPrescaler::Div4 => 4,\n\n ApbPrescaler::Div8 => 8,\n\n ApbPrescaler::Div16 => 16,\n\n }\n\n }\n\n}\n\n\n\nimpl<T: ClockDivisor> ClockDivisor for Option<T> {\n\n fn to_divisor(self) -> u32 {\n\n self.map(|v| v.to_divisor()).unwrap_or(1)\n\n }\n\n}\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 2, "score": 134543.6966103733 }, { "content": "#[inline(always)]\n\npub fn bitfield_extract(v: u32, hi: usize, lo: usize) -> u32 {\n\n let width = hi - lo + 1;\n\n let mask : u32 = if width < core::mem::size_of::<u32>() * 8 {\n\n (1 << width) - 1\n\n } else {\n\n !0\n\n };\n\n\n\n (v >> lo) & mask\n\n}\n\n\n\n/// Given a value `v`, replaces bits `hi` through `lo` (inclusive) with the\n\n/// same number of low-order bits from `new`.\n", "file_path": "embrs/src/bits.rs", "rank": 3, "score": 123527.34919210957 }, { "content": "#[inline]\n\npub fn dma1() -> &'static Dma {\n\n unsafe {\n\n &*(0x40026000 as *const Dma)\n\n }\n\n}\n\n\n\n/// Produces a shared reference to DMA2.\n", "file_path": "embrs/src/stm32f4/dma.rs", "rank": 4, "score": 118634.10979119364 }, { "content": "#[inline]\n\npub fn dma2() -> &'static Dma {\n\n unsafe {\n\n &*(0x40026400 as *const Dma)\n\n }\n\n}\n\n\n\n\n\n/*******************************************************************************\n\n * Interrupt Register(s)\n\n */\n\n\n\nbit_wrappers! {\n\n /// Interrupt Register type, used by both the Interrupt Status Registers and\n\n /// the Interrupt Flag Clear Registers.\n\n ///\n\n /// Interrupt registers contain an irregularly packed array of four five-bit\n\n /// fields, describing four streams. `isr[0]` and `ifcr[0]` describe\n\n /// streams 0-3, while `isr[1]` and `ifcr[1]` describe streams 4-7. Within\n\n /// a single register we refer to the four streams (whichever they may be)\n\n /// as *relative streams* 0-3.\n", "file_path": "embrs/src/stm32f4/dma.rs", "rank": 5, "score": 118634.10979119362 }, { "content": "#[inline(always)]\n\npub fn bitfield_replace(orig: u32, hi: usize, lo: usize, new: u32) -> u32 {\n\n let width = hi - lo + 1;\n\n let mask : u32 = if width < core::mem::size_of::<u32>() * 8 {\n\n (1 << width) - 1\n\n } else {\n\n !0\n\n };\n\n\n\n (orig & !(mask << lo)) | ((new & mask) << lo)\n\n}\n\n \n\n/// Declares wrapped bits types. A wrapped bits type declaration looks like a\n\n/// newtype around an integer:\n\n///\n\n/// pub struct MyType(pub u32);\n\n///\n\n/// This macro automatically derives `Copy`, `Clone`, and `BitsWrapper`.\n\nmacro_rules! bit_wrappers {\n\n () => {};\n\n ($(#[$m:meta])* pub struct $name:ident(pub $ty:ty); $($rest:tt)*) => {\n", "file_path": "embrs/src/bits.rs", "rank": 6, "score": 111447.42221155678 }, { "content": "#[inline]\n\npub fn wait_for_interrupt() {\n\n unsafe {\n\n asm!(\"wfi\" :::: \"volatile\")\n\n }\n\n}\n", "file_path": "embrs/src/arm_m/mod.rs", "rank": 7, "score": 109149.18940519213 }, { "content": "#[inline]\n\npub fn data_synchronization_barrier() {\n\n unsafe {\n\n asm!(\"dsb\" :::: \"volatile\")\n\n }\n\n}\n\n\n\n/// Generates a data memory barrier (`DMB`) instruction.\n", "file_path": "embrs/src/arm_m/mod.rs", "rank": 8, "score": 106225.18690855252 }, { "content": "#[inline]\n\npub fn instruction_synchronization_barrier() {\n\n unsafe {\n\n asm!(\"isb\" :::: \"volatile\")\n\n }\n\n}\n\n\n\n/// Generates a data synchronization barrier (`DSB`) instruction.\n", "file_path": "embrs/src/arm_m/mod.rs", "rank": 9, "score": 106225.18690855252 }, { "content": "#[inline]\n\npub fn data_memory_barrier() {\n\n unsafe {\n\n asm!(\"dmb\" :::: \"volatile\")\n\n }\n\n}\n\n\n", "file_path": "embrs/src/arm_m/mod.rs", "rank": 10, "score": 106225.18690855252 }, { "content": "/// Converts `self` into a small bitwise representation. For small integers and\n\n/// C-like enumerations, this is equivalent to widening casts using `as`. It\n\n/// should not panic.\n\npub trait IntoBits {\n\n fn into_bits(self) -> u32;\n\n}\n\n\n\nimpl IntoBits for bool {\n\n fn into_bits(self) -> u32 {\n\n match self {\n\n false => 0,\n\n true => 1,\n\n }\n\n }\n\n}\n\n\n\nimpl IntoBits for u8 {\n\n fn into_bits(self) -> u32 {\n\n self as u32\n\n }\n\n}\n\n\n\nimpl IntoBits for u16 {\n", "file_path": "embrs/src/bits.rs", "rank": 11, "score": 90084.66623887068 }, { "content": "/// Construct `Self` from a small bitwise representation, panicking if the bits\n\n/// are invalid.\n\n///\n\n/// This trait is similar to `core::convert::From` but less general.\n\n///\n\n/// This trait is similar to `FromBits` but is used when the application is\n\n/// willing to panic if it encounters an illegal bit pattern (e.g. when reading\n\n/// from a byte register into a 256-valued enum, illegal bit patterns are really\n\n/// unlikely).\n\npub trait FromBitsTotal {\n\n fn from_bits_total(bits: u32) -> Self;\n\n}\n\n\n\n/// Maps 0 to `false` and 1 to `true`.\n\nimpl FromBitsTotal for bool {\n\n fn from_bits_total(bits: u32) -> Self {\n\n match bits {\n\n 0 => false,\n\n 1 => true,\n\n _ => unreachable!(),\n\n }\n\n }\n\n}\n\n\n\n/// Maps 0..255\n\nimpl FromBitsTotal for u8 {\n\n fn from_bits_total(bits: u32) -> Self {\n\n if bits > core::u8::MAX as u32 {\n\n unreachable!()\n", "file_path": "embrs/src/bits.rs", "rank": 12, "score": 87794.10268364998 }, { "content": "/// Associates a wrapped bits type (e.g. the typesafe contents of a packed\n\n/// register) with both its underlying `Raw` type, and a function for\n\n/// constructing from that type.\n\npub trait BitsWrapper {\n\n /// Underlying bitwise type (often `u32`).\n\n type Raw;\n\n\n\n /// Constructor from bitwise representation.\n\n fn from_raw(v: Self::Raw) -> Self;\n\n}\n\n\n\n/// Given a value `v`, extracts bits `hi` through `lo` (inclusive).\n", "file_path": "embrs/src/bits.rs", "rank": 13, "score": 87786.5934838659 }, { "content": "pub fn task_1 (){\n\n use embrs::stm32f4::usart::*;\n\n let mut counter = 0;\n\n let infinite = true;\n\n\n\n\n\n while infinite {\n\n if (counter > 0) && (counter < 150000) {\n\n USART2.send8(b'1');\n\n gpiod().set(led_pin_12());\n\n }\n\n else if (counter > 150001) && (counter < 340000) {\n\n USART2.send8(b'0');\n\n gpiod().clear(led_pin_12());\n\n }\n\n counter = (counter + 1) % 340000;\n\n }\n\n}\n\n\n\n/* Function name : task_2() */\n\n/* Description : The second task taht will be scheduled */\n", "file_path": "scheduler/src/leds.rs", "rank": 14, "score": 87094.27398108272 }, { "content": "pub fn task_2 () {\n\n use embrs::stm32f4::usart::*;\n\n let mut counter = 0;\n\n let infinite = true;\n\n\n\n while infinite {\n\n if (counter > 0) && (counter < 150000) {\n\n USART2.send8(b'1');\n\n gpiod().set(led_pin_14())\n\n }\n\n else if (counter > 150001) && (counter < 340000) {\n\n USART2.send8(b'0');\n\n gpiod().clear(led_pin_14())\n\n }\n\n counter = (counter + 1) % 340000;\n\n }\n\n\n\n}\n", "file_path": "scheduler/src/leds.rs", "rank": 15, "score": 87094.27398108272 }, { "content": "/// Extension trait for `arm_m::Nvic` adding operations that deal in\n\n/// STM32F4-specific enumerations.\n\npub trait NvicExt {\n\n /// Ensures that an interrupt is enabled by the time this function returns.\n\n ///\n\n /// If the interrupt is pending, and the current execution priority allows\n\n /// it to preempt, the handler will have run *before this function returns*.\n\n ///\n\n /// This is a wrapper for `enable_irq_raw` that lets us omit the runtime\n\n /// range checks.\n\n fn enable_irq(&self, irq: Interrupt);\n\n\n\n /// Ensures that an interrupt is disabled by the time this function returns.\n\n ///\n\n /// In the presence of a concurrent or pending interrupt from `irq`,\n\n /// assuming the current execution priority would allow it to preempt, its\n\n /// handler will either execute before this function returns, or will be\n\n /// deferred.\n\n ///\n\n /// Thus, code appearing after a call to `disable_irq` in program order can\n\n /// assume it will not be preempted by this interrupt (assuming that\n\n /// some other concurrent activity, such as a separate interrupt, doesn't\n", "file_path": "embrs/src/stm32f4/irq.rs", "rank": 16, "score": 85666.67089652437 }, { "content": "/// Additional features that become available when a register contains a\n\n/// hardware-supported atomic type.\n\npub trait AtomicReg {\n\n type Type;\n\n\n\n /// Clears any bits in the register that are also set in `clear`.\n\n ///\n\n /// The effect is atomic from the perspective of other threads or\n\n /// interrupts; if there is a race (e.g. an interrupt) the update sequence\n\n /// will restart. This means this function can produce many volatile loads,\n\n /// but only one store with the final result.\n\n fn atomic_nand(&self, clear: Self::Type);\n\n\n\n /// Sets any bits in the register that are also set in `set`.\n\n ///\n\n /// The effect is atomic from the perspective of other threads or\n\n /// interrupts; if there is a race (e.g. an interrupt) the update sequence\n\n /// will restart. This means this function can produce many volatile loads,\n\n /// but only one store with the final result.\n\n fn atomic_or(&self, set: Self::Type);\n\n\n\n /// Clears any bits in the register that are also set in `clear`, and sets\n", "file_path": "embrs/src/arm_m/reg.rs", "rank": 17, "score": 85663.2301962462 }, { "content": "/// Construct `Self` from a small bitwise representation, without assuming that\n\n/// every possible bit pattern can be represented.\n\n///\n\n/// This trait is similar to `core::convert::From`, but less general and can\n\n/// fail.\n\n///\n\n/// This trait is similar to `FromBitsTotal` but allows illegal bit patterns to\n\n/// be processed at runtime.\n\npub trait FromBits: Sized {\n\n /// Constructs `Self` from `bits`. If `bits` is not valid (e.g. is out of\n\n /// range for an enum) returns `BadBits`.\n\n fn from_bits(bits: u32) -> BitsResult<Self>;\n\n}\n\n\n\n/// Maps 0 to `false` and 1 to `true`.\n\nimpl FromBits for bool {\n\n fn from_bits(bits: u32) -> BitsResult<Self> {\n\n match bits {\n\n 0 => Ok(false),\n\n 1 => Ok(true),\n\n _ => Err(BadBits(bits)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "embrs/src/bits.rs", "rank": 18, "score": 84916.09156698886 }, { "content": "pub fn task_test (){\n\n use embrs::stm32f4::usart::*;\n\n\n\n if gpiod().get(led_pins()).is_empty() {\n\n USART2.send8(b'1');\n\n gpiod().set(led_pins())\n\n } else {\n\n USART2.send8(b'0');\n\n gpiod().clear(led_pins())\n\n }\n\n}\n\n\n\n\n\n/* Function name : task_1() */\n\n/* Description : The first task taht will be scheduled */\n", "file_path": "scheduler/src/leds.rs", "rank": 19, "score": 84812.06607935618 }, { "content": "pub fn init_SYSTICK (){\n\n let cycles_per_toggle = HZ / TOGGLE_HZ;\n\n sys_tick::SYS_TICK.write_rvr(cycles_per_toggle - 1);\n\n sys_tick::SYS_TICK.write_csr(\n\n sys_tick::SYS_TICK.read_csr()\n\n .with_enable(true)\n\n .with_tickint(true)\n\n .with_clksource(sys_tick::ClkSource::ProcessorClock));\n\n\n\n}\n\n\n\n/* Function name : init_uart() */\n\n/* Description : Enables and setup the UART which will be used by the LEDs */\n", "file_path": "scheduler/src/init.rs", "rank": 20, "score": 84812.06607935618 }, { "content": "pub fn init_leds() {\n\n // Enable clock to GPIOD so we can mess with its registers.\n\n RCC.enable_clock(AhbPeripheral::GpioD);\n\n\n\n // Configure our pins for push-pull digital output.\n\n gpiod().set_mode(led_pins(), gpio::Mode::Gpio);\n\n gpiod().set_output_type(led_pins(), gpio::OutputType::PushPull);\n\n}\n\n\n\n/* Function name : task_test() */\n\n/* Description : Used for debugging purposes\n\n It flashes two LEDs*/\n", "file_path": "scheduler/src/leds.rs", "rank": 21, "score": 84812.06607935618 }, { "content": "pub fn init_uart() {\n\n use embrs::stm32f4::usart::*;\n\n\n\n // Enable clock to USART2.\n\n RCC.enable_clock(ApbPeripheral::Usart2);\n\n\n\n USART2.update_cr1(|v| v.with_ue(true));\n\n\n\n let speeds = CLOCKS.compute_speeds();\n\n\n\n let clk = speeds.get_clock_for(ApbPeripheral::Usart2);\n\n let brr = (clk / 115200_f32 + 0.5) as u32;\n\n\n\n USART2.update_brr(|v| v.with_mantissa(brr >> 4)\n\n .with_fraction(brr & 0xF));\n\n\n\n USART2.update_cr1(|v| v.with_te(true));\n\n\n\n RCC.enable_clock(AhbPeripheral::GpioA);\n\n // Configure its TX pin (PA2) as AF7\n\n gpioa().set_alternate_function(gpio::P2, gpio::Function::AF7);\n\n gpioa().set_mode(gpio::P2, gpio::Mode::Alternate);\n\n}\n", "file_path": "scheduler/src/init.rs", "rank": 22, "score": 84812.06607935618 }, { "content": "pub fn set_control () {\n\n unsafe {\n\n asm!(\"MSR control, $0\"\n\n :: \"r\"(0x3)\n\n : \"memory\")\n\n }\n\n}\n\n\n\n/* Function name : set_PSP()\n\nParam 1: Array with task PSPs\n\nParam 2: Index of PSP_array\n\n*/\n\n/* Description : Sets PSP register to the first task.\n\n The same as __set_PSP((PSP_array[curr_task] + 8 * 8));\n\n 8 * 8 because double stack alignment*/\n\npub unsafe fn set_PSP (PSP_array_: &mut [u32], curr_task_:& u32) {\n\n let _curr_task = *curr_task_ as usize;\n\n asm!(\"MSR psp, $0\\n\"\n\n :: \"r\"(PSP_array_[_curr_task] + 8 * 8)\n\n : \"sp\")\n", "file_path": "scheduler/src/scheduler_core.rs", "rank": 23, "score": 82703.76453989594 }, { "content": "pub fn led_pin_14() -> gpio::PinMask {\n\n gpio::P14\n\n}\n\n\n\n/* Function name : led_pins() */\n\n/* Description : Returns the P12 and _14 from the static GPIO struct to\n\n flash two LEDs together*/\n", "file_path": "scheduler/src/leds.rs", "rank": 24, "score": 74096.48446855912 }, { "content": "pub fn led_pin_12() -> gpio::PinMask {\n\n gpio::P12\n\n}\n\n\n\n/* Function name : led_pin_14() */\n\n/* Description : Returns the P14 from the static GPIO struct */\n", "file_path": "scheduler/src/leds.rs", "rank": 25, "score": 74096.48446855912 }, { "content": "pub fn led_pins () -> gpio::PinMask {\n\n led_pin_12() | led_pin_14()\n\n}\n\n\n\n/* Function name : led_leds() */\n\n/* Description : Initializes the LEDs clock and modes */\n", "file_path": "scheduler/src/leds.rs", "rank": 26, "score": 74096.48446855912 }, { "content": "//! Reset and Clock Control (RCC) raw register interface.\n\n\n\nuse arm_m::reg::Reg;\n\n\n\n/// The RCC's hardware register layout.\n\n#[repr(C, packed)]\n\npub struct Registers {\n\n pub cr: Reg<u32>,\n\n pub pllcfgr: Reg<u32>,\n\n pub cfgr: Reg<u32>,\n\n pub cir: Reg<u32>,\n\n /// AHB peripheral reset registers AHB1RSTR - AHB3RSTR.\n\n ///\n\n /// Note that they are numbered from zero in this array.\n\n pub ahb_rstr: [Reg<u32>; 3],\n\n pub _reserved_1c: Reg<u32>,\n\n /// APB peripheral reset registers APB1RSTR - APB2RSTR.\n\n ///\n\n /// Note that they are numbered from zero in this array.\n\n pub apb_rstr: [Reg<u32>; 2],\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 27, "score": 57157.83315416156 }, { "content": " pub struct Cfgr(pub u32);\n\n /// Wrapper for the PLL Configuration Register bits.\n\n pub struct Pllcfgr(pub u32);\n\n}\n\n\n\nimpl Cr {\n\n bitfield_accessors! {\n\n /// Ready flag for the PLLI2S.\n\n #[cfg(feature = \"soc_family:stm32f4[23]\")]\n\n pub total [27] get_plli2srdy / with_plli2srdy: bool,\n\n /// Turns the PLLI2S on/off.\n\n #[cfg(feature = \"soc_family:stm32f4[23]\")]\n\n pub total [26] get_plli2son / with_plli2son: bool,\n\n /// Ready flag for the main PLL.\n\n pub total [25] get_pllrdy / with_pllrdy: bool,\n\n /// Turns the main PLL on/off.\n\n pub total [24] get_pllon / with_pllon: bool,\n\n /// Turns the Clock Security System (CSS) on/off.\n\n pub total [19] get_csson / with_csson: bool,\n\n /// When `true`, bypasses the HSE oscillator, using the external clock\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 28, "score": 57156.2693255302 }, { "content": "\n\nimpl ClockDivisor for AhbPrescaler {\n\n fn to_divisor(self) -> u32 {\n\n match self {\n\n AhbPrescaler::Div2 => 2,\n\n AhbPrescaler::Div4 => 4,\n\n AhbPrescaler::Div8 => 8,\n\n AhbPrescaler::Div16 => 16,\n\n AhbPrescaler::Div64 => 64,\n\n AhbPrescaler::Div128 => 128,\n\n AhbPrescaler::Div256 => 256,\n\n AhbPrescaler::Div512 => 512,\n\n }\n\n }\n\n}\n\n\n\nimpl Pllcfgr {\n\n bitfield_accessors! {\n\n /// Prescaler for the PLL48 domain.\n\n ///\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 29, "score": 57150.85077828587 }, { "content": " pub total [26] get_mco1pre_en / with_mco1pre_en: bool,\n\n /// Raw divisor for the MCO1 prescaler; see `get_mco1` and `with_mco1`.\n\n pub total [25:24] get_mco1pre_div / with_mco1pre_div: McoPre,\n\n /// Selects the clock fed to the I2S peripheral(s).\n\n pub total [23] get_i2ssrc / with_i2ssrc: I2sSrc,\n\n /// Controls the clock output on the MCO1 pin.\n\n pub total [22:21] get_mco1 / with_mco1: Mco1,\n\n // TODO RTCPRE here\n\n /// Raw enable for the APB2 prescaler; see `get_ppre2` and `with_ppre2`.\n\n pub total [15] get_ppre2_en / with_ppre2_en: bool,\n\n /// Raw divisor for the APB2 prescaler; see `get_ppre2` and\n\n /// `with_ppre2`.\n\n pub total [14:13] get_ppre2_div / with_ppre2_div: ApbPrescaler,\n\n /// Raw enable for the APB1 prescaler; see `get_ppre1` and `with_ppre1`.\n\n pub total [12] get_ppre1_en / with_ppre1_en: bool,\n\n /// Raw divisor for the APB1 prescaler; see `get_ppre1` and\n\n /// `with_ppre1`.\n\n pub total [11:10] get_ppre1_div / with_ppre1_div: ApbPrescaler,\n\n /// Raw enable for the AHB prescaler; see `get_hpre` and `with_hpre`.\n\n pub total [ 7] get_hpre_en / with_hpre_en: bool,\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 30, "score": 57150.493422356565 }, { "content": " /// Note that they are numbered from zero in this array.\n\n pub apb_lpenr: [Reg<u32>; 2],\n\n pub _reserved_68: Reg<u32>,\n\n pub _reserved_6c: Reg<u32>,\n\n pub bdcr: Reg<u32>,\n\n pub csr: Reg<u32>,\n\n pub _reserved_78: Reg<u32>,\n\n pub _reserved_7c: Reg<u32>,\n\n pub sscgr: Reg<u32>,\n\n pub plli2scfgr: Reg<u32>,\n\n #[cfg(feature = \"soc_family:stm32f4[23]\")]\n\n pub pllsaicfgr: Reg<u32>,\n\n #[cfg(feature = \"soc_family:stm32f4[23]\")]\n\n pub dckcfgr: Reg<u32>,\n\n}\n\n\n\nbit_wrappers! {\n\n /// Wrapper for the Clock Control Register bits.\n\n pub struct Cr(pub u32);\n\n /// Wrapper for the Clock Configuration Register bits.\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 31, "score": 57148.41570400408 }, { "content": " /// signal directly where the HSE clock would otherwise be used.\n\n pub total [18] get_hsebyp / with_hsebyp: bool,\n\n /// Ready flag for the HSE oscillator.\n\n pub total [17] get_hserdy / with_hserdy: bool,\n\n /// Turns the HSE oscillator on/off.\n\n pub total [16] get_hseon / with_hseon: bool,\n\n /// Internal HSI calibration bits, set by hardware at startup.\n\n pub total [15:8] get_hsical / with_hsical: u8,\n\n /// HSI trim adjusts the frequency of the HSI oscillator.\n\n pub total [7:3] get_hsitrim / with_hsitrim: u32,\n\n /// Ready flag for the HSI oscillator.\n\n pub total [1] get_hsirdy / with_hsirdy: bool,\n\n /// Turns the HSI oscillator on/off.\n\n pub total [0] get_hsion / with_hsion: bool,\n\n }\n\n}\n\n\n\n/// Wraps up a pattern we use repeatedly below, where we turn an enable flag and\n\n/// a prescaler selection into an optional prescaler.\n\nmacro_rules! en_option_accessors {\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 32, "score": 57148.107623788484 }, { "content": " if let Some(wrapped) = v {\n\n self.$with_div(wrapped).$with_en(true)\n\n } else {\n\n self.$with_en(false)\n\n }\n\n }\n\n\n\n en_option_accessors!{$($rest)*}\n\n };\n\n}\n\n\n\nimpl Cfgr {\n\n bitfield_accessors! {\n\n /// Controls the clock output on the MCO2 pin.\n\n pub total [31:30] get_mco2 / with_mco2: Mco2,\n\n /// Raw enable for the MCO2 prescaler; see `get_mco2` and `with_mco2`.\n\n pub total [29] get_mco2pre_en / with_mco2pre_en: bool,\n\n /// Raw divisor for the MCO2 prescaler; see `get_mco2` and `with_mco2`.\n\n pub total [28:27] get_mco2pre_div / with_mco2pre_div: McoPre,\n\n /// Raw enable for the MCO1 prescaler; see `get_mco1` and `with_mco1`.\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 33, "score": 57147.43596486438 }, { "content": " /// Raw divisor for the AHB prescaler; see `get_hpre` and `with_hpre`.\n\n pub total [ 6: 4] get_hpre_div / with_hpre_div: AhbPrescaler,\n\n /// Reads as the currently selected system clock source. After writing\n\n /// `Cfgr` with a new value chosen by `with_sw`, applications can\n\n /// read `Cfgr` and check this field to find out when their setting has\n\n /// taken effect.\n\n pub [ 3: 2] get_sws / with_sws: ClockSwitch,\n\n /// Selects the system clock source. Selections written to `Cfgr` do\n\n /// not take effect immediately; monitor by re-reading and checking\n\n /// `get_sws`.\n\n pub [ 1: 0] get_sw / with_sw: ClockSwitch,\n\n }\n\n\n\n en_option_accessors!{\n\n /// Selects the (optional) prescaler used on the MCO2 output.\n\n ///\n\n /// This maps to the MCO2PRE field described in ST's documentation, but\n\n /// wraps up all the \"don't care\" patterns in `None`. If you need to\n\n /// write a specific \"don't care\" pattern for some reason, see the raw\n\n /// accessors `with_mco2pre_div` and `with_mco2pre_en`.\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 34, "score": 57147.16894772701 }, { "content": " pub _reserved_28: Reg<u32>,\n\n pub _reserved_2c: Reg<u32>,\n\n /// AHB clock enable registers AHB1ENR - AHB3ENR.\n\n ///\n\n /// Note that they are numbered from zero in this array.\n\n pub ahb_enr: [Reg<u32>; 3],\n\n pub _reserved_3c: Reg<u32>,\n\n /// APB clock enable registers APB1ENR - APB2ENR.\n\n ///\n\n /// Note that they are numbered from zero in this array.\n\n pub apb_enr: [Reg<u32>; 2],\n\n pub _reserved_48: Reg<u32>,\n\n pub _reserved_4c: Reg<u32>,\n\n /// AHB low power clock enable registers AHB1LPENR - AHB3LPENR.\n\n ///\n\n /// Note that they are numbered from zero in this array.\n\n pub ahb_lpenr: [Reg<u32>; 3],\n\n pub _reserved_5c: Reg<u32>,\n\n /// APB low power clock enable registers APB1LPENR - APB2LPENR.\n\n ///\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 35, "score": 57147.00498141112 }, { "content": " /// Derives the clock used for USB OTG FS, SDIO, and RNG from the VCO\n\n /// frequency.\n\n pub total [27:24] get_pllq / with_pllq: u32,\n\n /// Input clock for both the main PLL and PLLI2S.\n\n pub total [22] get_pllsrc / with_pllsrc: PllSource,\n\n /// Prescaler for the system clock domain.\n\n ///\n\n /// Derives the PLL's system clock output from the VCO frequency. Note\n\n /// that this determines the PLL's system clock *output*; to make this\n\n /// the actual system clock, the PLL must be selected in `Cfgr`.\n\n pub total [17:16] get_pllp / with_pllp: Pllp,\n\n /// Multiplication factor for the VCO.\n\n ///\n\n /// Determines the internal VCO frequency by multiplying the PLL input\n\n /// frequency.\n\n pub total [14: 6] get_plln / with_plln: u32,\n\n /// Prescaler for the PLL input frequency.\n\n ///\n\n /// Derives the PLL input frequency from the PLL source.\n\n pub total [ 5: 0] get_pllm / with_pllm: u32,\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 36, "score": 57146.33920726789 }, { "content": "\n\n // TODO model RTCPRE\n\n\n\n /// Prescaler options for the APB clocks (relative to the AHB clock).\n\n pub bit_enum ApbPrescaler {\n\n Div2 = 0b00,\n\n Div4 = 0b01,\n\n Div8 = 0b10,\n\n Div16 = 0b11,\n\n }\n\n\n\n /// Prescaler options for the AHB clocks (relative to the system clock).\n\n pub bit_enum AhbPrescaler {\n\n Div2 = 0b000,\n\n Div4 = 0b001,\n\n Div8 = 0b010,\n\n Div16 = 0b011,\n\n Div64 = 0b100,\n\n Div128 = 0b101,\n\n Div256 = 0b110,\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 37, "score": 57145.16954821008 }, { "content": " }\n\n}\n\n\n\nbit_enums! {\n\n /// Options for the PLL source clock.\n\n pub bit_enum PllSource {\n\n Hsi = 0,\n\n Hse = 1,\n\n }\n\n\n\n /// Options for deriving the system clock from the PLL's VCO.\n\n pub bit_enum Pllp {\n\n Div2 = 0b00,\n\n Div4 = 0b01,\n\n Div6 = 0b10,\n\n Div8 = 0b11,\n\n }\n\n}\n\n\n\nimpl ClockDivisor for Pllp {\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 38, "score": 57145.08391269525 }, { "content": " pub bit_enum McoPre {\n\n Div2 = 0b00,\n\n Div3 = 0b01,\n\n Div4 = 0b10,\n\n Div5 = 0b11,\n\n }\n\n\n\n /// Clocks that can be used to feed the I2S peripheral(s).\n\n pub bit_enum I2sSrc {\n\n Plli2s = 0,\n\n I2sCkin = 1,\n\n }\n\n\n\n /// Clocks that can be output on the MCO1 pin.\n\n pub bit_enum Mco1 {\n\n Hsi = 0b00,\n\n Lse = 0b01,\n\n Hse = 0b10,\n\n Pll = 0b11,\n\n }\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 39, "score": 57144.8305560391 }, { "content": " fn to_divisor(self) -> u32 {\n\n match self {\n\n Pllp::Div2 => 2,\n\n Pllp::Div4 => 4,\n\n Pllp::Div6 => 6,\n\n Pllp::Div8 => 8,\n\n }\n\n }\n\n}\n\n\n\npub const RCC_ADDRESS : usize = 0x40023800_usize;\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 40, "score": 57144.65168182608 }, { "content": " /// wraps up all the \"don't care\" patterns in `None`. If you need to\n\n /// write a specific \"don't care\" pattern for some reason, see the raw\n\n /// accessors `with_hpre_div` and `with_hpre_en`.\n\n enable get_hpre_en / with_hpre_en\n\n value get_hpre_div / with_hpre_div : AhbPrescaler\n\n as get_hpre / with_hpre;\n\n\n\n }\n\n}\n\n\n\nbit_enums! {\n\n /// Clocks that can be output on the MCO2 pin.\n\n pub bit_enum Mco2 {\n\n Sysclk = 0b00,\n\n Plli2s = 0b01,\n\n Hse = 0b10,\n\n Pll = 0b11,\n\n }\n\n\n\n /// Prescaler options for the MCOx pins.\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 41, "score": 57144.507165230665 }, { "content": " Div512 = 0b111,\n\n }\n\n\n\n /// Clocks that can be used as the system clock source.\n\n pub bit_enum ClockSwitch {\n\n Hsi = 0b00,\n\n Hse = 0b01,\n\n Pll = 0b10,\n\n }\n\n}\n\n\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 42, "score": 57144.45490535974 }, { "content": " /// accessors `with_ppre2_div` and `with_ppre2_en`.\n\n enable get_ppre2_en / with_ppre2_en\n\n value get_ppre2_div / with_ppre2_div : ApbPrescaler\n\n as get_ppre2 / with_ppre2;\n\n\n\n /// Selects the (optional) prescaler used to derive the APB1 clock from\n\n /// the AHB clock.\n\n ///\n\n /// This maps to the PPRE1 field described in ST's documentation, but\n\n /// wraps up all the \"don't care\" patterns in `None`. If you need to\n\n /// write a specific \"don't care\" pattern for some reason, see the raw\n\n /// accessors `with_ppre1_div` and `with_ppre1_en`.\n\n enable get_ppre1_en / with_ppre1_en\n\n value get_ppre1_div / with_ppre1_div : ApbPrescaler\n\n as get_ppre1 / with_ppre1;\n\n\n\n /// Selects the (optional) prescaler used to derive the AHB clock from\n\n /// the system clock.\n\n ///\n\n /// This maps to the HPRE field described in ST's documentation, but\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 43, "score": 57143.99268021988 }, { "content": " enable get_mco2pre_en / with_mco2pre_en\n\n value get_mco2pre_div / with_mco2pre_div : McoPre\n\n as get_mco2pre / with_mco2pre;\n\n\n\n /// Selects the (optional) prescaler used on the MCO1 output.\n\n ///\n\n /// This maps to the MCO1PRE field described in ST's documentation, but\n\n /// wraps up all the \"don't care\" patterns in `None`. If you need to\n\n /// write a specific \"don't care\" pattern for some reason, see the raw\n\n /// accessors `with_mco1pre_div` and `with_mco1pre_en`.\n\n enable get_mco1pre_en / with_mco1pre_en\n\n value get_mco1pre_div / with_mco1pre_div : McoPre\n\n as get_mco1pre / with_mco1pre;\n\n\n\n /// Selects the (optional) prescaler used to derive the APB2 clock from\n\n /// the AHB clock.\n\n ///\n\n /// This maps to the PPRE2 field described in ST's documentation, but\n\n /// wraps up all the \"don't care\" patterns in `None`. If you need to\n\n /// write a specific \"don't care\" pattern for some reason, see the raw\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 44, "score": 57143.80068537459 }, { "content": " () => {};\n\n (\n\n $(#[$m:meta])*\n\n enable $get_en:ident / $with_en:ident\n\n value $get_div:ident / $with_div:ident : $ty:ty\n\n as $get_opt:ident / $with_opt:ident;\n\n\n\n $($rest:tt)*\n\n ) => {\n\n $(#[$m])*\n\n pub fn $get_opt(self) -> Option<$ty> {\n\n if self.$get_en() {\n\n Some(self.$get_div())\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n $(#[$m])*\n\n pub fn $with_opt(self, v: Option<$ty>) -> Self {\n", "file_path": "embrs/src/stm32f4/rcc/raw.rs", "rank": 45, "score": 57141.2549622954 }, { "content": "fn main() {\n\n\n\n println!(\"cargo:rustc-link-search=./extraLibs\");\n\n\n\n println!(\"cargo:rustc-link-lib=static=scheduler\");\n\n\n\n println!(\"cargo:rustc-link-lib=static=memset\");\n\n\n\n println!(\"cargo:rustc-link-lib=static=contextSwitch\");\n\n}\n", "file_path": "build.rs", "rank": 68, "score": 50714.5913577078 }, { "content": "#[repr(C, packed)]\n\nstruct Registers {\n\n acr: Reg<u32>,\n\n}\n\n\n\nconst FLASH_ADDRESS : usize = 0x40023c00;\n\n\n\nbit_wrappers! {\n\n pub struct Acr(pub u32);\n\n}\n\n\n\nimpl Acr {\n\n bitfield_accessors! {\n\n pub total [12] get_dcrst / with_dcrst: bool,\n\n pub total [11] get_icrst / with_icrst: bool,\n\n pub total [10] get_dcen / with_dcen: bool,\n\n pub total [9] get_icen / with_icen: bool,\n\n pub total [8] get_prften / with_prften: bool,\n\n pub total [2:0] get_latency / with_latency: u32,\n\n }\n\n}\n", "file_path": "embrs/src/stm32f4/flash.rs", "rank": 69, "score": 50545.859423668124 }, { "content": "#[repr(C, packed)]\n\nstruct Registers {\n\n pub cpuid: Reg<u32>,\n\n pub icsr: Reg<u32>,\n\n pub vtor: Reg<u32>,\n\n pub aircr: Reg<u32>,\n\n pub scr: Reg<u32>,\n\n pub ccr: Reg<u32>,\n\n pub shpr: [Reg<u32>; 3],\n\n pub shcsr: Reg<u32>,\n\n pub cfsr: Reg<u32>,\n\n pub hfsr: Reg<u32>,\n\n pub dfsr: Reg<u32>,\n\n pub mmfar: Reg<u32>,\n\n pub bfar: Reg<u32>,\n\n pub afsr: Reg<u32>,\n\n\n\n _reserved: [Reg<u32>; 18],\n\n\n\n pub cpacr: Reg<u32>,\n\n}\n", "file_path": "embrs/src/arm_m/scb.rs", "rank": 70, "score": 50545.859423668124 }, { "content": "#[repr(C, packed)]\n\nstruct Registers {\n\n /// The Interrupt Set Enabled Registers have one bit for each potential\n\n /// interrupt source. Writing ones causes the corresponding interrupt(s) to\n\n /// become enabled; others remain unchanged.\n\n iser: [Reg<u32>; 16], _reserved_after_iser: [Reg<u32>; 16],\n\n\n\n /// The Interrupt Clear Enabled Registers have one bit for each potential\n\n /// interrupt source. Writing ones causes the corresponding interrupt(s) to\n\n /// become disabled; others remain unchanged.\n\n icer: [Reg<u32>; 16], _reserved_after_icer: [Reg<u32>; 16],\n\n\n\n /// The Interrupt Set Pending Registers have one bit for each potential\n\n /// interrupt source. Writing ones causes the corresponding interrupt(s) to\n\n /// become pending; others remain unchanged.\n\n ispr: [Reg<u32>; 16], _reserved_after_ispr: [Reg<u32>; 16],\n\n\n\n /// The Interrupt Clear Pending Registers have one bit for each potential\n\n /// interrupt source. Writing ones causes the corresponding interrupt(s) to\n\n /// become non-pending; others remain unchanged.\n\n icpr: [Reg<u32>; 16], _reserved_after_icpr: [Reg<u32>; 16],\n", "file_path": "embrs/src/arm_m/nvic.rs", "rank": 71, "score": 50545.859423668124 }, { "content": "#[cfg(feature = \"cpu:cortex-m4f\")]\n\n#[repr(C, packed)]\n\nstruct FpRegisters {\n\n pub fpccr: Reg<u32>,\n\n pub fpcar: Reg<u32>,\n\n pub fpdscr: Reg<u32>,\n\n pub mvfr: [Reg<u32>; 2],\n\n}\n\n\n\n#[cfg(feature = \"cpu:cortex-m4f\")]\n\nconst SCB_FP_ADDRESS : usize = 0xe000ef34;\n\n\n\n#[cfg(feature = \"cpu:cortex-m4f\")]\n\npub struct ScbFp;\n\n\n\n#[cfg(feature = \"cpu:cortex-m4f\")]\n\npub static SCB_FP : ScbFp = ScbFp;\n\n\n\nbit_wrappers! {\n\n pub struct Fpccr(pub u32);\n\n}\n\n\n", "file_path": "embrs/src/arm_m/scb.rs", "rank": 72, "score": 49391.964497899506 }, { "content": "#[repr(C, packed)]\n\nstruct Registers {\n\n csr: Reg<u32>,\n\n rvr: Reg<u32>,\n\n cvr: Reg<u32>,\n\n calib: Reg<u32>,\n\n}\n\n\n\nconst SYS_TICK_ADDRESS : usize = 0xe000e010;\n\n\n\npub struct SysTick;\n\n\n\nimpl SysTick {\n\n // TODO: this peripheral, unusually for something designed by ARM, contains\n\n // read-to-clear bits and R/W bits without inherent atomic updates. So this\n\n // API is probably wrong.\n\n\n\n fn reg(&self) -> &'static Registers {\n\n unsafe { &*(SYS_TICK_ADDRESS as *const Registers) }\n\n }\n\n\n", "file_path": "embrs/src/arm_m/sys_tick.rs", "rank": 73, "score": 49384.8620203007 }, { "content": "pub mod exc;\n\npub mod nvic;\n\npub mod reg;\n\npub mod scb;\n\npub mod sys_tick;\n\n\n\n#[cfg(target_os = \"none\")]\n\npub mod startup;\n\n\n\n/// Sets the processor's `PRIMASK` register to `val`.\n\n#[inline]\n", "file_path": "embrs/src/arm_m/mod.rs", "rank": 74, "score": 29867.49286518746 }, { "content": "//! Support for the STM32F4 series of SoCs.\n\n\n\npub mod dma;\n\npub mod flash;\n\npub mod gpio;\n\npub mod irq;\n\npub mod rcc;\n\npub mod usart;\n", "file_path": "embrs/src/stm32f4/mod.rs", "rank": 75, "score": 29865.28798671649 }, { "content": "\n\nuse embrs::arm_m::sys_tick;\n\nuse embrs::stm32f4::gpio::{self, gpioa};\n\nuse embrs::stm32f4::rcc::{self, RCC, AhbPeripheral, ApbPeripheral};\n\n\n\nconst TOGGLE_HZ : u32 = 10;\n\nconst HZ : u32 = 160_000_000;\n\n\n\n\n\n/* Function name : CLOCKS */\n\n/* Description : Constant struct that is used for UART and UART is used by the LEDs.\n\n It is also used byt he systick interrupt*/\n\npub const CLOCKS : rcc::ClockConfig = rcc::ClockConfig {\n\n crystal_hz: 8_000_000_f32,\n\n crystal_divisor: 4,\n\n vco_multiplier: 160,\n\n general_divisor: rcc::SysPrescaler::Div2,\n\n pll48_divisor: 4,\n\n\n\n ahb_divisor: None,\n\n apb1_divisor: Some(rcc::ApbPrescaler::Div4),\n\n apb2_divisor: Some(rcc::ApbPrescaler::Div2),\n\n\n\n flash_latency: 5,\n\n};\n\n\n\n\n\n/* Function name : inti_SYSTICK() */\n\n/* Description : Sets up the SYSTICK frecuency, clocks and enables it */\n", "file_path": "scheduler/src/init.rs", "rank": 76, "score": 23.567176701222728 }, { "content": "macro_rules! bit_enums {\n\n () => {};\n\n (\n\n $(#[$m:meta])*\n\n pub bit_enum $name:ident {\n\n $($e_name:ident = $e_val:expr,)+\n\n }\n\n $($rest:tt)*\n\n ) => {\n\n #[derive(Copy, Clone, Eq, PartialEq)]\n\n $(#[$m])*\n\n pub enum $name {\n\n $($e_name = $e_val),+\n\n }\n\n\n\n impl $crate::bits::IntoBits for $name {\n\n fn into_bits(self) -> u32 {\n\n self as u32\n\n }\n\n }\n", "file_path": "embrs/src/bits.rs", "rank": 77, "score": 21.991962820200907 }, { "content": " }\n\n }\n\n\n\n #[inline]\n\n pub fn with_clksource(self, v: ClkSource) -> Self {\n\n Csr((self.0 & !(1 << 2)) | ((v as u32) << 2))\n\n }\n\n\n\n #[inline]\n\n pub fn get_countflag(self) -> bool {\n\n (self.0 & (1 << 16)) != 0\n\n }\n\n}\n\n\n\nimpl From<u32> for Csr {\n\n fn from(v: u32) -> Csr {\n\n Csr(v)\n\n }\n\n}\n\n\n\nimpl From<Csr> for u32 {\n\n fn from(v: Csr) -> u32 {\n\n v.0\n\n }\n\n}\n\n\n\npub static SYS_TICK : SysTick = SysTick;\n", "file_path": "embrs/src/arm_m/sys_tick.rs", "rank": 78, "score": 21.79037373505812 }, { "content": " #[inline]\n\n pub fn with_enable(self, v: bool) -> Self {\n\n Csr((self.0 & !(1 << 0)) | ((v as u32) << 0))\n\n }\n\n\n\n #[inline]\n\n pub fn get_tickint(self) -> bool {\n\n (self.0 & (1 << 1)) != 0\n\n }\n\n\n\n #[inline]\n\n pub fn with_tickint(self, v: bool) -> Self {\n\n Csr((self.0 & !(1 << 1)) | ((v as u32) << 1))\n\n }\n\n\n\n #[inline]\n\n pub fn get_clksource(self) -> ClkSource {\n\n match (self.0 & (1 << 2)) != 0 {\n\n false => ClkSource::ExternalReference,\n\n true => ClkSource::ProcessorClock,\n", "file_path": "embrs/src/arm_m/sys_tick.rs", "rank": 79, "score": 21.549374165012363 }, { "content": " pub fn write_cvr(&self, v: u32) {\n\n self.reg().cvr.set(v)\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Csr(u32);\n\n\n\n#[derive(Copy, Clone)]\n\npub enum ClkSource {\n\n ExternalReference = 0,\n\n ProcessorClock = 1,\n\n}\n\n\n\nimpl Csr {\n\n #[inline]\n\n pub fn get_enable(self) -> bool {\n\n (self.0 & (1 << 0)) != 0\n\n }\n\n\n", "file_path": "embrs/src/arm_m/sys_tick.rs", "rank": 80, "score": 20.72442216739911 }, { "content": " #[derive(Copy, Clone)]\n\n #[repr(C, packed)]\n\n $(#[$m])*\n\n pub struct $name(pub $ty);\n\n\n\n impl $crate::bits::BitsWrapper for $name {\n\n type Raw = $ty;\n\n\n\n fn from_raw(v: Self::Raw) -> Self {\n\n $name(v)\n\n }\n\n }\n\n\n\n impl Default for $name {\n\n fn default() -> Self {\n\n <Self as $crate::bits::BitsWrapper>::from_raw(\n\n Default::default())\n\n }\n\n }\n\n\n", "file_path": "embrs/src/bits.rs", "rank": 81, "score": 20.590501395137608 }, { "content": " fn from_bits(bits: u32) -> bits::BitsResult<Self> {\n\n InterruptFlags::from_bits(bits).ok_or(bits::BadBits(bits))\n\n }\n\n}\n\n\n\nimpl bits::IntoBits for InterruptFlags {\n\n fn into_bits(self) -> u32 {\n\n self.bits()\n\n }\n\n}\n\n\n\n/// Names of relative streams within an interrupt register (`Ir`).\n\n///\n\n/// This is mostly used under the hood, but in case you need it: here it is.\n\n#[derive(Eq, PartialEq, Copy, Clone)]\n\npub enum RelativeStreamIndex {\n\n RS0, RS1, RS2, RS3,\n\n}\n\n\n\nimpl Ir {\n", "file_path": "embrs/src/stm32f4/dma.rs", "rank": 82, "score": 18.7179605523467 }, { "content": "}\n\n\n\n/// Names of DMA streams.\n\n#[derive(Eq, PartialEq, Copy, Clone)]\n\npub enum StreamIndex {\n\n S0, S1, S2, S3, S4, S5, S6, S7\n\n}\n\n\n\nimpl StreamIndex {\n\n /// Converts a stream index into the corresponding index into the interrupt\n\n /// register arrays `isr` and `ifcr`.\n\n pub fn get_ir_index(self) -> usize {\n\n (self as usize) / 4\n\n }\n\n\n\n /// Converts a stream index into the corresponding relative stream index\n\n /// within an interrupt register (`isr[x]` or `ifcr[x]`).\n\n pub fn get_rs_index(self) -> RelativeStreamIndex {\n\n unsafe {\n\n mem::transmute((self as u8) % 4)\n\n }\n\n }\n\n}\n", "file_path": "embrs/src/stm32f4/dma.rs", "rank": 83, "score": 17.902085446607565 }, { "content": "\n\nuse embrs::stm32f4::gpio::{self, gpiod};\n\nuse embrs::stm32f4::rcc::{ RCC, AhbPeripheral};\n\n\n\n\n\n\n\n/* Function name : led_pin_12() */\n\n/* Description : Returns the P12 from the static GPIO struct */\n", "file_path": "scheduler/src/leds.rs", "rank": 84, "score": 16.10738792121064 }, { "content": " pub total [ 1] get_iren / with_iren: bool,\n\n pub total [ 0] get_eie / with_eie: bool,\n\n }\n\n}\n\n\n\nimpl Gtpr {\n\n bitfield_accessors! {\n\n pub total [15:8] get_gt / with_gt: u8,\n\n pub total [ 7:0] get_psc / with_psc: u8,\n\n }\n\n}\n\n\n\nbit_enums! {\n\n pub bit_enum WordLength {\n\n EightBits = 0,\n\n NineBits = 1,\n\n }\n\n\n\n pub bit_enum WakeupMethod {\n\n IdleLine = 0,\n", "file_path": "embrs/src/stm32f4/usart.rs", "rank": 85, "score": 15.52180878441668 }, { "content": "\n\n impl $crate::bits::FromBits for $name {\n\n fn from_bits(bits: u32) -> $crate::bits::BitsResult<Self> {\n\n match bits {\n\n $($e_val => Ok($name::$e_name),)+\n\n _ => Err($crate::bits::BadBits(bits)),\n\n }\n\n }\n\n }\n\n\n\n impl $crate::bits::FromBitsTotal for $name {\n\n fn from_bits_total(bits: u32) -> Self {\n\n match bits {\n\n $($e_val => $name::$e_name,)+\n\n _ => unreachable!(),\n\n }\n\n }\n\n }\n\n\n\n bit_enums!{$($rest)*}\n\n };\n\n}\n", "file_path": "embrs/src/bits.rs", "rank": 86, "score": 15.51036595776841 }, { "content": " otg_hs: None,\n\n dcmi: None,\n\n cryp: None,\n\n hash_rng: None,\n\n fpu: None,\n\n\n\n uart7: None,\n\n uart8: None,\n\n spi4: None,\n\n spi5: None,\n\n spi6: None,\n\n sai1: None,\n\n ltdc: None,\n\n ltdc_er: None,\n\n dma2d: None,\n\n};\n\n\n\n/// Enumeration of the STM32F4 interrupts. This can be used to name an\n\n/// interrupt vector, like an integer, but without the risk of receiving\n\n/// out-of-range values.\n", "file_path": "embrs/src/stm32f4/irq.rs", "rank": 87, "score": 15.485037720615038 }, { "content": "///\n\n/// The special modifier `total` says that every bit pattern that may appear in\n\n/// that field is valid for the Rust type. Without `total`, packed field values\n\n/// are returned as `BitsResult<T>` so that invalid values can be handled. With\n\n/// `total`, they are returned as simply `T`, and invalid values will panic.\n\n/// (Under the hood, `total` uses a `FromBitsTotal` impl, while otherwise\n\n/// `FromBits` is used.)\n\n///\n\n/// The declarations above will produce the following methods:\n\n///\n\n/// pub fn get_sign(self) -> bool { ... }\n\n/// pub fn with_sign(self, v: bool) -> Self { ... }\n\n///\n\n/// pub fn get_value(self) -> u32 { ... }\n\n/// pub fn with_value(self, v: u32) -> Self { ... }\n\n///\n\n/// pub fn get_mode(self) -> BitsResult<Mode> { ... }\n\n/// pub fn with_value(self, v: Mode) -> Self { ... }\n\nmacro_rules! bitfield_accessors {\n\n // Terminal.\n", "file_path": "embrs/src/bits.rs", "rank": 88, "score": 15.344040259501188 }, { "content": " for i in 0..16 {\n\n places |= (pins & (1 << i)) << i;\n\n }\n\n\n\n (0b11 * places, val * places)\n\n };\n\n\n\n reg.atomic_nand_and_or(mask, setting)\n\n }\n\n}\n\n\n\nmacro_rules! static_gpio {\n\n ($name:ident, $addr:expr) => {\n\n #[inline]\n\n pub fn $name() -> &'static GpioPort {\n\n unsafe {\n\n &*($addr as *const GpioPort)\n\n }\n\n }\n\n };\n\n}\n\n\n\nstatic_gpio!(gpioa, 0x40020000);\n\nstatic_gpio!(gpiod, 0x40020c00);\n", "file_path": "embrs/src/stm32f4/gpio.rs", "rank": 89, "score": 15.01903328835639 }, { "content": " ///\n\n /// This operation is atomic with respect to `set_priority_raw`, but makes\n\n /// no particular guarantees about interaction with preempting interrupt\n\n /// handlers.\n\n #[inline] // into the SoC layer\n\n pub fn get_priority_raw(&self, irq: u32) -> u8 {\n\n atomic::fence(atomic::Ordering::Acquire);\n\n\n\n unsafe {\n\n self.reg().ipr[irq as usize].get()\n\n }\n\n }\n\n\n\n unsafe fn reg(&self) -> &'static Registers {\n\n &*(NVIC_ADDRESS as *const Registers)\n\n }\n\n\n\n #[inline]\n\n fn write_barriers() {\n\n // Data fence to ensure the write is not buffered (emits DMB).\n", "file_path": "embrs/src/arm_m/nvic.rs", "rank": 90, "score": 14.939253047369 }, { "content": "impl Dr {\n\n bitfield_accessors! {\n\n pub total [7:0] get_data / with_data: u8,\n\n }\n\n}\n\n\n\nimpl Brr {\n\n bitfield_accessors! {\n\n pub total [15:4] get_mantissa / with_mantissa: u32,\n\n pub total [3:0] get_fraction / with_fraction: u32,\n\n }\n\n}\n\n\n\nimpl Cr1 {\n\n bitfield_accessors! {\n\n pub total [15] get_over8 / with_over8: bool,\n\n pub total [13] get_ue / with_ue: bool,\n\n pub total [12] get_m / with_m: WordLength,\n\n pub total [11] get_wake / with_wake: WakeupMethod,\n\n pub total [10] get_pce / with_pce: bool,\n", "file_path": "embrs/src/stm32f4/usart.rs", "rank": 91, "score": 14.843747975552473 }, { "content": " reg: *const Registers,\n\n}\n\n\n\nmacro_rules! reg_accessors {\n\n ($name:ident, $ty:ident, $read:ident, $write:ident, $update:ident) => {\n\n pub fn $write(&self, v: $ty) {\n\n self.reg().$name.set(v.0)\n\n }\n\n\n\n pub fn $read(&self) -> $ty {\n\n $ty(self.reg().$name.get())\n\n }\n\n\n\n pub fn $update<F: FnOnce($ty) -> $ty>(&self, f: F) {\n\n self.$write(f(self.$read()))\n\n }\n\n };\n\n}\n\n\n\nimpl Usart {\n", "file_path": "embrs/src/stm32f4/usart.rs", "rank": 92, "score": 14.6803320193779 }, { "content": " const P13 = 1 << 13,\n\n const P14 = 1 << 14,\n\n const P15 = 1 << 15,\n\n }\n\n}\n\n\n\nimpl GpioPort {\n\n /// Changes the mode of the pins selected by `pins` to `mode`.\n\n pub fn set_mode(&self, pins: PinMask, mode: Mode) {\n\n Self::update_2(pins, mode as u32, &self.moder)\n\n }\n\n\n\n /// Changes the output type of the pins selected by `pins` to `ot`.\n\n pub fn set_output_type(&self, pins: PinMask, ot: OutputType) {\n\n Self::update_1(pins, ot as u32, &self.otyper)\n\n }\n\n\n\n /// Changes the output speed of the pins selected by `pins` to `speed`.\n\n pub fn set_speed(&self, pins: PinMask, speed: Speed) {\n\n Self::update_2(pins, speed as u32, &self.ospeedr)\n", "file_path": "embrs/src/stm32f4/gpio.rs", "rank": 93, "score": 14.67591873906509 }, { "content": " ///\n\n /// Note that changing the priority of an interrupt *while that interrupt's\n\n /// handler is executing or preempted* does not necessarily affect the\n\n /// current execution priority: the hardware ensures that doing so never\n\n /// produces a priority inversion between the current execution priority and\n\n /// any previously preempted handlers.\n\n ///\n\n /// You probably don't want to call this function. The SoC layer's\n\n /// `NvicExt` trait provides a `set_priority` method that is both more\n\n /// ergonomic (taking an enum instead of a `u32`) and *more performant*\n\n /// (because the enum lets us eliminate some range checks).\n\n #[inline] // into the SoC layer\n\n pub fn set_priority_raw(&self, irq: u32, priority: u8) {\n\n unsafe {\n\n self.reg().ipr[irq as usize].set(priority);\n\n }\n\n Self::write_barriers()\n\n }\n\n\n\n /// Reads the priority of an interrupt.\n", "file_path": "embrs/src/arm_m/nvic.rs", "rank": 94, "score": 14.416891924382744 }, { "content": " fn reg(&self) -> &Registers {\n\n unsafe {\n\n &*self.reg\n\n }\n\n }\n\n\n\n reg_accessors!(cr1, Cr1, read_cr1, write_cr1, update_cr1);\n\n reg_accessors!(brr, Brr, read_brr, write_brr, update_brr);\n\n\n\n pub fn send8(&self, v: u8) {\n\n self.reg().dr.set(v as u32)\n\n }\n\n}\n\n\n\nunsafe impl Sync for Usart {}\n\n\n\nmacro_rules! static_usart {\n\n ($name:ident, $addr:expr) => {\n\n pub static $name: Usart = Usart {\n\n reg: $addr as *const Registers,\n\n };\n\n };\n\n}\n\n\n\nstatic_usart!(USART2, 0x40004400);\n\n\n\n\n\n\n", "file_path": "embrs/src/stm32f4/usart.rs", "rank": 95, "score": 14.174201489840067 }, { "content": " /// Reads the contents of the register using a volatile load.\n\n pub fn get(&self) -> T {\n\n unsafe { ptr::read_volatile(self.value.get()) }\n\n }\n\n\n\n /// Replaces the contents of the register using a volatile store.\n\n pub fn set(&self, value: T) {\n\n unsafe {\n\n ptr::write_volatile(self.value.get(), value)\n\n }\n\n }\n\n\n\n pub fn update<F: FnOnce(T) -> T>(&self, f: F) {\n\n self.set(f(self.get()))\n\n }\n\n}\n\n\n\n/// Additional features that become available when a register contains a\n\n/// hardware-supported atomic type.\n", "file_path": "embrs/src/arm_m/reg.rs", "rank": 96, "score": 14.077180602785429 }, { "content": "//! Support for converting between Rust types and bitwise representations,\n\n//! including registers with packed bitfields.\n\n//!\n\n//! This module is an attempt to reduce the boilerplate in interacting with\n\n//! packed registers, without going all the way to a compiler plugin.\n\n\n\n#![macro_use]\n\n\n\nuse core;\n\n\n\n/// Error type indicating that some bits read from the hardware weren't valid\n\n/// for the expected type. This usually indicates a driver bug, but can also\n\n/// indicate misbehaving hardware.\n\n#[derive(Copy, Clone, Eq, PartialEq)]\n\npub struct BadBits(pub u32);\n\n\n\n/// Result type for `BadBits`.\n\npub type BitsResult<T> = Result<T, BadBits>;\n\n\n\n/// Construct `Self` from a small bitwise representation, without assuming that\n\n/// every possible bit pattern can be represented.\n\n///\n\n/// This trait is similar to `core::convert::From`, but less general and can\n\n/// fail.\n\n///\n\n/// This trait is similar to `FromBitsTotal` but allows illegal bit patterns to\n\n/// be processed at runtime.\n", "file_path": "embrs/src/bits.rs", "rank": 97, "score": 13.805497403499908 }, { "content": "\n\npub struct Flash;\n\n\n\nimpl Flash {\n\n fn reg(&self) -> &Registers {\n\n unsafe {\n\n &*(FLASH_ADDRESS as *const Registers)\n\n }\n\n }\n\n\n\n pub fn read_acr(&self) -> Acr {\n\n Acr(self.reg().acr.get())\n\n }\n\n\n\n pub fn write_acr(&self, v: Acr) {\n\n self.reg().acr.set(v.0)\n\n }\n\n\n\n pub fn update_acr<F: FnOnce(Acr) -> Acr>(&self, f: F) {\n\n self.write_acr(f(self.read_acr()))\n\n }\n\n}\n\n\n\npub static FLASH : Flash = Flash;\n", "file_path": "embrs/src/stm32f4/flash.rs", "rank": 98, "score": 13.582888616662505 }, { "content": " bit_wrappers!{$($rest)*}\n\n };\n\n}\n\n\n\n/// Declares accessors for packed bitfields. This macro should be used within\n\n/// an `impl` block for a `BitsWrapper` type (possibly declared using the\n\n/// `bit_wrappers` macro).\n\n///\n\n/// Packed bitfield accessor declarations look like this:\n\n///\n\n/// pub total [31] get_sign / with_sign: bool,\n\n/// pub total [30:15] get_value / with_value: u32,\n\n/// pub [3:0] get_mode / with_mode: MyMode,\n\n///\n\n/// From left to right:\n\n/// - Access modifier(s) (`pub` is currently required).\n\n/// - Bit range, given as either a single bit index, or high and low indices\n\n/// (inclusive).\n\n/// - Getter name and builder name, separated by a slash.\n\n/// - Rust type.\n", "file_path": "embrs/src/bits.rs", "rank": 99, "score": 13.530813606480434 } ]
Rust
src/lib.rs
jsgf/eventfd-rust
b2db38af64f731ccdd84e10968948ab1ab8d123e
#![cfg(target_os = "linux")] extern crate nix; pub use nix::sys::eventfd::{EventFdFlag, EFD_CLOEXEC, EFD_NONBLOCK, EFD_SEMAPHORE}; use nix::sys::eventfd::eventfd; use nix::unistd::{dup, close, write, read}; use std::io; use std::os::unix::io::{AsRawFd,RawFd}; use std::thread; use std::sync::mpsc; use std::mem; pub struct EventFD { fd: RawFd, flags: EventFdFlag, } unsafe impl Send for EventFD {} unsafe impl Sync for EventFD {} impl EventFD { pub fn new(initval: usize, flags: EventFdFlag) -> io::Result<EventFD> { Ok(EventFD { fd: try!(eventfd(initval, flags)), flags: flags }) } pub fn read(&self) -> io::Result<u64> { let mut buf = [0u8; 8]; let _ = try!(read(self.fd, &mut buf)); let val = unsafe { mem::transmute(buf) }; Ok(val) } pub fn write(&self, val: u64) -> io::Result<()> { let buf: [u8; 8] = unsafe { mem::transmute(val) }; try!(write(self.fd, &buf)); Ok(()) } pub fn events(&self) -> mpsc::Receiver<u64> { let (tx, rx) = mpsc::sync_channel(1); let c = self.clone(); thread::spawn(move || { loop { match c.read() { Ok(v) => match tx.send(v) { Ok(_) => (), Err(_) => break, }, Err(e) => panic!("read failed: {}", e), } } }); rx } } impl AsRawFd for EventFD { fn as_raw_fd(&self) -> RawFd { self.fd as RawFd } } impl Drop for EventFD { fn drop(&mut self) { let _ = close(self.fd); } } impl Clone for EventFD { fn clone(&self) -> EventFD { EventFD { fd: dup(self.fd).unwrap(), flags: self.flags } } } #[cfg(test)] mod test { extern crate std; use super::{EventFdFlag, EventFD, EFD_SEMAPHORE, EFD_NONBLOCK}; use std::thread; #[test] fn test_basic() { let (tx,rx) = std::sync::mpsc::channel(); let efd = match EventFD::new(10, EventFdFlag::empty()) { Err(e) => panic!("new failed {}", e), Ok(fd) => fd, }; let cefd = efd.clone(); assert_eq!(efd.read().unwrap(), 10); thread::spawn(move || { assert_eq!(cefd.read().unwrap(), 7); assert_eq!(cefd.write(1).unwrap(), ()); assert_eq!(cefd.write(2).unwrap(), ()); assert!(tx.send(()).is_ok()); }); assert_eq!(efd.write(7).unwrap(), ()); let _ = rx.recv(); assert_eq!(efd.read().unwrap(), 3); } #[test] fn test_sema() { let efd = match EventFD::new(0, EFD_SEMAPHORE | EFD_NONBLOCK) { Err(e) => panic!("new failed {}", e), Ok(fd) => fd, }; match efd.read() { Err(ref e) if e.kind() == std::io::ErrorKind::WouldBlock => (), Err(e) => panic!("unexpected error {}", e), Ok(v) => panic!("unexpected success {}", v), } assert_eq!(efd.write(5).unwrap(), ()); assert_eq!(efd.read().unwrap(), 1); assert_eq!(efd.read().unwrap(), 1); assert_eq!(efd.read().unwrap(), 1); assert_eq!(efd.read().unwrap(), 1); assert_eq!(efd.read().unwrap(), 1); match efd.read() { Err(ref e) if e.kind() == std::io::ErrorKind::WouldBlock => (), Err(e) => panic!("unexpected error {}", e), Ok(v) => panic!("unexpected success {}", v), } } #[test] fn test_stream() { let efd = match EventFD::new(11, EFD_SEMAPHORE) { Err(e) => panic!("new failed {}", e), Ok(fd) => fd, }; let mut count = 0; for v in efd.events().iter().take(10) { assert_eq!(v, 1); count += v; } assert_eq!(count, 10) } #[test] fn test_chan() { let (tx,rx) = std::sync::mpsc::channel(); let efd = match EventFD::new(10, EventFdFlag::empty()) { Err(e) => panic!("new failed {}", e), Ok(fd) => fd, }; assert_eq!(efd.write(1).unwrap(), ()); assert!(tx.send(efd).is_ok()); let t = thread::spawn(move || { let efd = rx.recv().unwrap(); assert_eq!(efd.read().unwrap(), 11) }).join(); match t { Ok(_) => println!("ok"), Err(_) => panic!("failed"), } } }
#![cfg(target_os = "linux")] extern crate nix; pub use nix::sys::eventfd::{EventFdFlag, EFD_CLOEXEC, EFD_NONBLOCK, EFD_SEMAPHORE}; use nix::sys::eventfd::eventfd; use nix::unistd::{dup, close, write, read}; use std::io; use std::os::unix::io::{AsRawFd,RawFd}; use std::thread; use std::sync::mpsc; use std::mem; pub struct EventFD { fd: RawFd, flags: EventFdFlag, } unsafe impl Send for EventFD {} unsafe impl Sync for EventFD {} impl EventFD { pub fn new(initval: usize, flags: EventFdFlag) -> io::Result<EventFD> { Ok(EventFD { fd: try!(eventfd(initval, flags)), flags: flags }) } pub fn read(&self) -> io::Result<u64> { let mut
pub fn write(&self, val: u64) -> io::Result<()> { let buf: [u8; 8] = unsafe { mem::transmute(val) }; try!(write(self.fd, &buf)); Ok(()) } pub fn events(&self) -> mpsc::Receiver<u64> { let (tx, rx) = mpsc::sync_channel(1); let c = self.clone(); thread::spawn(move || { loop { match c.read() { Ok(v) => match tx.send(v) { Ok(_) => (), Err(_) => break, }, Err(e) => panic!("read failed: {}", e), } } }); rx } } impl AsRawFd for EventFD { fn as_raw_fd(&self) -> RawFd { self.fd as RawFd } } impl Drop for EventFD { fn drop(&mut self) { let _ = close(self.fd); } } impl Clone for EventFD { fn clone(&self) -> EventFD { EventFD { fd: dup(self.fd).unwrap(), flags: self.flags } } } #[cfg(test)] mod test { extern crate std; use super::{EventFdFlag, EventFD, EFD_SEMAPHORE, EFD_NONBLOCK}; use std::thread; #[test] fn test_basic() { let (tx,rx) = std::sync::mpsc::channel(); let efd = match EventFD::new(10, EventFdFlag::empty()) { Err(e) => panic!("new failed {}", e), Ok(fd) => fd, }; let cefd = efd.clone(); assert_eq!(efd.read().unwrap(), 10); thread::spawn(move || { assert_eq!(cefd.read().unwrap(), 7); assert_eq!(cefd.write(1).unwrap(), ()); assert_eq!(cefd.write(2).unwrap(), ()); assert!(tx.send(()).is_ok()); }); assert_eq!(efd.write(7).unwrap(), ()); let _ = rx.recv(); assert_eq!(efd.read().unwrap(), 3); } #[test] fn test_sema() { let efd = match EventFD::new(0, EFD_SEMAPHORE | EFD_NONBLOCK) { Err(e) => panic!("new failed {}", e), Ok(fd) => fd, }; match efd.read() { Err(ref e) if e.kind() == std::io::ErrorKind::WouldBlock => (), Err(e) => panic!("unexpected error {}", e), Ok(v) => panic!("unexpected success {}", v), } assert_eq!(efd.write(5).unwrap(), ()); assert_eq!(efd.read().unwrap(), 1); assert_eq!(efd.read().unwrap(), 1); assert_eq!(efd.read().unwrap(), 1); assert_eq!(efd.read().unwrap(), 1); assert_eq!(efd.read().unwrap(), 1); match efd.read() { Err(ref e) if e.kind() == std::io::ErrorKind::WouldBlock => (), Err(e) => panic!("unexpected error {}", e), Ok(v) => panic!("unexpected success {}", v), } } #[test] fn test_stream() { let efd = match EventFD::new(11, EFD_SEMAPHORE) { Err(e) => panic!("new failed {}", e), Ok(fd) => fd, }; let mut count = 0; for v in efd.events().iter().take(10) { assert_eq!(v, 1); count += v; } assert_eq!(count, 10) } #[test] fn test_chan() { let (tx,rx) = std::sync::mpsc::channel(); let efd = match EventFD::new(10, EventFdFlag::empty()) { Err(e) => panic!("new failed {}", e), Ok(fd) => fd, }; assert_eq!(efd.write(1).unwrap(), ()); assert!(tx.send(efd).is_ok()); let t = thread::spawn(move || { let efd = rx.recv().unwrap(); assert_eq!(efd.read().unwrap(), 11) }).join(); match t { Ok(_) => println!("ok"), Err(_) => panic!("failed"), } } }
buf = [0u8; 8]; let _ = try!(read(self.fd, &mut buf)); let val = unsafe { mem::transmute(buf) }; Ok(val) }
function_block-function_prefixed
[ { "content": "Eventfd Binding\n\n===============\n\n\n\n[![Build Status](https://travis-ci.org/jsgf/eventfd-rust.svg?branch=master)](https://travis-ci.org/jsgf/eventfd-rust)\n\n\n\nThis crate implements a binding for eventfd. This isn't especially\n\nuseful on its own; the primary use case is as part of the API for\n\nother Linux syscalls and subsystems.\n\n\n\nJeremy Fitzhardinge <jeremy@goop.org>\n", "file_path": "README.md", "rank": 0, "score": 6070.140952804858 } ]
Rust
src/client/tokens.rs
lann/bindle
ec1513554a4b088d5c9f7b25020be3d05cc58a62
use std::{ path::{Path, PathBuf}, sync::Arc, }; use oauth2::reqwest::async_http_client; use oauth2::{ basic::*, devicecode::DeviceAuthorizationResponse, AuthUrl, Client as Oauth2Client, ClientId, RefreshToken, StandardRevocableToken, StandardTokenResponse, TokenResponse, TokenUrl, }; use reqwest::{ header::{HeaderValue, AUTHORIZATION}, Client as HttpClient, RequestBuilder, }; use time::{serde::timestamp, OffsetDateTime}; use tokio::fs::OpenOptions; use tokio::io::AsyncWriteExt; use tokio::sync::RwLock; use super::{ClientError, Result}; #[derive(serde::Deserialize, serde::Serialize, Debug, Clone)] struct OidcTokenExtraFields { pub id_token: String, #[serde(default)] pub issuer: String, #[serde(default)] pub client_id: String, #[serde(default)] pub token_url: String, } impl oauth2::ExtraTokenFields for OidcTokenExtraFields {} #[derive(serde::Deserialize, Debug)] struct Claims { pub iss: String, #[serde(with = "timestamp")] pub exp: OffsetDateTime, } #[async_trait::async_trait] pub trait TokenManager { async fn apply_auth_header(&self, builder: RequestBuilder) -> Result<RequestBuilder>; } #[derive(Clone, Default)] pub struct NoToken; #[async_trait::async_trait] impl TokenManager for NoToken { async fn apply_auth_header(&self, builder: RequestBuilder) -> Result<RequestBuilder> { Ok(builder) } } #[derive(Clone)] pub struct LongLivedToken { token: String, } impl LongLivedToken { pub fn new(token: &str) -> Self { LongLivedToken { token: token.to_owned(), } } } #[async_trait::async_trait] impl TokenManager for LongLivedToken { async fn apply_auth_header(&self, builder: RequestBuilder) -> Result<RequestBuilder> { let mut header_val = HeaderValue::from_str(&format!("Bearer {}", self.token)) .map_err(|e| ClientError::Other(e.to_string()))?; header_val.set_sensitive(true); Ok(builder.header(AUTHORIZATION, header_val)) } } #[derive(Clone)] pub struct HttpBasic { username: String, password: String, } impl HttpBasic { pub fn new(username: &str, password: &str) -> Self { HttpBasic { username: username.to_owned(), password: password.to_owned(), } } } #[async_trait::async_trait] impl TokenManager for HttpBasic { async fn apply_auth_header(&self, builder: RequestBuilder) -> Result<RequestBuilder> { let data = base64::encode(format!("{}:{}", self.username, self.password)); let mut header_val = HeaderValue::from_str(&format!("Basic {}", data)) .map_err(|e| ClientError::Other(e.to_string()))?; header_val.set_sensitive(true); Ok(builder.header(AUTHORIZATION, header_val)) } } type LockData<T> = Arc<RwLock<T>>; #[derive(Clone)] pub struct OidcToken { id_token: LockData<String>, refresh_token: LockData<RefreshToken>, expiry_time: LockData<OffsetDateTime>, #[allow(dead_code)] issuer: String, #[allow(dead_code)] scopes: Vec<String>, client_id: String, token_url: String, token_file: Option<PathBuf>, } impl OidcToken { pub async fn new_from_parts( id_token: &str, refresh_token: &str, client_id: &str, token_url: &str, scopes: Vec<String>, ) -> Result<Self> { let (expiry_time, issuer) = data_from_token(id_token)?; let me = OidcToken { id_token: Arc::new(RwLock::new(id_token.to_owned())), refresh_token: Arc::new(RwLock::new(RefreshToken::new(refresh_token.to_owned()))), expiry_time: Arc::new(RwLock::new(expiry_time)), issuer, scopes, client_id: client_id.to_owned(), token_url: token_url.to_owned(), token_file: None, }; me.ensure_token().await?; Ok(me) } pub async fn new_from_file(token_file: impl AsRef<Path>) -> Result<Self> { let path = token_file.as_ref().to_owned(); let raw = tokio::fs::read(&path).await?; let token_res: StandardTokenResponse<OidcTokenExtraFields, BasicTokenType> = toml::from_slice(&raw)?; let mut me = Self::new_from_parts( &token_res.extra_fields().id_token, token_res .refresh_token() .ok_or_else(|| { ClientError::TokenError( "Token response does not contain a refresh token".into(), ) })? .secret(), &token_res.extra_fields().client_id, &token_res.extra_fields().token_url, token_res .scopes() .map(|s| s.iter().map(|s| s.to_string()).collect()) .unwrap_or_default(), ) .await?; me.token_file = Some(path); Ok(me) } pub async fn login(bindle_base_url: &str, token_file: impl AsRef<Path>) -> Result<Self> { let (base_url, headers) = super::base_url_and_headers(bindle_base_url)?; let login_resp = HttpClient::builder() .build()? .get(base_url.join(super::LOGIN_ENDPOINT).unwrap()) .query(&crate::LoginParams { provider: "nothing".into(), }) .headers(headers) .send() .await?; let login_resp = super::unwrap_status(login_resp, super::Endpoint::Login, super::Operation::Login) .await?; let device_code_details: DeviceAuthorizationResponse< crate::DeviceAuthorizationExtraFields, > = toml::from_slice(&login_resp.bytes().await?)?; println!( "Open this URL in your browser:\n{}\nand then enter the code when prompted: {}", **device_code_details.verification_uri(), device_code_details.user_code().secret() ); let oauth_client: Oauth2Client< BasicErrorResponse, StandardTokenResponse<OidcTokenExtraFields, BasicTokenType>, BasicTokenType, BasicTokenIntrospectionResponse, StandardRevocableToken, BasicRevocationErrorResponse, > = Oauth2Client::new( ClientId::new(device_code_details.extra_fields().client_id.clone()), None, AuthUrl::new("https://not.needed.com".into()).unwrap(), Some(TokenUrl::new(device_code_details.extra_fields().token_url.clone()).unwrap()), ) .set_auth_type(oauth2::AuthType::RequestBody); let token_res = match oauth_client .exchange_device_access_token(&device_code_details) .request_async(async_http_client, tokio::time::sleep, None) .await { Ok(t) => t, Err(e) => { return Err(ClientError::Other(format!("{:?}", e))); } }; let (expiry_time, issuer) = data_from_token(&token_res.extra_fields().id_token)?; let me = OidcToken { id_token: Arc::new(RwLock::new(token_res.extra_fields().id_token.to_owned())), refresh_token: Arc::new(RwLock::new(RefreshToken::new( token_res .refresh_token() .ok_or_else(|| { ClientError::TokenError( "Token response does not contain a refresh token".into(), ) })? .secret() .to_owned(), ))), expiry_time: Arc::new(RwLock::new(expiry_time)), issuer, scopes: token_res .scopes() .map(|s| s.iter().map(|s| s.to_string()).collect()) .unwrap_or_default(), client_id: device_code_details.extra_fields().client_id.clone(), token_url: device_code_details.extra_fields().token_url.clone(), token_file: Some(token_file.as_ref().to_owned()), }; me.write_token_file(token_res).await?; Ok(me) } async fn ensure_token(&self) -> Result<()> { let is_expired = OffsetDateTime::now_utc() - time::Duration::minutes(1) >= *self.expiry_time.read().await; if is_expired { tracing::debug!("Token has expired, attempting to refresh token"); let oauth_client: Oauth2Client< BasicErrorResponse, StandardTokenResponse<OidcTokenExtraFields, BasicTokenType>, BasicTokenType, BasicTokenIntrospectionResponse, StandardRevocableToken, BasicRevocationErrorResponse, > = Oauth2Client::new( ClientId::new(self.client_id.clone()), None, AuthUrl::new("https://not.needed.com".into()).unwrap(), Some(TokenUrl::new(self.token_url.clone()).map_err(|e| { ClientError::TokenError(format!("Invalid token url: {}", e)) })?), ) .set_auth_type(oauth2::AuthType::RequestBody); let token_res = { let mut refresh_token = self.refresh_token.write().await; let token_res = match oauth_client .exchange_refresh_token(&refresh_token) .request_async(async_http_client) .await { Ok(t) => t, Err(e) => { return Err(ClientError::TokenError(format!( "Unable to refresh token {:?}", e ))); } }; let (expiry, _) = data_from_token(&token_res.extra_fields().id_token)?; let mut expiry_time = self.expiry_time.write().await; let mut id_token = self.id_token.write().await; *expiry_time = expiry; *id_token = token_res.extra_fields().id_token.clone(); *refresh_token = RefreshToken::new( token_res .refresh_token() .ok_or_else(|| { ClientError::TokenError( "Token response does not contain a refresh token".into(), ) })? .secret() .to_owned(), ); token_res }; if let Some(p) = self.token_file.as_ref() { tracing::trace!(path = %p.display(), "Token refreshed and token file is set. Updating with token data"); self.write_token_file(token_res).await?; } } Ok(()) } async fn write_token_file( &self, mut token_res: StandardTokenResponse<OidcTokenExtraFields, BasicTokenType>, ) -> Result<()> { let token_file = match self.token_file.as_ref() { Some(p) => p, None => return Ok(()), }; let mut extra = token_res.extra_fields().to_owned(); let (_, issuer) = data_from_token(&token_res.extra_fields().id_token)?; extra.issuer = issuer.clone(); extra.client_id = self.client_id.clone(); extra.token_url = self.token_url.clone(); token_res.set_extra_fields(extra); tracing::info!(path = %token_file.display(), "Writing access token to file"); #[cfg(not(target_family = "windows"))] let mut file = OpenOptions::new() .create(true) .write(true) .mode(0o600) .truncate(true) .open(token_file) .await?; #[cfg(target_family = "windows")] let mut file = OpenOptions::new() .create(true) .write(true) .truncate(true) .open(token_file) .await?; file.write_all(&toml::to_vec(&token_res)?).await?; file.flush().await?; Ok(()) } } #[async_trait::async_trait] impl TokenManager for OidcToken { async fn apply_auth_header(&self, builder: RequestBuilder) -> Result<RequestBuilder> { self.ensure_token().await?; let mut header_val = HeaderValue::from_str(&format!("Bearer {}", (*self.id_token.read().await).clone())) .map_err(|e| ClientError::Other(e.to_string()))?; header_val.set_sensitive(true); Ok(builder.header(AUTHORIZATION, header_val)) } } fn data_from_token(token: &str) -> Result<(OffsetDateTime, String)> { let mut validation = jsonwebtoken::Validation::default(); validation.validate_exp = false; validation.insecure_disable_signature_validation(); let fake_key = jsonwebtoken::DecodingKey::from_secret(b"fake"); let parsed_token = jsonwebtoken::decode::<Claims>(token, &fake_key, &validation) .map_err(|e| ClientError::TokenError(format!("Invalid token data: {}", e)))?; Ok((parsed_token.claims.exp, parsed_token.claims.iss)) }
use std::{ path::{Path, PathBuf}, sync::Arc, }; use oauth2::reqwest::async_http_client; use oauth2::{ basic::*, devicecode::DeviceAuthorizationResponse, AuthUrl, Client as Oauth2Client, ClientId, RefreshToken, StandardRevocableToken, StandardTokenResponse, TokenResponse, TokenUrl, }; use reqwest::{ header::{HeaderValue, AUTHORIZATION}, Client as HttpClient, RequestBuilder, }; use time::{serde::timestamp, OffsetDateTime}; use tokio::fs::OpenOptions; use tokio::io::AsyncWriteExt; use tokio::sync::RwLock; use super::{ClientError, Result}; #[derive(serde::Deserialize, serde::Serialize, Debug, Clone)] struct OidcTokenExtraFields { pub id_token: String, #[serde(default)] pub issuer: String, #[serde(default)] pub client_id: String, #[serde(default)] pub token_url: String, } impl oauth2::ExtraTokenFields for OidcTokenExtraFields {} #[derive(serde::Deserialize, Debug)] struct Claims { pub iss: String, #[serde(with = "timestamp")] pub exp: OffsetDateTime, } #[async_trait::async_trait] pub trait TokenManager { async fn apply_auth_header(&self, builder: RequestBuilder) -> Result<RequestBuilder>; } #[derive(Clone, Default)] pub struct NoToken; #[async_trait::async_trait] impl TokenManager for NoToken { async fn apply_auth_header(&self, builder: RequestBuilder) -> Result<RequestBuilder> { Ok(builder) } } #[derive(Clone)] pub struct LongLivedToken { token: String, } impl LongLivedToken { pub fn new(token: &str) -> Self { LongLivedToken { token: token.to_owned(), } } } #[async_trait::async_trait] impl TokenManager for LongLivedToken { async fn apply_auth_header(&self, builder: RequestBuilder) -> Result<RequestBuilder> { let mut header_val = HeaderValue::from_str(&format!("Bearer {}", self.token)) .map_err(|e| ClientError::Other(e.to_string()))?; header_val.set_sensitive(true); Ok(builder.header(AUTHORIZATION, header_val)) } } #[derive(Clone)] pub struct HttpBasic { username: String, password: String, } impl HttpBasic { pub fn new(username: &str, password: &str) -> Self { HttpBasic { username: username.to_owned(), password: password.to_owned(), } } } #[async_trait::async_trait] impl TokenManager for HttpBasic { async fn apply_auth_header(&self, builder: RequestBuilder) -> Result<RequestBuilder> { let data = base64::encode(format!("{}:{}", self.username, self.password)); let mut header_val = HeaderValue::from_str(&format!("Basic {}", data)) .map_err(|e| ClientError::Other(e.to_string()))?; header_val.set_sensitive(true); Ok(builder.header(AUTHORIZATION, header_val)) } } type LockData<T> = Arc<RwLock<T>>; #[derive(Clone)] pub struct OidcToken { id_token: LockData<String>, refresh_token: LockData<RefreshToken>, expiry_time: LockData<OffsetDateTime>, #[allow(dead_code)] issuer: String, #[allow(dead_code)] scopes: Vec<String>, client_id: String, token_url: String, token_file: Option<PathBuf>, } impl OidcToken { pub async fn new_from_parts( id_token: &str, refresh_token: &str, client_id: &str, token_url: &str, scopes: Vec<String>, ) -> Result<Self> { let (expiry_time, issuer) = data_from_token(id_token)?; let me = OidcToken { id_token: Arc::new(RwLock::new(id_token.to_owned())), refresh_token: Arc::new(RwLock::new(RefreshToken::new(refresh_token.to_owned()))), expiry_time: Arc::new(RwLock::new(expiry_time)), issuer, scopes, client_id: client_id.to_owned(), token_url: token_url.to_owned(), token_file: None, }; me.ensure_token().await?; Ok(me) } pub async fn new_from_file(token_file: impl AsRef<Path>) -> Result<Self> { let path = token_file.as_ref().to_owned(); let raw = tokio::fs::read(&path).await?; let token_res: StandardTokenResponse<OidcTokenExtraFields, BasicTokenType> = toml::from_slice(&raw)?; let mut me = Self::new_from_parts( &token_res.extra_fields().id_token, token_res .refresh_token() .ok_or_else(|| { ClientError::TokenError( "Token response does not contain a refresh token".into(), ) })? .secret(), &token_res.extra_fields().client_id, &token_res.extra_fields().token_url, token_res .scopes() .map(|s| s.iter().map(|s| s.to_string()).collect()) .unwrap_or_default(), ) .await?; me.token_file = Some(path); Ok(me) } pub async fn login(bindle_base_url: &str, token_file: impl AsRef<Path>) -> Result<Self> { let (base_url, headers) = super::base_url_and_headers(bindle_base_url)?; let login_resp = HttpClient::builder() .build()? .get(base_url.join(super::LOGIN_ENDPOINT).unwrap()) .query(&crate::LoginParams { provider: "nothing".into(), }) .headers(headers) .send() .await?; let login_resp = super::unwrap_status(login_resp, super::Endpoint::Login, super::Operation::Login) .await?; let device_code_details: DeviceAuthorizationResponse< crate::DeviceAuthorizationExtraFields, > = toml::from_slice(&login_resp.bytes().await?)?; println!( "Open this URL in your browser:\n{}\nand then enter the code when prompted: {}", **device_code_details.verification_uri(), device_code_details.user_code().secret() ); let oauth_client: Oauth2Client< BasicErrorResponse, StandardTokenResponse<OidcTokenExtraFields, BasicTokenType>, BasicTokenType, BasicTokenIntrospectionResponse, StandardRevocableToken, BasicRevocationErrorResponse, > = Oauth2Client::new( ClientId::new(device_code_details.extra_fields().client_id.clone()), None, AuthUrl::new("https://not.needed.com".into()).unwrap(), Some(TokenUrl::new(device_code_details.extra_fields().token_url.clone()).unwrap()), ) .set_auth_type(oauth2::AuthType::RequestBody); let token_res = match oauth_client .exchange_device_access_token(&device_code_details) .request_async(async_http_client, tokio::time::sleep, None) .await { Ok(t) => t, Err(e) => { return Err(ClientError::Other(format!("{:?}", e))); } }; let (expiry_time, issuer) = data_from_token(&token_res.extra_fields().id_token)?; let me = OidcToken { id_token: Arc::new(RwLock::new(token_res.extra_fields().id_token.to_owned())), refresh_token: Arc::new(RwLock::new(RefreshToken::new( token_res .refresh_token() .ok_or_else(|| { ClientError::TokenError( "Token response does not contain a refresh token".into(), ) })? .secret() .to_owned(), ))), expiry_time: Arc::new(RwLock::new(expiry_time)), issuer, scopes: token_res .scopes() .map(|s| s.iter().map(|s| s.to_string()).collect()) .unwrap_or_default(), client_id: device_code_details.extra_fields().client_id.clone(), token_url: device_code_details.extra_fields().token_url.clone(), token_file: Some(token_file.as_ref().to_owned()), }; me.write_token_file(token_res).await?; Ok(me) } async fn ensure_token(&self) -> Result<()> { let is_expired = OffsetDateTime::now_utc() - time::Duration::minutes(1) >= *self.expiry_time.read().await; if is_expired { tracing::debug!("Token has expired, attempting to refresh token"); let oauth_client: Oauth2Client< BasicErrorResponse, StandardTokenResponse<OidcTokenExtraFields, BasicTokenType>, BasicTokenType, BasicTokenIntrospectionResponse, StandardRevocableToken, BasicRevocationErrorResponse, > = Oauth2Client::new( ClientId::new(self.client_id.clone()), None, AuthUrl::new("https://not.needed.com".into()).unwrap(),
, ) .set_auth_type(oauth2::AuthType::RequestBody); let token_res = { let mut refresh_token = self.refresh_token.write().await; let token_res = match oauth_client .exchange_refresh_token(&refresh_token) .request_async(async_http_client) .await { Ok(t) => t, Err(e) => { return Err(ClientError::TokenError(format!( "Unable to refresh token {:?}", e ))); } }; let (expiry, _) = data_from_token(&token_res.extra_fields().id_token)?; let mut expiry_time = self.expiry_time.write().await; let mut id_token = self.id_token.write().await; *expiry_time = expiry; *id_token = token_res.extra_fields().id_token.clone(); *refresh_token = RefreshToken::new( token_res .refresh_token() .ok_or_else(|| { ClientError::TokenError( "Token response does not contain a refresh token".into(), ) })? .secret() .to_owned(), ); token_res }; if let Some(p) = self.token_file.as_ref() { tracing::trace!(path = %p.display(), "Token refreshed and token file is set. Updating with token data"); self.write_token_file(token_res).await?; } } Ok(()) } async fn write_token_file( &self, mut token_res: StandardTokenResponse<OidcTokenExtraFields, BasicTokenType>, ) -> Result<()> { let token_file = match self.token_file.as_ref() { Some(p) => p, None => return Ok(()), }; let mut extra = token_res.extra_fields().to_owned(); let (_, issuer) = data_from_token(&token_res.extra_fields().id_token)?; extra.issuer = issuer.clone(); extra.client_id = self.client_id.clone(); extra.token_url = self.token_url.clone(); token_res.set_extra_fields(extra); tracing::info!(path = %token_file.display(), "Writing access token to file"); #[cfg(not(target_family = "windows"))] let mut file = OpenOptions::new() .create(true) .write(true) .mode(0o600) .truncate(true) .open(token_file) .await?; #[cfg(target_family = "windows")] let mut file = OpenOptions::new() .create(true) .write(true) .truncate(true) .open(token_file) .await?; file.write_all(&toml::to_vec(&token_res)?).await?; file.flush().await?; Ok(()) } } #[async_trait::async_trait] impl TokenManager for OidcToken { async fn apply_auth_header(&self, builder: RequestBuilder) -> Result<RequestBuilder> { self.ensure_token().await?; let mut header_val = HeaderValue::from_str(&format!("Bearer {}", (*self.id_token.read().await).clone())) .map_err(|e| ClientError::Other(e.to_string()))?; header_val.set_sensitive(true); Ok(builder.header(AUTHORIZATION, header_val)) } } fn data_from_token(token: &str) -> Result<(OffsetDateTime, String)> { let mut validation = jsonwebtoken::Validation::default(); validation.validate_exp = false; validation.insecure_disable_signature_validation(); let fake_key = jsonwebtoken::DecodingKey::from_secret(b"fake"); let parsed_token = jsonwebtoken::decode::<Claims>(token, &fake_key, &validation) .map_err(|e| ClientError::TokenError(format!("Invalid token data: {}", e)))?; Ok((parsed_token.claims.exp, parsed_token.claims.iss)) }
Some(TokenUrl::new(self.token_url.clone()).map_err(|e| { ClientError::TokenError(format!("Invalid token url: {}", e)) })?)
call_expression
[ { "content": "fn parse_basic(auth_data: &str) -> anyhow::Result<(String, String)> {\n\n match auth_data.strip_prefix(HTTP_BASIC_PREFIX) {\n\n None => anyhow::bail!(\"Wrong auth type. Only Basic auth is supported\"),\n\n Some(suffix) => {\n\n // suffix should be base64 string\n\n let decoded = String::from_utf8(base64::decode(suffix)?)?;\n\n let pair: Vec<&str> = decoded.splitn(2, ':').collect();\n\n if pair.len() != 2 {\n\n anyhow::bail!(\"Malformed Basic header\")\n\n } else {\n\n Ok((pair[0].to_owned(), pair[1].to_owned()))\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// A representation of a user authenticated by HTTP basic auth. This user contains no groups and\n\n/// will match the username given in the basic auth header\n\npub struct HttpUser {\n\n username: String,\n", "file_path": "src/authn/http_basic.rs", "rank": 1, "score": 274438.4436585719 }, { "content": "#[instrument(level = \"trace\")]\n\nfn handle_tail(tail: &str) -> Result<(String, Option<String>), Rejection> {\n\n let mut split: Vec<String> = tail\n\n .split(PARCEL_ID_SEPARATOR)\n\n .map(|s| s.to_owned())\n\n .collect();\n\n\n\n // The unwraps here are safe because we are checking length\n\n match split.len() {\n\n 1 => {\n\n trace!(bindle_id = %split[0], \"Matched only bindle ID\");\n\n Ok((split.pop().unwrap(), None))\n\n }\n\n 2 => {\n\n trace!(\n\n bindle_id = %split[0],\n\n sha = %split[1],\n\n \"Matched bindle ID and sha\"\n\n );\n\n let parcel = split.pop().unwrap();\n\n let inv = split.pop().unwrap();\n\n Ok((inv, Some(parcel)))\n\n }\n\n _ => Err(custom(InvalidRequestPath)),\n\n }\n\n}\n\n\n", "file_path": "src/server/filters.rs", "rank": 2, "score": 203932.52752369395 }, { "content": "/// A warp filter that returns the invoice ID and parcel ID as a tuple if the path is for a parcel\n\n/// and rejects it otherwise\n\npub fn parcel() -> impl Filter<Extract = ((String, String),), Error = Rejection> + Copy {\n\n warp::path(\"_i\")\n\n .and(warp::path::tail())\n\n .and_then(|tail: warp::path::Tail| {\n\n async move {\n\n let (inv, parcel) = match handle_tail(tail.as_str()) {\n\n Ok(i) => i,\n\n // The try operator doesn't work because I can't implement `From` for the sealed\n\n // CombinedRejection type\n\n Err(e) => return Err(e),\n\n };\n\n let parcel = match parcel {\n\n None => return Err(custom(InvalidRequestPath)),\n\n Some(p) => p,\n\n };\n\n Ok((inv, parcel))\n\n }\n\n .instrument(tracing::debug_span!(\"parcel_filter\"))\n\n })\n\n}\n\n\n", "file_path": "src/server/filters.rs", "rank": 4, "score": 193493.9031841574 }, { "content": "fn map_io_error(e: std::io::Error) -> ProviderError {\n\n if matches!(e.kind(), std::io::ErrorKind::NotFound) {\n\n return ProviderError::NotFound;\n\n }\n\n ProviderError::from(e)\n\n}\n\n\n", "file_path": "src/provider/embedded.rs", "rank": 5, "score": 189379.14342678248 }, { "content": "fn map_io_error(e: std::io::Error) -> ProviderError {\n\n if matches!(e.kind(), std::io::ErrorKind::NotFound) {\n\n return ProviderError::NotFound;\n\n }\n\n ProviderError::from(e)\n\n}\n\n\n\n/// An internal wrapper to implement `AsyncWrite` on Sha256\n\npub(crate) struct AsyncSha256 {\n\n inner: Mutex<Sha256>,\n\n}\n\n\n\nimpl AsyncSha256 {\n\n /// Equivalent to the `Sha256::new()` function\n\n pub(crate) fn new() -> Self {\n\n AsyncSha256 {\n\n inner: Mutex::new(Sha256::new()),\n\n }\n\n }\n\n\n", "file_path": "src/provider/file/mod.rs", "rank": 6, "score": 185858.09740547853 }, { "content": "/// A warp filter that returns the invoice ID if the path is for an invoice and rejects it otherwise\n\npub fn invoice() -> impl Filter<Extract = (String,), Error = Rejection> + Copy {\n\n warp::path(\"_i\")\n\n .and(warp::path::tail())\n\n .and_then(|tail: warp::path::Tail| {\n\n async move {\n\n let (inv, parcel) = match handle_tail(tail.as_str()) {\n\n Ok(i) => i,\n\n // The try operator doesn't work because I can't implement `From` for the sealed\n\n // CombinedRejection type\n\n Err(e) => return Err(e),\n\n };\n\n if parcel.is_some() {\n\n return Err(custom(InvalidRequestPath));\n\n }\n\n Ok(inv)\n\n }\n\n .instrument(tracing::debug_span!(\"invoice_filter\"))\n\n })\n\n}\n\n\n", "file_path": "src/server/filters.rs", "rank": 8, "score": 181568.57793420015 }, { "content": "type Result<T> = std::result::Result<T, ParseError>;\n\n\n\nconst PATH_SEPARATOR: char = '/';\n\n\n\n/// A parsed representation of an ID string for a bindle. This is currently defined as an arbitrary\n\n/// path with a version string at the end.\n\n///\n\n/// Examples of valid ID strings include:\n\n///\n\n/// - `foo/0.1.0`\n\n/// - `example.com/foo/1.2.3`\n\n/// - `example.com/a/longer/path/foo/1.10.0-rc.1`\n\n///\n\n/// An `Id` can be parsed from any string using the `.parse()` method:\n\n/// ```\n\n/// use bindle::Id;\n\n///\n\n/// let id: Id = \"example.com/foo/1.2.3\".parse().expect(\"should parse\");\n\n/// println!(\"{}\", id);\n\n/// ```\n", "file_path": "src/id.rs", "rank": 9, "score": 177178.01962794375 }, { "content": "/// Use an accept header to determine how to serialize content.\n\n///\n\n/// This will examine the Accept header, looking for the best match, and then it will\n\n/// use the appropriate serializer to serialize the data.\n\n///\n\n/// The current implementation ignores `q=` annotations, assigning preference based on\n\n/// the first MIME type to match.\n\n///\n\n/// For example, `Accept: text/json, application/toml;q=0.9` will cause encoding to be in JSON.\n\n/// If no suitable content type is found, this will encode in application/toml, as that\n\n/// is the behavior described in the spec.\n\npub fn serialized_data<T>(val: &T, accept: String) -> SerializedData\n\nwhere\n\n T: Serialize,\n\n{\n\n let best_fit = accept_best_fit(accept.as_str());\n\n let inner = match best_fit {\n\n JSON_MIME_TYPE => serde_json::to_vec(val).map_err(|e| {\n\n tracing::log::error!(\"Error while serializing TOML: {:?}\", e);\n\n }),\n\n // TOML is default\n\n _ => toml::to_vec(val).map_err(|e| {\n\n tracing::log::error!(\"Error while serializing TOML: {:?}\", e);\n\n }),\n\n };\n\n SerializedData {\n\n inner,\n\n mime: best_fit.to_owned(),\n\n }\n\n}\n\n\n", "file_path": "src/server/reply.rs", "rank": 10, "score": 175820.5258333481 }, { "content": "#[async_trait::async_trait]\n\npub trait Provider {\n\n /// This takes an invoice and creates it in storage. Returns the newly created invoice and a\n\n /// list of missing parcels\n\n ///\n\n /// It must verify that each referenced parcel is present in storage. Any parcel that is not\n\n /// present must be returned in the list of labels.\n\n async fn create_invoice<I>(&self, inv: I) -> Result<(crate::Invoice, Vec<super::Label>)>\n\n where\n\n I: Signed + Verified + Send + Sync;\n\n\n\n /// Load an invoice and return it\n\n ///\n\n /// This will return an invoice if the bindle exists and is not yanked. The default\n\n /// implementation of this method is sufficient for most use cases, but can be overridden if\n\n /// needed\n\n async fn get_invoice<I>(&self, id: I) -> Result<super::Invoice>\n\n where\n\n I: TryInto<Id> + Send,\n\n I::Error: Into<ProviderError>,\n\n {\n", "file_path": "src/provider/mod.rs", "rank": 11, "score": 168764.4099010137 }, { "content": "/// A warp filter that parses the body of a request from TOML to the specified type\n\n// Lovingly borrowed from https://docs.rs/warp/0.2.5/src/warp/filters/body.rs.html\n\npub fn toml<T: DeserializeOwned + Send>() -> impl Filter<Extract = (T,), Error = Rejection> + Copy {\n\n // We can't use the http type constant here because clippy is warning about it having internal\n\n // mutability.\n\n warp::filters::header::header::<String>(\"Content-Type\")\n\n .and(warp::body::aggregate())\n\n .and_then(parse_toml)\n\n}\n\n\n\nasync fn parse_toml<T: DeserializeOwned + Send>(\n\n raw_header: String,\n\n buf: impl warp::Buf,\n\n) -> Result<T, Rejection> {\n\n let mime: mime::Mime = raw_header\n\n .parse()\n\n .map_err(|err: mime::FromStrError| custom(BodyDeserializeError { cause: err.into() }))?;\n\n // As far as I can tell from the code, essence_str is lowercased, so we shouldn't need to\n\n // do it here\n\n if mime.essence_str() != TOML_MIME_TYPE {\n\n return Err(custom(BodyDeserializeError {\n\n cause: \"content-type is not TOML\".into(),\n", "file_path": "src/server/filters.rs", "rank": 12, "score": 161595.1486469263 }, { "content": "/// A trait for any system that can authorize any [`Authorizable`](Authorizable) type\n\n// TODO: Will this need to be async?\n\npub trait Authorizer {\n\n /// Checks whether or not the given item is authorized to access provided path and method,\n\n /// returning a failure reason in the case where the item is not authorized\n\n // TODO: We might want to have a custom error enum down the line\n\n fn authorize<A: Authorizable>(\n\n &self,\n\n item: A,\n\n path: &str,\n\n method: &warp::http::Method,\n\n ) -> anyhow::Result<()>;\n\n}\n", "file_path": "src/authz/mod.rs", "rank": 13, "score": 159348.67329751723 }, { "content": "/// A warp filter for adding an authenticator\n\nfn authenticate<Authn: Authenticator + Clone + Send + Sync>(\n\n authn: Authn,\n\n) -> impl Filter<Extract = (Either<Anonymous, Authn::Item>,), Error = Rejection> + Clone {\n\n // We get the header optionally as anonymous auth could be enabled\n\n warp::any()\n\n .map(move || authn.clone())\n\n .and(warp::header::optional::<String>(\"Authorization\"))\n\n .and_then(_authenticate)\n\n}\n\n\n\n#[instrument(level = \"trace\", skip(authn, auth_data), name = \"authentication\")]\n\nasync fn _authenticate<A: Authenticator + Clone + Send>(\n\n authn: A,\n\n auth_data: Option<String>,\n\n) -> Result<Either<Anonymous, A::Item>, Rejection> {\n\n let data = match auth_data {\n\n Some(s) => s,\n\n // If we had no auth data, that means this is anonymous\n\n None => return Ok(Either::Left(Anonymous)),\n\n };\n\n match authn.authenticate(&data).await {\n\n Ok(a) => Ok(Either::Right(a)),\n\n Err(e) => {\n\n debug!(error = %e, \"Authentication error\");\n\n Err(warp::reject::custom(AuthnFail))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/server/filters.rs", "rank": 14, "score": 153768.80269242314 }, { "content": "fn parse_roles(r: Option<&&str>) -> Result<Vec<SignatureRole>, &'static str> {\n\n let raw = r.ok_or(\"Multiple attestation strategy is missing roles\")?;\n\n if !raw.ends_with(']') {\n\n return Err(\"Missing closing ']' on roles\");\n\n }\n\n raw.trim_end_matches(']')\n\n .split(',')\n\n .map(|role| role.parse::<SignatureRole>())\n\n .collect::<Result<Vec<_>, _>>()\n\n}\n\n\n\n/// A strategy for verifying an invoice.\n\nimpl VerificationStrategy {\n\n fn verify_signature(&self, sig: &Signature, cleartext: &[u8]) -> Result<(), SignatureError> {\n\n let pk = base64::decode(sig.key.as_bytes())\n\n .map_err(|_| SignatureError::CorruptKey(sig.key.clone()))?;\n\n let sig_block = base64::decode(sig.signature.as_bytes())\n\n .map_err(|_| SignatureError::CorruptSignature(sig.key.clone()))?;\n\n\n\n let pubkey =\n", "file_path": "src/invoice/verification.rs", "rank": 15, "score": 152112.7009510154 }, { "content": "/// Storage for secret keys\n\n///\n\n/// Any possible number of key storage systems may be used for key storage, but\n\n/// all of them must provide a way for the system to fetch a key matching the\n\n/// desired role.\n\npub trait SecretKeyStorage {\n\n /// Get a key appropriate for signing with the given role.\n\n ///\n\n /// If no key is found, this will return a None.\n\n /// In general, if multiple keys match, the implementation chooses the \"best fit\"\n\n /// and returns that key.\n\n fn get_first_matching(&self, role: &SignatureRole) -> Option<&SecretKeyEntry>;\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct SecretKeyFile {\n\n pub version: String,\n\n pub key: Vec<SecretKeyEntry>,\n\n}\n\n\n\nimpl Default for SecretKeyFile {\n\n fn default() -> Self {\n\n Self {\n\n version: KEY_RING_VERSION.to_owned(),\n", "file_path": "src/invoice/signature.rs", "rank": 16, "score": 151685.33474453294 }, { "content": "/// A marker trait that indicates this is a caching implementation (as opposed to just a provider)\n\npub trait Cache: Provider {}\n\n\n\n/// A custom result type representing a possible cache miss. As all underlying caches implement\n\n/// `Storage`, this contains a storage error that is guaranteed not to be a cache miss (e.g.\n\n/// NotFound). The Option indicates whether a value was returned. This value is obtained by\n\n/// coverting a normal storage result using `into_cache_result`\n\npub(crate) type CacheResult<T> = Result<Option<T>, crate::provider::ProviderError>;\n\n\n\n/// Converts a storage result into a `CacheResult`\n\npub(crate) fn into_cache_result<T>(res: crate::provider::Result<T>) -> CacheResult<T> {\n\n match res {\n\n Ok(val) => Ok(Some(val)),\n\n Err(e) if matches!(e, ProviderError::NotFound) => Ok(None),\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n", "file_path": "src/cache/mod.rs", "rank": 17, "score": 151588.74336214166 }, { "content": "fn parse_accept(header: &str) -> Vec<mime::Mime> {\n\n header\n\n .split(',')\n\n .filter_map(|h| match h.trim().parse::<mime::Mime>() {\n\n Ok(m) => Some(m),\n\n Err(e) => {\n\n tracing::warn!(\n\n header,\n\n %e,\n\n \"Accept header contains unparsable media type. Ignoring.\"\n\n );\n\n None\n\n }\n\n })\n\n .collect()\n\n}\n\n\n\n/// A serialized body.\n\n///\n\n/// Currently, this may be JSON or TOML.\n", "file_path": "src/server/reply.rs", "rank": 18, "score": 150279.06320379846 }, { "content": "fn default_scaffold_dir() -> PathBuf {\n\n let root = std::env::var(\"CARGO_MANIFEST_DIR\").expect(\"Unable to get project directory\");\n\n let mut path = PathBuf::from(root);\n\n path.push(SCAFFOLD_DIR);\n\n path\n\n}\n\n\n", "file_path": "src/testing/mod.rs", "rank": 19, "score": 148648.5966847503 }, { "content": "/// A helper function for converting a [`ProviderError`](crate::provider::ProviderError) into a Warp\n\n/// `Reply` with the proper status code. It will return a TOML body that looks like:\n\n/// ```toml\n\n/// error = \"bindle is yanked\"\n\n/// ```\n\npub fn into_reply(error: ProviderError) -> warp::reply::WithStatus<SerializedData> {\n\n let status_code = match &error {\n\n ProviderError::CreateYanked => StatusCode::UNPROCESSABLE_ENTITY,\n\n ProviderError::NotFound => StatusCode::NOT_FOUND,\n\n ProviderError::Io(e) if e.kind() == std::io::ErrorKind::NotFound => {\n\n // Remap the error in the case this is a not found error\n\n return reply_from_error(ProviderError::NotFound, StatusCode::NOT_FOUND);\n\n }\n\n ProviderError::Exists | ProviderError::WriteInProgress => StatusCode::CONFLICT,\n\n ProviderError::Malformed(_)\n\n | ProviderError::Unserializable(_)\n\n | ProviderError::DigestMismatch\n\n | ProviderError::InvalidId(_)\n\n | ProviderError::SizeMismatch => StatusCode::BAD_REQUEST,\n\n ProviderError::Yanked => StatusCode::FORBIDDEN,\n\n #[cfg(feature = \"client\")]\n\n ProviderError::ProxyError(e) => {\n\n // Unwrap the inner error so as to provide better details to the client\n\n return reply_from_error(e, StatusCode::INTERNAL_SERVER_ERROR);\n\n }\n\n ProviderError::Other(_) | ProviderError::Io(_) => StatusCode::INTERNAL_SERVER_ERROR,\n\n ProviderError::FailedSigning(e) => {\n\n // Unwrap the inner error so as to provide better details to the client\n\n return reply_from_error(e, StatusCode::BAD_REQUEST);\n\n }\n\n };\n\n\n\n reply_from_error(error, status_code)\n\n}\n\n\n", "file_path": "src/server/reply.rs", "rank": 21, "score": 147483.84132876352 }, { "content": "fn assert_status(output: std::process::Output, message: &str) {\n\n assert!(\n\n output.status.success(),\n\n \"{}:\\nStdout:\\n {}\\nStderr:\\n{}\",\n\n message,\n\n String::from_utf8_lossy(&output.stdout),\n\n String::from_utf8_lossy(&output.stderr)\n\n );\n\n}\n", "file_path": "tests/cli.rs", "rank": 22, "score": 147110.04604227285 }, { "content": "fn map_sled_error(e: SledError) -> ProviderError {\n\n match &e {\n\n // This is a panicable error because if the collection is somehow gone, we can't keep\n\n // continuing\n\n SledError::CollectionNotFound(e) => panic!(\n\n \"The collection {} was not found, something is wrong with the database\",\n\n String::from_utf8_lossy(e)\n\n ),\n\n SledError::Io(i) => {\n\n error!(error = ?e, \"IO error occurred while accessingata store\");\n\n // Add some more decoration as to _where_ the IO error came from\n\n ProviderError::Io(std::io::Error::new(\n\n i.kind(),\n\n format!(\"Error accessing local data store: {}\", i),\n\n ))\n\n }\n\n SledError::Unsupported(_) | SledError::ReportableBug(_) => {\n\n error!(error = ?e, \"Error while attempting to access embedded data store\");\n\n ProviderError::Other(String::from(\n\n \"Internal system error while performing data storage lookup\",\n", "file_path": "src/provider/embedded.rs", "rank": 23, "score": 146705.06406434212 }, { "content": "trait ConditionalBuilder {\n\n fn and_if(self, condition: bool, build_method: impl Fn(Self) -> Self) -> Self\n\n where\n\n Self: Sized,\n\n {\n\n if condition {\n\n build_method(self)\n\n } else {\n\n self\n\n }\n\n }\n\n}\n\n\n\nimpl ConditionalBuilder for reqwest::ClientBuilder {}\n", "file_path": "src/client/mod.rs", "rank": 25, "score": 138499.44059927008 }, { "content": "/// Helper function for \"verifying\" and \"signing\" with noop operations\n\nfn noop_verify_and_sign(inv: crate::Invoice) -> impl Signed + Verified + Send + Sync {\n\n NoopSigned(NoopVerified(inv))\n\n}\n", "file_path": "src/cache/mod.rs", "rank": 26, "score": 132034.40650746477 }, { "content": "/// A trait that can be implemented on any type (such as a custom `User` or `Token` type) so that it\n\n/// can be authorized by an [`Authorizer`](Authorizer)\n\npub trait Authorizable {\n\n /// Returns the identity or username of the authenticated user\n\n fn principal(&self) -> String;\n\n\n\n /// Returns the groups the authenticated user is a member of, generally embedded on something\n\n /// like a JWT or fetched from an upstream server\n\n fn groups(&self) -> Vec<String>;\n\n}\n\n\n", "file_path": "src/authz/mod.rs", "rank": 27, "score": 121256.50128767446 }, { "content": "#[async_trait::async_trait]\n\npub trait Authenticator {\n\n /// The authorizable item type that is returned from the `authenticate` method\n\n type Item: Authorizable + Send + 'static;\n\n\n\n /// Authenticate the request given the arbitrary `auth_data`, returning an arbitrary error in\n\n /// case of a failure. This data will likely be the value of the Authorization header. Anonymous\n\n /// auth will be indicated by an empty auth_data string\n\n async fn authenticate(&self, auth_data: &str) -> anyhow::Result<Self::Item>;\n\n\n\n // TODO(thomastaylor312): Perhaps we should create a single method that returns another trait\n\n // implementing type for actually authenticating with a service. That way we can encapsulate all\n\n // the data we need rather than dangling all these methods we need here\n\n\n\n /// The client_id to use for this authentication. Defaults to an empty string if not implemented\n\n fn client_id(&self) -> &str {\n\n \"\"\n\n }\n\n\n\n /// The device code authorization url to use for this authentication. Defaults to an empty\n\n /// string if not implemented\n\n fn auth_url(&self) -> &str {\n\n \"\"\n\n }\n\n\n\n /// The token url to use for this authentication. Defaults to an empty string if not implemented\n\n fn token_url(&self) -> &str {\n\n \"\"\n\n }\n\n}\n", "file_path": "src/authn/mod.rs", "rank": 28, "score": 121247.14960050455 }, { "content": "#[async_trait::async_trait]\n\npub trait Search {\n\n /// A high-level function that can take raw search strings (queries and filters) and options.\n\n ///\n\n /// This will parse the terms and filters according to its internal rules, and return\n\n /// a set of matches.\n\n ///\n\n /// An error is returned if either there is something incorrect in the terms/filters,\n\n /// or if the search engine itself fails to process the query.\n\n async fn query(\n\n &self,\n\n term: &str,\n\n filter: &str,\n\n options: SearchOptions,\n\n ) -> anyhow::Result<Matches>;\n\n\n\n /// Given an invoice, extract information from it that will be useful for searching.\n\n ///\n\n /// This high-level feature does not provide any guarantees about how it will\n\n /// process the invoice. But it may implement Strict and/or Standard modes\n\n /// described in the protocol specification.\n\n ///\n\n /// If the index function is given an invoice it has already indexed, it treats\n\n /// the call as an update. Otherwise, it adds a new entry to the index.\n\n ///\n\n /// As a special note, if an invoice is yanked, the index function will mark it\n\n /// as such, following the protocol specification's requirements for yanked\n\n /// invoices.\n\n async fn index(&self, document: &crate::Invoice) -> anyhow::Result<()>;\n\n}\n", "file_path": "src/search/mod.rs", "rank": 29, "score": 121247.14960050455 }, { "content": "/// A Marker trait that should not be exposed publicly. It is used to mark a trait as \"sealed\",\n\n/// which means that the trait cannot be implemented outside of this crate\n\npub trait Sealed {}\n", "file_path": "src/invoice/sealed.rs", "rank": 30, "score": 121246.36315244272 }, { "content": "/// Parse an Accept header and return the best possible handler.\n\n///\n\n/// This will always return one of the supported serializers, defaulting to\n\n/// application/toml.\n\nfn accept_best_fit(accept_value: &str) -> &str {\n\n let accept_items = parse_accept(accept_value);\n\n debug!(\n\n %accept_value,\n\n ?accept_items,\n\n \"Parsed accept header into list\",\n\n );\n\n\n\n // Basically, we're working around the issue that there are multiple MIME types\n\n // for JSON (application/json and text/json, as well as application/json+STUFF)\n\n let best_fit = accept_items\n\n .iter()\n\n .find_map(|m| match m.subtype().as_str() {\n\n \"toml\" => Some(TOML_MIME_TYPE),\n\n \"json\" => Some(JSON_MIME_TYPE),\n\n _ => None,\n\n })\n\n .unwrap_or(TOML_MIME_TYPE);\n\n\n\n debug!(%best_fit, \"Selected a best-fit MIME\");\n\n best_fit\n\n}\n\n\n", "file_path": "src/server/reply.rs", "rank": 31, "score": 121236.13140531402 }, { "content": "// A more generic wrapper that takes any ToString implementation (which includes Errors) and builds\n\n// a TOML error body with the given status code\n\npub fn reply_from_error(\n\n error: impl std::string::ToString,\n\n status_code: warp::http::StatusCode,\n\n) -> warp::reply::WithStatus<SerializedData> {\n\n warp::reply::with_status(\n\n serialized_data(\n\n &crate::ErrorResponse {\n\n error: error.to_string(),\n\n },\n\n TOML_MIME_TYPE.to_owned(),\n\n ),\n\n status_code,\n\n )\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n #[test]\n\n fn test_parse_accept() {\n", "file_path": "src/server/reply.rs", "rank": 32, "score": 116623.5462742862 }, { "content": "fn scaffold_dir() -> PathBuf {\n\n std::env::var(SCAFFOLD_DIR_ENV)\n\n .ok()\n\n .map(PathBuf::from)\n\n .unwrap_or_else(default_scaffold_dir)\n\n}\n\n\n\n/// A struct containing the SHA of the data and the data as bytes\n\n#[derive(Clone, Debug)]\n\npub struct ParcelInfo {\n\n pub sha: String,\n\n pub data: Vec<u8>,\n\n}\n\n\n\n/// A scaffold loaded from disk, containing the raw bytes for all files in the bindle.\n\n#[derive(Clone, Debug)]\n\npub struct RawScaffold {\n\n pub invoice: Vec<u8>,\n\n pub parcel_files: HashMap<String, ParcelInfo>,\n\n pub keys: SecretKeyFile,\n", "file_path": "src/testing/mod.rs", "rank": 33, "score": 116572.92883821003 }, { "content": "/// Sign the parcels in the invoice using the given list of roles and keys. This is a list of tuples\n\n/// containing a [`SignatureRole`] and [`SecretKeyEntry`] in that order. Returns a [`SignedInvoice`]\n\n///\n\n/// Note that this signature will be invalidated if any parcels are added after this signature.\n\n///\n\n/// In the current version of the spec, a signature is generated by combining the signer's ID, the\n\n/// invoice version, and a list of parcels, and then performing a cryptographic signature on those\n\n/// fields. The result is then stored in a `[[signature]]` block on the invoice. Multiple signatures\n\n/// can be attached to any invoice.\n\npub fn sign<I>(\n\n mut invoice: I,\n\n sign_with: Vec<(SignatureRole, &SecretKeyEntry)>,\n\n) -> Result<SignedInvoice<I>, SignatureError>\n\nwhere\n\n I: BorrowMut<Invoice> + Into<crate::Invoice>,\n\n{\n\n let inv = invoice.borrow_mut();\n\n for (role, key) in sign_with {\n\n sign_one(inv, role, key)?;\n\n }\n\n\n\n Ok(SignedInvoice(invoice))\n\n}\n\n\n", "file_path": "src/invoice/mod.rs", "rank": 34, "score": 116252.79495938675 }, { "content": "#[async_trait::async_trait]\n\npub trait KeyRingSaver {\n\n /// Save the keyring to the given source\n\n async fn save(&self, keyring: &KeyRing) -> anyhow::Result<()>;\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl<T: AsRef<Path> + Sync> KeyRingLoader for T {\n\n async fn load(&self) -> anyhow::Result<KeyRing> {\n\n let raw_data = tokio::fs::read(self).await.map_err(|e| {\n\n anyhow::anyhow!(\n\n \"failed to read TOML file {}: {}\",\n\n self.as_ref().display(),\n\n e\n\n )\n\n })?;\n\n let res: KeyRing = toml::from_slice(&raw_data)?;\n\n Ok(res)\n\n }\n\n}\n\n\n", "file_path": "src/invoice/signature.rs", "rank": 35, "score": 116144.93133312765 }, { "content": "#[async_trait::async_trait]\n\npub trait KeyRingLoader {\n\n /// Load the keyring from source, returning the KeyRing\n\n async fn load(&self) -> anyhow::Result<KeyRing>;\n\n}\n\n\n\n/// Keyrings could be saved to any number of sources. This trait allows implementors to create\n\n/// custom saving helpers for keyrings\n", "file_path": "src/invoice/signature.rs", "rank": 36, "score": 116144.93133312765 }, { "content": "/// A sealed trait used to mark that an invoice has been signed. This trait cannot be implemented by\n\n/// consumers of the bindle crate\n\npub trait Signed: sealed::Sealed {\n\n /// Consumes the object, returning the signed invoice\n\n fn signed(self) -> Invoice;\n\n}\n\n\n\n/// The main structure for a Bindle invoice.\n\n///\n\n/// The invoice describes a specific version of a bindle. For example, the bindle\n\n/// `foo/bar/1.0.0` would be represented as an Invoice with the `BindleSpec` name\n\n/// set to `foo/bar` and version set to `1.0.0`.\n\n///\n\n/// Most fields on this struct are singular to best represent the specification. There,\n\n/// fields like `group` and `parcel` are singular due to the conventions of TOML.\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\n#[serde(deny_unknown_fields, rename_all = \"camelCase\")]\n\npub struct Invoice {\n\n pub bindle_version: String,\n\n pub yanked: Option<bool>,\n\n pub yanked_signature: Option<Vec<Signature>>,\n\n pub bindle: BindleSpec,\n", "file_path": "src/invoice/mod.rs", "rank": 37, "score": 110266.94160548321 }, { "content": "/// A marker trait indicating that an invoice has been verified\n\npub trait Verified: super::sealed::Sealed {}\n\n\n\n/// This enumerates the verifications strategies described in the signing spec.\n\n#[derive(Debug, Clone)]\n\npub enum VerificationStrategy {\n\n /// CreativeIntegrity verifies that (a) the key that signs as Creator is a known key,\n\n /// and that the signature is valid.\n\n CreativeIntegrity,\n\n /// AuthoritativeIntegrity verifies that at least one of the Creator or Approver keys\n\n /// is known and the signature is valid.\n\n AuthoritativeIntegrity,\n\n /// Verify that the Creator key is known and that all signatures are valid.\n\n ///\n\n /// This is subject to a DOS attack if a signer can generate intentionally bad signatures.\n\n GreedyVerification,\n\n /// Verify that every key on the invoice is known, and that every signature is valid.\n\n ExhaustiveVerification,\n\n /// Verifies that all signatures of the given roles are valid and signed by known keys.\n\n MultipleAttestation(Vec<SignatureRole>),\n\n /// Verifies that all signatures of the given roles are valid and signed by known keys. Will\n", "file_path": "src/invoice/verification.rs", "rank": 38, "score": 105557.97317717122 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Claims {\n\n preferred_username: Option<String>,\n\n email: Option<String>,\n\n email_verified: Option<bool>,\n\n sub: String,\n\n iss: String,\n\n groups: Option<Vec<String>>,\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Authenticator for OidcAuthenticator {\n\n type Item = OidcUser;\n\n\n\n async fn authenticate(&self, auth_data: &str) -> anyhow::Result<Self::Item> {\n\n // This is the raw auth data, so we need to chop off the \"Bearer\" part of the header data\n\n // with any starting whitespace. I am not using to_lowercase to avoid an extra string\n\n // allocation\n\n let raw_token = auth_data\n\n .trim_start_matches(\"Bearer\")\n\n .trim_start_matches(\"bearer\")\n", "file_path": "src/authn/oidc.rs", "rank": 39, "score": 103911.77140216672 }, { "content": "#[instrument(level = \"trace\", skip(inv, parcels), fields(invoice_id = %inv.bindle.id))]\n\nfn validate_shas<'a, T: Iterator<Item = &'a String>>(\n\n inv: &crate::Invoice,\n\n parcels: T,\n\n) -> Result<()> {\n\n let zero_vec = Vec::with_capacity(0);\n\n let offending_shas: Vec<String> = parcels\n\n .filter(|s| {\n\n !inv.parcel\n\n .as_ref()\n\n .unwrap_or(&zero_vec)\n\n .iter()\n\n .any(|p| &p.label.sha256 == *s)\n\n })\n\n .cloned()\n\n .collect();\n\n if !offending_shas.is_empty() {\n\n Err(ClientError::Other(format!(\n\n \"Got collection of parcels containing parcels that do not exist in the invoice: {}\",\n\n offending_shas.join(\", \")\n\n )))\n", "file_path": "src/standalone/mod.rs", "rank": 40, "score": 99286.94331495573 }, { "content": "/// Check whether the given version is within the legal range.\n\n///\n\n/// An empty range matches anything.\n\n///\n\n/// A range that fails to parse matches nothing.\n\n///\n\n/// An empty version matches nothing (unless the requirement is empty)\n\n///\n\n/// A version that fails to parse matches nothing (unless the requirement is empty).\n\n///\n\n/// In all other cases, if the version satisfies the requirement, this returns true.\n\n/// And if it fails to satisfy the requirement, this returns false.\n\nfn version_compare(version: &Version, requirement: &str) -> bool {\n\n if requirement.is_empty() {\n\n return true;\n\n }\n\n\n\n // For compatibility with npm (https://www.npmjs.com/package/semver),\n\n // check if the requirement is just a version; if so, treat it as equality (`=`) rather\n\n // than Rust's default (`^`).\n\n if let Ok(v) = Version::parse(requirement) {\n\n return *version == v;\n\n }\n\n\n\n match VersionReq::parse(requirement) {\n\n Ok(req) => req.matches(version),\n\n Err(e) => {\n\n tracing::log::error!(\"SemVer range could not parse: {}\", e);\n\n false\n\n }\n\n }\n\n}\n", "file_path": "src/invoice/mod.rs", "rank": 41, "score": 99069.42089378384 }, { "content": "#[derive(Debug)]\n\nstruct InvalidRequestPath;\n\n\n\nimpl Reject for InvalidRequestPath {}\n", "file_path": "src/server/filters.rs", "rank": 42, "score": 98416.34629236568 }, { "content": "/// A helper function that aggregates all routes into a complete API filter. If you only wish to\n\n/// serve specific endpoints or versions, you can assemble them with the individual submodules\n\npub fn api<P, I, Authn, Authz, S>(\n\n store: P,\n\n index: I,\n\n authn: Authn,\n\n authz: Authz,\n\n secret_store: S,\n\n verification_strategy: crate::VerificationStrategy,\n\n keyring: KeyRing,\n\n) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone\n\nwhere\n\n P: crate::provider::Provider + Clone + Send + Sync + 'static,\n\n I: crate::search::Search + Clone + Send + Sync + 'static,\n\n S: crate::invoice::signature::SecretKeyStorage + Clone + Send + Sync + 'static,\n\n Authn: crate::authn::Authenticator + Clone + Send + Sync + 'static,\n\n Authz: crate::authz::Authorizer + Clone + Send + Sync + 'static,\n\n{\n\n let health = warp::path(\"healthz\").map(|| \"OK\");\n\n\n\n // Use an Arc to avoid a possibly expensive clone of the keyring on every API call\n\n let wrapped_keyring = Arc::new(keyring);\n", "file_path": "src/server/routes.rs", "rank": 43, "score": 97910.2259028777 }, { "content": "/// A helper struct for a part file that will clean up the file on drop if it still exists. Also\n\n/// contains functionality for writing to the file and finalizing it (i.e moving it to the correct\n\n/// location)\n\nstruct PartFile {\n\n path: PathBuf,\n\n final_location: PathBuf,\n\n file: File,\n\n}\n\n\n\nimpl PartFile {\n\n /// Creates a new PartFile that will eventually be located at the given `final_location`. This\n\n /// will attempt to create a new part file and return an error if one already exists\n\n async fn new(final_location: PathBuf) -> Result<Self> {\n\n let extension = match final_location.extension() {\n\n Some(s) => {\n\n let mut ext = s.to_owned();\n\n ext.push(\".\");\n\n ext.push(PART_EXTENSION);\n\n ext\n\n }\n\n None => OsString::from(PART_EXTENSION),\n\n };\n\n let part = final_location.with_extension(extension);\n", "file_path": "src/provider/file/mod.rs", "rank": 44, "score": 97792.11158827925 }, { "content": "#[derive(Deserialize)]\n\nstruct Package {\n\n name: String,\n\n version: String,\n\n author: Option<String>,\n\n description: Option<String>,\n\n}\n", "file_path": "examples/as2bindle.rs", "rank": 63, "score": 65928.17248780956 }, { "content": "#[derive(Deserialize)]\n\nstruct Package {\n\n name: String,\n\n version: String,\n\n authors: Option<Vec<String>>,\n\n description: Option<String>,\n\n}\n", "file_path": "examples/cargo2bindle.rs", "rank": 64, "score": 65928.17248780956 }, { "content": "#[derive(Deserialize)]\n\nstruct Cargo {\n\n package: Package,\n\n}\n\n\n", "file_path": "examples/cargo2bindle.rs", "rank": 65, "score": 65928.17248780956 }, { "content": "// A helper struct for HEAD responses that takes the raw headers from a GET request and puts them\n\n// onto an empty body\n\nstruct HeadResponse {\n\n headers: warp::http::HeaderMap,\n\n}\n\n\n\nimpl Reply for HeadResponse {\n\n fn into_response(self) -> warp::reply::Response {\n\n let mut resp = warp::http::Response::new(warp::hyper::Body::empty());\n\n let headers = resp.headers_mut();\n\n *headers = self.headers;\n\n resp\n\n }\n\n}\n", "file_path": "src/server/handlers.rs", "rank": 66, "score": 62992.67998465702 }, { "content": "#[derive(Debug)]\n\nstruct AuthnFail;\n\n\n\nimpl warp::reject::Reject for AuthnFail {}\n\n\n\n#[instrument(level = \"trace\", skip(err))]\n\npub(crate) async fn handle_authn_rejection(\n\n err: warp::Rejection,\n\n) -> Result<impl warp::Reply, warp::Rejection> {\n\n if err.find::<AuthnFail>().is_some() {\n\n debug!(\"Handling rejection as authn rejection\");\n\n Ok(crate::server::reply::reply_from_error(\n\n \"unauthorized\",\n\n warp::http::StatusCode::UNAUTHORIZED,\n\n ))\n\n } else {\n\n Err(err)\n\n }\n\n}\n\n\n\n/// A warp filter for adding an authorizer\n", "file_path": "src/server/filters.rs", "rank": 67, "score": 62983.71875532958 }, { "content": "#[derive(Debug)]\n\nstruct AuthzFail;\n\n\n\nimpl warp::reject::Reject for AuthzFail {}\n\n\n\n#[instrument(level = \"trace\", skip(err))]\n\npub(crate) async fn handle_authz_rejection(\n\n err: warp::Rejection,\n\n) -> std::result::Result<impl warp::Reply, warp::Rejection> {\n\n if err.find::<AuthzFail>().is_some() {\n\n debug!(\"Handling rejection as authz rejection\");\n\n Ok(crate::server::reply::reply_from_error(\n\n \"access denied\",\n\n warp::http::StatusCode::FORBIDDEN,\n\n ))\n\n } else {\n\n Err(err)\n\n }\n\n}\n\n\n", "file_path": "src/server/filters.rs", "rank": 68, "score": 62983.71875532958 }, { "content": "#[derive(Clone)]\n\nstruct FeatureReference {\n\n group: String,\n\n name: String,\n\n value: String,\n\n}\n\n\n\n/// BindleFilter walks an invoice and resolves a list of parcels.\n\n///\n\n/// A bindle may define many parcels, some of which are to be included by default, and\n\n/// others which are members of groups that are only conditionally included. Parcels also\n\n/// have features attached to them. A feature can be turned on or off, and this will impact\n\n/// which list of parcels are considered the correct ones to pass on to the runtime.\n\n///\n\n/// The filter can be used to retrieve the list of parcels that satisfies a set of\n\n/// requirements. For example, use this to activate or deactivate features. You can also\n\n/// include or exclude groups.\n\npub struct BindleFilter<'a> {\n\n // The invoice that we operate on.\n\n invoice: &'a Invoice,\n\n groups: HashSet<String>,\n", "file_path": "src/filters/mod.rs", "rank": 69, "score": 62983.65264657361 }, { "content": "struct StrategyVisitor;\n\n\n\nimpl<'de> serde::de::Visitor<'de> for StrategyVisitor {\n\n type Value = VerificationStrategy;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"a valid verification strategy value\")\n\n }\n\n\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n match v.parse::<VerificationStrategy>() {\n\n Ok(s) => Ok(s),\n\n Err(e) => Err(E::custom(e)),\n\n }\n\n }\n\n\n\n fn visit_string<E>(self, v: String) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n self.visit_str(&v)\n\n }\n\n}\n\n\n", "file_path": "src/invoice/verification.rs", "rank": 70, "score": 62978.8382541445 }, { "content": "fn create_key(\n\n keyring_file: impl AsRef<OsStr>,\n\n secrets_file: &Path,\n\n label: &str,\n\n skip_keyring: bool,\n\n) {\n\n let mut args = vec![\n\n \"run\",\n\n \"--features\",\n\n \"cli\",\n\n \"--bin\",\n\n \"bindle\",\n\n \"--\",\n\n \"keys\",\n\n \"create\",\n\n \"--secrets-file\",\n\n secrets_file.to_str().unwrap(),\n\n \"--roles\",\n\n \"creator,approver\",\n\n ];\n", "file_path": "tests/cli.rs", "rank": 71, "score": 62252.57315548218 }, { "content": "#[derive(Debug)]\n\nstruct BodyDeserializeError {\n\n cause: Box<dyn Error + Send + Sync>,\n\n}\n\n\n\nimpl fmt::Display for BodyDeserializeError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"Request body toml deserialize error: {}\", self.cause)\n\n }\n\n}\n\n\n\nimpl Error for BodyDeserializeError {\n\n fn source(&self) -> Option<&(dyn Error + 'static)> {\n\n Some(self.cause.as_ref())\n\n }\n\n}\n\n\n\nimpl Reject for BodyDeserializeError {}\n\n\n\n#[instrument(level = \"trace\", skip(err))]\n\npub(crate) async fn handle_invalid_request_path(\n", "file_path": "src/server/filters.rs", "rank": 72, "score": 61662.862072936216 }, { "content": "fn sign_one(\n\n inv: &mut Invoice,\n\n signer_role: SignatureRole,\n\n keyfile: &SecretKeyEntry,\n\n) -> Result<(), SignatureError> {\n\n let signer_name = keyfile.label.clone();\n\n let key = keyfile.key()?;\n\n // The spec says it is illegal for the a single key to sign the same invoice\n\n // more than once.\n\n let encoded_key = base64::encode(key.public.to_bytes());\n\n if let Some(sigs) = inv.signature.as_ref() {\n\n for s in sigs {\n\n if s.key == encoded_key {\n\n return Err(SignatureError::DuplicateSignature);\n\n }\n\n }\n\n }\n\n\n\n let cleartext = inv.cleartext(&signer_name, &signer_role);\n\n let signature: EdSignature = key.sign(cleartext.as_bytes());\n", "file_path": "src/invoice/mod.rs", "rank": 73, "score": 60836.832530421954 }, { "content": "fn get_random_port() -> u16 {\n\n TcpListener::bind(SocketAddrV4::new(Ipv4Addr::LOCALHOST, 0))\n\n .expect(\"Unable to bind to check for port\")\n\n .local_addr()\n\n .unwrap()\n\n .port()\n\n}\n", "file_path": "tests/test_util.rs", "rank": 74, "score": 56580.651556111974 }, { "content": "// Type alias for shorthanding a locked cache\n\ntype LockedCache<K, V> = Arc<Mutex<Lru<K, V>>>;\n\n\n\n/// A least recently used cache implementation that stores cached invoices in memory and cached\n\n/// parcels on disk. Any mutating operations (like creating or yanking) will pass through to the\n\n/// configured remote provider.\n\n///\n\n/// The cache will store invoices in memory and parcels on disk. Parcels will be automatically\n\n/// cleaned up from disk when they are ejected from the cache\n\n#[derive(Clone)]\n\npub struct LruCache<Remote: Provider + Clone> {\n\n invoices: LockedCache<Id, Invoice>,\n\n parcels: LockedCache<String, NamedTempFile>,\n\n remote: Remote,\n\n}\n\n\n\nimpl<Remote: Provider + Clone> LruCache<Remote> {\n\n /// Return a new LruCache with the given cache size and remote provider for fetching items that\n\n /// don't exist in the cache. The given cache size will be used to configure the cache size for\n\n /// both invoices and parcels\n\n pub fn new(cache_size: usize, remote: Remote) -> Self {\n", "file_path": "src/cache/lru.rs", "rank": 75, "score": 47253.42092688478 }, { "content": "use bindle::client;\n\nuse tempfile::tempdir;\n\nuse tokio::io::AsyncWriteExt;\n\nuse tokio_stream::StreamExt;\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let url = std::env::var(\"BINDLE_URL\")?;\n\n let root = std::env::var(\"CARGO_MANIFEST_DIR\")?;\n\n let root_path = std::path::PathBuf::from(root);\n\n\n\n let bindle_client = client::Client::new(\n\n &url,\n\n client::tokens::NoToken,\n\n std::sync::Arc::new(bindle::signature::KeyRing::default()),\n\n )?;\n\n\n\n // Load an invoice manually and send it to the server\n\n println!(\"Creating invoice 1\");\n\n let inv = toml::from_slice(\n", "file_path": "examples/client.rs", "rank": 76, "score": 42004.39857964123 }, { "content": " query: Some(\"enterprise.com/warpcore\".to_string()),\n\n version: Some(\"1.0.0\".to_string()),\n\n ..Default::default()\n\n })\n\n .await?;\n\n println!(\"{:?}\", matches);\n\n\n\n // Upload a parcel using a stream instead of loading into memory\n\n println!(\"Creating parcel 2\");\n\n bindle_client\n\n .create_parcel_from_file(\n\n \"enterprise.com/warpcore/2.0.0\",\n\n &second_sha,\n\n root_path.join(\"tests/scaffolds/valid_v2/parcels/parcel.dat\"),\n\n )\n\n .await?;\n\n\n\n // Get a parcel and load its bytes into memory\n\n println!(\"Loading parcel 1\");\n\n let data = bindle_client\n", "file_path": "examples/client.rs", "rank": 77, "score": 41997.078845797536 }, { "content": " Ok(_) => panic!(\"getting a yanked invoice should have errored\"),\n\n Err(e) => {\n\n if !matches!(e, bindle::client::ClientError::InvoiceNotFound) {\n\n panic!(\"Expected an invoice not found error, got: {:?}\", e)\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_charset() {\n\n let controller = TestController::new(BINARY_NAME).await;\n\n\n\n let scaffold = testing::RawScaffold::load(\"valid_v1\").await;\n\n\n\n // Manually assemble a request\n\n let client = reqwest::Client::builder()\n\n .http2_prior_knowledge()\n\n .build()\n\n .unwrap();\n", "file_path": "tests/client.rs", "rank": 78, "score": 41995.88557742327 }, { "content": "\n\n let url = format!(\"{}/_i\", controller.base_url);\n\n client\n\n .post(&url)\n\n .header(\"Content-Type\", \"application/toml; charset=utf-8\")\n\n .body(scaffold.parcel_files.get(\"parcel\").unwrap().data.clone())\n\n .send()\n\n .await\n\n .expect(\"Content-Type with charset shouldn't fail\");\n\n}\n", "file_path": "tests/client.rs", "rank": 79, "score": 41995.01741859646 }, { "content": " .get_parcel(\"enterprise.com/warpcore/2.0.0\", &first_sha)\n\n .await?;\n\n println!(\"{}\", data.len());\n\n\n\n // Get a parcel as a stream, and write it into a file somewhere\n\n println!(\"Loading parcel 2 as stream\");\n\n let temp = tempdir()?;\n\n let mut stream = bindle_client\n\n .get_parcel_stream(\"enterprise.com/warpcore/2.0.0\", &second_sha)\n\n .await?;\n\n\n\n let file_path = temp.path().join(\"foo\");\n\n let mut file = tokio::fs::OpenOptions::new()\n\n .write(true)\n\n .create(true)\n\n .open(&file_path)\n\n .await?;\n\n\n\n while let Some(data) = stream.next().await {\n\n let data = data?;\n", "file_path": "examples/client.rs", "rank": 80, "score": 41992.74036820374 }, { "content": "async fn test_streaming_successful() {\n\n let controller = TestController::new(BINARY_NAME).await;\n\n\n\n // Use raw paths instead of scaffolds so we can test the stream\n\n let root = std::env::var(\"CARGO_MANIFEST_DIR\").expect(\"Unable to get project directory\");\n\n let base = std::path::PathBuf::from(root).join(\"tests/scaffolds/valid_v1\");\n\n\n\n let inv = controller\n\n .client\n\n .create_invoice_from_file(base.join(\"invoice.toml\"))\n\n .await\n\n .expect(\"unable to create invoice\")\n\n .invoice;\n\n\n\n controller\n\n .client\n\n .get_invoice(&inv.bindle.id)\n\n .await\n\n .expect(\"Should be able to fetch newly created invoice\");\n\n\n", "file_path": "tests/client.rs", "rank": 81, "score": 41988.54012249897 }, { "content": " &tokio::fs::read(root_path.join(\"tests/scaffolds/valid_v1/invoice.toml\")).await?,\n\n )?;\n\n let inv = bindle_client.create_invoice(inv).await?;\n\n println!(\"{:?}\", inv);\n\n\n\n // Upload a parcel by loading the file into memory\n\n println!(\"Creating parcel 1\");\n\n let first_sha = inv.invoice.parcel.expect(\"Parcel list shouldn't be empty\")[0]\n\n .label\n\n .sha256\n\n .clone();\n\n let data =\n\n tokio::fs::read(root_path.join(\"tests/scaffolds/valid_v1/parcels/parcel.dat\")).await?;\n\n bindle_client\n\n .create_parcel(&inv.invoice.bindle.id, &first_sha, data)\n\n .await?;\n\n\n\n // Load an invoice from file and stream it to the API\n\n println!(\"Creating invoice 2\");\n\n let inv = bindle_client\n", "file_path": "examples/client.rs", "rank": 82, "score": 41986.55481267244 }, { "content": " .create_invoice_from_file(root_path.join(\"tests/scaffolds/valid_v2/invoice.toml\"))\n\n .await?;\n\n println!(\"{:?}\", inv);\n\n\n\n // Get the missing sha from the response\n\n let second_sha = inv.missing.expect(\"Should have missing parcels\")[0]\n\n .sha256\n\n .clone();\n\n\n\n // Get one of the created invoices\n\n println!(\"Getting invoice 2\");\n\n let inv = bindle_client\n\n .get_invoice(\"enterprise.com/warpcore/2.0.0\")\n\n .await?;\n\n println!(\"{:?}\", inv);\n\n\n\n // Query the API for a specific version\n\n println!(\"Querying for invoice 1\");\n\n let matches = bindle_client\n\n .query_invoices(bindle::QueryOptions {\n", "file_path": "examples/client.rs", "rank": 83, "score": 41985.07052883054 }, { "content": "//! Tests for the client. These tests are not intended to walk through all the API possibilites (as\n\n//! that is taken care of in the API tests), but instead focus on entire user workflows\n\n\n\nmod test_util;\n\nuse test_util::TestController;\n\n\n\nuse std::convert::TryInto;\n\n\n\nuse bindle::{signature::SecretKeyStorage, testing, SignatureRole};\n\n\n\nuse tokio_stream::StreamExt;\n\n\n\n#[cfg(not(target_family = \"windows\"))]\n\nconst BINARY_NAME: &str = \"bindle-server\";\n\n#[cfg(target_family = \"windows\")]\n\nconst BINARY_NAME: &str = \"bindle-server.exe\";\n\n\n\n#[tokio::test]\n\nasync fn test_successful() {\n\n // This first creates some invoices/parcels and then tries fetching them to see that they work.\n", "file_path": "tests/client.rs", "rank": 84, "score": 41982.91493537687 }, { "content": " .expect(\"Unable to create parcel\");\n\n }\n\n\n\n // Make sure we can create an invoice where all parcels already exist\n\n let mut other_inv = scaffold.invoice.clone();\n\n other_inv.bindle.id = \"another.com/bindle/1.0.0\".try_into().unwrap();\n\n other_inv.signature = None;\n\n other_inv\n\n .sign(\n\n SignatureRole::Creator,\n\n scaffold\n\n .keys\n\n .get_first_matching(&SignatureRole::Creator)\n\n .unwrap(),\n\n )\n\n .unwrap();\n\n controller\n\n .client\n\n .create_invoice(other_inv)\n\n .await\n", "file_path": "tests/client.rs", "rank": 85, "score": 41982.589596223435 }, { "content": " // Load the label from disk\n\n let parcel_path = base.join(\"parcels/parcel.dat\");\n\n let parcel_sha = inv.parcel.expect(\"Should have parcels in invoice\")[0]\n\n .label\n\n .sha256\n\n .to_owned();\n\n controller\n\n .client\n\n .create_parcel_from_file(&inv.bindle.id, &parcel_sha, &parcel_path)\n\n .await\n\n .expect(\"Unable to create parcel\");\n\n\n\n // Now check that we can get the parcel and read data from the stream\n\n let mut stream = controller\n\n .client\n\n .get_parcel_stream(&inv.bindle.id, &parcel_sha)\n\n .await\n\n .expect(\"unable to get parcel\");\n\n\n\n let mut data = Vec::new();\n", "file_path": "tests/client.rs", "rank": 86, "score": 41982.388317165474 }, { "content": " );\n\n }\n\n\n\n controller\n\n .client\n\n .yank_invoice(&inv.bindle.id)\n\n .await\n\n .expect(\"unable to yank invoice\");\n\n\n\n match controller.client.get_invoice(inv.bindle.id).await {\n\n Ok(_) => panic!(\"getting a yanked invoice should have errored\"),\n\n Err(e) => {\n\n if !matches!(e, bindle::client::ClientError::InvoiceNotFound) {\n\n panic!(\"Expected an invoice not found error, got: {:?}\", e)\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[tokio::test]\n", "file_path": "tests/client.rs", "rank": 87, "score": 41979.16344656771 }, { "content": " let controller = TestController::new(BINARY_NAME).await;\n\n\n\n let scaffold = testing::Scaffold::load(\"valid_v2\").await;\n\n\n\n controller\n\n .client\n\n .create_invoice(scaffold.invoice.clone())\n\n .await\n\n .expect(\"Invoice creation should not error\");\n\n\n\n // Upload parcels for this bindle\n\n for parcel in scaffold.parcel_files.values() {\n\n controller\n\n .client\n\n .create_parcel(\n\n &scaffold.invoice.bindle.id,\n\n &parcel.sha,\n\n parcel.data.clone(),\n\n )\n\n .await\n", "file_path": "tests/client.rs", "rank": 88, "score": 41978.785655125226 }, { "content": " .client\n\n .create_parcel(&inv.bindle.id, &parcel.sha, parcel.data.clone())\n\n .await\n\n .expect(\"Unable to create parcel\");\n\n }\n\n\n\n // Now check that we can get all the parcels\n\n for parcel in scaffold.parcel_files.values() {\n\n let data = controller\n\n .client\n\n .get_parcel(&inv.bindle.id, &parcel.sha)\n\n .await\n\n .expect(\"unable to get parcel\");\n\n let expected_len = parcel.data.len();\n\n assert_eq!(\n\n data.len(),\n\n expected_len,\n\n \"Expected file to be {} bytes, got {} bytes\",\n\n expected_len,\n\n data.len()\n", "file_path": "tests/client.rs", "rank": 89, "score": 41978.25222394061 }, { "content": " while let Some(res) = stream.next().await {\n\n let bytes = res.expect(\"Shouldn't get an error in stream\");\n\n data.extend(bytes);\n\n }\n\n\n\n let on_disk_len = tokio::fs::metadata(parcel_path)\n\n .await\n\n .expect(\"Unable to get file info\")\n\n .len() as usize;\n\n assert_eq!(\n\n data.len(),\n\n on_disk_len,\n\n \"Expected file to be {} bytes, got {} bytes\",\n\n on_disk_len,\n\n data.len()\n\n );\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_already_created() {\n", "file_path": "tests/client.rs", "rank": 90, "score": 41977.310049257154 }, { "content": " file.write_all(&data).await?;\n\n }\n\n file.flush().await?;\n\n\n\n // Read the whole file and make sure we got it\n\n assert_eq!(tokio::fs::read(file_path).await?, b\"a green one\");\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/client.rs", "rank": 91, "score": 41974.279230188375 }, { "content": " .expect(\"invoice creation should not error\");\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_missing() {\n\n let controller = TestController::new(BINARY_NAME).await;\n\n\n\n // Create a bindle with missing invoices\n\n let scaffold = testing::Scaffold::load(\"lotsa_parcels\").await;\n\n\n\n let inv = controller\n\n .client\n\n .create_invoice(scaffold.invoice)\n\n .await\n\n .expect(\"unable to create invoice\")\n\n .invoice;\n\n\n\n // Check we get the right amount of missing parcels\n\n let missing = controller\n\n .client\n", "file_path": "tests/client.rs", "rank": 92, "score": 41973.801272287405 }, { "content": " .get_missing_parcels(&inv.bindle.id)\n\n .await\n\n .expect(\"Should be able to fetch list of missing parcels\");\n\n assert_eq!(\n\n missing.len(),\n\n scaffold.parcel_files.len(),\n\n \"Expected {} missing parcels, found {}\",\n\n scaffold.parcel_files.len(),\n\n missing.len()\n\n );\n\n\n\n // Yank the invoice\n\n controller\n\n .client\n\n .yank_invoice(&inv.bindle.id)\n\n .await\n\n .expect(\"unable to yank invoice\");\n\n\n\n // Make sure we can't get missing\n\n match controller.client.get_missing_parcels(&inv.bindle.id).await {\n", "file_path": "tests/client.rs", "rank": 93, "score": 41973.39025686631 }, { "content": " // Once we confirm that works, we test yank\n\n let controller = TestController::new(BINARY_NAME).await;\n\n\n\n let scaffold = testing::Scaffold::load(\"valid_v1\").await;\n\n\n\n let inv = controller\n\n .client\n\n .create_invoice(scaffold.invoice)\n\n .await\n\n .expect(\"unable to create invoice\")\n\n .invoice;\n\n\n\n controller\n\n .client\n\n .get_invoice(&inv.bindle.id)\n\n .await\n\n .expect(\"Should be able to fetch newly created invoice\");\n\n\n\n for parcel in scaffold.parcel_files.values() {\n\n controller\n", "file_path": "tests/client.rs", "rank": 94, "score": 41970.44121298192 }, { "content": " buf: &[u8],\n\n ) -> Poll<std::result::Result<usize, std::io::Error>> {\n\n // Because the hasher is all in memory, we only need to make sure only one caller at a time\n\n // can write using the mutex\n\n let mut inner = match self.inner.try_lock() {\n\n Ok(l) => l,\n\n Err(_) => return Poll::Pending,\n\n };\n\n\n\n Poll::Ready(inner.write(buf))\n\n }\n\n\n\n fn poll_flush(\n\n self: Pin<&mut Self>,\n\n _cx: &mut Context<'_>,\n\n ) -> Poll<std::result::Result<(), std::io::Error>> {\n\n let mut inner = match self.inner.try_lock() {\n\n Ok(l) => l,\n\n Err(_) => return Poll::Pending,\n\n };\n", "file_path": "src/async_util.rs", "rank": 95, "score": 41056.5070971808 }, { "content": "\n\nimpl AsyncSha256 {\n\n /// Equivalent to the `Sha256::new()` function\n\n pub fn new() -> Self {\n\n AsyncSha256 {\n\n inner: Mutex::new(Sha256::new()),\n\n }\n\n }\n\n\n\n /// Consumes self and returns the bare Sha256. This should only be called once you are done\n\n /// writing. This will only return an error if for some reason the underlying mutex was poisoned\n\n pub fn into_inner(self) -> std::sync::LockResult<Sha256> {\n\n self.inner.into_inner()\n\n }\n\n}\n\n\n\nimpl tokio::io::AsyncWrite for AsyncSha256 {\n\n fn poll_write(\n\n self: Pin<&mut Self>,\n\n _cx: &mut Context<'_>,\n", "file_path": "src/async_util.rs", "rank": 96, "score": 41055.5896565427 }, { "content": "//! A collection of various utilities for asyncifying things, publicly exposed for convenience of\n\n//! those consuming Bindle as a Rust SDK\n\n\n\nuse std::io::Write;\n\nuse std::pin::Pin;\n\nuse std::sync::Mutex;\n\nuse std::task::{Context, Poll};\n\n\n\nuse sha2::{Digest, Sha256};\n\n\n\n/// A wrapper to implement `AsyncWrite` on Sha256\n\npub struct AsyncSha256 {\n\n inner: Mutex<Sha256>,\n\n}\n\n\n\nimpl Default for AsyncSha256 {\n\n fn default() -> Self {\n\n AsyncSha256::new()\n\n }\n\n}\n", "file_path": "src/async_util.rs", "rank": 97, "score": 41055.25067970136 }, { "content": "\n\n Poll::Ready(inner.flush())\n\n }\n\n\n\n fn poll_shutdown(\n\n self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n ) -> Poll<std::result::Result<(), std::io::Error>> {\n\n // There are no actual shutdown tasks to perform, so just flush things as defined in the\n\n // trait documentation\n\n self.poll_flush(cx)\n\n }\n\n}\n", "file_path": "src/async_util.rs", "rank": 98, "score": 41047.73522942266 }, { "content": " /// the client.\n\n // TODO: Right now this is mainly used if you want to HEAD any of the endpoints, as a HEAD\n\n // requests is used to get the headers and status code, which is pretty much a raw response\n\n // anyway. But should we make those their own methods instead?\n\n #[instrument(level = \"trace\", skip(self, body))]\n\n pub async fn raw(\n\n &self,\n\n method: reqwest::Method,\n\n path: &str,\n\n body: Option<impl Into<reqwest::Body>>,\n\n ) -> anyhow::Result<reqwest::Response> {\n\n let req = self.client.request(method, self.base_url.join(path)?);\n\n let req = self.token_manager.apply_auth_header(req).await?;\n\n let req = match body {\n\n Some(b) => req.body(b),\n\n None => req,\n\n };\n\n req.send().await.map_err(|e| e.into())\n\n }\n\n\n", "file_path": "src/client/mod.rs", "rank": 99, "score": 40404.83061718501 } ]
Rust
primitives/mmr/src/mmr/utils.rs
redmaner/core-rs-albatross
9721dd99e8fef949e7e89a8047f95eaaa8ec9fd7
use crate::error::Error; use crate::hash::Merge; const USIZE_BITS: u32 = 0usize.count_zeros(); #[inline] pub(crate) fn bit_length(v: usize) -> u32 { USIZE_BITS - v.leading_zeros() } pub(crate) fn bagging<H: Merge, I: Iterator<Item = Result<(H, usize), Error>>>( peaks_rev: I, ) -> Result<H, Error> { let mut bagging_info = None; for item in peaks_rev { let (peak_hash, peak_leaves) = item?; bagging_info = match bagging_info { None => Some((peak_hash, peak_leaves)), Some((root_hash, root_leaves)) => { let sum_leaves = root_leaves + peak_leaves; Some((peak_hash.merge(&root_hash, sum_leaves as u64), sum_leaves)) } }; } let (root, _) = bagging_info.ok_or(Error::ProveInvalidLeaves)?; Ok(root) } #[cfg(test)] pub(crate) mod test_utils { use crate::hash::{Hash, Merge}; use super::*; pub(crate) fn hash_perfect_tree<H: Merge, T: Hash<H>>(values: &[T]) -> Option<H> { let len = values.len(); if len == 0 { return Some(H::empty(0)); } if len.count_ones() != 1 { return None; } if len == 1 { return Some(values[0].hash(1)); } let mid = len >> 1; Some(H::merge( &hash_perfect_tree(&values[..mid])?, &hash_perfect_tree(&values[mid..])?, len as u64, )) } pub(crate) fn hash_mmr<H: Merge, T: Hash<H>>(values: &[T]) -> H { let mut peaks = vec![]; let mut i = 0; while i < values.len() { let max_height = bit_length(values.len() - i) as usize - 1; let max_leaves = 1 << max_height; let root = hash_perfect_tree(&values[i..i + max_leaves]).unwrap(); peaks.push(Ok((root, max_leaves))); i += max_leaves; } bagging(peaks.into_iter().rev()).unwrap() } #[derive(Debug, Clone, Eq, PartialEq)] pub(crate) struct TestHash(pub(crate) usize); impl Merge for TestHash { fn empty(prefix: u64) -> Self { TestHash(prefix as usize) } fn merge(&self, other: &Self, prefix: u64) -> Self { TestHash(self.0 * 2 + other.0 * 3 + prefix as usize) } } impl Hash<TestHash> for usize { fn hash(&self, prefix: u64) -> TestHash { TestHash(self * 2 + prefix as usize) } } #[test] fn test_utils_hash_correctly() { let values = vec![1, 3, 5, 7]; assert_eq!(hash_perfect_tree(&values[..0]), Some(TestHash(0))); assert_eq!(hash_perfect_tree(&values[..3]), None); assert_eq!(hash_perfect_tree(&values[..1]), Some(TestHash(3))); assert_eq!(hash_perfect_tree(&values[1..2]), Some(TestHash(7))); assert_eq!(hash_perfect_tree(&values[2..3]), Some(TestHash(11))); assert_eq!(hash_perfect_tree(&values[3..]), Some(TestHash(15))); assert_eq!(hash_perfect_tree(&values[..2]), Some(TestHash(29))); assert_eq!(hash_perfect_tree(&values[2..]), Some(TestHash(69))); assert_eq!(hash_perfect_tree(&values), Some(TestHash(269))); assert_eq!(hash_mmr(&values[..1]), TestHash(3)); assert_eq!(hash_mmr(&values[1..2]), TestHash(7)); assert_eq!(hash_mmr(&values[2..3]), TestHash(11)); assert_eq!(hash_mmr(&values[3..]), TestHash(15)); assert_eq!(hash_mmr(&values[..2]), TestHash(29)); assert_eq!(hash_mmr(&values[2..]), TestHash(69)); assert_eq!(hash_mmr(&values), TestHash(269)); assert_eq!(hash_mmr(&values[..3]), TestHash(94)); } } #[cfg(test)] mod tests { use crate::mmr::utils::test_utils::TestHash; use super::*; #[test] fn it_correctly_compute_bit_length() { assert_eq!(bit_length(0), 0); assert_eq!(bit_length(1), 1); assert_eq!(bit_length(2), 2); assert_eq!(bit_length(3), 2); assert_eq!(bit_length(255), 8); assert_eq!(bit_length(256), 9); } #[test] fn it_correctly_performs_bagging() { assert!(bagging::<TestHash, _>(vec![].into_iter()).is_err()); let mut positions = vec![Ok((TestHash(2), 2))]; assert_eq!( bagging(positions.clone().into_iter().rev()), Ok(TestHash(2)) ); assert_eq!(bagging(positions.clone().into_iter()), Ok(TestHash(2))); positions.push(Ok((TestHash(1), 1))); assert_eq!( bagging(positions.clone().into_iter().rev()), Ok(TestHash(10)) ); assert_eq!(bagging(positions.clone().into_iter()), Ok(TestHash(11))); positions.push(Ok((TestHash(2), 2))); assert_eq!( bagging(positions.clone().into_iter().rev()), Ok(TestHash(42)) ); assert_eq!(bagging(positions.into_iter()), Ok(TestHash(42))); } }
use crate::error::Error; use crate::hash::Merge; const USIZE_BITS: u32 = 0usize.count_zeros(); #[inline] pub(crate) fn bit_length(v: usize) -> u32 { USIZE_BITS - v.leading_zeros() } pub(crate) fn bagging<H: Merge, I: Iterator<Item = Result<(H, usize), Error>>>( peaks_rev: I, ) -> Result<H, Error> { let mut bagging_info = None; for item in peaks_rev { let (peak_hash, peak_leaves) = item?; bagging_info = match bagging_info { None => Some((peak_hash, peak_leaves)), Some((root_hash, root_leaves)) => { let sum_leaves = root_leaves + peak_leaves; Some((peak_hash.merge(&root_hash, sum_leaves as u64), sum_leaves)) } }; } let (root, _) = bagging_info.ok_or(Error::ProveInvalidLeaves)?; Ok(root) } #[cfg(test)] pub(crate) mod test_utils { use crate::hash::{Hash, Merge}; use super::*; pub(crate) fn hash_perfect_tree<H: Merge, T: Hash<H>>(values: &[T]) -> Option<H> { let len = values.len(); if len == 0 { return Some(H::empty(0)); } if len.count_ones() != 1 { return None; } if len == 1 { return Some(values[0].hash(1)); } let mid = len >> 1;
} pub(crate) fn hash_mmr<H: Merge, T: Hash<H>>(values: &[T]) -> H { let mut peaks = vec![]; let mut i = 0; while i < values.len() { let max_height = bit_length(values.len() - i) as usize - 1; let max_leaves = 1 << max_height; let root = hash_perfect_tree(&values[i..i + max_leaves]).unwrap(); peaks.push(Ok((root, max_leaves))); i += max_leaves; } bagging(peaks.into_iter().rev()).unwrap() } #[derive(Debug, Clone, Eq, PartialEq)] pub(crate) struct TestHash(pub(crate) usize); impl Merge for TestHash { fn empty(prefix: u64) -> Self { TestHash(prefix as usize) } fn merge(&self, other: &Self, prefix: u64) -> Self { TestHash(self.0 * 2 + other.0 * 3 + prefix as usize) } } impl Hash<TestHash> for usize { fn hash(&self, prefix: u64) -> TestHash { TestHash(self * 2 + prefix as usize) } } #[test] fn test_utils_hash_correctly() { let values = vec![1, 3, 5, 7]; assert_eq!(hash_perfect_tree(&values[..0]), Some(TestHash(0))); assert_eq!(hash_perfect_tree(&values[..3]), None); assert_eq!(hash_perfect_tree(&values[..1]), Some(TestHash(3))); assert_eq!(hash_perfect_tree(&values[1..2]), Some(TestHash(7))); assert_eq!(hash_perfect_tree(&values[2..3]), Some(TestHash(11))); assert_eq!(hash_perfect_tree(&values[3..]), Some(TestHash(15))); assert_eq!(hash_perfect_tree(&values[..2]), Some(TestHash(29))); assert_eq!(hash_perfect_tree(&values[2..]), Some(TestHash(69))); assert_eq!(hash_perfect_tree(&values), Some(TestHash(269))); assert_eq!(hash_mmr(&values[..1]), TestHash(3)); assert_eq!(hash_mmr(&values[1..2]), TestHash(7)); assert_eq!(hash_mmr(&values[2..3]), TestHash(11)); assert_eq!(hash_mmr(&values[3..]), TestHash(15)); assert_eq!(hash_mmr(&values[..2]), TestHash(29)); assert_eq!(hash_mmr(&values[2..]), TestHash(69)); assert_eq!(hash_mmr(&values), TestHash(269)); assert_eq!(hash_mmr(&values[..3]), TestHash(94)); } } #[cfg(test)] mod tests { use crate::mmr::utils::test_utils::TestHash; use super::*; #[test] fn it_correctly_compute_bit_length() { assert_eq!(bit_length(0), 0); assert_eq!(bit_length(1), 1); assert_eq!(bit_length(2), 2); assert_eq!(bit_length(3), 2); assert_eq!(bit_length(255), 8); assert_eq!(bit_length(256), 9); } #[test] fn it_correctly_performs_bagging() { assert!(bagging::<TestHash, _>(vec![].into_iter()).is_err()); let mut positions = vec![Ok((TestHash(2), 2))]; assert_eq!( bagging(positions.clone().into_iter().rev()), Ok(TestHash(2)) ); assert_eq!(bagging(positions.clone().into_iter()), Ok(TestHash(2))); positions.push(Ok((TestHash(1), 1))); assert_eq!( bagging(positions.clone().into_iter().rev()), Ok(TestHash(10)) ); assert_eq!(bagging(positions.clone().into_iter()), Ok(TestHash(11))); positions.push(Ok((TestHash(2), 2))); assert_eq!( bagging(positions.clone().into_iter().rev()), Ok(TestHash(42)) ); assert_eq!(bagging(positions.into_iter()), Ok(TestHash(42))); } }
Some(H::merge( &hash_perfect_tree(&values[..mid])?, &hash_perfect_tree(&values[mid..])?, len as u64, ))
call_expression
[]
Rust
vendor/wayland-client/src/globals.rs
nwtnni/icfp-2020
0eeb7851cd70bdec9343d6a4257d7286275fa79f
use std::sync::{Arc, Mutex}; use crate::protocol::wl_display; use crate::protocol::wl_registry; use crate::{Attached, DispatchData, Interface, Main, Proxy}; struct Inner { list: Vec<(u32, String, u32)>, } #[derive(Clone)] pub struct GlobalManager { inner: Arc<Mutex<Inner>>, registry: Main<wl_registry::WlRegistry>, } #[derive(Debug, PartialEq)] pub enum GlobalError { Missing, VersionTooLow(u32), } impl ::std::error::Error for GlobalError { fn description(&self) -> &str { match *self { GlobalError::Missing => "The requested global was missing.", GlobalError::VersionTooLow(_) => "The requested global's version is too low.", } } } impl ::std::fmt::Display for GlobalError { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { f.write_str(::std::error::Error::description(self)) } } pub enum GlobalEvent { New { id: u32, interface: String, version: u32, }, Removed { id: u32, interface: String, }, } impl GlobalManager { pub fn new(display: &Attached<wl_display::WlDisplay>) -> GlobalManager { let inner = Arc::new(Mutex::new(Inner { list: Vec::new() })); let inner_clone = inner.clone(); let registry = display .as_ref() .send::<wl_registry::WlRegistry>(wl_display::Request::GetRegistry {}, None) .unwrap(); registry.quick_assign(move |_proxy, msg, _data| { let mut inner = inner.lock().unwrap(); match msg { wl_registry::Event::Global { name, interface, version, } => { inner.list.push((name, interface, version)); } wl_registry::Event::GlobalRemove { name } => { inner.list.retain(|&(n, _, _)| n != name); } } }); GlobalManager { inner: inner_clone, registry, } } pub fn new_with_cb<F>(display: &Attached<wl_display::WlDisplay>, mut callback: F) -> GlobalManager where F: FnMut(GlobalEvent, Attached<wl_registry::WlRegistry>, DispatchData) + 'static, { let inner = Arc::new(Mutex::new(Inner { list: Vec::new() })); let inner_clone = inner.clone(); let registry = display .as_ref() .send::<wl_registry::WlRegistry>(wl_display::Request::GetRegistry {}, None) .unwrap(); registry.quick_assign(move |proxy, msg, data| { let mut inner = inner.lock().unwrap(); let inner = &mut *inner; match msg { wl_registry::Event::Global { name, interface, version, } => { inner.list.push((name, interface.clone(), version)); callback( GlobalEvent::New { id: name, interface, version, }, (*proxy).clone(), data, ); } wl_registry::Event::GlobalRemove { name } => { if let Some((i, _)) = inner.list.iter().enumerate().find(|&(_, &(n, _, _))| n == name) { let (id, interface, _) = inner.list.swap_remove(i); callback(GlobalEvent::Removed { id, interface }, (*proxy).clone(), data); } else { panic!( "Wayland protocol error: the server removed non-existing global \"{}\".", name ); } } } }); GlobalManager { inner: inner_clone, registry, } } pub fn instantiate_exact<I>(&self, version: u32) -> Result<Main<I>, GlobalError> where I: Interface + AsRef<Proxy<I>> + From<Proxy<I>>, { let inner = self.inner.lock().unwrap(); for &(id, ref interface, server_version) in &inner.list { if interface == I::NAME { if version > server_version { return Err(GlobalError::VersionTooLow(server_version)); } else { return Ok(self.registry.bind::<I>(version, id)); } } } Err(GlobalError::Missing) } pub fn instantiate_range<I>(&self, min_version: u32, max_version: u32) -> Result<Main<I>, GlobalError> where I: Interface + AsRef<Proxy<I>> + From<Proxy<I>>, { let inner = self.inner.lock().unwrap(); for &(id, ref interface, version) in &inner.list { if interface == I::NAME { if version >= min_version { let version = ::std::cmp::min(version, max_version); return Ok(self.registry.bind::<I>(version, id)); } else { return Err(GlobalError::VersionTooLow(version)); } } } Err(GlobalError::Missing) } pub fn list(&self) -> Vec<(u32, String, u32)> { self.inner.lock().unwrap().list.clone() } } pub trait GlobalImplementor<I: Interface + AsRef<Proxy<I>> + From<Proxy<I>>> { fn new_global(&mut self, global: Main<I>, data: DispatchData); fn error(&mut self, _version: u32, _data: DispatchData) {} } impl<F, I: Interface> GlobalImplementor<I> for F where I: Interface + AsRef<Proxy<I>> + From<Proxy<I>>, F: FnMut(Main<I>, DispatchData), { fn new_global(&mut self, global: Main<I>, data: DispatchData) { (*self)(global, data) } } #[macro_export] macro_rules! global_filter { ($([$interface:ty, $version:expr, $callback:expr]),*) => { { use $crate::protocol::wl_registry; use $crate::{GlobalEvent, Interface, Attached, GlobalImplementor, DispatchData}; type Callback = Box<dyn FnMut(u32, u32, Attached<wl_registry::WlRegistry>, DispatchData<'_>)>; let mut callbacks: Vec<(&'static str, Callback)> = Vec::new(); $({ let mut cb = { $callback }; callbacks.push(( <$interface as Interface>::NAME, Box::new(move |id, version, registry: Attached<wl_registry::WlRegistry>, ddata: DispatchData| { if version < $version { GlobalImplementor::<$interface>::error(&mut cb, version, ddata); } else { let proxy = registry.bind::<$interface>(version, id); GlobalImplementor::<$interface>::new_global(&mut cb, proxy, ddata); } }) as Box<_> )); })* move |event: GlobalEvent, registry: Attached<wl_registry::WlRegistry>, ddata| { if let GlobalEvent::New { id, interface, version } = event { for &mut (iface, ref mut cb) in &mut callbacks { if iface == interface { cb(id, version, registry, ddata); break; } } } } } } }
use std::sync::{Arc, Mutex}; use crate::protocol::wl_display; use crate::protocol::wl_registry; use crate::{Attached, DispatchData, Interface, Main, Proxy}; struct Inner { list: Vec<(u32, String, u32)>, } #[derive(Clone)] pub struct GlobalManager { inner: Arc<Mutex<Inner>>, registry: Main<wl_registry::WlRegistry>, } #[derive(Debug, PartialEq)] pub enum GlobalError { Missing, VersionTooLow(u32), } impl ::std::error::Error for GlobalError { fn description(&self) -> &str { match *self { GlobalError::Missing => "The requested global was missing.", GlobalError::VersionTooLow(_) => "The requested global's version is too low.", } } } impl ::std::fmt::Display for GlobalError { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { f.write_str(::std::error::Error::description(self)) } } pub enum GlobalEvent { New { id: u32, interface: String, version: u32, }, Removed { id: u32, interface: String, }, } impl GlobalManager { pub fn new(display: &Attached<wl_display::WlDisplay>) -> GlobalManager { let inner = Arc::new(Mutex::new(Inner { list: Vec::new() })); let inner_clone = inner.clone(); let registry = display .as_ref() .send::<wl_registry::WlRegistry>(wl_display::Request::GetRegistry {}, None) .unwrap(); registry.quick_assign(move |_proxy, msg, _data| { let mut inner = inner.lock().unwrap();
}); GlobalManager { inner: inner_clone, registry, } } pub fn new_with_cb<F>(display: &Attached<wl_display::WlDisplay>, mut callback: F) -> GlobalManager where F: FnMut(GlobalEvent, Attached<wl_registry::WlRegistry>, DispatchData) + 'static, { let inner = Arc::new(Mutex::new(Inner { list: Vec::new() })); let inner_clone = inner.clone(); let registry = display .as_ref() .send::<wl_registry::WlRegistry>(wl_display::Request::GetRegistry {}, None) .unwrap(); registry.quick_assign(move |proxy, msg, data| { let mut inner = inner.lock().unwrap(); let inner = &mut *inner; match msg { wl_registry::Event::Global { name, interface, version, } => { inner.list.push((name, interface.clone(), version)); callback( GlobalEvent::New { id: name, interface, version, }, (*proxy).clone(), data, ); } wl_registry::Event::GlobalRemove { name } => { if let Some((i, _)) = inner.list.iter().enumerate().find(|&(_, &(n, _, _))| n == name) { let (id, interface, _) = inner.list.swap_remove(i); callback(GlobalEvent::Removed { id, interface }, (*proxy).clone(), data); } else { panic!( "Wayland protocol error: the server removed non-existing global \"{}\".", name ); } } } }); GlobalManager { inner: inner_clone, registry, } } pub fn instantiate_exact<I>(&self, version: u32) -> Result<Main<I>, GlobalError> where I: Interface + AsRef<Proxy<I>> + From<Proxy<I>>, { let inner = self.inner.lock().unwrap(); for &(id, ref interface, server_version) in &inner.list { if interface == I::NAME { if version > server_version { return Err(GlobalError::VersionTooLow(server_version)); } else { return Ok(self.registry.bind::<I>(version, id)); } } } Err(GlobalError::Missing) } pub fn instantiate_range<I>(&self, min_version: u32, max_version: u32) -> Result<Main<I>, GlobalError> where I: Interface + AsRef<Proxy<I>> + From<Proxy<I>>, { let inner = self.inner.lock().unwrap(); for &(id, ref interface, version) in &inner.list { if interface == I::NAME { if version >= min_version { let version = ::std::cmp::min(version, max_version); return Ok(self.registry.bind::<I>(version, id)); } else { return Err(GlobalError::VersionTooLow(version)); } } } Err(GlobalError::Missing) } pub fn list(&self) -> Vec<(u32, String, u32)> { self.inner.lock().unwrap().list.clone() } } pub trait GlobalImplementor<I: Interface + AsRef<Proxy<I>> + From<Proxy<I>>> { fn new_global(&mut self, global: Main<I>, data: DispatchData); fn error(&mut self, _version: u32, _data: DispatchData) {} } impl<F, I: Interface> GlobalImplementor<I> for F where I: Interface + AsRef<Proxy<I>> + From<Proxy<I>>, F: FnMut(Main<I>, DispatchData), { fn new_global(&mut self, global: Main<I>, data: DispatchData) { (*self)(global, data) } } #[macro_export] macro_rules! global_filter { ($([$interface:ty, $version:expr, $callback:expr]),*) => { { use $crate::protocol::wl_registry; use $crate::{GlobalEvent, Interface, Attached, GlobalImplementor, DispatchData}; type Callback = Box<dyn FnMut(u32, u32, Attached<wl_registry::WlRegistry>, DispatchData<'_>)>; let mut callbacks: Vec<(&'static str, Callback)> = Vec::new(); $({ let mut cb = { $callback }; callbacks.push(( <$interface as Interface>::NAME, Box::new(move |id, version, registry: Attached<wl_registry::WlRegistry>, ddata: DispatchData| { if version < $version { GlobalImplementor::<$interface>::error(&mut cb, version, ddata); } else { let proxy = registry.bind::<$interface>(version, id); GlobalImplementor::<$interface>::new_global(&mut cb, proxy, ddata); } }) as Box<_> )); })* move |event: GlobalEvent, registry: Attached<wl_registry::WlRegistry>, ddata| { if let GlobalEvent::New { id, interface, version } = event { for &mut (iface, ref mut cb) in &mut callbacks { if iface == interface { cb(id, version, registry, ddata); break; } } } } } } }
match msg { wl_registry::Event::Global { name, interface, version, } => { inner.list.push((name, interface, version)); } wl_registry::Event::GlobalRemove { name } => { inner.list.retain(|&(n, _, _)| n != name); } }
if_condition
[ { "content": "pub fn demodulate(list: &str, cache: &mut AtomCache) -> Rc<Exp> {\n\n let (ret, _) = demodulate_list(list, cache);\n\n ret.set_cached(Rc::clone(&ret));\n\n ret\n\n}\n\n\n", "file_path": "src/transport.rs", "rank": 0, "score": 154818.572826611 }, { "content": "pub fn modulate(list: &Exp) -> String {\n\n let mut buffer = String::new();\n\n modulate_mut(list, &mut buffer);\n\n buffer\n\n}\n\n\n", "file_path": "src/transport.rs", "rank": 1, "score": 148736.74652800857 }, { "content": "fn modulate_mut(list: &Exp, buffer: &mut String) {\n\n if let Exp::Atom(Atom::Nil) = list {\n\n buffer.push_str(\"00\");\n\n return;\n\n }\n\n\n\n if let Exp::Atom(Atom::Int(int)) = list {\n\n modulate_int_mut(*int, buffer);\n\n return;\n\n }\n\n\n\n let (head, tail) = list.to_cons();\n\n buffer.push_str(\"11\");\n\n modulate_mut(&head, buffer);\n\n modulate_mut(&tail, buffer);\n\n}\n\n\n", "file_path": "src/transport.rs", "rank": 2, "score": 147036.39000017056 }, { "content": "pub fn demodulate_list<'v>(value: &'v str, cache: &mut AtomCache) -> (Rc<Exp>, &'v str) {\n\n match &value[0..2] {\n\n | \"00\" => (cache.get(Atom::Nil), &value[2..]),\n\n | \"11\" => {\n\n let (head, rest) = demodulate_list(&value[2..], cache);\n\n let (tail, rest) = demodulate_list(&rest, cache);\n\n (Exp::app(Exp::app(cache.get(Atom::Cons), head), tail), rest)\n\n }\n\n | _ => demodulate_int(value, cache),\n\n }\n\n}\n\n\n", "file_path": "src/transport.rs", "rank": 3, "score": 140620.44311275185 }, { "content": "/// https://message-from-space.readthedocs.io/en/latest/message13.html\n\npub fn modulate_int_mut(value: i64, buffer: &mut String) {\n\n\n\n // Bits 0..1 define a positive or negative number (and signal width)\n\n // via a high/low or low/high signal change:\n\n // - 01: positive number\n\n // - 10: negative number\n\n if value >= 0 {\n\n buffer.push_str(\"01\");\n\n } else {\n\n buffer.push_str(\"10\");\n\n }\n\n\n\n // Bits 2..(n+2) define the width of the following binary-encoded\n\n // number via a unary-encoded number of length n composed of high\n\n // signals ending with a low signal. The number width (in bits) is\n\n // four times the unary encoding (i.e. 4 * n):\n\n //\n\n // - 0: 0 [i.e. the number zero]\n\n // - 10: 4-bit number [i.e. 1-7]\n\n // - 110: 8-bit number [i.e. 1-255]\n", "file_path": "src/transport.rs", "rank": 4, "score": 136551.67958237976 }, { "content": "fn main() -> anyhow::Result<()> {\n\n env_logger::init();\n\n\n\n // let client = icfp::Client::new()?;\n\n\n\n // let mut args = env::args().skip(1);\n\n\n\n // let mode = match args.next().as_deref() {\n\n // Some(\"t\") | Some(\"test\") => Mode::Test,\n\n // Some(\"p\") | Some(\"protocol\") => Mode::Protocol,\n\n // other => {\n\n // return Err(anyhow!(\n\n // \"Unknown mode '{:?}', expected '[t]est' or '[p]rotocol'\",\n\n // other\n\n // ))\n\n // }\n\n // };\n\n\n\n // let path = args.next().unwrap();\n\n // let transmission = fs::read_to_string(&path)?;\n", "file_path": "jeff/main.rs", "rank": 5, "score": 125132.7348572101 }, { "content": "fn main() -> anyhow::Result<()> {\n\n Ok(())\n\n}\n", "file_path": "newton/main.rs", "rank": 6, "score": 125132.7348572101 }, { "content": "fn main() -> anyhow::Result<()> {\n\n env_logger::init();\n\n\n\n // let _ = icfp::Client::new()?;\n\n\n\n Ok(())\n\n}\n", "file_path": "justin/main.rs", "rank": 7, "score": 125132.7348572101 }, { "content": "fn main() -> anyhow::Result<()> {\n\n\n\n env_logger::init();\n\n\n\n let client = icfp::Client::new(\n\n URL.trim().to_owned(),\n\n KEY.trim().to_owned(),\n\n None,\n\n );\n\n\n\n let path = env::var(\"ICFP_PROTOCOL\")\n\n .unwrap_or_else(|_| String::from(\"data/galaxy.txt\"));\n\n\n\n let file = fs::read_to_string(path)?;\n\n let tokens = icfp::lex(&file);\n\n let protocol = icfp::parse::interaction_protocol(tokens);\n\n\n\n let mut cache = AtomCache::default();\n\n let nil = cache.get(Atom::Nil);\n\n let mut state = Rc::clone(&nil);\n", "file_path": "galaxy/main.rs", "rank": 8, "score": 125132.7348572101 }, { "content": "fn main() -> anyhow::Result<()> {\n\n\n\n env_logger::init();\n\n\n\n let mut args = env::args().skip(1);\n\n let server_url = args.next().unwrap();\n\n let player_key = args\n\n .next()\n\n .unwrap()\n\n .parse::<i64>()\n\n .unwrap();\n\n\n\n log::info!(\"Player Key: {}\", player_key);\n\n\n\n let mut atoms = icfp::ast::AtomCache::default();\n\n let client = icfp::Client::new(\n\n server_url,\n\n API_KEY.trim().to_owned(),\n\n Some(player_key),\n\n );\n", "file_path": "submit/main.rs", "rank": 9, "score": 125132.7348572101 }, { "content": "fn main() -> anyhow::Result<()> {\n\n env_logger::init();\n\n\n\n // let _ = icfp::Client::new()?;\n\n // let mut draw_vec = Vec::new();\n\n // draw_vec.push(1);\n\n // draw_vec.push(2);\n\n // let draw_args = build_vec(&mut draw_vec, Value::Nil);\n\n // let draw_args = eval(\n\n // dbg!(&exp(\n\n // &mut lex(\"ap ap cons 1 ap ap cons ap ap cons 2 ap ap cons 3 nil ap ap cons 4 nil \")\n\n // ).expect(\"bruh\")));\n\n // print!(\"{}\", modulate_list(draw_args));\n\n\n\n// println!(\"{:?}\", demodulate_list(\"110110000111011111100001001111110101000000\"));\n\n// println!(\"{:?}\", demodulate_list(\"110110000111011111100001001111110100110000\"));\n\n// println!(\"\");\n\n// println!(\"{:?}\", demodulate(\"1101100001110111110101000110010111000\"));\n\n\n\n Ok(())\n\n}\n", "file_path": "attilus/main.rs", "rank": 10, "score": 125132.7348572101 }, { "content": "fn main() -> anyhow::Result<()> {\n\n env_logger::init();\n\n\n\n // let _ = icfp::Client::new()?;\n\n\n\n Ok(())\n\n}\n", "file_path": "aaron/main.rs", "rank": 11, "score": 125132.7348572101 }, { "content": "fn main() -> anyhow::Result<()> {\n\n env_logger::init();\n\n\n\n// // let _ = icfp::Client::new()?;\n\n// let mut image1: Vec<(i64, i64)> = Vec::new();\n\n// image1.push((2, 2));\n\n// image1.push((2, 3));\n\n// image1.push((3, 2));\n\n// image1.push((3, 3));\n\n\n\n// let mut image2: Vec<(i64, i64)> = Vec::new();\n\n// image2.push((6, 6));\n\n// image2.push((6, 7));\n\n// image2.push((7, 6));\n\n// image2.push((7, 7));\n\n\n\n// let mut image3: Vec<(i64, i64)> = Vec::new();\n\n// image3.push((10, 10));\n\n// image3.push((10, 11));\n\n// image3.push((11, 10));\n\n// image3.push((11, 11));\n\n\n\n// multidraw(&build_images_vec(vec![image1, image2, image3], Value::Nil));\n\n\n\n Ok(())\n\n}\n", "file_path": "andy/main.rs", "rank": 12, "score": 125132.7348572101 }, { "content": "pub fn lex<'input>(input: &'input str) -> impl Iterator<Item = Token> + 'input {\n\n input\n\n .trim()\n\n .split('\\n')\n\n .enumerate()\n\n .filter(|(_, text)| !text.is_empty())\n\n .flat_map(|(line, text)| {\n\n text.trim()\n\n .split_whitespace()\n\n .map(move |word| (line, word))\n\n })\n\n .map(|(line, token)| {\n\n match token {\n\n | int if int.parse::<i64>().is_ok() => int\n\n .parse::<i64>()\n\n .map(Token::Int)\n\n .unwrap(),\n\n | var if var.starts_with(\":\") | var.starts_with(\"x\") => var[1..]\n\n .parse::<u64>()\n\n .map(Token::Var)\n", "file_path": "src/lex.rs", "rank": 13, "score": 115329.59335034364 }, { "content": "pub fn modulate_int(value: i64) -> String {\n\n let mut buffer = String::new();\n\n modulate_int_mut(value, &mut buffer);\n\n buffer\n\n}\n\n\n", "file_path": "src/transport.rs", "rank": 14, "score": 104112.4693139195 }, { "content": "fn demodulate_int<'v>(v: &'v str, cache: &mut AtomCache) -> (Rc<Exp>, &'v str) {\n\n let positive = &v[0..2] == \"01\";\n\n let index = v[2..]\n\n .find('0')\n\n .expect(\"Expected '0' in linear-encoded value\");\n\n let length = index * 4;\n\n let mut final_val = i64::from_str_radix(&v[index+2..index+3+length], 2).unwrap();\n\n if !positive {\n\n final_val = -final_val;\n\n }\n\n (cache.get(Atom::Int(final_val)), &v[index+3+length..])\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n // use crate::parse::exp;\n\n // use crate::lex::lex;\n\n\n\n // #[test]\n", "file_path": "src/transport.rs", "rank": 15, "score": 98327.60373935165 }, { "content": "pub fn multidraw_exp(exp: &Exp, buffer: &mut Vec<Vec<(i64, i64)>>) {\n\n if let Exp::Atom(Atom::Nil) = exp {\n\n return;\n\n }\n\n\n\n let (points, rest) = exp.to_cons();\n\n let mut frame = Vec::new();\n\n draw_exp(points, &mut frame);\n\n buffer.push(frame);\n\n multidraw_exp(rest, buffer);\n\n}\n", "file_path": "src/draw.rs", "rank": 16, "score": 83272.38558186196 }, { "content": "pub fn eval(e: &Rc<Exp>, p: &Protocol, a: &mut AtomCache) -> Rc<Exp> {\n\n if let Some(cached) = e.get_cached() {\n\n return cached;\n\n }\n\n\n\n let mut prev = Rc::clone(&e);\n\n\n\n loop {\n\n let next = step(&prev, p, a);\n\n\n\n // Found fixpoint of `step` function\n\n if prev == next {\n\n e.set_cached(Rc::clone(&next));\n\n return next;\n\n }\n\n\n\n prev = next;\n\n }\n\n}\n\n\n", "file_path": "src/eval.rs", "rank": 17, "score": 82689.08558014098 }, { "content": "fn initial_vectors(text: &str) -> Vec<Rc<Exp>> {\n\n text.trim()\n\n .split_whitespace()\n\n .rev()\n\n .map(|pair| {\n\n let mut iter = pair.split(',');\n\n\n\n let x = iter\n\n .next()\n\n .unwrap()\n\n .trim()\n\n .parse::<i64>()\n\n .unwrap();\n\n\n\n let y = iter\n\n .next()\n\n .unwrap()\n\n .trim()\n\n .parse::<i64>()\n\n .unwrap();\n\n\n\n vector(x, y)\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "galaxy/main.rs", "rank": 18, "score": 82041.47503620162 }, { "content": "pub fn interact(\n\n c: &Client,\n\n p: &Protocol,\n\n a: &mut AtomCache,\n\n\n\n s: Rc<Exp>,\n\n v: Rc<Exp>,\n\n) -> (Rc<Exp>, Rc<Exp>) {\n\n let e = eval(\n\n &Exp::app(Exp::app(Rc::clone(&p[p.galaxy]), s), v),\n\n p,\n\n a,\n\n );\n\n\n\n _interact(c, p, a, e)\n\n}\n\n\n", "file_path": "src/eval.rs", "rank": 19, "score": 76081.2894514865 }, { "content": "pub fn draw(exp: &Exp) {\n\n clear();\n\n _draw(exp);\n\n io::stdout().flush().unwrap();\n\n}\n\n\n", "file_path": "src/draw.rs", "rank": 20, "score": 68413.9743389015 }, { "content": "pub fn multidraw(exp: &Exp) {\n\n alt_buffer();\n\n hide_cursor();\n\n clear();\n\n _multidraw(exp);\n\n show_cursor();\n\n reg_buffer();\n\n io::stdout().flush().unwrap();\n\n}\n\n\n", "file_path": "src/draw.rs", "rank": 21, "score": 68413.9743389015 }, { "content": "pub fn exp<I: Iterator<Item = Token>>(\n\n tokens: &mut I,\n\n) -> Option<ast::Exp> {\n\n use Token::*;\n\n let exp = match tokens.next()? {\n\n | Var(var) => ast::Exp::Atom(ast::Atom::Var(var)),\n\n | Int(int) => ast::Exp::Atom(ast::Atom::Int(int)),\n\n | Bool(bool) => ast::Exp::Atom(ast::Atom::Bool(bool)),\n\n | Neg => ast::Exp::Atom(ast::Atom::Neg),\n\n | Inc => ast::Exp::Atom(ast::Atom::Inc),\n\n | Dec => ast::Exp::Atom(ast::Atom::Dec),\n\n | Add => ast::Exp::Atom(ast::Atom::Add),\n\n | Mul => ast::Exp::Atom(ast::Atom::Mul),\n\n | Div => ast::Exp::Atom(ast::Atom::Div),\n\n | Eq => ast::Exp::Atom(ast::Atom::Eq),\n\n | Lt => ast::Exp::Atom(ast::Atom::Lt),\n\n | S => ast::Exp::Atom(ast::Atom::S),\n\n | C => ast::Exp::Atom(ast::Atom::C),\n\n | B => ast::Exp::Atom(ast::Atom::B),\n\n | I => ast::Exp::Atom(ast::Atom::I),\n", "file_path": "src/parse.rs", "rank": 22, "score": 63997.34363828555 }, { "content": "pub fn interaction_protocol<I: IntoIterator<Item = Token>>(\n\n tokens: I,\n\n) -> ast::Protocol {\n\n let mut tokens = tokens.into_iter();\n\n let mut assignments = HashMap::new();\n\n while let Some((var, exp)) = assign(&mut tokens) {\n\n assignments.insert(var, Rc::new(exp));\n\n }\n\n ast::Protocol {\n\n assignments,\n\n galaxy: galaxy(&mut tokens),\n\n }\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 23, "score": 62180.17951114222 }, { "content": "fn redraw(\n\n window_buffer: &mut Vec<u32>,\n\n data_buffer: &[Vec<(i64, i64)>],\n\n current_x: i64,\n\n current_y: i64,\n\n scale: i64,\n\n filter: Option<usize>,\n\n window: &mut Window,\n\n) -> anyhow::Result<()> {\n\n // Clear window buffer\n\n window_buffer\n\n .iter_mut()\n\n .for_each(|pixel| *pixel = 0);\n\n\n\n let scaled_width = WIDTH as i64 / scale;\n\n let scaled_height = HEIGHT as i64 / scale;\n\n\n\n // Draw points on GUI\n\n for (color, frame) in data_buffer.iter().enumerate() {\n\n\n", "file_path": "galaxy/main.rs", "rank": 24, "score": 56946.72311437979 }, { "content": "fn draw_exp(exp: &Exp, frame: &mut Vec<(i64, i64)>) {\n\n if let Exp::Atom(Atom::Nil) = exp {\n\n return;\n\n }\n\n\n\n let (point, rest) = exp.to_cons();\n\n draw_point_exp(point, frame);\n\n draw_exp(rest, frame);\n\n}\n\n\n", "file_path": "src/draw.rs", "rank": 25, "score": 55895.03223789677 }, { "content": "fn galaxy<'arena, I: Iterator<Item = Token>>(tokens: &mut I) -> u64 {\n\n match tokens.next() {\n\n | Some(Token::Assign) => (),\n\n | _ => panic!(\"Invalid galaxy: expected '=' token\"),\n\n }\n\n\n\n match tokens.next() {\n\n | Some(Token::Var(var)) => var,\n\n | _ => panic!(\"Expected galaxy var token\"),\n\n }\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 26, "score": 55707.236622548284 }, { "content": "fn draw_point_exp(exp: &Exp, frame: &mut Vec<(i64, i64)>) {\n\n let (x, y) = exp.to_cons();\n\n frame.push((extract_int(x), extract_int(y)));\n\n}\n\n\n", "file_path": "src/draw.rs", "rank": 27, "score": 54559.747515539464 }, { "content": "fn step(e: &Rc<Exp>, p: &Protocol, a: &mut AtomCache) -> Rc<Exp> {\n\n if let Some(cached) = e.get_cached() {\n\n return cached;\n\n }\n\n\n\n // Evaluate atoms:\n\n //\n\n // ```text\n\n // a\n\n // ```\n\n let (f, x) = match &**e {\n\n | Exp::Atom(Atom::Var(var)) => return Rc::clone(&p[*var]),\n\n | Exp::Atom(atom) => return a.get(*atom),\n\n | Exp::App(f, x, _) => (f, x),\n\n };\n\n\n\n let f = eval(f, p, a);\n\n\n\n // Evaluate single-argument functions:\n\n //\n", "file_path": "src/eval.rs", "rank": 28, "score": 53563.758157126846 }, { "content": "fn vector(x: i64, y: i64) -> Rc<Exp> {\n\n Exp::cons(\n\n Exp::Atom(Atom::Int(x)),\n\n Exp::Atom(Atom::Int(y)),\n\n )\n\n}\n", "file_path": "galaxy/main.rs", "rank": 29, "score": 46556.621217880696 }, { "content": "fn direction(ship: &game::Ship) -> (i64, i64) {\n\n match (ship.x >= 0, ship.y >= 0) {\n\n | (true, true) => (-1, 1),\n\n | (false, true) => (-1, -1),\n\n | (false, false) => (1, -1),\n\n | (true, false) => (1, 1),\n\n }\n\n}\n", "file_path": "submit/main.rs", "rank": 30, "score": 44947.92007753843 }, { "content": "fn _interact(\n\n c: &Client,\n\n p: &Protocol,\n\n a: &mut AtomCache,\n\n e: Rc<Exp>,\n\n) -> (Rc<Exp>, Rc<Exp>) {\n\n let (flag, tail) = e.to_cons();\n\n let (state, tail) = tail.to_cons();\n\n let (data, tail) = tail.to_cons();\n\n\n\n assert_eq!(**tail, Exp::Atom(Atom::Nil));\n\n\n\n if let Exp::Atom(Atom::Int(0)) = &**flag {\n\n (Rc::clone(state), Rc::clone(data))\n\n } else {\n\n let new_data = c\n\n .send_alien_message(a, data)\n\n .expect(\"Failed to send message to server\");\n\n interact(c, p, a, Rc::clone(state), new_data)\n\n }\n\n}\n\n\n", "file_path": "src/eval.rs", "rank": 31, "score": 30491.898255375312 }, { "content": "fn clear() {\n\n print!(\"{}\", CLEAR_SCREEN);\n\n}\n\n\n", "file_path": "src/draw.rs", "rank": 32, "score": 30491.898255375312 }, { "content": "fn alt_buffer() {\n\n print!(\"{}?1049h\", CSI);\n\n}\n\n\n", "file_path": "src/draw.rs", "rank": 33, "score": 29219.47831493084 }, { "content": "fn show_cursor() {\n\n print!(\"{}?25h\", CSI);\n\n}\n\n\n", "file_path": "src/draw.rs", "rank": 34, "score": 29219.47831493084 }, { "content": "fn hide_cursor() {\n\n print!(\"{}?25l\", CSI);\n\n}\n\n\n", "file_path": "src/draw.rs", "rank": 35, "score": 29219.47831493084 }, { "content": "fn reg_buffer() {\n\n print!(\"{}?1049l\", CSI);\n\n}\n\n\n", "file_path": "src/draw.rs", "rank": 36, "score": 29219.47831493084 }, { "content": "\n\n let mut vectors = initial_vectors(INITIAL_VECTORS);\n\n vectors.push(Exp::cons(\n\n Exp::Atom(Atom::Int(0)),\n\n Exp::Atom(Atom::Int(0)),\n\n ));\n\n\n\n let mut title_buffer = String::new();\n\n let mut data_buffer = Vec::new();\n\n let mut debounce = time::Instant::now();\n\n\n\n let mut current_x = 0i64;\n\n let mut current_y = 0i64;\n\n let mut speed = 1;\n\n let mut scale = 16;\n\n let mut filter = None;\n\n\n\n let mut window_buffer = vec![0u32; WIDTH * HEIGHT];\n\n let mut window = Window::new(\n\n \"Galaxy UI\",\n", "file_path": "galaxy/main.rs", "rank": 37, "score": 27904.568286056965 }, { "content": "// use icfp::eval;\n\n// use icfp::lex;\n\n// use icfp::parse::exp;\n\n// use icfp::transport::modulate;\n\n// use icfp::transport::demodulate;\n\n// use icfp::transport::demodulate_list;\n\n// use std::vec::Vec;\n\n\n\n// #[allow(dead_code)]\n\n// fn build_vec(vec: &mut Vec<i64>, acc: Value) -> Value {\n\n// if vec.is_empty() {\n\n// return acc\n\n// };\n\n// // let (x, y) = vec.pop().expect(\"Empty vec?\");\n\n// let x = vec.pop().expect(\"Empty vec?\");\n\n// build_vec(\n\n// vec,\n\n// Value::Cons(\n\n// Box::new(\n\n// Value::Int(x)\n\n// ),\n\n// Box::new(acc)\n\n// )\n\n// )\n\n// }\n\n\n", "file_path": "attilus/main.rs", "rank": 38, "score": 27902.09468392128 }, { "content": "// use std::vec::Vec;\n\n// use icfp::Value;\n\n// use icfp::draw::multidraw;\n\n\n\n// fn build_vec(mut vec: Vec<(i64, i64)>, acc: Value) -> Value {\n\n// if vec.is_empty() {\n\n// return acc\n\n// };\n\n// let (x, y) = vec.pop().expect(\"Empty vec?\");\n\n// build_vec(\n\n// vec,\n\n// Value::Cons(\n\n// Box::new(\n\n// Value::Cons(\n\n// Box::new(Value::Int(x)),\n\n// Box::new(Value::Int(y)),\n\n// )\n\n// ),\n\n// Box::new(acc)\n\n// )\n", "file_path": "andy/main.rs", "rank": 39, "score": 27901.37895876705 }, { "content": "use std::cmp;\n\nuse std::env;\n\nuse std::fmt::Write as _;\n\nuse std::fs;\n\nuse std::rc::Rc;\n\nuse std::time;\n\n\n\nuse minifb::Key;\n\nuse minifb::KeyRepeat;\n\nuse minifb::MouseButton;\n\nuse minifb::MouseMode;\n\nuse minifb::Window;\n\nuse minifb::WindowOptions;\n\n\n\nuse icfp::ast::Atom;\n\nuse icfp::ast::AtomCache;\n\nuse icfp::ast::Exp;\n\n\n\nconst WIDTH: usize = 1920;\n\nconst HEIGHT: usize = 1080;\n\n\n\nstatic INITIAL_VECTORS: &str = include_str!(\"init.txt\");\n\nstatic URL: &str = include_str!(\"../url.txt\");\n\nstatic KEY: &str = include_str!(\"../key.txt\");\n\n\n", "file_path": "galaxy/main.rs", "rank": 40, "score": 27900.861669283266 }, { "content": "use std::env;\n\n\n\nuse rand::Rng as _;\n\n\n\nuse icfp::game;\n\n\n\n// TODO: wipe from Git history before publicizing repo\n\nstatic API_KEY: &str = include_str!(\"../key.txt\");\n\n\n", "file_path": "submit/main.rs", "rank": 41, "score": 27900.369333667997 }, { "content": " | _ => -1,\n\n };\n\n\n\n commands.push(game::Command::Accelerate {\n\n id: ally.id,\n\n x: dx * sign,\n\n y: dy * sign,\n\n })\n\n }\n\n\n\n if ally.temp > ally.max_temp / 2\n\n || ally.stats.damage == 0 {\n\n continue;\n\n }\n\n\n\n let mut min_dist = i64::MAX;\n\n let mut min_ship = None;\n\n\n\n for (enemy, _) in state.ships.iter().filter(|(ship, _)| ship.role != team) {\n\n let dist = ((ally.x + ally.vx) - (enemy.x + enemy.vx)).pow(2) +\n", "file_path": "submit/main.rs", "rank": 42, "score": 27900.319189159763 }, { "content": "// )\n\n// }\n\n\n\n// fn build_images_vec(mut vec: Vec<Vec<(i64, i64)>>, acc: Value) -> Value {\n\n// if vec.is_empty() {\n\n// return acc\n\n// };\n\n// let image: Vec<(i64, i64)> = vec.pop().expect(\"Empty list of images?\");\n\n// build_images_vec(\n\n// vec,\n\n// Value::Cons(\n\n// Box::new(build_vec(image, Value::Nil)),\n\n// Box::new(acc)\n\n// )\n\n// )\n\n// }\n\n\n", "file_path": "andy/main.rs", "rank": 43, "score": 27899.92872578556 }, { "content": "\n\n client.join(&mut atoms)?;\n\n\n\n let stats = game::Stats {\n\n fuel: 128,\n\n damage: 64,\n\n coolant: 4,\n\n spawns: 4,\n\n };\n\n\n\n let mut current = client.start(&mut atoms, &stats)?;\n\n let mut commands = Vec::new();\n\n let mut spawned = 0;\n\n let mut rng = rand::thread_rng();\n\n\n\n let team = current.info.role;\n\n\n\n while current.stage != game::Stage::Finished {\n\n\n\n let state = current\n", "file_path": "submit/main.rs", "rank": 44, "score": 27899.2305119269 }, { "content": " commands.push(game::Command::Split {\n\n id: ally.id,\n\n stats: game::Stats {\n\n fuel: 8,\n\n damage: 0,\n\n coolant: 0,\n\n spawns: 1,\n\n },\n\n });\n\n spawned = state.tick;\n\n continue;\n\n }\n\n\n\n if ally.temp <= ally.max_temp / 2 {\n\n let (dx, dy) = direction(ally);\n\n\n\n let sign = match speed {\n\n | 000..=064 if rng.gen_ratio(1, 8) => 2,\n\n | 000..=064 => 1,\n\n | 065..=144 => continue,\n", "file_path": "submit/main.rs", "rank": 45, "score": 27898.49860482829 }, { "content": " WIDTH,\n\n HEIGHT,\n\n WindowOptions::default(),\n\n )?;\n\n\n\n window.limit_update_rate(Some(time::Duration::from_micros(16600)));\n\n\n\n while window.is_open() {\n\n\n\n let in_state = std::mem::replace(&mut state, Rc::clone(&nil));\n\n let in_vector = vectors.pop().expect(\"Missing vector\");\n\n\n\n log::info!(\"[send]: {}\", &in_vector);\n\n\n\n let (out_state, out_data) = icfp::interact(\n\n &client,\n\n &protocol,\n\n &mut cache,\n\n in_state,\n\n in_vector,\n", "file_path": "galaxy/main.rs", "rank": 46, "score": 27898.159509695226 }, { "content": " ((ally.y + ally.vy) - (enemy.y + enemy.vy)).pow(2);\n\n\n\n if dist < min_dist {\n\n min_dist = dist;\n\n min_ship = Some(enemy);\n\n }\n\n }\n\n\n\n // Same quadrant\n\n if let Some(enemy) = min_ship {\n\n if (enemy.x + enemy.vx >= 0) == (ally.x + ally.vx >= 0)\n\n && (enemy.y + enemy.vy >= 0) == (ally.y + ally.vy >= 0) {\n\n commands.push(game::Command::Shoot {\n\n id: ally.id,\n\n x: enemy.x + enemy.vx,\n\n y: enemy.y + enemy.vy,\n\n power: ally.stats.damage,\n\n });\n\n }\n\n }\n\n }\n\n\n\n current = client.commands(&mut atoms, &commands)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "submit/main.rs", "rank": 47, "score": 27897.644124655373 }, { "content": "// use std::env;\n\n// use std::fs;\n\n\n\n// use anyhow::anyhow;\n\n\n\n// #[derive(Copy, Clone, Debug)]\n\n// enum Mode {\n\n// Test,\n\n// Protocol,\n\n// }\n\n\n", "file_path": "jeff/main.rs", "rank": 48, "score": 27897.55971294987 }, { "content": " );\n\n\n\n data_buffer.clear();\n\n icfp::draw::multidraw_exp(&out_data, &mut data_buffer);\n\n\n\n redraw(\n\n &mut window_buffer,\n\n &data_buffer,\n\n current_x,\n\n current_y,\n\n scale,\n\n filter,\n\n &mut window,\n\n )?;\n\n\n\n while vectors.is_empty() {\n\n\n\n let mut dirty = false;\n\n\n\n if debounce.elapsed() > time::Duration::from_millis(250) {\n", "file_path": "galaxy/main.rs", "rank": 49, "score": 27897.300042495383 }, { "content": " &data_buffer,\n\n current_x,\n\n current_y,\n\n scale,\n\n filter,\n\n &mut window,\n\n )?;\n\n } else {\n\n window.update();\n\n }\n\n };\n\n\n\n log::info!(\"[recv]: {}\", &out_state);\n\n let _ = std::mem::replace(&mut state, out_state);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "galaxy/main.rs", "rank": 50, "score": 27896.985825789496 }, { "content": " }\n\n if window.is_key_pressed(Key::Equal, KeyRepeat::Yes) {\n\n scale = cmp::min(scale << 1, 32);\n\n dirty = true;\n\n }\n\n\n\n title_buffer.clear();\n\n write!(\n\n &mut title_buffer,\n\n \"Galaxy Position: ({}, {}) @ Speed {} & Scale {}\",\n\n current_x,\n\n current_y,\n\n speed,\n\n scale,\n\n )?;\n\n window.set_title(&title_buffer);\n\n\n\n if dirty {\n\n redraw(\n\n &mut window_buffer,\n", "file_path": "galaxy/main.rs", "rank": 51, "score": 27896.526827915273 }, { "content": " let index = ((y + dy) as usize) * WIDTH + ((x + dx) as usize);\n\n let shift = (color % 3) * 8;\n\n let apply = (((window_buffer[index] >> shift) as u8).saturating_add(127) as u32) << shift;\n\n window_buffer[index] |= apply;\n\n }\n\n }\n\n }\n\n }\n\n\n\n window\n\n .update_with_buffer(&window_buffer, WIDTH, HEIGHT)\n\n .map_err(anyhow::Error::from)\n\n}\n\n\n", "file_path": "galaxy/main.rs", "rank": 52, "score": 27896.04925243139 }, { "content": " .state\n\n .expect(\"Missing game state\");\n\n\n\n log::info!(\"Tick {}\", state.tick);\n\n\n\n commands.clear();\n\n\n\n for (ally, _) in state.ships.iter().filter(|(ship, _)| ship.role == team) {\n\n\n\n // Skip dummy ships with no fuel\n\n if ally.stats.fuel == 0 {\n\n continue;\n\n }\n\n\n\n let speed = ally.vx.pow(2) + ally.vy.pow(2);\n\n\n\n if ally.stats.fuel > 64\n\n && ally.stats.spawns > 1\n\n && speed > 100\n\n && state.tick - spawned > 16 {\n", "file_path": "submit/main.rs", "rank": 53, "score": 27895.752359598642 }, { "content": " // let tokens = icfp::lex(&transmission);\n\n\n\n // match mode {\n\n // Mode::Protocol => {\n\n // let entry = icfp::PROTOCOL.galaxy;\n\n // let expr = &icfp::PROTOCOL[entry];\n\n // dbg!(expr);\n\n // }\n\n // Mode::Test => {\n\n // let test = icfp::parse::test_suite(tokens);\n\n // dbg!(&test);\n\n // for t in test.equals {\n\n // let lhs = dbg!(icfp::eval(&t.equal.lhs));\n\n // let rhs = dbg!(icfp::eval(&t.equal.rhs));\n\n // assert_eq!(lhs, rhs)\n\n // }\n\n // }\n\n // }\n\n\n\n Ok(())\n\n}\n", "file_path": "jeff/main.rs", "rank": 54, "score": 27895.66320191147 }, { "content": " }\n\n if window.is_key_pressed(Key::E, KeyRepeat::Yes) {\n\n speed += 1;\n\n }\n\n\n\n if window.is_key_pressed(Key::Key0, KeyRepeat::Yes) { filter = None; dirty = true; }\n\n if window.is_key_pressed(Key::Key1, KeyRepeat::Yes) { filter = Some(0); dirty = true; }\n\n if window.is_key_pressed(Key::Key2, KeyRepeat::Yes) { filter = Some(1); dirty = true; }\n\n if window.is_key_pressed(Key::Key3, KeyRepeat::Yes) { filter = Some(2); dirty = true; }\n\n if window.is_key_pressed(Key::Key4, KeyRepeat::Yes) { filter = Some(3); dirty = true; }\n\n if window.is_key_pressed(Key::Key5, KeyRepeat::Yes) { filter = Some(4); dirty = true; }\n\n if window.is_key_pressed(Key::Key6, KeyRepeat::Yes) { filter = Some(5); dirty = true; }\n\n\n\n if window.is_key_pressed(Key::E, KeyRepeat::Yes) {\n\n speed += 1;\n\n }\n\n\n\n if window.is_key_pressed(Key::Minus, KeyRepeat::Yes) {\n\n scale = cmp::max(scale >> 1, 1);\n\n dirty = true;\n", "file_path": "galaxy/main.rs", "rank": 55, "score": 27895.04261423656 }, { "content": " if window.get_mouse_down(MouseButton::Left) {\n\n if let Some((x, y)) = window.get_mouse_pos(MouseMode::Discard) {\n\n debounce = time::Instant::now();\n\n vectors.push(Exp::cons(\n\n Exp::Atom(Atom::Int(x as i64 / scale + current_x)),\n\n Exp::Atom(Atom::Int(y as i64 / scale + current_y)),\n\n ));\n\n }\n\n }\n\n }\n\n\n\n if window.is_key_pressed(Key::Escape, KeyRepeat::Yes) {\n\n return Ok(())\n\n }\n\n\n\n if window.is_key_pressed(Key::Left, KeyRepeat::Yes)\n\n || window.is_key_pressed(Key::A, KeyRepeat::Yes) {\n\n current_x -= speed;\n\n dirty = true;\n\n }\n", "file_path": "galaxy/main.rs", "rank": 56, "score": 27894.095266481643 }, { "content": " if window.is_key_pressed(Key::Right, KeyRepeat::Yes)\n\n || window.is_key_pressed(Key::D, KeyRepeat::Yes) {\n\n current_x += speed;\n\n dirty = true;\n\n }\n\n\n\n // Note: inverted Y coordinate\n\n if window.is_key_pressed(Key::Up, KeyRepeat::Yes)\n\n || window.is_key_pressed(Key::W, KeyRepeat::Yes) {\n\n current_y -= speed;\n\n dirty = true;\n\n }\n\n if window.is_key_pressed(Key::Down, KeyRepeat::Yes)\n\n || window.is_key_pressed(Key::S, KeyRepeat::Yes) {\n\n current_y += speed;\n\n dirty = true;\n\n }\n\n\n\n if window.is_key_pressed(Key::Q, KeyRepeat::Yes) {\n\n speed = cmp::max(speed, 1);\n", "file_path": "galaxy/main.rs", "rank": 57, "score": 27894.095266481643 }, { "content": " // Filter specific frames\n\n if let Some(filter) = filter {\n\n if color != filter {\n\n continue;\n\n }\n\n }\n\n\n\n for (x, y) in frame {\n\n if *x < current_x\n\n || *x >= current_x + scaled_width\n\n || *y < current_y\n\n || *y >= current_y + scaled_height {\n\n continue;\n\n }\n\n\n\n let x = (x - current_x) * scale;\n\n let y = (y - current_y) * scale;\n\n\n\n for dy in 0..scale {\n\n for dx in 0..scale {\n", "file_path": "galaxy/main.rs", "rank": 58, "score": 27894.095266481643 }, { "content": "fn _draw(exp: &Exp) {\n\n if let Exp::Atom(Atom::Nil) = exp {\n\n return;\n\n }\n\n\n\n let (v, n) = exp.to_cons();\n\n draw_point(v);\n\n _draw(n);\n\n}\n\n\n", "file_path": "src/draw.rs", "rank": 59, "score": 27464.6877083383 }, { "content": "fn _multidraw(exp: &Exp) {\n\n if let Exp::Atom(Atom::Nil) = exp {\n\n return;\n\n }\n\n\n\n let (image, rest) = exp.to_cons();\n\n _draw(image);\n\n thread::sleep(time::Duration::from_secs(1));\n\n _multidraw(rest);\n\n}\n\n\n", "file_path": "src/draw.rs", "rank": 60, "score": 27464.6877083383 }, { "content": "fn draw_at(x: i64, y:i64) {\n\n print!(\"{}{};{}H\", CSI, x+1, y+1);\n\n print!(\"█\");\n\n io::stdout().flush().unwrap();\n\n}\n\n\n", "file_path": "src/draw.rs", "rank": 61, "score": 27219.80691419739 }, { "content": "fn draw_point(exp: &Exp) {\n\n let (v1, v2) = exp.to_cons();\n\n draw_at(extract_int(v1), extract_int(v2));\n\n}\n\n\n", "file_path": "src/draw.rs", "rank": 62, "score": 26414.36316978637 }, { "content": "fn assign<I: Iterator<Item = Token>>(\n\n tokens: &mut I,\n\n) -> Option<(u64, ast::Exp)> {\n\n let var = match tokens.next() {\n\n | Some(Token::Var(var)) => var,\n\n | Some(Token::Galaxy) => return None,\n\n | _ => panic!(\"Invalid assignment: expected var or 'galaxy' token\"),\n\n };\n\n\n\n match tokens.next() {\n\n | Some(Token::Assign) => (),\n\n | _ => panic!(\"Invalid assignment: expected '=' token\"),\n\n }\n\n\n\n Some((var, exp(tokens)?))\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 63, "score": 25953.13977882056 }, { "content": "fn extract_int(exp: &Exp) -> i64 {\n\n match exp {\n\n | Exp::Atom(Atom::Int(vv)) => *vv,\n\n | _ => panic!(\"Extracting int from non-int\"),\n\n }\n\n}\n\n\n", "file_path": "src/draw.rs", "rank": 64, "score": 25209.003195622267 }, { "content": "impl Client {\n\n pub fn new(\n\n server_url: String,\n\n api_key: String,\n\n player_key: Option<i64>,\n\n ) -> Self {\n\n Client {\n\n inner: blocking::Client::new(),\n\n api_key,\n\n player_key,\n\n server_url,\n\n }\n\n }\n\n\n\n pub fn get_alien_response(&self, id: &str) -> anyhow::Result<String> {\n\n log::info!(\"Retrieving alien response for id '{}'\", id);\n\n self.inner\n\n .get(&format!(\"{}/aliens/{}\", &self.server_url, id))\n\n .query(&[(\"apiKey\", &self.api_key)])\n\n .send()\n", "file_path": "src/api.rs", "rank": 65, "score": 18.086037091941332 }, { "content": " Some(Command::Split { id, stats })\n\n }\n\n | _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl From<Command> for Exp {\n\n fn from(command: Command) -> Self {\n\n let list = match command {\n\n | Command::Accelerate { id, x, y } => {\n\n list!(\n\n Exp::from(0),\n\n Exp::from(id),\n\n pair!(Exp::from(x), Exp::from(y)),\n\n )\n\n }\n\n | Command::Detonate { id } => {\n\n list!(\n\n Exp::from(2),\n", "file_path": "src/game.rs", "rank": 66, "score": 14.404711098013893 }, { "content": "}\n\n\n\nimpl fmt::Display for Exp {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n | Exp::Atom(atom) => write!(fmt, \"{}\", atom),\n\n | Exp::App(f, x, _) => write!(fmt, \"ap {} {}\", f, x),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Atom {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n | Atom::Nil => write!(fmt, \"nil\"),\n\n | Atom::Int(int) => write!(fmt, \"{}\", int),\n\n | Atom::Var(var) => write!(fmt, \"{}\", var),\n\n | Atom::Bool(bool) => write!(fmt, \"{}\", bool),\n\n | Atom::Neg => write!(fmt, \"neg\"),\n\n | Atom::Inc => write!(fmt, \"inc\"),\n", "file_path": "src/ast.rs", "rank": 67, "score": 10.08166913665747 }, { "content": " player_key: Option<i64>,\n\n server_url: String,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct CreateResponse {\n\n pub attack: i64,\n\n pub defend: i64,\n\n}\n\n\n\nimpl From<&Exp> for CreateResponse {\n\n fn from(exp: &Exp) -> Self {\n\n let (_, tail) = exp.to_cons();\n\n let (keys, _) = tail.to_cons();\n\n\n\n let (attack_list, tail) = keys.to_cons();\n\n let (defend_list, _) = tail.to_cons();\n\n\n\n // (0, attack_player_key)\n\n let (_index, tail) = attack_list.to_cons();\n", "file_path": "src/api.rs", "rank": 68, "score": 10.035229997841576 }, { "content": " Detonate {\n\n id: i64,\n\n },\n\n Shoot {\n\n id: i64,\n\n x: i64,\n\n y: i64,\n\n power: i64,\n\n },\n\n Split {\n\n id: i64,\n\n stats: Stats,\n\n },\n\n}\n\n\n\nimpl Command {\n\n fn with_id(self, id: i64) -> Self {\n\n match self {\n\n | Command::Accelerate { id: _, x, y } => Command::Accelerate { id, x, y },\n\n | Command::Detonate { .. } => Command::Detonate { id },\n", "file_path": "src/game.rs", "rank": 69, "score": 9.337021495015996 }, { "content": " .and_then(Self::extract_text)\n\n .with_context(|| anyhow!(\"Failed to retrieve alien response for id '{}'\", id))\n\n }\n\n\n\n pub fn send_alien_message(\n\n &self,\n\n cache: &mut AtomCache,\n\n message: &Exp,\n\n ) -> anyhow::Result<Rc<Exp>> {\n\n log::trace!(\"Sending alien message: {}\", &message);\n\n self.inner\n\n .post(&format!(\"{}/aliens/send\", &self.server_url))\n\n .query(&[(\"apiKey\", &self.api_key)])\n\n .body(transport::modulate(message))\n\n .send()\n\n .and_then(Self::extract_text)\n\n .map(|response| transport::demodulate(&response, cache))\n\n .map(|response| {\n\n log::trace!(\"Received alien response: {}\", &response);\n\n response\n", "file_path": "src/api.rs", "rank": 70, "score": 9.260064682794713 }, { "content": " }\n\n }\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub struct AtomCache(HashMap<Atom, Rc<Exp>>);\n\n\n\nimpl AtomCache {\n\n pub fn get(&mut self, atom: Atom) -> Rc<Exp> {\n\n // Integers and variables are potentially unbounded,\n\n // so we avoid caching them to save memory.\n\n match atom {\n\n | Atom::Int(_)\n\n | Atom::Var(_) => Rc::new(Exp::Atom(atom)),\n\n | _ => {\n\n self.0\n\n .entry(atom)\n\n .or_insert_with(|| Rc::new(Exp::Atom(atom)))\n\n .clone()\n\n }\n", "file_path": "src/ast.rs", "rank": 71, "score": 9.129547809606146 }, { "content": " let message = list!(\n\n Exp::from(2),\n\n Exp::from(self.player_key.expect(\"Missing player key\")),\n\n Exp::Atom(Atom::Nil)\n\n );\n\n log::debug!(\"Sending `join` message: {}\", &message);\n\n self.send_alien_message(cache, &message)?;\n\n Ok(())\n\n }\n\n\n\n pub fn start(\n\n &self,\n\n cache: &mut AtomCache,\n\n stats: &game::Stats,\n\n ) -> anyhow::Result<game::Response> {\n\n let message = list!(\n\n Exp::from(3),\n\n Exp::from(self.player_key.expect(\"Missing player key\")),\n\n Exp::from(*stats),\n\n );\n", "file_path": "src/api.rs", "rank": 72, "score": 9.094612588625496 }, { "content": "use std::rc::Rc;\n\n\n\nuse crate::ast::Atom;\n\nuse crate::ast::Exp;\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, Hash)]\n\npub struct Response {\n\n /// Static game state.\n\n pub info: Info,\n\n\n\n /// Game completion stage.\n\n pub stage: Stage,\n\n\n\n /// Dynamic game state.\n\n pub state: Option<State>,\n\n}\n\n\n\nimpl From<&Exp> for Option<Response> {\n\n fn from(exp: &Exp) -> Self {\n\n\n", "file_path": "src/game.rs", "rank": 73, "score": 8.869790159754725 }, { "content": " Exp::from(id),\n\n )\n\n }\n\n | Command::Shoot { id, x, y, power } => {\n\n list!(\n\n Exp::from(2),\n\n Exp::from(id),\n\n pair!(Exp::from(x), Exp::from(y)),\n\n Exp::from(power),\n\n )\n\n }\n\n | Command::Split { id, stats } => {\n\n list!(\n\n Exp::from(3),\n\n Exp::from(id),\n\n Exp::from(stats),\n\n )\n\n }\n\n };\n\n Rc::try_unwrap(list)\n", "file_path": "src/game.rs", "rank": 74, "score": 8.779396521235128 }, { "content": " })\n\n .with_context(|| anyhow!(\"Failed to send alien message\"))\n\n }\n\n\n\n pub fn create(\n\n &self,\n\n cache: &mut AtomCache,\n\n ) -> anyhow::Result<CreateResponse> {\n\n let message = list!(Exp::from(1), Exp::from(0));\n\n log::debug!(\"Sending `create` message: {}\", &message);\n\n self.send_alien_message(cache, &message)\n\n .map(|response| {\n\n CreateResponse::from(&*response)\n\n })\n\n }\n\n\n\n pub fn join(\n\n &self,\n\n cache: &mut AtomCache,\n\n ) -> anyhow::Result<()> {\n", "file_path": "src/api.rs", "rank": 75, "score": 8.74276454609366 }, { "content": " | Command::Shoot { id: _, x, y, power } => Command::Shoot { id, x, y, power },\n\n | Command::Split { id: _, stats } => Command::Split { id, stats },\n\n }\n\n }\n\n}\n\n\n\nimpl From<&Exp> for Option<Command> {\n\n fn from(exp: &Exp) -> Self {\n\n\n\n log::debug!(\"Parsing command: {}\", exp);\n\n\n\n let (r#type, tail) = exp.to_cons();\n\n let r#type = r#type.to_int();\n\n\n\n // Dummy ID when parsing from a response\n\n let id = 0;\n\n\n\n match r#type {\n\n | 0 => {\n\n let (vec, _) = tail.to_cons();\n", "file_path": "src/game.rs", "rank": 76, "score": 8.512697017545797 }, { "content": " | list => Some(Stats::from(list)),\n\n };\n\n\n\n Info { ticks, role, enemy }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, Hash)]\n\npub struct State {\n\n /// Time inside the game.\n\n pub tick: i64,\n\n\n\n /// All ships and previously applied commands.\n\n pub ships: Vec<(Ship, Vec<Command>)>,\n\n}\n\n\n\nimpl From<&Exp> for State {\n\n fn from(exp: &Exp) -> Self {\n\n\n\n log::debug!(\"Parsing state: {}\", exp);\n", "file_path": "src/game.rs", "rank": 77, "score": 8.449876691287852 }, { "content": " .expect(\"Impossible: command Rc has single owner\")\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\npub struct Ship {\n\n pub role: Role,\n\n pub id: i64,\n\n pub x: i64,\n\n pub y: i64,\n\n pub vx: i64,\n\n pub vy: i64,\n\n pub stats: Stats,\n\n pub temp: i64,\n\n pub max_temp: i64,\n\n}\n\n\n\nimpl From<&Exp> for Ship {\n\n fn from(exp: &Exp) -> Self {\n\n\n", "file_path": "src/game.rs", "rank": 78, "score": 8.29904158674238 }, { "content": " | Exp::Atom(_) => None,\n\n | Exp::App(_, _, cache) => cache.borrow().clone(),\n\n }\n\n }\n\n\n\n pub fn set_cached(&self, exp: Rc<Exp>) {\n\n match self {\n\n | Exp::Atom(_) => (),\n\n | Exp::App(_, _, cache) => *cache.borrow_mut() = Some(exp),\n\n }\n\n }\n\n\n\n pub fn to_int(&self) -> i64 {\n\n match self {\n\n | Exp::Atom(Atom::Int(int)) => *int,\n\n | other => panic!(format!(\"Expected `int`, but found: {}\", other)),\n\n }\n\n }\n\n\n\n pub fn to_cons(&self) -> (&Rc<Exp>, &Rc<Exp>) {\n", "file_path": "src/ast.rs", "rank": 79, "score": 8.238069048483538 }, { "content": " App(Rc<Exp>, Rc<Exp>, RefCell<Option<Rc<Exp>>>),\n\n}\n\n\n\nimpl Exp {\n\n pub fn app<L, R>(lhs: L, rhs: R) -> Rc<Exp>\n\n where L: Into<Rc<Exp>>,\n\n R: Into<Rc<Exp>>,\n\n {\n\n Rc::new(Exp::App(lhs.into(), rhs.into(), Default::default()))\n\n }\n\n\n\n pub fn cons<L, R>(lhs: L, rhs: R) -> Rc<Exp>\n\n where L: Into<Rc<Exp>>,\n\n R: Into<Rc<Exp>>,\n\n {\n\n Self::app(Self::app(Exp::Atom(Atom::Cons), lhs), rhs)\n\n }\n\n\n\n pub fn get_cached(&self) -> Option<Rc<Exp>> {\n\n match self {\n", "file_path": "src/ast.rs", "rank": 80, "score": 8.107933976555621 }, { "content": "use std::fmt;\n\nuse std::rc::Rc;\n\n\n\nuse anyhow::anyhow;\n\nuse anyhow::Context as _;\n\nuse reqwest::blocking;\n\n\n\nuse crate::ast::Atom;\n\nuse crate::ast::AtomCache;\n\nuse crate::ast::Exp;\n\nuse crate::game;\n\nuse crate::transport;\n\n\n\n/// Responsible for communicating with the central ICFP server.\n\n///\n\n/// Abstracts over the communication protocol and transport method.\n\n#[allow(unused)]\n\npub struct Client {\n\n inner: blocking::Client,\n\n api_key: String,\n", "file_path": "src/api.rs", "rank": 81, "score": 8.07968054706646 }, { "content": " cache,\n\n &list!(\n\n Exp::from(4),\n\n Exp::from(self.player_key.expect(\"Missing player key\")),\n\n commands,\n\n ),\n\n )\n\n .and_then(Self::extract_game)\n\n }\n\n\n\n fn extract_text(response: blocking::Response) -> reqwest::Result<String> {\n\n response\n\n .error_for_status()\n\n .and_then(blocking::Response::text)\n\n }\n\n\n\n fn extract_game(response: Rc<Exp>) -> anyhow::Result<game::Response> {\n\n let response = <Option<game::Response>>::from(&*response)\n\n .ok_or_else(|| anyhow!(\"Received error response from server\"))?;\n\n log::debug!(\"Received response: {:#?}\", response);\n\n Ok(response)\n\n }\n\n}\n", "file_path": "src/api.rs", "rank": 82, "score": 8.049385186825212 }, { "content": " let (attack, _) = tail.to_cons();\n\n let attack = attack.to_int();\n\n\n\n // (0, defend_player_key)\n\n let (_index, tail) = defend_list.to_cons();\n\n let (defend, _) = tail.to_cons();\n\n let defend = defend.to_int();\n\n\n\n CreateResponse { attack, defend }\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Client {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n fmt.debug_struct(\"icfp::api::Client\")\n\n .field(\"server_url\", &self.server_url)\n\n .finish()\n\n }\n\n}\n\n\n", "file_path": "src/api.rs", "rank": 83, "score": 7.961383852254434 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl From<Role> for Exp {\n\n fn from(role: Role) -> Self {\n\n match role {\n\n | Role::Attack => Exp::Atom(Atom::Int(0)),\n\n | Role::Defend => Exp::Atom(Atom::Int(1)),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\npub enum Stage {\n\n NotStarted,\n\n Started,\n\n Finished,\n\n}\n\n\n", "file_path": "src/game.rs", "rank": 84, "score": 7.418497076680303 }, { "content": "#[macro_use]\n\npub mod ast;\n\n\n\nmod api;\n\npub mod draw;\n\nmod eval;\n\npub mod game;\n\nmod lex;\n\npub mod parse;\n\nmod token;\n\npub mod transport;\n\n\n\npub use api::Client;\n\npub use draw::draw;\n\npub use eval::eval;\n\npub use eval::interact;\n\npub use lex::lex;\n\npub use token::Token;\n", "file_path": "src/lib.rs", "rank": 85, "score": 7.150836355194629 }, { "content": "impl From<&Exp> for Info {\n\n fn from(exp: &Exp) -> Self {\n\n\n\n log::debug!(\"Parsing info: {}\", exp);\n\n\n\n let (ticks, tail) = exp.to_cons();\n\n let ticks = ticks.to_int();\n\n\n\n let (role, tail) = tail.to_cons();\n\n let role = Role::from(&**role);\n\n\n\n // (512, 1, 64) ?\n\n let (_, tail) = tail.to_cons();\n\n\n\n // (16, 128) ?\n\n let (_, tail) = tail.to_cons();\n\n\n\n let (enemy, _) = tail.to_cons();\n\n let enemy = match &**enemy {\n\n | Exp::Atom(Atom::Nil) => None,\n", "file_path": "src/game.rs", "rank": 86, "score": 6.692398007154418 }, { "content": "\n\nimpl From<Ship> for Exp {\n\n fn from(ship: Ship) -> Exp {\n\n let ship = list!(\n\n Exp::from(ship.role),\n\n Exp::from(ship.id),\n\n pair!(Exp::from(ship.x), Exp::from(ship.y)),\n\n pair!(Exp::from(ship.vx), Exp::from(ship.vy)),\n\n Exp::Atom(Atom::Var(4)),\n\n Exp::Atom(Atom::Var(5)),\n\n Exp::Atom(Atom::Var(6)),\n\n Exp::Atom(Atom::Var(7)),\n\n );\n\n\n\n Rc::try_unwrap(ship)\n\n .expect(\"Impossible: ship Rc has single owner\")\n\n }\n\n}\n\n\n\nimpl From<i64> for Exp {\n", "file_path": "src/game.rs", "rank": 87, "score": 6.6264869260298305 }, { "content": " }\n\n}\n\n\n\n/// Interaction protocol.\n\n#[derive(Clone, Debug, Default)]\n\npub struct Protocol {\n\n pub assignments: HashMap<u64, Rc<Exp>>,\n\n pub galaxy: u64,\n\n}\n\n\n\nimpl ops::Index<u64> for Protocol {\n\n type Output = Rc<Exp>;\n\n fn index(&self, var: u64) -> &Self::Output {\n\n &self.assignments[&var]\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Eq)]\n\npub enum Exp {\n\n Atom(Atom),\n", "file_path": "src/ast.rs", "rank": 88, "score": 6.546903430855185 }, { "content": "use std::io;\n\nuse std::io::Write;\n\nuse std::time;\n\nuse std::thread;\n\n\n\nuse crate::ast::Atom;\n\nuse crate::ast::Exp;\n\n\n\nconst CSI: &str = \"\\x1b[\";\n\nconst CLEAR_SCREEN: &str = \"\\x1b[2J\";\n\n\n", "file_path": "src/draw.rs", "rank": 89, "score": 6.4761898716239426 }, { "content": " // fn mod_neg_100() {\n\n // assert_eq!(super::modulate(-100), \"1011001100100\");\n\n // }\n\n\n\n // // #[test]\n\n // // fn round_trip() {\n\n // // let mut buffer = String::new();\n\n // // for value in 0..1000 {\n\n // // buffer.clear();\n\n // // super::modulate_mut(value, &mut buffer);\n\n // // assert_eq!(super::demodulate(&buffer), value);\n\n // // }\n\n // // }\n\n\n\n // #[test]\n\n // fn demodulate_0() {\n\n // assert_eq!(super::demodulate_int(\"010\"), (Value::Int(0), \"\"));\n\n // }\n\n\n\n // #[test]\n", "file_path": "src/transport.rs", "rank": 90, "score": 6.456480444390417 }, { "content": "use std::cell::RefCell;\n\nuse std::collections::HashMap;\n\nuse std::fmt;\n\nuse std::ops;\n\nuse std::rc::Rc;\n\n\n\n#[macro_export]\n\nmacro_rules! list {\n\n () => {\n\n std::rc::Rc::new(crate::ast::Exp::Atom(crate::ast::Atom::Nil))\n\n };\n\n ($head:expr $(, $tail:expr)* $(,)?) => {\n\n crate::ast::Exp::cons($head, list!($($tail),*))\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! pair {\n\n ($lhs:expr, $rhs:expr) => {\n\n crate::ast::Exp::cons($lhs, $rhs)\n", "file_path": "src/ast.rs", "rank": 91, "score": 6.434207825072249 }, { "content": " log::debug!(\"Parsing response: {}\", exp);\n\n\n\n let (status, tail) = exp.to_cons();\n\n let status = status.to_int();\n\n\n\n match status {\n\n | 1 => {\n\n let (stage, tail) = tail.to_cons();\n\n let stage = Stage::from(&**stage);\n\n\n\n let (info, tail) = tail.to_cons();\n\n let info = Info::from(&**info);\n\n\n\n let (state, _) = tail.to_cons();\n\n let state = match &**state {\n\n | Exp::Atom(Atom::Nil) => None,\n\n | list => Some(State::from(list)),\n\n };\n\n\n\n Some(Response { info, stage, state })\n", "file_path": "src/game.rs", "rank": 92, "score": 6.296415005048082 }, { "content": "impl From<&Exp> for Stage {\n\n fn from(exp: &Exp) -> Self {\n\n\n\n log::debug!(\"Parsing stage: {}\", exp);\n\n\n\n match exp {\n\n | Exp::Atom(Atom::Int(0)) => Stage::NotStarted,\n\n | Exp::Atom(Atom::Int(1)) => Stage::Started,\n\n | Exp::Atom(Atom::Int(2)) => Stage::Finished,\n\n | _ => panic!(format!(\"Expected 0, 1, or 2 for stage, but found: {}\", exp)),\n\n }\n\n }\n\n}\n\n\n\nimpl From<Stage> for Exp {\n\n fn from(stage: Stage) -> Self {\n\n match stage {\n\n | Stage::NotStarted => Exp::Atom(Atom::Int(0)),\n\n | Stage::Started => Exp::Atom(Atom::Int(1)),\n\n | Stage::Finished => Exp::Atom(Atom::Int(2)),\n\n }\n\n }\n\n}\n", "file_path": "src/game.rs", "rank": 93, "score": 6.091053856366457 }, { "content": " match self.to_cons_opt() {\n\n | Some(cons) => cons,\n\n | None => panic!(format!(\"Expected `cons` but found: {}\", self)),\n\n }\n\n }\n\n\n\n /// Extracts sub-trees `h`, and `t`:\n\n ///\n\n /// ```text\n\n /// app\n\n /// / \\\n\n /// app t\n\n /// / \\\n\n /// cons h\n\n /// ```\n\n pub fn to_cons_opt(&self) -> Option<(&Rc<Exp>, &Rc<Exp>)> {\n\n let (cons_h, t) = match self {\n\n | Exp::Atom(Atom::Nil) => return None,\n\n | Exp::Atom(atom) => panic!(format!(\"Expected `ap ap cons <CAR> <CDR>`, but found: {}\", atom)),\n\n | Exp::App(cons_h, t, _) => (cons_h, t),\n", "file_path": "src/ast.rs", "rank": 94, "score": 6.063762526350908 }, { "content": " fn from(int: i64) -> Exp {\n\n Exp::Atom(Atom::Int(int))\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\npub enum Role {\n\n Attack,\n\n Defend,\n\n}\n\n\n\nimpl From<&Exp> for Role {\n\n fn from(exp: &Exp) -> Self {\n\n\n\n log::debug!(\"Parsing role: {}\", exp);\n\n\n\n match exp {\n\n | Exp::Atom(Atom::Int(0)) => Role::Attack,\n\n | Exp::Atom(Atom::Int(1)) => Role::Defend,\n\n | _ => panic!(format!(\"Expected 0 or 1 for role, but found: {}\", exp)),\n", "file_path": "src/game.rs", "rank": 95, "score": 5.9830785498040555 }, { "content": " log::debug!(\"Sending `start` message: {}\", &message);\n\n self.send_alien_message(cache, &message)\n\n .and_then(Self::extract_game)\n\n }\n\n\n\n pub fn commands(\n\n &self,\n\n cache: &mut AtomCache,\n\n commands: &[game::Command],\n\n ) -> anyhow::Result<game::Response> {\n\n let commands = commands\n\n .iter()\n\n .rev()\n\n .fold(cache.get(Atom::Nil), |acc, command| {\n\n Exp::cons(Exp::from(*command), acc)\n\n });\n\n\n\n log::debug!(\"Sending `commands` message: {}\", &commands);\n\n\n\n self.send_alien_message(\n", "file_path": "src/api.rs", "rank": 96, "score": 5.961040768240908 }, { "content": " };\n\n\n\n let (cons, h) = match &**cons_h {\n\n | Exp::Atom(atom) => panic!(format!(\"Expected `ap cons <CAR>`, but found: {}\", atom)),\n\n | Exp::App(cons, h, _) => (cons, h),\n\n };\n\n\n\n match &**cons {\n\n | Exp::Atom(Atom::Cons) => Some((h, t)),\n\n | other => panic!(format!(\"Expected `cons`, but found: {}\", other)),\n\n }\n\n }\n\n}\n\n\n\nimpl PartialEq for Exp {\n\n fn eq(&self, rhs: &Self) -> bool {\n\n match (self, rhs) {\n\n | (Exp::Atom(lhs), Exp::Atom(rhs)) => lhs == rhs,\n\n | (Exp::App(llhs, lrhs, _), Exp::App(rlhs, rrhs, _)) => llhs == rlhs && lrhs == rrhs,\n\n | _ => false,\n", "file_path": "src/ast.rs", "rank": 97, "score": 5.244269932527824 }, { "content": " let (spawns, _) = tail.to_cons();\n\n let spawns = spawns.to_int();\n\n \n\n Stats { fuel, damage, coolant, spawns }\n\n }\n\n}\n\n\n\nimpl From<Stats> for Exp {\n\n fn from(stats: Stats) -> Exp {\n\n let stats = list!(\n\n Exp::from(stats.fuel),\n\n Exp::from(stats.damage),\n\n Exp::from(stats.coolant),\n\n Exp::from(stats.spawns),\n\n );\n\n Rc::try_unwrap(stats)\n\n .expect(\"Impossible: stats Rc has single owner\")\n\n }\n\n}\n\n\n", "file_path": "src/game.rs", "rank": 98, "score": 5.191033167370668 }, { "content": "\n\n let (tick, tail) = exp.to_cons();\n\n let tick = tick.to_int();\n\n\n\n let (_x1, tail) = tail.to_cons();\n\n\n\n let (mut ship_exps, _) = tail.to_cons();\n\n\n\n let mut ships = Vec::new();\n\n\n\n while let Some((head, tail)) = ship_exps.to_cons_opt() {\n\n \n\n let (ship_exp, rest) = head.to_cons();\n\n let ship = Ship::from(&**ship_exp);\n\n\n\n let mut commands = Vec::new();\n\n let (mut command_exps, _) = rest.to_cons();\n\n\n\n while let Some((command_exp, tail)) = command_exps.to_cons_opt() {\n\n <Option<Command>>::from(&**command_exp)\n", "file_path": "src/game.rs", "rank": 99, "score": 5.050326668028966 } ]
Rust
jormungandr/src/topology/mod.rs
MitchellTesla/jormungandr-rom_io
2325582d4dced77e2b023b12ffe6d014005078c8
use crate::network::p2p::Address; use jormungandr_lib::{interfaces::Subscription, time::SystemTime}; use serde::{Serialize, Serializer}; use std::{ convert::{TryFrom, TryInto}, fmt, hash::{Hash, Hasher}, }; mod gossip; pub mod layers; mod process; mod quarantine; #[allow(clippy::module_inception)] mod topology; pub use self::{ gossip::{Gossip, Gossips}, process::{start, TaskData, DEFAULT_NETWORK_STUCK_INTERVAL}, topology::{P2pTopology, View}, }; pub use quarantine::{QuarantineConfig, ReportRecords}; /** # topics definition for p2p interest subscriptions */ pub mod topic { use poldercast::Topic; pub const MESSAGES: Topic = Topic::new([0; 32]); pub const BLOCKS: Topic = Topic::new([1; 32]); } /** limits for the property::{Serialize/Deserialize} implementations */ pub mod limits { pub const MAX_GOSSIP_SIZE: usize = 512; pub const MAX_ID_SIZE: u64 = 32; } #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub struct NodeId(keynesis::key::ed25519::PublicKey); impl From<jormungandr_lib::interfaces::NodeId> for NodeId { fn from(id: jormungandr_lib::interfaces::NodeId) -> Self { let id_bytes = id.as_ref().as_ref(); NodeId(id_bytes.try_into().unwrap()) } } impl From<NodeId> for jormungandr_lib::interfaces::NodeId { fn from(node_id: NodeId) -> jormungandr_lib::interfaces::NodeId { jormungandr_lib::interfaces::NodeId::from_hex(&node_id.0.to_string()).unwrap() } } impl TryFrom<&[u8]> for NodeId { type Error = chain_crypto::PublicKeyError; fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> { use chain_crypto::{Ed25519, PublicKey}; Ok(Self::from( PublicKey::<Ed25519>::from_binary(bytes) .map(jormungandr_lib::interfaces::NodeId::from)?, )) } } impl fmt::Display for NodeId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.0) } } impl AsRef<keynesis::key::ed25519::PublicKey> for NodeId { fn as_ref(&self) -> &keynesis::key::ed25519::PublicKey { &self.0 } } impl Serialize for NodeId { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if serializer.is_human_readable() { self.0.to_string().serialize(serializer) } else { self.0.as_ref().serialize(serializer) } } } pub type Peer = Gossip; #[derive(Eq, Clone, Serialize, Debug)] pub struct PeerInfo { pub id: NodeId, pub address: Address, pub last_update: SystemTime, pub quarantined: Option<SystemTime>, pub subscriptions: Vec<Subscription>, } impl PartialEq for PeerInfo { fn eq(&self, other: &Self) -> bool { self.id == other.id && self.address == other.address } } impl Hash for PeerInfo { fn hash<H: Hasher>(&self, state: &mut H) { self.id.hash(state); self.address.hash(state); } } impl From<Peer> for PeerInfo { fn from(other: Peer) -> Self { let other: poldercast::Gossip = other.into(); Self { id: NodeId(other.id()), address: other.address(), last_update: other.time().to_system_time().into(), quarantined: None, subscriptions: other .subscriptions() .iter() .map(|s| Subscription { topic: s.topic().to_string(), interest: s .interest_level() .priority_score(poldercast::InterestLevel::ZERO) as u32, }) .collect(), } } }
use crate::network::p2p::Address; use jormungandr_lib::{interfaces::Subscription, time::SystemTime}; use serde::{Serialize, Serializer}; use std::{ convert::{TryFrom, TryInto}, fmt, hash::{Hash, Hasher}, }; mod gossip; pub mod layers; mod process; mod quarantine; #[allow(clippy::module_inception)] mod topology; pub use self::{ gossip::{Gossip, Gossips}, process::{start, TaskData, DEFAULT_NETWORK_STUCK_INTERVAL}, topology::{P2pTopology, View}, }; pub use quarantine::{QuarantineConfig, ReportRecords}; /** # topics definition for p2p interest subscriptions */ pub mod topic { use poldercast::Topic; pub const MESSAGES: Topic = Topic::new([0; 32]); pub const BLOCKS: Topic = Topic::new([1; 32]); } /** limits for the property::{Serialize/Deserialize} implementations */ pub mod limits { pub const MAX_GOSSIP_SIZE: usize = 512; pub const MAX_ID_SIZE: u64 = 32; } #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub struct NodeId(keynesis::key::ed25519::PublicKey); impl From<jormungandr_lib::interfaces::NodeId> for NodeId { fn from(id: jormungandr_lib::interfaces::NodeId) -> Self { let id_bytes = id.as_ref().as_ref(); NodeId(id_bytes.try_into().unwrap()) } } impl From<NodeId> for jormungandr_lib::interfaces::NodeId { fn from(node_id: NodeId) -> jormungandr_lib::interfaces::NodeId { jormungandr_lib::interfaces::NodeId::from_hex(&node_id.0.to_string()).unwrap() } } impl TryFrom<&[u8]> for NodeId { type Error = chain_crypto::PublicKeyError; fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> { use chain_crypto::{Ed25519, PublicKey}; Ok(Self::from( PublicKey::<Ed25519>::from_binary(bytes) .map(jormungandr_lib::interfaces::NodeId::from)?, )) } } impl fmt::Display for NodeId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.0) } } impl AsRef<keynesis::key::ed25519::PublicKey> for NodeId { fn as_ref(&self) -> &keynesis::key::ed25519::PublicKey { &self.0 } } impl Serialize for NodeId { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if serializer.is_human_readable() { self.0.to_string().serialize(serializer) } else { self.0.as_ref().serialize(serializer) } } } pub type Peer = Gossip; #[derive(Eq, Clone, Serialize, Debug)] pub struct PeerInfo { pub id: NodeId, pub address: Address, pub last_update: SystemTime, pub quarantined: Option<SystemTime>, pub subscriptions: Vec<Subscription>, } impl PartialEq for PeerInfo { fn eq(&self, other: &Self) -> bool { self.id == other.id && self.address == other.address } } impl Hash for PeerInfo { fn hash<H: Hasher>(&self, state: &mut H) { self.id.hash(state); self.address.hash(state); } } impl From<Peer> for PeerInfo {
}
fn from(other: Peer) -> Self { let other: poldercast::Gossip = other.into(); Self { id: NodeId(other.id()), address: other.address(), last_update: other.time().to_system_time().into(), quarantined: None, subscriptions: other .subscriptions() .iter() .map(|s| Subscription { topic: s.topic().to_string(), interest: s .interest_level() .priority_score(poldercast::InterestLevel::ZERO) as u32, }) .collect(), } }
function_block-full_function
[ { "content": "pub fn load_block(block_reader: impl BufRead) -> Result<Block, Error> {\n\n Block::deserialize(&mut Codec::new(block_reader)).map_err(Error::BlockFileCorrupted)\n\n}\n\n\n\n#[derive(StructOpt)]\n\npub struct Common {\n\n #[structopt(flatten)]\n\n pub input: Input,\n\n\n\n /// the file path to the block to create\n\n ///\n\n /// If not available the command will expect to write the block to\n\n /// to the standard output\n\n #[structopt(long = \"output\", parse(from_os_str), name = \"FILE_OUTPUT\")]\n\n pub output_file: Option<std::path::PathBuf>,\n\n}\n\n\n\nimpl Common {\n\n pub fn open_output(&self) -> Result<impl Write, Error> {\n\n io::open_file_write(&self.output_file).map_err(|source| Error::OutputInvalid {\n\n source,\n\n path: self.output_file.clone().unwrap_or_default(),\n\n })\n\n }\n\n}\n", "file_path": "jcli/src/jcli_lib/block/mod.rs", "rank": 0, "score": 390822.6403017366 }, { "content": "pub fn assert_equals<A: fmt::Debug + PartialEq>(\n\n left: &A,\n\n right: &A,\n\n info: &str,\n\n) -> Result<(), Error> {\n\n if left != right {\n\n return Err(Error::VerificationFailed(format!(\n\n \"{}. {:?} vs {:?}\",\n\n info, left, right\n\n )));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "testing/jormungandr-automation/src/testing/verify.rs", "rank": 1, "score": 358255.4171534509 }, { "content": "fn apply_block_to_addresses(mut addresses: Addresses, block: &ExplorerBlock) -> Addresses {\n\n let transactions = block.transactions.values();\n\n\n\n for tx in transactions {\n\n let id = tx.id();\n\n\n\n // A Hashset is used for preventing duplicates when the address is both an\n\n // input and an output in the given transaction\n\n\n\n let included_addresses: std::collections::HashSet<ExplorerAddress> = tx\n\n .outputs()\n\n .iter()\n\n .map(|output| output.address.clone())\n\n .chain(tx.inputs().iter().map(|input| input.address.clone()))\n\n .collect();\n\n\n\n for address in included_addresses {\n\n addresses = addresses.insert_or_update_simple(\n\n address,\n\n Arc::new(PersistentSequence::new().append(id)),\n\n |set| {\n\n let new_set = set.append(id);\n\n Some(Arc::new(new_set))\n\n },\n\n )\n\n }\n\n }\n\n addresses\n\n}\n\n\n", "file_path": "explorer/src/db/mod.rs", "rank": 2, "score": 338228.2352160944 }, { "content": "pub fn open_block_file(input_file: &Option<PathBuf>) -> Result<impl BufRead, Error> {\n\n io::open_file_read(input_file).map_err(|source| Error::InputInvalid {\n\n source,\n\n path: input_file.clone().unwrap_or_default(),\n\n })\n\n}\n\n\n", "file_path": "jcli/src/jcli_lib/block/mod.rs", "rank": 3, "score": 333669.85819417966 }, { "content": "fn get_block_from_storage(storage: &Storage, id: HeaderHash) -> Result<Block, Error> {\n\n match storage.get(id) {\n\n Ok(Some(block)) => Ok(block),\n\n Ok(None) => Err(Error::not_found(format!(\n\n \"block {} is not known to this node\",\n\n id\n\n ))),\n\n Err(e) => Err(e.into()),\n\n }\n\n}\n\n\n\n// Stop after sending the first Err() variant\n\n//\n\n// Common base for GetBlocks and GetHeaders\n\nasync fn fuse_send_items<T, V>(\n\n items: T,\n\n reply_handle: ReplyStreamHandle<V>,\n\n) -> Result<(), ReplySendError>\n\nwhere\n\n T: IntoIterator<Item = Result<V, Error>>,\n", "file_path": "jormungandr/src/client.rs", "rank": 4, "score": 329051.490143258 }, { "content": "pub fn submit_hashrate(_hash_rate: H256, _id: H256, _context: &Context) -> Result<bool, Error> {\n\n Err(Error::MiningIsNotAllowed)\n\n}\n", "file_path": "jormungandr/src/jrpc/eth_miner/logic.rs", "rank": 5, "score": 315986.659701103 }, { "content": "pub fn serialize_hash<S>(hash: &Blake2b256, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n if serializer.is_human_readable() {\n\n hash.to_string().serialize(serializer)\n\n } else {\n\n hash.as_ref().serialize(serializer)\n\n }\n\n}\n\n\n", "file_path": "jormungandr-lib/src/crypto/serde.rs", "rank": 6, "score": 306114.00566736574 }, { "content": "pub fn assert_connected_cnt(node: &JormungandrProcess, peer_connected_cnt: usize, info: &str) {\n\n let stats = node.rest().stats().unwrap().stats.expect(\"empty stats\");\n\n assert_eq!(\n\n &peer_connected_cnt,\n\n &stats.peer_connected_cnt.clone(),\n\n \"{}: peer_connected_cnt, Node {}\",\n\n info,\n\n node.alias()\n\n );\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/mod.rs", "rank": 7, "score": 304010.0771651147 }, { "content": "pub fn spawn_network(config: Config, mut topology: Topology, args: Args) -> Result<(), Error> {\n\n println!(\"Building network...\");\n\n let mut controller = NetworkBuilder::default()\n\n .topology(topology.clone())\n\n .session_settings(config.session.clone())\n\n .blockchain_config(config.build_blockchain())\n\n .build()?;\n\n\n\n let mut processes: HashMap<NodeAlias, JormungandrProcess> = HashMap::new();\n\n\n\n while !topology.nodes.is_empty() {\n\n let alias = topology\n\n .nodes\n\n .values()\n\n .find(|n| n.trusted_peers.is_empty())\n\n .map(|n| n.alias.clone())\n\n .ok_or(Error::CircularTrust)?;\n\n\n\n let spawn_params = config.node_spawn_params(&alias)?;\n\n\n", "file_path": "testing/hersir/src/spawn/standard.rs", "rank": 8, "score": 299126.04969647573 }, { "content": "pub fn spawn_network(config: Config, mut topology: Topology, args: Args) -> Result<(), Error> {\n\n let (tx, rx) = channel();\n\n\n\n let mut monitor_controller = MonitorControllerBuilder::new(&config.session.title)\n\n .topology(topology.clone())\n\n .blockchain(config.build_blockchain())\n\n .build(config.session.clone())?;\n\n\n\n let mut processes: HashMap<NodeAlias, MonitorNode> = HashMap::new();\n\n\n\n while !topology.nodes.is_empty() {\n\n let alias = topology\n\n .nodes\n\n .values()\n\n .find(|n| n.trusted_peers.is_empty())\n\n .map(|n| n.alias.clone())\n\n .ok_or(Error::CircularTrust)?;\n\n\n\n let spawn_params = config.node_spawn_params(&alias)?;\n\n\n", "file_path": "testing/hersir/src/spawn/monitor.rs", "rank": 9, "score": 299126.04969647573 }, { "content": "fn mk_address_1<A, F>(prefix: &str, s: PublicKey<A>, testing: bool, f: F)\n\nwhere\n\n F: FnOnce(PublicKey<A>) -> Kind,\n\n A: AsymmetricPublicKey,\n\n{\n\n let discrimination = mk_discrimination(testing);\n\n let kind = f(s);\n\n mk_address(prefix, discrimination, kind);\n\n}\n\n\n", "file_path": "jcli/src/jcli_lib/address.rs", "rank": 10, "score": 297702.8329025345 }, { "content": "pub fn uninstall_filter(_filter_id: Number, _context: &Context) -> Result<bool, Error> {\n\n // TODO implement\n\n Ok(true)\n\n}\n\n\n", "file_path": "jormungandr/src/jrpc/eth_filter/logic.rs", "rank": 11, "score": 293180.36446720414 }, { "content": "pub fn assert_are_in_quarantine(\n\n node: &JormungandrProcess,\n\n peers: Vec<&JormungandrProcess>,\n\n info: &str,\n\n) {\n\n let available_list = node.rest().p2p_quarantined().unwrap();\n\n assert_record_is_present(available_list, peers, \"quarantine\", info)\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/mod.rs", "rank": 12, "score": 291541.3413040429 }, { "content": "pub fn ask_yes_or_no(with_output: bool) -> std::io::Result<bool> {\n\n if with_output {\n\n print!(\"Continue? Yes[y] or No[n]? \");\n\n std::io::stdout().flush()?;\n\n }\n\n let mut buff = String::new();\n\n std::io::stdin().read_line(&mut buff)?;\n\n Ok(matches!(buff.to_ascii_lowercase().trim_end(), \"yes\" | \"y\"))\n\n}\n", "file_path": "jcli/src/jcli_lib/utils/io.rs", "rank": 13, "score": 290641.87518218264 }, { "content": "#[test]\n\npub fn test_correct_error_is_returned_for_incorrect_block_id_in_next_block_id_request() {\n\n let jcli: JCli = Default::default();\n\n let incorrect_block_id = \"e1049ea45726f0b1fc473af54f706546b3331765abf89ae9e6a8333e49621641aa\";\n\n\n\n let jormungandr = Starter::new().start().unwrap();\n\n\n\n jcli.rest().v0().block().next_expect_fail(\n\n incorrect_block_id,\n\n 1,\n\n jormungandr.rest_uri(),\n\n \"node rejected request because of invalid parameters\",\n\n );\n\n}\n", "file_path": "testing/jormungandr-integration-tests/src/jcli/rest/block.rs", "rank": 14, "score": 290037.3570457317 }, { "content": "pub fn assert_empty_quarantine(node: &JormungandrProcess, info: &str) {\n\n let quarantine = node.rest().p2p_quarantined().unwrap();\n\n assert_eq!(\n\n quarantine,\n\n vec![],\n\n \"{}: Peer {} has got non empty quarantine list\",\n\n info,\n\n node.alias()\n\n )\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/mod.rs", "rank": 15, "score": 289466.72518486844 }, { "content": "pub fn assert_are_not_in_network_view(\n\n node: &JormungandrProcess,\n\n peers: Vec<&JormungandrProcess>,\n\n info: &str,\n\n) {\n\n let network_view = node.rest().network_stats().unwrap();\n\n for peer in peers {\n\n assert!(\n\n network_view\n\n .iter()\n\n .any(|info| info.addr == Some(peer.address())),\n\n \"{}: Peer {} is present in network view list, while it should not\",\n\n info,\n\n peer.alias()\n\n );\n\n }\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/mod.rs", "rank": 16, "score": 286485.6739112848 }, { "content": "pub fn assert_are_in_network_view(\n\n node: &JormungandrProcess,\n\n peers: Vec<&JormungandrProcess>,\n\n info: &str,\n\n) {\n\n let network_view = node.rest().p2p_view().unwrap();\n\n for peer in peers {\n\n assert!(\n\n network_view\n\n .iter()\n\n .any(|address| *address == peer.address().to_string()),\n\n \"{}: Peer {} is not present in network view list\",\n\n info,\n\n peer.alias()\n\n );\n\n }\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/mod.rs", "rank": 17, "score": 286485.6739112848 }, { "content": "#[test]\n\n#[cfg(windows)]\n\npub fn cors_https() -> Result<(), Box<dyn std::error::Error>> {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_rest_cors_config(Cors {\n\n allowed_origins: vec![\"https://domain.com\".to_owned().into()],\n\n max_age_secs: None,\n\n allowed_headers: vec![],\n\n allowed_methods: vec![],\n\n })\n\n .build(&temp_dir);\n\n\n\n let jormungandr = Starter::new().config(config).start_async().unwrap();\n\n\n\n let mut rest_client = jormungandr.rest();\n\n rest_client.set_origin(\"https://domain.com\");\n\n\n\n assert!(rest_client.raw().stats()?.status().is_success());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/cors.rs", "rank": 18, "score": 286429.2859339479 }, { "content": "#[test]\n\npub fn cors_multi_domain() -> Result<(), Box<dyn std::error::Error>> {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_rest_cors_config(Cors {\n\n allowed_origins: vec![\"http://domain.com;http://other_domain.com\"\n\n .to_owned()\n\n .into()],\n\n max_age_secs: None,\n\n allowed_headers: vec![],\n\n allowed_methods: vec![],\n\n })\n\n .build(&temp_dir);\n\n\n\n Starter::new()\n\n .config(config)\n\n .start_fail(\"invalid value: string \\\"http://domain.com;http://other_domain.com\\\"\");\n\n\n\n Ok(())\n\n}\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/cors.rs", "rank": 19, "score": 283212.64881809964 }, { "content": "#[test]\n\npub fn cors_illegal_domain() -> Result<(), Box<dyn std::error::Error>> {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_rest_cors_config(Cors {\n\n allowed_origins: vec![\"http://domain.com\".to_owned().into()],\n\n max_age_secs: None,\n\n allowed_headers: vec![],\n\n allowed_methods: vec![],\n\n })\n\n .build(&temp_dir);\n\n\n\n let jormungandr = Starter::new().config(config).start().unwrap();\n\n\n\n let mut rest_client = jormungandr.rest();\n\n rest_client.set_origin(\"http://other_domain.com\");\n\n\n\n assert_request_failed_due_to_cors(&rest_client)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/cors.rs", "rank": 20, "score": 283212.64881809964 }, { "content": "#[test]\n\npub fn cors_wrong_delimiter() -> Result<(), Box<dyn std::error::Error>> {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_rest_cors_config(Cors {\n\n allowed_origins: vec![\"http://domain.com,http://other_domain.com\"\n\n .to_owned()\n\n .into()],\n\n max_age_secs: None,\n\n allowed_headers: vec![],\n\n allowed_methods: vec![],\n\n })\n\n .build(&temp_dir);\n\n\n\n Starter::new()\n\n .config(config)\n\n .start_fail(\"rest.cors.allowed_origins[0]: invalid value\");\n\n Ok(())\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/cors.rs", "rank": 21, "score": 283212.64881809964 }, { "content": "#[test]\n\n#[cfg(windows)]\n\npub fn cors_single_domain() -> Result<(), Box<dyn std::error::Error>> {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_rest_cors_config(Cors {\n\n allowed_origins: vec![\"http://domain.com\".to_owned().into()],\n\n max_age_secs: None,\n\n allowed_headers: vec![],\n\n allowed_methods: vec![],\n\n })\n\n .build(&temp_dir);\n\n\n\n let jormungandr = Starter::new().config(config).start_async().unwrap();\n\n\n\n let mut rest_client = jormungandr.rest();\n\n rest_client.set_origin(\"http://domain.com\");\n\n\n\n assert!(rest_client.raw().stats()?.status().is_success());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/cors.rs", "rank": 22, "score": 283212.64881809964 }, { "content": "#[test]\n\npub fn test_correct_error_is_returned_for_incorrect_block_id() {\n\n let jcli: JCli = Default::default();\n\n let incorrect_block_id = \"e1049ea45726f0b1fc473af54f706546b3331765abf89ae9e6a8333e49621641aa\";\n\n let jormungandr = Starter::new().start().unwrap();\n\n\n\n jcli.rest().v0().block().get_expect_fail(\n\n incorrect_block_id,\n\n jormungandr.rest_uri(),\n\n \"node rejected request because of invalid parameters\",\n\n );\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/jcli/rest/block.rs", "rank": 23, "score": 283190.0228758994 }, { "content": "pub fn do_for_all_alias<F: Fn(&JormungandrProcess), G: Fn(&JormungandrProcess)>(\n\n alias: &Option<String>,\n\n nodes: &[JormungandrProcess],\n\n legacy_nodes: &[JormungandrProcess],\n\n f: F,\n\n g: G,\n\n) {\n\n if let Some(alias) = alias {\n\n if let Some(node) = nodes.iter().find(|x| *x.alias() == *alias) {\n\n f(node);\n\n }\n\n if let Some(node) = legacy_nodes.iter().find(|x| *x.alias() == *alias) {\n\n g(node)\n\n }\n\n return;\n\n }\n\n\n\n for node in nodes.iter() {\n\n f(node);\n\n }\n\n for node in legacy_nodes.iter() {\n\n g(node);\n\n }\n\n}\n", "file_path": "testing/hersir/src/controller/interactive/controller.rs", "rank": 24, "score": 283100.96699330315 }, { "content": "// Validate the server peer's node ID\n\nfn validate_peer_auth(auth: AuthenticatedNodeId, nonce: &[u8]) -> Result<NodeId, ConnectError> {\n\n use super::super::convert::Decode;\n\n auth.verify(nonce)\n\n .map_err(ConnectError::PeerSignatureVerificationFailed)?;\n\n chain_network::data::NodeId::from(auth)\n\n .decode()\n\n .map_err(ConnectError::InvalidNodeId)\n\n}\n\n\n\n/// Handle used to monitor the P2P client in process of\n\n/// establishing a connection and subscription streams.\n\n///\n\n/// If the handle is dropped before the connection is established,\n\n/// the client connection is canceled.\n\npub struct ConnectHandle {\n\n receiver: oneshot::Receiver<PeerComms>,\n\n}\n\n\n\n/// An error type to signal that the connection was not established.\n\n/// The reason should be logged already, so this error type should not be\n", "file_path": "jormungandr/src/network/client/connect.rs", "rank": 25, "score": 282494.37510606967 }, { "content": "pub fn parse_shift(from: &str) -> Result<(BlockDate, bool), BlockDateParseError> {\n\n if let Some(stripped) = from.strip_prefix('~') {\n\n BlockDate::from_str(stripped).map(|d| (d, true))\n\n } else {\n\n BlockDate::from_str(from).map(|d| (d, false))\n\n }\n\n}\n", "file_path": "testing/mjolnir/src/mjolnir_lib/args.rs", "rank": 26, "score": 281971.57398842496 }, { "content": "pub fn copy_initial_storage_if_used(\n\n config: &ClientLoadConfig,\n\n storage_folder: &str,\n\n temp_dir: &TempDir,\n\n) {\n\n if let Some(storage) = config.initial_storage() {\n\n let client_storage: PathBuf = temp_dir.child(storage_folder).path().into();\n\n if client_storage.exists() {\n\n fs::remove_dir_all(&client_storage).expect(\"cannot remove existing client storage\");\n\n }\n\n fs::create_dir(&client_storage).expect(\"cannot create client storage\");\n\n file::copy_folder(storage, &client_storage, true).unwrap()\n\n }\n\n}\n\n\n", "file_path": "testing/mjolnir/src/mjolnir_lib/bootstrap/scenario/mod.rs", "rank": 27, "score": 281756.429165279 }, { "content": "#[test]\n\npub fn cors_malformed_domain_no_http() -> Result<(), Box<dyn std::error::Error>> {\n\n let temp_dir = TempDir::new().unwrap();\n\n let config = ConfigurationBuilder::new()\n\n .with_rest_cors_config(Cors {\n\n allowed_origins: vec![\"domain.com\".to_owned().into()],\n\n max_age_secs: None,\n\n allowed_headers: vec![],\n\n allowed_methods: vec![],\n\n })\n\n .build(&temp_dir);\n\n\n\n Starter::new()\n\n .config(config)\n\n .start_fail(\"invalid value: string \\\"domain.com\\\"\");\n\n Ok(())\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/cors.rs", "rank": 28, "score": 280112.9133626149 }, { "content": "#[test]\n\n#[cfg(windows)]\n\npub fn cors_ip_versus_domain() -> Result<(), Box<dyn std::error::Error>> {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_rest_cors_config(Cors {\n\n allowed_origins: vec![\"http://127.0.0.1\".to_owned().into()],\n\n max_age_secs: None,\n\n allowed_headers: vec![],\n\n allowed_methods: vec![],\n\n })\n\n .build(&temp_dir);\n\n\n\n let jormungandr = Starter::new().config(config).start_async().unwrap();\n\n\n\n let mut rest_client = jormungandr.rest();\n\n rest_client.set_origin(\"http://localhost\");\n\n\n\n assert_eq!(rest_client.raw().stats()?.status(), 403);\n\n Ok(())\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/cors.rs", "rank": 29, "score": 280112.9133626149 }, { "content": "type GetNextBlockScheduler = FireForgetScheduler<HeaderHash, NodeId, ()>;\n\n\n\nconst TIP_UPDATE_QUEUE_SIZE: usize = 10;\n\n\n\nconst DEFAULT_TIMEOUT_PROCESS_LEADERSHIP: u64 = 5;\n\nconst DEFAULT_TIMEOUT_PROCESS_ANNOUNCEMENT: u64 = 5;\n\nconst DEFAULT_TIMEOUT_PROCESS_BLOCKS: u64 = 60;\n\nconst DEFAULT_TIMEOUT_PROCESS_HEADERS: u64 = 60;\n\n\n\nconst PULL_HEADERS_SCHEDULER_CONFIG: FireForgetSchedulerConfig = FireForgetSchedulerConfig {\n\n max_running: 16,\n\n max_running_same_task: 2,\n\n command_channel_size: 1024,\n\n timeout: Duration::from_millis(500),\n\n};\n\n\n\nconst GET_NEXT_BLOCK_SCHEDULER_CONFIG: FireForgetSchedulerConfig = FireForgetSchedulerConfig {\n\n max_running: 16,\n\n max_running_same_task: 2,\n\n command_channel_size: 1024,\n", "file_path": "jormungandr/src/blockchain/process.rs", "rank": 30, "score": 279386.0270315394 }, { "content": "fn print_hash(input: Input) -> Result<(), Error> {\n\n let block = input.load_block()?;\n\n println!(\"{}\", block.id());\n\n Ok(())\n\n}\n\n\n\n/// create block 0 of the blockchain (i.e. the genesis block)\n\n#[derive(StructOpt)]\n\n#[structopt(name = \"genesis\", rename_all = \"kebab-case\")]\n\npub enum Genesis {\n\n /// Create a default Genesis file with appropriate documentation\n\n /// to help creating the YAML file\n\n Init,\n\n\n\n /// create the block 0 file (the genesis block of the blockchain)\n\n /// from a given yaml file\n\n ///\n\n Encode(Common),\n\n\n\n /// Decode the block 0 and print the corresponding YAML file\n", "file_path": "jcli/src/jcli_lib/block/mod.rs", "rank": 31, "score": 278440.3604050233 }, { "content": "fn mk_address_2<A1, A2, F>(prefix: &str, s: PublicKey<A1>, d: PublicKey<A2>, testing: bool, f: F)\n\nwhere\n\n F: FnOnce(PublicKey<A1>, PublicKey<A2>) -> Kind,\n\n A1: AsymmetricPublicKey,\n\n A2: AsymmetricPublicKey,\n\n{\n\n let discrimination = mk_discrimination(testing);\n\n let kind = f(s, d);\n\n mk_address(prefix, discrimination, kind);\n\n}\n", "file_path": "jcli/src/jcli_lib/address.rs", "rank": 32, "score": 277361.77419522137 }, { "content": "pub fn mining(_context: &Context) -> Result<bool, Error> {\n\n Err(Error::MiningIsNotAllowed)\n\n}\n\n\n", "file_path": "jormungandr/src/jrpc/eth_miner/logic.rs", "rank": 33, "score": 276957.98110774875 }, { "content": "fn filter_gossip_node(node: &Gossip, config: &Configuration) -> bool {\n\n if config.allow_private_addresses {\n\n node.has_valid_address()\n\n } else {\n\n node.is_global()\n\n }\n\n}\n\n\n", "file_path": "jormungandr/src/network/subscription.rs", "rank": 34, "score": 275970.7430820425 }, { "content": "#[test]\n\npub fn topics_of_interest_influences_node_sync_ability() {\n\n const FAST_CLIENT: &str = \"FAST_CLIENT\";\n\n const SLOW_CLIENT: &str = \"SLOW_CLIENT\";\n\n\n\n let mut network_controller = NetworkBuilder::default()\n\n .topology(\n\n Topology::default()\n\n .with_node(Node::new(SERVER))\n\n .with_node(Node::new(FAST_CLIENT).with_trusted_peer(SERVER))\n\n .with_node(Node::new(SLOW_CLIENT).with_trusted_peer(SERVER)),\n\n )\n\n .blockchain_config(Blockchain::default().with_leader(SERVER))\n\n .wallet_template(\n\n WalletTemplateBuilder::new(ALICE)\n\n .with(1_000_000)\n\n .delegated_to(SERVER)\n\n .build(),\n\n )\n\n .wallet_template(\n\n WalletTemplateBuilder::new(BOB)\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/connections.rs", "rank": 35, "score": 273440.8746300827 }, { "content": "fn apply_block_to_blocks(blocks: Blocks, block: &ExplorerBlock) -> Result<Blocks, Error> {\n\n let block_id = block.id();\n\n blocks\n\n .insert(block_id, Arc::new(block.clone()))\n\n .map_err(|_| Error::BlockAlreadyExists(block_id))\n\n}\n\n\n", "file_path": "explorer/src/db/mod.rs", "rank": 36, "score": 272403.93420518766 }, { "content": "fn parse_block_hash(hex: &str) -> Result<Hash, Error> {\n\n Blake2b256::from_str(hex)\n\n .map_err(Into::into)\n\n .map(Into::into)\n\n}\n\n\n", "file_path": "jormungandr/src/rest/v0/logic.rs", "rank": 37, "score": 271327.07612635434 }, { "content": "pub fn protocol_verion(_context: &Context) -> Result<u64, Error> {\n\n // TODO implement\n\n Ok(0)\n\n}\n\n\n", "file_path": "jormungandr/src/jrpc/eth_chain_info/logic.rs", "rank": 38, "score": 269735.2705155808 }, { "content": "pub fn assert(statement: bool, info: &str) -> Result<(), Error> {\n\n if !statement {\n\n return Err(Error::VerificationFailed(info.to_string()));\n\n }\n\n Ok(())\n\n}\n", "file_path": "testing/jormungandr-automation/src/testing/verify.rs", "rank": 39, "score": 267807.56851964525 }, { "content": "pub fn wait(seconds: u64) {\n\n std::thread::sleep(Duration::from_secs(seconds));\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/networking/utils/mod.rs", "rank": 40, "score": 267213.7391127198 }, { "content": "#[test]\n\npub fn test_correct_error_is_returned_for_non_existent_genesis_block() {\n\n let temp_dir = TempDir::new().unwrap();\n\n let block_file = temp_dir.child(\"block-0.bin\");\n\n let jcli: JCli = Default::default();\n\n jcli.genesis().hash_expect_fail(block_file.path(), \"file\");\n\n}\n", "file_path": "testing/jormungandr-integration-tests/src/jcli/genesis/hash.rs", "rank": 41, "score": 264806.6681417006 }, { "content": "pub fn sign(_address: H160, _message: Bytes, _context: &Context) -> Result<H512, Error> {\n\n // TODO implement\n\n Ok(H512::zero())\n\n}\n\n\n", "file_path": "jormungandr/src/jrpc/eth_transaction/logic.rs", "rank": 42, "score": 261527.02736027798 }, { "content": "pub fn get_uncle_count_by_hash(_: H256, _: &Context) -> Result<Option<Number>, Error> {\n\n // jormungandr block does not have any ethereum \"uncles\" so we allways return 0\n\n Ok(Some(0.into()))\n\n}\n\n\n", "file_path": "jormungandr/src/jrpc/eth_block_info/logic.rs", "rank": 43, "score": 260362.24729518022 }, { "content": "fn delegation_ratio_sum(pool_ids: &[WeightedPoolId]) -> Result<u8, Error> {\n\n let parts = pool_ids\n\n .iter()\n\n .map(|pool_id| match pool_id.weight {\n\n 0 => Err(Error::PoolDelegationWithZeroWeight),\n\n weight => Ok(weight as u64),\n\n })\n\n .sum::<Result<_, _>>()?;\n\n u8::try_from(parts).map_err(|_| Error::InvalidPoolDelegationWeights {\n\n actual: parts,\n\n max: u8::max_value() as u64,\n\n })\n\n}\n\n\n\nimpl FromStr for WeightedPoolId {\n\n type Err = Box<dyn StdError>;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let mut split = s.splitn(2, ':');\n\n Ok(WeightedPoolId {\n\n pool_id: split.next().unwrap().parse()?,\n\n weight: split.next().map_or(Ok(1), str::parse)?,\n\n })\n\n }\n\n}\n", "file_path": "jcli/src/jcli_lib/certificate/weighted_pool_ids.rs", "rank": 44, "score": 259199.44691332913 }, { "content": "fn network_block_error_into_reply(err: chain::Error) -> intercom::Error {\n\n use super::chain::Error::*;\n\n\n\n match err {\n\n Storage(e) => intercom::Error::failed(e),\n\n Ledger(e) => intercom::Error::failed_precondition(e),\n\n Block0(e) => intercom::Error::failed(e),\n\n MissingParentBlock(_) => intercom::Error::failed_precondition(err.to_string()),\n\n BlockHeaderVerificationFailed(_) => intercom::Error::invalid_argument(err.to_string()),\n\n _ => intercom::Error::failed(err.to_string()),\n\n }\n\n}\n\n\n", "file_path": "jormungandr/src/blockchain/process.rs", "rank": 45, "score": 259089.10595407453 }, { "content": "pub fn post_fragment(args: RestArgs, fragment: Fragment) -> Result<String, Error> {\n\n let fragment_id = args\n\n .client()?\n\n .post(&[\"v0\", \"message\"])\n\n .body(fragment.serialize_as_vec()?)\n\n .execute()?\n\n .text()?;\n\n Ok(fragment_id)\n\n}\n", "file_path": "jcli/src/jcli_lib/rest/v0/message/mod.rs", "rank": 46, "score": 259058.72716392184 }, { "content": "type BlockEventAnnounceStream = stream::Map<OutboundSubscription<Header>, fn(Header) -> BlockEvent>;\n\n\n", "file_path": "jormungandr/src/network/p2p/comm.rs", "rank": 47, "score": 259058.29447787718 }, { "content": "struct Process {\n\n input: MessageQueue<TopologyMsg>,\n\n network_msgbox: MessageBox<NetworkMsg>,\n\n gossip_interval: Interval,\n\n network_stuck_check: Duration,\n\n topology: P2pTopology,\n\n}\n\n\n\npub struct TaskData {\n\n pub network_msgbox: MessageBox<NetworkMsg>,\n\n pub topology_queue: MessageQueue<TopologyMsg>,\n\n pub initial_peers: Vec<Peer>,\n\n pub config: Configuration,\n\n pub stats_counter: Metrics,\n\n}\n\n\n\npub async fn start(task_data: TaskData) {\n\n let TaskData {\n\n network_msgbox,\n\n topology_queue,\n", "file_path": "jormungandr/src/topology/process.rs", "rank": 48, "score": 257790.26925458014 }, { "content": "#[derive(Debug)]\n\nstruct Seed([u8; 32]);\n\nimpl std::str::FromStr for Seed {\n\n type Err = Error;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let vec = hex::decode(s)?;\n\n if vec.len() != 32 {\n\n return Err(Error::InvalidSeed {\n\n seed_len: vec.len(),\n\n });\n\n }\n\n let mut bytes = [0; 32];\n\n bytes.copy_from_slice(&vec);\n\n Ok(Seed(bytes))\n\n }\n\n}\n", "file_path": "jcli/src/jcli_lib/vote/mod.rs", "rank": 49, "score": 257392.27477589904 }, { "content": "pub fn spawn_network(args: Args) -> Result<(), Error> {\n\n let config: Config = serde_yaml::from_reader(File::open(&args.config)?)?;\n\n let topology = config.build_topology();\n\n\n\n match &config.session.mode {\n\n SessionMode::Standard => standard::spawn_network(config, topology, args),\n\n SessionMode::Monitor => monitor::spawn_network(config, topology, args),\n\n SessionMode::Interactive => interactive::spawn_network(config, topology),\n\n }\n\n}\n", "file_path": "testing/hersir/src/spawn/mod.rs", "rank": 50, "score": 255837.62188060605 }, { "content": "pub fn spawn_network(config: Config, topology: Topology) -> Result<(), Error> {\n\n let controller = NetworkBuilder::default()\n\n .topology(topology)\n\n .blockchain_config(config.build_blockchain())\n\n .session_settings(config.session)\n\n .build()?;\n\n\n\n let user_integration = jormungandr_user_interaction();\n\n\n\n let mut interactive_commands = JormungandrInteractiveCommandExec {\n\n controller: UserInteractionController::new(controller),\n\n };\n\n\n\n user_integration\n\n .interact(&mut interactive_commands)\n\n .map_err(Into::into)\n\n}\n\n\n", "file_path": "testing/hersir/src/spawn/interactive.rs", "rank": 51, "score": 254905.25535192277 }, { "content": "pub fn parse_timestamp(log: &str) -> u64 {\n\n let re = regex::Regex::new(\"([0-9]+):([0-9]+):([0-9]+)\").unwrap();\n\n\n\n let captures = re.captures(log).unwrap();\n\n\n\n let mut seconds = 0;\n\n\n\n for i in 1..=3 {\n\n seconds +=\n\n captures.get(i).unwrap().as_str().parse::<u64>().unwrap() * 60_u64.pow(3 - i as u32);\n\n }\n\n\n\n seconds\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/connections.rs", "rank": 52, "score": 254796.62030590035 }, { "content": "fn exec_get(args: RestArgs, block_id: String, count: Option<usize>) -> Result<(), Error> {\n\n let response = args\n\n .client()?\n\n .get(&[\"v0\", \"block\", &block_id, \"next_id\"])\n\n .query(&[(\"count\", count)])\n\n .execute()?\n\n .bytes()?;\n\n for block_id in response.chunks(Blake2b256::HASH_SIZE) {\n\n println!(\"{}\", hex::encode(block_id));\n\n }\n\n Ok(())\n\n}\n", "file_path": "jcli/src/jcli_lib/rest/v0/block/next_id.rs", "rank": 53, "score": 254454.91343401812 }, { "content": "#[test]\n\npub fn node_does_not_quarantine_trusted_node() {\n\n let mut network_controller = NetworkBuilder::default()\n\n .topology(\n\n Topology::default()\n\n .with_node(Node::new(SERVER))\n\n .with_node(Node::new(CLIENT).with_trusted_peer(SERVER)),\n\n )\n\n .blockchain_config(Blockchain::default().with_leader(SERVER))\n\n .wallet_template(\n\n WalletTemplateBuilder::new(ALICE)\n\n .with(1_000_000)\n\n .delegated_to(CLIENT)\n\n .build(),\n\n )\n\n .wallet_template(\n\n WalletTemplateBuilder::new(BOB)\n\n .with(1_000_000)\n\n .delegated_to(SERVER)\n\n .build(),\n\n )\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/quarantine.rs", "rank": 54, "score": 252445.10523504484 }, { "content": "#[test]\n\npub fn node_does_not_quarantine_whitelisted_node() {\n\n let mut network_controller = NetworkBuilder::default()\n\n .topology(\n\n Topology::default()\n\n .with_node(Node::new(SERVER))\n\n .with_node(Node::new(CLIENT).with_trusted_peer(SERVER))\n\n .with_node(Node::new(CLIENT_2).with_trusted_peer(SERVER)),\n\n )\n\n .blockchain_config(Blockchain::default().with_leader(SERVER))\n\n .build()\n\n .unwrap();\n\n\n\n let fake_addr = \"/ip4/127.0.0.1/tcp/80\";\n\n let policy = Policy {\n\n quarantine_duration: Some(Duration::new(30, 0)),\n\n quarantine_whitelist: Some(vec![fake_addr.parse().unwrap()]),\n\n };\n\n\n\n let server = network_controller.spawn(SpawnParams::new(SERVER)).unwrap();\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/quarantine.rs", "rank": 55, "score": 252445.10523504484 }, { "content": "/// prepare the block storage from the given settings\n\npub fn prepare_storage(setting: &Settings) -> Result<Storage, Error> {\n\n let span = span!(Level::TRACE, \"sub_task\", kind = \"storage\");\n\n let storage_span = span.clone();\n\n let _enter = span.enter();\n\n if let Some(dir) = &setting.storage {\n\n std::fs::create_dir_all(dir).map_err(|err| Error::Io {\n\n source: err,\n\n reason: ErrorKind::BlockStorage,\n\n })?;\n\n\n\n tracing::info!(\"storing blockchain in '{:?}'\", dir);\n\n\n\n Storage::file(dir, storage_span).map_err(Into::into)\n\n } else {\n\n Storage::memory(storage_span).map_err(Into::into)\n\n }\n\n}\n\n\n\n/// Try to fetch the block0_id from the HTTP base URL (services) in the array\n\n///\n", "file_path": "jormungandr/src/start_up/mod.rs", "rank": 56, "score": 251467.9806110015 }, { "content": "struct Progress(pub Poll<ProcessingOutcome>);\n\n\n\nimpl Progress {\n\n fn begin(async_outcome: Poll<Result<ProcessingOutcome, ()>>) -> Self {\n\n use self::ProcessingOutcome::*;\n\n\n\n Progress(async_outcome.map(|res| res.unwrap_or(Disconnect)))\n\n }\n\n\n\n fn and_proceed_with<F>(&mut self, poll_fn: F)\n\n where\n\n F: FnOnce() -> Poll<Result<ProcessingOutcome, ()>>,\n\n {\n\n use self::ProcessingOutcome::*;\n\n use Poll::*;\n\n\n\n let async_outcome = match self.0 {\n\n Pending | Ready(Continue) => poll_fn(),\n\n Ready(Disconnect) => return,\n\n };\n", "file_path": "jormungandr/src/network/client/mod.rs", "rank": 57, "score": 250677.01422126964 }, { "content": "pub fn slot_duration(block0: &Block) -> Result<Duration, Block0Error> {\n\n for config in initial(block0)?.iter() {\n\n if let ConfigParam::SlotDuration(duration) = config {\n\n return Ok(Duration::from_secs(*duration as u64));\n\n }\n\n }\n\n Err(Block0Malformed::NoSlotDuration.into())\n\n}\n\n\n", "file_path": "modules/blockchain/src/block0.rs", "rank": 58, "score": 248936.57823610725 }, { "content": "#[test]\n\npub fn node_put_in_quarantine_nodes_which_are_not_whitelisted() {\n\n let mut network_controller = NetworkBuilder::default()\n\n .topology(\n\n Topology::default()\n\n .with_node(Node::new(SERVER))\n\n .with_node(Node::new(CLIENT).with_trusted_peer(SERVER))\n\n .with_node(Node::new(CLIENT_2).with_trusted_peer(SERVER)),\n\n )\n\n .blockchain_config(Blockchain::default().with_leader(SERVER))\n\n .build()\n\n .unwrap();\n\n\n\n let server = network_controller\n\n .spawn(SpawnParams::new(SERVER).in_memory())\n\n .unwrap();\n\n\n\n let client = network_controller\n\n .spawn(\n\n SpawnParams::new(CLIENT)\n\n // The client broadcast a different ip address from the one it's actually\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/quarantine.rs", "rank": 59, "score": 248834.01360678778 }, { "content": "fn encode_block_0(common: Common) -> Result<(), Error> {\n\n let reader = common.input.open()?;\n\n let genesis: Block0Configuration =\n\n serde_yaml::from_reader(reader).map_err(Error::GenesisFileCorrupted)?;\n\n let block = genesis.to_block();\n\n Ledger::new(block.id(), block.fragments())?;\n\n block\n\n .serialize(&mut Codec::new(common.open_output()?))\n\n .map_err(Error::BlockSerializationFailed)\n\n}\n\n\n", "file_path": "jcli/src/jcli_lib/block/mod.rs", "rank": 60, "score": 247218.382826157 }, { "content": "fn decode_block_0(common: Common) -> Result<(), Error> {\n\n let block = common.input.load_block()?;\n\n let yaml = Block0Configuration::from_block(&block)?;\n\n serde_yaml::to_writer(common.open_output()?, &yaml).map_err(Error::GenesisSerializationFailed)\n\n}\n\n\n", "file_path": "jcli/src/jcli_lib/block/mod.rs", "rank": 61, "score": 247218.382826157 }, { "content": "#[test]\n\npub fn node_whitelist_itself() {\n\n let mut network_controller = NetworkBuilder::default()\n\n .topology(\n\n Topology::default()\n\n .with_node(Node::new(SERVER))\n\n .with_node(Node::new(CLIENT).with_trusted_peer(SERVER)),\n\n )\n\n .blockchain_config(Blockchain::default().with_leader(SERVER))\n\n .wallet_template(\n\n WalletTemplateBuilder::new(ALICE)\n\n .with(1_000_000)\n\n .delegated_to(CLIENT)\n\n .build(),\n\n )\n\n .wallet_template(\n\n WalletTemplateBuilder::new(BOB)\n\n .with(1_000_000)\n\n .delegated_to(SERVER)\n\n .build(),\n\n )\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/quarantine.rs", "rank": 62, "score": 246410.71549849745 }, { "content": "pub fn wait_for_date(target_block_date: BlockDate, mut rest: JormungandrRest) {\n\n let settings = rest.settings().unwrap();\n\n while is_it_due(get_current_date(&mut rest), target_block_date) {\n\n std::thread::sleep(std::time::Duration::from_secs(settings.slot_duration));\n\n }\n\n}\n\n\n", "file_path": "testing/jormungandr-automation/src/testing/time.rs", "rank": 63, "score": 246307.91810056963 }, { "content": "fn address_info(address: &AddressReadable) -> Result<(), Error> {\n\n let chain_addr::Address(discrimination, kind) = address.to_address();\n\n match discrimination {\n\n Discrimination::Production => {\n\n println!(\"discrimination: production\");\n\n }\n\n Discrimination::Test => {\n\n println!(\"discrimination: testing\");\n\n }\n\n }\n\n\n\n match kind {\n\n Kind::Single(single) => println!(\"public key: {}\", single.to_bech32_str()),\n\n Kind::Account(account) => println!(\"account: {}\", account.to_bech32_str()),\n\n Kind::Multisig(_) => return Err(Error::MultisigAddressNotSupported),\n\n Kind::Group(pubk, groupk) => {\n\n println!(\"public key: {}\", pubk.to_bech32_str());\n\n println!(\"group key: {}\", groupk.to_bech32_str());\n\n }\n\n Kind::Script(id) => println!(\"script identifier: {}\", hex::encode(&id)),\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "jcli/src/jcli_lib/address.rs", "rank": 64, "score": 245990.2448223271 }, { "content": "pub fn assert_are_not_available(\n\n node: &JormungandrProcess,\n\n peers: Vec<&JormungandrProcess>,\n\n info: &str,\n\n) {\n\n let available_list = node.rest().p2p_available().unwrap();\n\n assert_record_is_present(available_list, peers, \"available\", info)\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/mod.rs", "rank": 65, "score": 245434.99440334295 }, { "content": "pub fn assert_are_available(\n\n node: &JormungandrProcess,\n\n peers: Vec<&JormungandrProcess>,\n\n info: &str,\n\n) {\n\n let available_list = node.rest().p2p_available().unwrap();\n\n assert_record_is_present(available_list, peers, \"available\", info)\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/mod.rs", "rank": 66, "score": 245434.99440334295 }, { "content": "#[test]\n\npub fn test_correct_hash_is_returned_for_correct_block() {\n\n let jcli: JCli = Default::default();\n\n let content = jcli.genesis().init();\n\n let temp_dir = TempDir::new().unwrap();\n\n let yaml_file = temp_dir.child(\"init_file.yaml\");\n\n yaml_file.write_str(&content).unwrap();\n\n let block_file = temp_dir.child(\"block-0.bin\");\n\n\n\n jcli.genesis().encode(yaml_file.path(), &block_file);\n\n jcli.genesis().hash(block_file.path());\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/jcli/genesis/hash.rs", "rank": 67, "score": 245296.31807736203 }, { "content": "#[test]\n\npub fn pull_blocks_correct_hashes_all_blocks() {\n\n let setup = setup::client::default();\n\n std::thread::sleep(Duration::from_secs(10)); // wait for the server to produce some blocks\n\n\n\n let genesis_block_hash = Hash::from_str(setup.config.genesis_block_hash()).unwrap();\n\n let blocks = setup\n\n .client\n\n .pull_blocks(&[genesis_block_hash], setup.client.tip().id())\n\n .unwrap();\n\n\n\n let blocks_hashes: Vec<Hash> = blocks.iter().map(|x| x.header().hash()).collect();\n\n let block_hashes_from_logs = setup.server.logger.get_created_blocks_hashes();\n\n assert!(\n\n is_long_prefix(&block_hashes_from_logs, &blocks_hashes),\n\n \"server blocks: {:?} | client blocks: {:?}\",\n\n block_hashes_from_logs,\n\n blocks_hashes\n\n );\n\n}\n\n\n\n// L1022 PullBlocks correct hashes\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/grpc/client_tests.rs", "rank": 68, "score": 245264.6239506935 }, { "content": "pub fn start_time(block0: &Block) -> Result<SystemTime, Block0Error> {\n\n for config in initial(block0)?.iter() {\n\n if let ConfigParam::Block0Date(date) = config {\n\n return Ok(SystemTime::UNIX_EPOCH + Duration::from_secs(date.0));\n\n }\n\n }\n\n Err(Block0Malformed::NoStartTime.into())\n\n}\n\n\n", "file_path": "modules/blockchain/src/block0.rs", "rank": 69, "score": 245210.86408395233 }, { "content": "pub fn get_current_date(rest: &mut JormungandrRest) -> BlockDate {\n\n BlockDate::from_str(\n\n rest.stats()\n\n .unwrap()\n\n .stats\n\n .unwrap()\n\n .last_block_date\n\n .unwrap()\n\n .as_ref(),\n\n )\n\n .unwrap()\n\n}\n", "file_path": "testing/jormungandr-automation/src/testing/time.rs", "rank": 70, "score": 242765.63975337782 }, { "content": "pub fn serialize_public<S, A>(key: &PublicKey<A>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n A: AsymmetricPublicKey,\n\n{\n\n if serializer.is_human_readable() {\n\n key.to_bech32_str().serialize(serializer)\n\n } else {\n\n key.as_ref().serialize(serializer)\n\n }\n\n}\n\n\n", "file_path": "jormungandr-lib/src/crypto/serde.rs", "rank": 71, "score": 242509.78752463465 }, { "content": "pub fn get_transaction_count_by_hash(\n\n hash: H256,\n\n context: &Context,\n\n) -> Result<Option<Number>, Error> {\n\n let block = context.blockchain()?.storage().get(hash.0.into())?;\n\n Ok(block.map(Block::calc_transactions_count))\n\n}\n\n\n\npub async fn get_transaction_count_by_number(\n\n number: BlockNumber,\n\n context: &Context,\n\n) -> Result<Option<Number>, Error> {\n\n let blockchain = context.blockchain()?;\n\n let blockchain_tip = context.blockchain_tip()?.get_ref().await;\n\n Ok(\n\n get_block_by_number_from_context(number, blockchain, blockchain_tip)?\n\n .map(Block::calc_transactions_count),\n\n )\n\n}\n\n\n", "file_path": "jormungandr/src/jrpc/eth_block_info/logic.rs", "rank": 72, "score": 242278.95179007575 }, { "content": "#[test]\n\npub fn test_watch_block_subscription_blocks_are_in_logs() {\n\n use std::collections::HashSet;\n\n\n\n let setup = setup::client::default();\n\n\n\n let watch_client = setup.watch_client;\n\n\n\n let (sender, receiver) = std::sync::mpsc::channel();\n\n\n\n watch_client.block_subscription(sender);\n\n\n\n let mut ids = HashSet::new();\n\n\n\n const BLOCKS_TO_TEST: usize = 20;\n\n\n\n while let Ok(block) = receiver.recv() {\n\n assert!(ids.insert(block.unwrap().id()));\n\n\n\n if ids.len() == BLOCKS_TO_TEST {\n\n break;\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/grpc/client_tests.rs", "rank": 73, "score": 241944.57135507866 }, { "content": "#[derive(Debug, Clone)]\n\nstruct ReportRecord {\n\n peer_info: PeerInfo,\n\n report_time: Instant,\n\n}\n\n\n\npub enum ReportNodeStatus {\n\n Ok,\n\n Quarantine,\n\n SoftReport,\n\n}\n\n\n\n/// Forgive nodes we demoted after some time\n\npub struct ReportRecords {\n\n /// A report will be lifted after 'report_duration'\n\n report_duration: StdDuration,\n\n report_whitelist: HashSet<Address>,\n\n /// To avoid cycling down nodes back and and forth(and as such prevent them\n\n /// from being evicted from the lru cache), do not report again nodes that were recently\n\n /// lifted from a report.\n\n ///\n", "file_path": "jormungandr/src/topology/quarantine.rs", "rank": 74, "score": 241682.25955385104 }, { "content": "pub fn assert_record_is_present(\n\n peer_list: Vec<PeerRecord>,\n\n peers: Vec<&JormungandrProcess>,\n\n list_name: &str,\n\n info: &str,\n\n) {\n\n for peer in peers {\n\n assert!(\n\n peer_list\n\n .iter()\n\n .any(|x| x.address == peer.address().to_string()),\n\n \"{}: Peer {} is not present in {} list\",\n\n info,\n\n peer.alias(),\n\n list_name\n\n );\n\n }\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/mod.rs", "rank": 75, "score": 241384.1350621335 }, { "content": "pub fn assert_are_in_network_stats(\n\n node: &JormungandrProcess,\n\n peers: Vec<&JormungandrProcess>,\n\n info: &str,\n\n) {\n\n let network_stats = node.rest().network_stats().unwrap();\n\n for peer in peers {\n\n assert!(\n\n network_stats.iter().any(|x| x.addr == Some(peer.address())),\n\n \"{}: Peer {} is not present in network_stats list\",\n\n info,\n\n peer.alias()\n\n );\n\n }\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/mod.rs", "rank": 76, "score": 241384.1350621335 }, { "content": "pub fn assert_are_not_in_network_stats(\n\n node: &JormungandrProcess,\n\n peers: Vec<&JormungandrProcess>,\n\n info: &str,\n\n) {\n\n let network_stats = node.rest().network_stats().unwrap();\n\n for peer in peers {\n\n assert!(\n\n !network_stats.iter().any(|x| x.addr == Some(peer.address())),\n\n \"{}: Peer {} is present in network_stats list, while it should not\",\n\n info,\n\n peer.alias()\n\n );\n\n }\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/mod.rs", "rank": 77, "score": 241384.1350621335 }, { "content": "pub fn assert_record_is_not_present(\n\n peer_list: Vec<PeerRecord>,\n\n peers: Vec<&JormungandrProcess>,\n\n list_name: &str,\n\n) {\n\n for peer in peers {\n\n assert!(\n\n !peer_list\n\n .iter()\n\n .any(|x| x.address == peer.address().to_string()),\n\n \"Peer {} is present in {} list, while should not\",\n\n peer.alias(),\n\n list_name\n\n );\n\n }\n\n}\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/mod.rs", "rank": 78, "score": 241384.1350621335 }, { "content": "pub fn assert_node_stats(\n\n node: &JormungandrProcess,\n\n peer_available_cnt: usize,\n\n peer_quarantined_cnt: usize,\n\n peer_total_cnt: usize,\n\n info: &str,\n\n) {\n\n node.log_stats();\n\n let stats = node.rest().stats().unwrap().stats.expect(\"empty stats\");\n\n assert_eq!(\n\n &peer_available_cnt,\n\n &stats.peer_available_cnt.clone(),\n\n \"{}: peer_available_cnt, Node {}\",\n\n info,\n\n node.alias()\n\n );\n\n\n\n assert_eq!(\n\n &peer_quarantined_cnt,\n\n &stats.peer_quarantined_cnt,\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/mod.rs", "rank": 79, "score": 241384.1350621335 }, { "content": "pub fn check_transaction_was_processed(\n\n transaction: String,\n\n receiver: &Wallet,\n\n value: u64,\n\n jormungandr: &JormungandrProcess,\n\n) -> Result<(), NodeStuckError> {\n\n send_transaction_and_ensure_block_was_produced(&[transaction], jormungandr)?;\n\n\n\n check_funds_transferred_to(&receiver.address().to_string(), value.into(), jormungandr)?;\n\n\n\n jormungandr\n\n .check_no_errors_in_log()\n\n .map_err(NodeStuckError::InternalJormungandrError)\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/non_functional/mod.rs", "rank": 80, "score": 241343.83208353413 }, { "content": "pub fn chain_id(_context: &Context) -> Result<Number, Error> {\n\n // TODO implement\n\n Ok(0.into())\n\n}\n\n\n", "file_path": "jormungandr/src/jrpc/eth_chain_info/logic.rs", "rank": 81, "score": 241190.25586429203 }, { "content": "pub fn new_block_filter(_context: &Context) -> Result<Number, Error> {\n\n // TODO implement\n\n Ok(0.into())\n\n}\n\n\n", "file_path": "jormungandr/src/jrpc/eth_filter/logic.rs", "rank": 82, "score": 241095.07980414748 }, { "content": "pub fn read_into<T: DeserializeFromSlice>(bytes: &[u8]) -> T {\n\n let mut buf = Codec::new(bytes);\n\n T::deserialize_from_slice(&mut buf).unwrap()\n\n}\n", "file_path": "testing/jormungandr-automation/src/jormungandr/grpc/mod.rs", "rank": 83, "score": 241009.71686196508 }, { "content": "#[test]\n\npub fn test_correct_id_is_returned_for_block_tip_if_only_genesis_block_exists() {\n\n let jcli: JCli = Default::default();\n\n let jormungandr = Starter::new().start().unwrap();\n\n let block_id = jcli.rest().v0().tip(jormungandr.rest_uri());\n\n\n\n assert_ne!(&block_id, \"\", \"empty block hash\");\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/jcli/rest/tip.rs", "rank": 84, "score": 235610.9683665529 }, { "content": "#[test]\n\npub fn get_blocks_incorrect_hash() {\n\n let setup = setup::client::default();\n\n let fake_hash: Hash = TestGen::hash();\n\n assert_eq!(\n\n MockClientError::InvalidRequest(format!(\n\n \"not found (block {} is not known to this node)\",\n\n fake_hash\n\n )),\n\n setup.client.headers(&[fake_hash]).err().unwrap(),\n\n \"wrong error\"\n\n );\n\n}\n\n\n\n// L1013 PullBlocksToTip correct hash\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/grpc/client_tests.rs", "rank": 85, "score": 234657.01844790435 }, { "content": "#[test]\n\npub fn get_blocks_correct_hash() {\n\n let setup = setup::client::default();\n\n\n\n let tip = setup.client.tip();\n\n assert!(setup.client.get_blocks(&[tip.hash()]).is_ok());\n\n}\n\n\n\n// L1012 GetBlocks incorrect hash\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/grpc/client_tests.rs", "rank": 86, "score": 234657.01844790435 }, { "content": "#[test]\n\npub fn leader_node_last_block_info() {\n\n let mut network_controller = build_network!()\n\n .topology(\n\n Topology::default()\n\n .with_node(Node::new(LEADER))\n\n .with_node(Node::new(LEADER_CLIENT).with_trusted_peer(LEADER)),\n\n )\n\n .wallet_template(\n\n WalletTemplateBuilder::new(\"alice\")\n\n .with(1_000_000)\n\n .delegated_to(LEADER)\n\n .build(),\n\n )\n\n .wallet_template(WalletTemplateBuilder::new(\"bob\").with(1_000_000).build())\n\n .blockchain_config(Blockchain::default().with_leader(LEADER))\n\n .build()\n\n .unwrap();\n\n\n\n let leader = network_controller\n\n .spawn(SpawnParams::new(LEADER).in_memory())\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/stats.rs", "rank": 87, "score": 234585.8336228111 }, { "content": "#[test]\n\npub fn passive_node_last_block_info() {\n\n let mut network_controller = build_network!()\n\n .topology(\n\n Topology::default()\n\n .with_node(Node::new(LEADER))\n\n .with_node(Node::new(PASSIVE).with_trusted_peer(LEADER)),\n\n )\n\n .blockchain_config(Blockchain::default().with_leader(LEADER))\n\n .wallet_template(\n\n WalletTemplateBuilder::new(\"alice\")\n\n .with(1_000_000)\n\n .delegated_to(LEADER)\n\n .build(),\n\n )\n\n .wallet_template(WalletTemplateBuilder::new(\"bob\").with(1_000_000).build())\n\n .build()\n\n .unwrap();\n\n\n\n let leader = network_controller\n\n .spawn(SpawnParams::new(LEADER).in_memory())\n", "file_path": "testing/jormungandr-integration-tests/src/networking/p2p/stats.rs", "rank": 88, "score": 234585.83362281113 }, { "content": "fn bytes_to_pub_key<K: AsymmetricPublicKey>(bytes: &[u8]) -> Result<String, Error> {\n\n use chain_crypto::bech32::Bech32 as _;\n\n let public: chain_crypto::PublicKey<K> = chain_crypto::PublicKey::from_binary(bytes)?;\n\n Ok(public.to_bech32_str())\n\n}\n\n\n", "file_path": "jcli/src/jcli_lib/key.rs", "rank": 89, "score": 234204.68914426543 }, { "content": "pub fn send_transaction_and_ensure_block_was_produced(\n\n transation_messages: &[String],\n\n jormungandr: &JormungandrProcess,\n\n) -> Result<(), NodeStuckError> {\n\n let jcli: JCli = Default::default();\n\n let block_tip_before_transaction = jcli.rest().v0().tip(&jormungandr.rest_uri());\n\n let block_counter_before_transaction = jormungandr.logger.get_created_blocks_counter();\n\n\n\n jcli.fragment_sender(jormungandr)\n\n .send_many(transation_messages)\n\n .wait_until_all_processed(&Default::default())\n\n .map_err(NodeStuckError::InternalJcliError)?;\n\n\n\n let block_tip_after_transaction = jcli.rest().v0().tip(jormungandr.rest_uri());\n\n let block_counter_after_transaction = jormungandr.logger.get_created_blocks_counter();\n\n\n\n if block_tip_before_transaction == block_tip_after_transaction {\n\n return Err(NodeStuckError::TipIsNotMoving {\n\n tip_hash: block_tip_after_transaction,\n\n logs: jormungandr.logger.get_log_content(),\n", "file_path": "testing/jormungandr-integration-tests/src/non_functional/mod.rs", "rank": 90, "score": 233775.34228890244 }, { "content": "type PullHeadersScheduler = FireForgetScheduler<HeaderHash, NodeId, Checkpoints>;\n", "file_path": "jormungandr/src/blockchain/process.rs", "rank": 91, "score": 232920.27031148106 }, { "content": "pub fn block0() -> Block {\n\n block::builder(\n\n BlockVersion::Genesis,\n\n ContentsBuilder::new().into(),\n\n |hdr| {\n\n Ok::<_, ()>(\n\n hdr.set_genesis()\n\n .set_date(BlockDate::first())\n\n .into_unsigned_header()\n\n .expect(\"internal error cannot build unsigned block\")\n\n .generalize(),\n\n )\n\n },\n\n )\n\n .expect(\"internal error: block builder cannot return error\")\n\n}\n", "file_path": "testing/jormungandr-automation/src/jormungandr/grpc/server/data.rs", "rank": 92, "score": 232398.1544963159 }, { "content": "pub fn get_uncle_count_by_number(_: BlockNumber, _: &Context) -> Result<Option<Number>, Error> {\n\n // jormungandr block does not have any ethereum \"uncles\" so we allways return 0\n\n Ok(Some(0.into()))\n\n}\n\n\n\npub async fn get_block_number(context: &Context) -> Result<Number, Error> {\n\n let blockchain_tip = context.blockchain_tip()?.get_ref().await;\n\n Ok((Into::<u32>::into(blockchain_tip.chain_length()) as u64).into())\n\n}\n", "file_path": "jormungandr/src/jrpc/eth_block_info/logic.rs", "rank": 93, "score": 232036.57374571252 }, { "content": "/// open the given file path as a writable stream, or stdout if no path\n\n/// provided\n\npub fn open_file_write<P: AsRef<Path>>(path: &Option<P>) -> Result<impl Write, Error> {\n\n match path {\n\n Some(path) => {\n\n let writer = std::fs::OpenOptions::new()\n\n .create(true)\n\n .write(true)\n\n .read(false)\n\n .append(false)\n\n .truncate(true)\n\n .open(path)?;\n\n Ok(Box::new(writer) as Box<dyn Write>)\n\n }\n\n None => Ok(Box::new(stdout()) as Box<dyn Write>),\n\n }\n\n}\n\n\n", "file_path": "jcli/src/jcli_lib/utils/io.rs", "rank": 94, "score": 231505.79004463975 }, { "content": "#[test]\n\npub fn pull_blocks_correct_hashes_partial() {\n\n let setup = setup::client::default();\n\n setup\n\n .client\n\n .wait_for_chain_length(10.into(), CHAIN_GROWTH_TIMEOUT);\n\n\n\n let block_hashes_from_logs = setup.server.logger.get_created_blocks_hashes();\n\n let start = 2;\n\n let end = 8;\n\n let expected_hashes = block_hashes_from_logs[start..end].to_vec();\n\n\n\n let blocks = setup\n\n .client\n\n .pull_blocks(\n\n &[expected_hashes[0]],\n\n expected_hashes.last().copied().unwrap(),\n\n )\n\n .unwrap();\n\n\n\n let blocks_hashes: Vec<Hash> = blocks.iter().map(|x| x.header().hash()).collect();\n\n\n\n assert_eq!(&expected_hashes[1..], &blocks_hashes);\n\n}\n\n\n\n// L1023 PullBlocks to and from in wrong order\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/grpc/client_tests.rs", "rank": 95, "score": 231084.5933888375 }, { "content": "#[test]\n\npub fn test_non_empty_hash_is_returned_for_block0() {\n\n let jcli: JCli = Default::default();\n\n let jormungandr = Starter::new().start().unwrap();\n\n let rest_uri = jormungandr.rest_uri();\n\n let block_id = jcli.rest().v0().tip(&rest_uri);\n\n jcli.rest().v0().block().get(block_id, rest_uri);\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/jcli/rest/block.rs", "rank": 96, "score": 231084.5933888375 }, { "content": "#[test]\n\npub fn pull_blocks_hashes_wrong_order() {\n\n let setup = setup::client::default();\n\n\n\n setup\n\n .client\n\n .wait_for_chain_length(10.into(), CHAIN_GROWTH_TIMEOUT);\n\n\n\n let block_hashes_from_logs = setup.server.logger.get_created_blocks_hashes();\n\n let start = 2;\n\n let end = 8;\n\n let expected_hashes = block_hashes_from_logs[start..end].to_vec();\n\n\n\n let result = setup.client.pull_blocks(\n\n &[expected_hashes.last().copied().unwrap()],\n\n expected_hashes[0],\n\n );\n\n\n\n assert!(result.is_err());\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/grpc/client_tests.rs", "rank": 97, "score": 231084.5933888375 }, { "content": "#[test]\n\npub fn pull_blocks_to_tip_correct_hash() {\n\n let setup = setup::client::default();\n\n\n\n std::thread::sleep(Duration::from_secs(10)); // wait for the server to produce some blocks\n\n\n\n let blocks = setup\n\n .client\n\n .pull_blocks_to_tip(Hash::from_str(setup.config.genesis_block_hash()).unwrap())\n\n .unwrap();\n\n\n\n let blocks_hashes: Vec<Hash> = blocks.iter().map(|x| x.header().hash()).collect();\n\n\n\n let block_hashes_from_logs = setup.server.logger.get_created_blocks_hashes();\n\n assert!(\n\n is_long_prefix(&block_hashes_from_logs, &blocks_hashes),\n\n \"server blocks: {:?} | client blocks: {:?}\",\n\n block_hashes_from_logs,\n\n blocks_hashes\n\n );\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/grpc/client_tests.rs", "rank": 98, "score": 231084.5933888375 }, { "content": "pub fn request_settings(args: RestArgs) -> Result<SettingsDto, Error> {\n\n serde_json::from_str(&(args.client()?.get(&[\"v0\", \"settings\"]).execute()?.text()?))\n\n .map_err(Error::SerdeError)\n\n}\n", "file_path": "jcli/src/jcli_lib/rest/v0/settings/mod.rs", "rank": 99, "score": 230501.78555448825 } ]
Rust
src/dynamics/world.rs
Gohla/rust_box2d
d5431f905a09c0b2670f648c15f6d5120c0693c3
#[path = "world_callbacks.rs"] pub mod callbacks; use std::mem; use std::ptr; use std::marker::PhantomData; use std::cell::{Ref, RefMut}; use wrap::*; use handle::*; use common::{Draw, DrawLink, DrawFlags}; use common::math::Vec2; use collision::AABB; use dynamics::Profile; use user_data::UserDataTypes; use dynamics::body::{BodyDef, MetaBody, Body}; use dynamics::joints::{Joint, JointDef, MetaJoint}; use dynamics::contacts::Contact; use self::callbacks::{ContactFilter, ContactFilterLink, ContactListener, ContactListenerLink, QueryCallback, QueryCallbackLink, RayCastCallback, RayCastCallbackLink}; pub type BodyHandle = TypedHandle<Body>; pub type JointHandle = TypedHandle<Joint>; pub struct World<U: UserDataTypes> { ptr: *mut ffi::World, bodies: HandleMap<MetaBody<U>, Body>, joints: HandleMap<MetaJoint<U>, Joint>, contact_filter_link: ContactFilterLink, contact_listener_link: ContactListenerLink, draw_link: DrawLink, } impl<U: UserDataTypes> Wrapped<ffi::World> for World<U> { unsafe fn ptr(&self) -> *const ffi::World { self.ptr as *const ffi::World } unsafe fn mut_ptr(&mut self) -> *mut ffi::World { self.ptr } } impl<U: UserDataTypes> World<U> { pub fn new(gravity: &Vec2) -> Self { unsafe { World { ptr: ffi::World_new(gravity), bodies: HandleMap::new(), joints: HandleMap::new(), contact_filter_link: ContactFilterLink::new(), contact_listener_link: ContactListenerLink::new(), draw_link: DrawLink::new(), } } } pub fn set_contact_filter<F: ContactFilter<U>>(&mut self, filter: Box<F>) { unsafe { let filter_ptr = self.contact_filter_link.use_with(filter); ffi::World_set_contact_filter(self.mut_ptr(), filter_ptr); } } pub fn set_contact_listener<L: ContactListener<U>>(&mut self, listener: Box<L>) { unsafe { let listener_ptr = self.contact_listener_link.use_with(listener); ffi::World_set_contact_listener(self.mut_ptr(), listener_ptr); } } pub fn create_body(&mut self, def: &BodyDef) -> BodyHandle where U::BodyData: Default { self.create_body_with(def, U::BodyData::default()) } pub fn create_body_with(&mut self, def: &BodyDef, data: U::BodyData) -> BodyHandle { unsafe { let body = ffi::World_create_body(self.mut_ptr(), def); self.bodies.insert_with(|h| MetaBody::new(body, h, data)) } } pub fn body(&self, handle: BodyHandle) -> Ref<MetaBody<U>> { self.bodies.get(handle).expect("invalid body handle") } pub fn body_mut(&self, handle: BodyHandle) -> RefMut<MetaBody<U>> { self.bodies.get_mut(handle).expect("invalid body handle") } pub fn destroy_body(&mut self, handle: BodyHandle) { let mut body = self.bodies.remove(handle); World::remove_body_joint_handles(&mut body, &mut self.joints); unsafe { ffi::World_destroy_body(self.mut_ptr(), body.mut_ptr()); } } pub fn bodies(&self) -> HandleIter<Body, MetaBody<U>> { self.bodies.iter() } fn remove_body_joint_handles(body: &mut Body, joints: &mut HandleMap<MetaJoint<U>, Joint>) { for (_, joint) in body.joints() { joints.remove(joint); } } pub fn create_joint<JD: JointDef>(&mut self, def: &JD) -> JointHandle where U::JointData: Default { self.create_joint_with(def, U::JointData::default()) } pub fn create_joint_with<JD: JointDef>(&mut self, def: &JD, data: U::JointData) -> JointHandle { unsafe { let joint = def.create(self); self.joints.insert_with(|h| MetaJoint::new(joint, h, data)) } } pub fn joint(&self, handle: JointHandle) -> Ref<MetaJoint<U>> { self.joints.get(handle).expect("invalid joint handle") } pub fn joint_mut(&self, handle: JointHandle) -> RefMut<MetaJoint<U>> { self.joints.get_mut(handle).expect("invalid joint handle") } pub fn destroy_joint(&mut self, handle: JointHandle) { let mut joint = self.joints.remove(handle); unsafe { ffi::World_destroy_joint(self.mut_ptr(), joint.mut_base_ptr()); } } pub fn joints(&self) -> HandleIter<Joint, MetaJoint<U>> { self.joints.iter() } pub fn step(&mut self, time_step: f32, velocity_iterations: i32, position_iterations: i32) { unsafe { ffi::World_step(self.mut_ptr(), time_step, velocity_iterations, position_iterations); } } pub fn clear_forces(&mut self) { unsafe { ffi::World_clear_forces(self.mut_ptr()) } } pub fn draw_debug_data<D: Draw>(&mut self, draw: &mut D, flags: DrawFlags) { unsafe { let ptr = self.draw_link.use_with(draw, flags); ffi::World_set_debug_draw(self.mut_ptr(), ptr); ffi::World_draw_debug_data(self.mut_ptr()); ffi::World_set_debug_draw(self.mut_ptr(), ptr::null_mut()); } } pub fn query_aabb<C: QueryCallback>(&self, callback: &mut C, aabb: &AABB) { unsafe { let mut link = QueryCallbackLink::new(); let ptr = link.use_with(callback); ffi::World_query_aabb(self.ptr(), ptr, aabb); } } pub fn ray_cast<C: RayCastCallback>(&self, callback: &mut C, p1: &Vec2, p2: &Vec2) { unsafe { let mut link = RayCastCallbackLink::new(); let ptr = link.use_with(callback); ffi::World_ray_cast(self.ptr(), ptr, p1, p2); } } pub fn contacts_mut(&mut self) -> ContactIterMut { ContactIterMut { ptr: unsafe { ffi::World_get_contact_list(self.mut_ptr()) }, phantom: PhantomData, } } pub fn contacts(&self) -> ContactIter { ContactIter { ptr: unsafe { ffi::World_get_contact_list_const(self.ptr()) }, phantom: PhantomData, } } pub fn set_sleeping_allowed(&mut self, flag: bool) { unsafe { ffi::World_set_allow_sleeping(self.mut_ptr(), flag) } } pub fn is_sleeping_allowed(&self) -> bool { unsafe { ffi::World_get_allow_sleeping(self.ptr()) } } pub fn set_warm_starting(&mut self, flag: bool) { unsafe { ffi::World_set_warm_starting(self.mut_ptr(), flag) } } pub fn is_warm_starting(&self) -> bool { unsafe { ffi::World_get_warm_starting(self.ptr()) } } pub fn set_continuous_physics(&mut self, flag: bool) { unsafe { ffi::World_set_continuous_physics(self.mut_ptr(), flag) } } pub fn is_continuous_physics(&self) -> bool { unsafe { ffi::World_get_continuous_physics(self.ptr()) } } pub fn set_sub_stepping(&mut self, flag: bool) { unsafe { ffi::World_set_sub_stepping(self.mut_ptr(), flag) } } pub fn is_sub_stepping(&self) -> bool { unsafe { ffi::World_get_sub_stepping(self.ptr()) } } pub fn proxy_count(&self) -> i32 { unsafe { ffi::World_get_proxy_count(self.ptr()) } } pub fn body_count(&self) -> i32 { unsafe { ffi::World_get_body_count(self.ptr()) } } pub fn joint_count(&self) -> i32 { unsafe { ffi::World_get_joint_count(self.ptr()) } } pub fn contact_count(&self) -> i32 { unsafe { ffi::World_get_contact_count(self.ptr()) } } pub fn tree_height(&self) -> i32 { unsafe { ffi::World_get_tree_height(self.ptr()) } } pub fn tree_balance(&self) -> i32 { unsafe { ffi::World_get_tree_balance(self.ptr()) } } pub fn tree_quality(&self) -> f32 { unsafe { ffi::World_get_tree_quality(self.ptr()) } } pub fn set_gravity(&mut self, gravity: &Vec2) { unsafe { ffi::World_set_gravity(self.mut_ptr(), gravity) } } pub fn gravity(&self) -> Vec2 { unsafe { ffi::World_get_gravity(self.ptr()) } } pub fn is_locked(&self) -> bool { unsafe { ffi::World_is_locked(self.ptr()) } } pub fn set_auto_clearing_forces(&mut self, flag: bool) { unsafe { ffi::World_set_auto_clear_forces(self.mut_ptr(), flag) } } pub fn is_auto_clearing_forces(&self) -> bool { unsafe { ffi::World_get_auto_clear_forces(self.ptr()) } } pub fn shift_origin(&mut self, origin: &Vec2) { unsafe { ffi::World_shift_origin(self.mut_ptr(), origin) } } pub fn profile<'a>(&'a self) -> &'a Profile { unsafe { &*ffi::World_get_profile(self.ptr()) } } pub fn dump(&mut self) { unsafe { ffi::World_dump(self.mut_ptr()) } } } impl<U: UserDataTypes> Drop for World<U> { fn drop(&mut self) { unsafe { ffi::World_drop(self.mut_ptr()) } } } pub struct ContactIterMut<'a> { ptr: *mut ffi::Contact, phantom: PhantomData<&'a ()>, } impl<'a> Iterator for ContactIterMut<'a> { type Item = WrappedRefMut<'a, Contact>; fn next(&mut self) -> Option<Self::Item> { if self.ptr.is_null() { None } else { unsafe { let next = ffi::Contact_get_next(self.ptr); Some(WrappedRefMut::new(Contact::from_ffi(mem::replace(&mut self.ptr, next)))) } } } } pub struct ContactIter<'a> { ptr: *const ffi::Contact, phantom: PhantomData<&'a ()>, } impl<'a> Iterator for ContactIter<'a> { type Item = WrappedRef<'a, Contact>; fn next(&mut self) -> Option<Self::Item> { if self.ptr.is_null() { None } else { unsafe { let next = ffi::Contact_get_next_const(self.ptr); Some(WrappedRef::new(Contact::from_ffi( mem::replace(&mut self.ptr, next) as *mut ffi::Contact ))) } } } } #[doc(hidden)] pub mod ffi { pub use common::ffi::Draw; pub use dynamics::body::ffi::Body; pub use dynamics::joints::ffi::Joint; pub use dynamics::contacts::ffi::{Contact, Contact_get_next, Contact_get_next_const}; pub use super::callbacks::ffi::{ContactFilter, ContactListener, QueryCallback, RayCastCallback}; use common::math::Vec2; use collision::AABB; use dynamics::Profile; use dynamics::body::BodyDef; pub enum World {} extern "C" { pub fn World_new(gravity: *const Vec2) -> *mut World; pub fn World_drop(slf: *mut World); pub fn World_set_contact_filter(slf: *mut World, cf: *mut ContactFilter); pub fn World_set_contact_listener(slf: *mut World, cl: *mut ContactListener); pub fn World_set_debug_draw(slf: *mut World, dd: *mut Draw); pub fn World_create_body(slf: *mut World, def: *const BodyDef) -> *mut Body; pub fn World_destroy_body(slf: *mut World, body: *mut Body); pub fn World_destroy_joint(slf: *mut World, joint: *mut Joint); pub fn World_step(slf: *mut World, time_step: f32, velocity_iterations: i32, position_iterations: i32); pub fn World_clear_forces(slf: *mut World); pub fn World_draw_debug_data(slf: *mut World); pub fn World_query_aabb(slf: *const World, qc: *mut QueryCallback, aabb: *const AABB); pub fn World_ray_cast(slf: *const World, rcc: *mut RayCastCallback, p1: *const Vec2, p2: *const Vec2); pub fn World_get_contact_list(slf: *mut World) -> *mut Contact; pub fn World_get_contact_list_const(slf: *const World) -> *const Contact; pub fn World_set_allow_sleeping(slf: *mut World, flag: bool); pub fn World_get_allow_sleeping(slf: *const World) -> bool; pub fn World_set_warm_starting(slf: *mut World, flag: bool); pub fn World_get_warm_starting(slf: *const World) -> bool; pub fn World_set_continuous_physics(slf: *mut World, flag: bool); pub fn World_get_continuous_physics(slf: *const World) -> bool; pub fn World_set_sub_stepping(slf: *mut World, flag: bool); pub fn World_get_sub_stepping(slf: *const World) -> bool; pub fn World_get_proxy_count(slf: *const World) -> i32; pub fn World_get_body_count(slf: *const World) -> i32; pub fn World_get_joint_count(slf: *const World) -> i32; pub fn World_get_contact_count(slf: *const World) -> i32; pub fn World_get_tree_height(slf: *const World) -> i32; pub fn World_get_tree_balance(slf: *const World) -> i32; pub fn World_get_tree_quality(slf: *const World) -> f32; pub fn World_set_gravity(slf: *mut World, gravity: *const Vec2); pub fn World_get_gravity(slf: *const World) -> Vec2; pub fn World_is_locked(slf: *const World) -> bool; pub fn World_set_auto_clear_forces(slf: *mut World, flag: bool); pub fn World_get_auto_clear_forces(slf: *const World) -> bool; pub fn World_shift_origin(slf: *mut World, origin: *const Vec2); pub fn World_get_profile(slf: *const World) -> *const Profile; pub fn World_dump(slf: *mut World); } }
#[path = "world_callbacks.rs"] pub mod callbacks; use std::mem; use std::ptr; use std::marker::PhantomData; use std::cell::{Ref, RefMut}; use wrap::*; use handle::*; use common::{Draw, DrawLink, DrawFlags}; use common::math::Vec2; use collision::AABB; use dynamics::Profile; use user_data::UserDataTypes; use dynamics::body::{BodyDef, MetaBody, Body}; use dynamics::joints::{Joint, JointDef, MetaJoint}; use dynamics::contacts::Contact; use self::callbacks::{ContactFilter, ContactFilterLink, ContactListener, ContactListenerLink, QueryCallback, QueryCallbackLink, RayCastCallback, RayCastCallbackLink}; pub type BodyHandle = TypedHandle<Body>; pub type JointHandle = TypedHandle<Joint>; pub struct World<U: UserDataTypes> { ptr: *mut ffi::World, bodies: HandleMap<MetaBody<U>, Body>, joints: HandleMap<MetaJoint<U>, Joint>, contact_filter_link: ContactFilterLink, contact_listener_link: ContactListenerLink, draw_link: DrawLink, } impl<U: UserDataTypes> Wrapped<ffi::World> for World<U> { unsafe fn ptr(&self) -> *const ffi::World { self.ptr as *const ffi::World } unsafe fn mut_ptr(&mut self) -> *mut ffi::World { self.ptr } } impl<U: UserDataTypes> World<U> { pub fn new(gravity: &Vec2) -> Self { unsafe { World { ptr: ffi::World_new(gravity), bodies: HandleMap::new(), joints: HandleMap::new(), contact_filter_link: ContactFilterLink::new(), contact_listener_link: ContactListenerLink::new(), draw_link: DrawLink::new(), } } } pub fn set_contact_filter<F: ContactFilter<U>>(&mut self, filter: Box<F>) { unsafe { let filter_ptr = self.contact_filter_link.use_with(filter); ffi::World_set_contact_filter(self.mut_ptr(), filter_ptr); } } pub fn set_contact_listener<L: ContactListener<U>>(&mut self, listener: Box<L>) { unsafe { let listener_ptr = self.contact_listener_link.use_with(listener); ffi::World_set_contact_listener(self.mut_ptr(), listener_ptr); } } pub fn create_body(&mut self, def: &BodyDef) -> BodyHandle where U::BodyData: Default { self.create_body_with(def, U::BodyData::default()) } pub fn create_body_with(&mut self, def: &BodyDef, data: U::BodyData) -> BodyHandle { unsafe { let body = ffi::World_create_body(self.mut_ptr(), def); self.bodies.insert_with(|h| MetaBody::new(body, h, data)) } } pub fn body(&self, handle: BodyHandle) -> Ref<MetaBody<U>> { self.bodies.get(handle).expect("invalid body handle") } pub fn body_mut(&self, handle: BodyHandle) -> RefMut<MetaBody<U>> { self.bodies.get_mut(handle).expect("invalid body handle") } pub fn destroy_body(&mut self, handle: BodyHandle) { let mut body = self.bodies.remove(handle); World::remove_body_joint_handles(&mut body, &mut self.joints); unsafe { ffi::World_destroy_body(self.mut_ptr(), body.mut_ptr()); } } pub fn bodies(&self) -> HandleIter<Body, MetaBody<U>> { self.bodies.iter() } fn remove_body_joint_handles(body: &mut Body, joints: &mut HandleMap<MetaJoint<U>, Joint>) { for (_, joint) in body.joints() { joints.remove(joint); } } pub fn create_joint<JD: JointDef>(&mut self, def: &JD) -> JointHandle where U::JointData: Default { self.create_joint_with(def, U::JointData::default()) } pub fn create_joint_with<JD: JointDef>(&mut self, def: &JD, data: U::JointData) -> JointHandle { unsafe { let joint = def.create(self); self.joints.insert_with(|h| MetaJoint::new(joint, h, data)) } } pub fn joint(&self, handle: JointHandle) -> Ref<MetaJoint<U>> { self.joints.get(handle).expect("invalid joint handle") } pub fn joint_mut(&self, handle: JointHandle) -> RefMut<MetaJoint<U>> { self.joints.get_mut(handle).expect("invalid joint handle") } pub fn destroy_joint(&mut self, handle: JointHandle) { let mut joint = self.joints.remove(handle); unsafe { ffi::World_destroy_joint(self.mut_ptr(), joint.mut_base_ptr()); } } pub fn joints(&self) -> HandleIter<Joint, MetaJoint<U>> { self.joints.iter() } pub fn step(&mut self, time_step: f32, velocity_iterations: i32, position_iterations: i32) { unsafe { ffi::World_step(self.mut_ptr(), time_step, velocity_iterations, position_iterations); } } pub fn clear_forces(&mut self) { unsafe { ffi::World_clear_forces(self.mut_ptr()) } } pub fn draw_debug_data<D: Draw>(&mut self, draw: &mut D, flags: DrawFlags) { unsafe { let ptr = self.draw_link.use_with(draw, flags); ffi::World_set_debug_draw(self.mut_ptr(), ptr); ffi::World_draw_debug_data(self.mut_ptr()); ffi::World_set_debug_draw(self.mut_ptr(), ptr::null_mut()); } } pub fn query_aabb<C: QueryCallback>(&self, callback: &mut C, aabb: &AABB) {
(WrappedRef::new(Contact::from_ffi( mem::replace(&mut self.ptr, next) as *mut ffi::Contact ))) } } } } #[doc(hidden)] pub mod ffi { pub use common::ffi::Draw; pub use dynamics::body::ffi::Body; pub use dynamics::joints::ffi::Joint; pub use dynamics::contacts::ffi::{Contact, Contact_get_next, Contact_get_next_const}; pub use super::callbacks::ffi::{ContactFilter, ContactListener, QueryCallback, RayCastCallback}; use common::math::Vec2; use collision::AABB; use dynamics::Profile; use dynamics::body::BodyDef; pub enum World {} extern "C" { pub fn World_new(gravity: *const Vec2) -> *mut World; pub fn World_drop(slf: *mut World); pub fn World_set_contact_filter(slf: *mut World, cf: *mut ContactFilter); pub fn World_set_contact_listener(slf: *mut World, cl: *mut ContactListener); pub fn World_set_debug_draw(slf: *mut World, dd: *mut Draw); pub fn World_create_body(slf: *mut World, def: *const BodyDef) -> *mut Body; pub fn World_destroy_body(slf: *mut World, body: *mut Body); pub fn World_destroy_joint(slf: *mut World, joint: *mut Joint); pub fn World_step(slf: *mut World, time_step: f32, velocity_iterations: i32, position_iterations: i32); pub fn World_clear_forces(slf: *mut World); pub fn World_draw_debug_data(slf: *mut World); pub fn World_query_aabb(slf: *const World, qc: *mut QueryCallback, aabb: *const AABB); pub fn World_ray_cast(slf: *const World, rcc: *mut RayCastCallback, p1: *const Vec2, p2: *const Vec2); pub fn World_get_contact_list(slf: *mut World) -> *mut Contact; pub fn World_get_contact_list_const(slf: *const World) -> *const Contact; pub fn World_set_allow_sleeping(slf: *mut World, flag: bool); pub fn World_get_allow_sleeping(slf: *const World) -> bool; pub fn World_set_warm_starting(slf: *mut World, flag: bool); pub fn World_get_warm_starting(slf: *const World) -> bool; pub fn World_set_continuous_physics(slf: *mut World, flag: bool); pub fn World_get_continuous_physics(slf: *const World) -> bool; pub fn World_set_sub_stepping(slf: *mut World, flag: bool); pub fn World_get_sub_stepping(slf: *const World) -> bool; pub fn World_get_proxy_count(slf: *const World) -> i32; pub fn World_get_body_count(slf: *const World) -> i32; pub fn World_get_joint_count(slf: *const World) -> i32; pub fn World_get_contact_count(slf: *const World) -> i32; pub fn World_get_tree_height(slf: *const World) -> i32; pub fn World_get_tree_balance(slf: *const World) -> i32; pub fn World_get_tree_quality(slf: *const World) -> f32; pub fn World_set_gravity(slf: *mut World, gravity: *const Vec2); pub fn World_get_gravity(slf: *const World) -> Vec2; pub fn World_is_locked(slf: *const World) -> bool; pub fn World_set_auto_clear_forces(slf: *mut World, flag: bool); pub fn World_get_auto_clear_forces(slf: *const World) -> bool; pub fn World_shift_origin(slf: *mut World, origin: *const Vec2); pub fn World_get_profile(slf: *const World) -> *const Profile; pub fn World_dump(slf: *mut World); } }
unsafe { let mut link = QueryCallbackLink::new(); let ptr = link.use_with(callback); ffi::World_query_aabb(self.ptr(), ptr, aabb); } } pub fn ray_cast<C: RayCastCallback>(&self, callback: &mut C, p1: &Vec2, p2: &Vec2) { unsafe { let mut link = RayCastCallbackLink::new(); let ptr = link.use_with(callback); ffi::World_ray_cast(self.ptr(), ptr, p1, p2); } } pub fn contacts_mut(&mut self) -> ContactIterMut { ContactIterMut { ptr: unsafe { ffi::World_get_contact_list(self.mut_ptr()) }, phantom: PhantomData, } } pub fn contacts(&self) -> ContactIter { ContactIter { ptr: unsafe { ffi::World_get_contact_list_const(self.ptr()) }, phantom: PhantomData, } } pub fn set_sleeping_allowed(&mut self, flag: bool) { unsafe { ffi::World_set_allow_sleeping(self.mut_ptr(), flag) } } pub fn is_sleeping_allowed(&self) -> bool { unsafe { ffi::World_get_allow_sleeping(self.ptr()) } } pub fn set_warm_starting(&mut self, flag: bool) { unsafe { ffi::World_set_warm_starting(self.mut_ptr(), flag) } } pub fn is_warm_starting(&self) -> bool { unsafe { ffi::World_get_warm_starting(self.ptr()) } } pub fn set_continuous_physics(&mut self, flag: bool) { unsafe { ffi::World_set_continuous_physics(self.mut_ptr(), flag) } } pub fn is_continuous_physics(&self) -> bool { unsafe { ffi::World_get_continuous_physics(self.ptr()) } } pub fn set_sub_stepping(&mut self, flag: bool) { unsafe { ffi::World_set_sub_stepping(self.mut_ptr(), flag) } } pub fn is_sub_stepping(&self) -> bool { unsafe { ffi::World_get_sub_stepping(self.ptr()) } } pub fn proxy_count(&self) -> i32 { unsafe { ffi::World_get_proxy_count(self.ptr()) } } pub fn body_count(&self) -> i32 { unsafe { ffi::World_get_body_count(self.ptr()) } } pub fn joint_count(&self) -> i32 { unsafe { ffi::World_get_joint_count(self.ptr()) } } pub fn contact_count(&self) -> i32 { unsafe { ffi::World_get_contact_count(self.ptr()) } } pub fn tree_height(&self) -> i32 { unsafe { ffi::World_get_tree_height(self.ptr()) } } pub fn tree_balance(&self) -> i32 { unsafe { ffi::World_get_tree_balance(self.ptr()) } } pub fn tree_quality(&self) -> f32 { unsafe { ffi::World_get_tree_quality(self.ptr()) } } pub fn set_gravity(&mut self, gravity: &Vec2) { unsafe { ffi::World_set_gravity(self.mut_ptr(), gravity) } } pub fn gravity(&self) -> Vec2 { unsafe { ffi::World_get_gravity(self.ptr()) } } pub fn is_locked(&self) -> bool { unsafe { ffi::World_is_locked(self.ptr()) } } pub fn set_auto_clearing_forces(&mut self, flag: bool) { unsafe { ffi::World_set_auto_clear_forces(self.mut_ptr(), flag) } } pub fn is_auto_clearing_forces(&self) -> bool { unsafe { ffi::World_get_auto_clear_forces(self.ptr()) } } pub fn shift_origin(&mut self, origin: &Vec2) { unsafe { ffi::World_shift_origin(self.mut_ptr(), origin) } } pub fn profile<'a>(&'a self) -> &'a Profile { unsafe { &*ffi::World_get_profile(self.ptr()) } } pub fn dump(&mut self) { unsafe { ffi::World_dump(self.mut_ptr()) } } } impl<U: UserDataTypes> Drop for World<U> { fn drop(&mut self) { unsafe { ffi::World_drop(self.mut_ptr()) } } } pub struct ContactIterMut<'a> { ptr: *mut ffi::Contact, phantom: PhantomData<&'a ()>, } impl<'a> Iterator for ContactIterMut<'a> { type Item = WrappedRefMut<'a, Contact>; fn next(&mut self) -> Option<Self::Item> { if self.ptr.is_null() { None } else { unsafe { let next = ffi::Contact_get_next(self.ptr); Some(WrappedRefMut::new(Contact::from_ffi(mem::replace(&mut self.ptr, next)))) } } } } pub struct ContactIter<'a> { ptr: *const ffi::Contact, phantom: PhantomData<&'a ()>, } impl<'a> Iterator for ContactIter<'a> { type Item = WrappedRef<'a, Contact>; fn next(&mut self) -> Option<Self::Item> { if self.ptr.is_null() { None } else { unsafe { let next = ffi::Contact_get_next_const(self.ptr); Some
random
[ { "content": "pub fn step<U: UserDataTypes>(data: &mut Data<U>, dt: f32) {\n\n data.world.step(dt, VELOCITY_ITERATIONS, POSITION_ITERATIONS);\n\n}\n\n\n\nimpl<U: UserDataTypes> Test<U> for () {}\n\n\n\nimpl<F, U: UserDataTypes> Test<U> for F\n\n where F: FnMut(&Input, &mut Data<U>)\n\n{\n\n fn process_input(&mut self, i: &Input, d: &mut Data<U>) {\n\n self(i, d);\n\n }\n\n}\n\n\n\npub struct Data<U: UserDataTypes> {\n\n pub world: b2::World<U>,\n\n pub camera: Camera,\n\n pub draw_flags: b2::DrawFlags\n\n}\n\n\n", "file_path": "testbed/src/lib.rs", "rank": 0, "score": 225389.14023515995 }, { "content": "fn create_body(world: &mut World) -> b2::BodyHandle {\n\n let def = b2::BodyDef {\n\n body_type: b2::BodyType::Dynamic,\n\n angular_damping: 2.,\n\n linear_damping: 0.5,\n\n position: b2::Vec2 { x: 0., y: -18. },\n\n angle: b2::PI,\n\n allow_sleep: false,\n\n .. b2::BodyDef::new()\n\n };\n\n\n\n let handle = world.create_body(&def);\n\n let mut body = world.body_mut(handle);\n\n\n\n let mut f_def = b2::FixtureDef::new();\n\n\n\n let mut create_fixture = |transform: &b2::Transform, density| {\n\n let vertices = [\n\n transform * b2::Vec2 { x: -1., y: 0. },\n\n transform * b2::Vec2 { x: 1., y: 0. },\n", "file_path": "testbed/examples/apply_force.rs", "rank": 1, "score": 205394.8900758852 }, { "content": "fn create_bodies(world: &mut World) -> Vec<b2::BodyHandle> {\n\n let shape = b2::PolygonShape::new_box(0.5, 0.5);\n\n\n\n let mut bodies = Vec::new();\n\n\n\n for &(x, y) in &[(-5., 5.), (5., 5.), (5., 15.), (-5., 15.)] {\n\n let bd = b2::BodyDef {\n\n body_type: b2::BodyType::Dynamic,\n\n position: b2::Vec2 { x: x, y: y },\n\n .. b2::BodyDef::new()\n\n };\n\n\n\n let body = world.create_body(&bd);\n\n world.body_mut(body).create_fast_fixture(&shape, 5.);\n\n bodies.push(body);\n\n }\n\n\n\n bodies\n\n}\n\n\n", "file_path": "testbed/examples/web.rs", "rank": 2, "score": 201627.9447323862 }, { "content": "fn try_grab<U>(world: &mut b2::World<U>, p: b2::Vec2, dummy: b2::BodyHandle) -> Option<b2::JointHandle>\n\n where U: UserDataTypes, U::BodyData: Default, U::JointData: Default\n\n{\n\n match query_point(world, p) {\n\n None => None,\n\n Some(body_h) => {\n\n let mass;\n\n let center;\n\n {\n\n let mut body = world.body_mut(body_h);\n\n mass = body.mass();\n\n center = *body.world_center();\n\n body.set_awake(true);\n\n }\n\n\n\n let mut j_def = b2::MouseJointDef::new(dummy, body_h);\n\n j_def.target = center;\n\n j_def.max_force = 500. * mass;\n\n Some(world.create_joint(&j_def))\n\n }\n\n }\n\n}\n\n\n", "file_path": "testbed/src/lib.rs", "rank": 3, "score": 200779.74469464505 }, { "content": "pub fn debug_draw<U, G>(world: &mut b2::World<U>,\n\n flags: b2::DrawFlags,\n\n transform: [[f64; 3]; 2],\n\n c: Context,\n\n g: &mut G)\n\n where U: UserDataTypes,\n\n G: Graphics\n\n{\n\n let mut session = Session {\n\n c: c,\n\n g: g,\n\n transform: transform,\n\n };\n\n world.draw_debug_data(&mut session, flags);\n\n}\n\n\n", "file_path": "testbed/src/debug_draw.rs", "rank": 4, "score": 200023.60814800602 }, { "content": "fn create_ground(world: &mut World) -> b2::BodyHandle {\n\n let bd = b2::BodyDef {\n\n body_type: b2::BodyType::Static,\n\n .. b2::BodyDef::new()\n\n };\n\n\n\n let ground = world.create_body(&bd);\n\n\n\n let mut shape = b2::EdgeShape::new();\n\n shape.set(&b2::Vec2 { x: -40., y: 0. }, &b2::Vec2 { x: 40., y: 0. });\n\n world.body_mut(ground).create_fast_fixture(&shape, 0.1);\n\n\n\n ground\n\n}\n\n\n", "file_path": "testbed/examples/web.rs", "rank": 5, "score": 199115.01295470554 }, { "content": "fn create_ground(world: &mut World) -> b2::BodyHandle {\n\n let bd = b2::BodyDef {\n\n body_type: b2::BodyType::Static,\n\n .. b2::BodyDef::new()\n\n };\n\n\n\n let ground = world.create_body(&bd);\n\n\n\n let mut shape = b2::EdgeShape::new();\n\n shape.set(&b2::Vec2 { x: -40., y: 0. }, &b2::Vec2 { x: 40., y: 0. });\n\n world.body_mut(ground).create_fast_fixture(&shape, 0.);\n\n\n\n ground\n\n}\n\n\n", "file_path": "testbed/examples/breakable.rs", "rank": 6, "score": 199115.01295470554 }, { "content": "fn create_ground(world: &mut World) -> b2::BodyHandle {\n\n let def = b2::BodyDef {\n\n position: b2::Vec2 { x: 0., y: 0. },\n\n .. b2::BodyDef::new()\n\n };\n\n\n\n let handle = world.create_body(&def);\n\n let mut ground = world.body_mut(handle);\n\n\n\n let mut edge = b2::EdgeShape::new();\n\n let top_right = b2::Vec2 { x: 20., y: 20. };\n\n let top_left = b2::Vec2 { x: -20., y: 20. };\n\n let bot_left = b2::Vec2 { x: -20., y: -20. };\n\n let bot_right = b2::Vec2 { x: 20., y: -20. };\n\n\n\n let mut def = b2::FixtureDef {\n\n density: 0.,\n\n restitution: 0.4,\n\n .. b2::FixtureDef::new()\n\n };\n", "file_path": "testbed/examples/apply_force.rs", "rank": 7, "score": 194769.36009428115 }, { "content": "fn create_joints(world: &mut World,\n\n ground: b2::BodyHandle,\n\n bodies: &[b2::BodyHandle]) {\n\n let mut create_joint = |body_a, body_b, local_anchor_a, local_anchor_b| {\n\n let p1 = world.body(body_a).world_point(&local_anchor_a);\n\n let p2 = world.body(body_b).world_point(&local_anchor_b);\n\n\n\n let jd = b2::DistanceJointDef {\n\n frequency: 2.,\n\n damping_ratio: 0.,\n\n local_anchor_a: local_anchor_a,\n\n local_anchor_b: local_anchor_b,\n\n length: (p2 - p1).norm(),\n\n .. b2::DistanceJointDef::new(body_a, body_b)\n\n };\n\n\n\n world.create_joint(&jd)\n\n };\n\n\n\n for (i, &(ax, ay, bx, by)) in [(-10., 0., -0.5, -0.5),\n", "file_path": "testbed/examples/web.rs", "rank": 8, "score": 192335.5374263248 }, { "content": "pub fn cross_vv(a: Vec2, b: Vec2) -> f32 {\n\n a.x * b.y - a.y * b.x\n\n}\n\n\n", "file_path": "src/common/math.rs", "rank": 9, "score": 191684.9837963039 }, { "content": "pub fn cross_vs(v: Vec2, s: f32) -> Vec2 {\n\n Vec2 {\n\n x: s * v.y,\n\n y: -s * v.x,\n\n }\n\n}\n\n\n", "file_path": "src/common/math.rs", "rank": 10, "score": 191684.98379630386 }, { "content": "pub fn cross_sv(s: f32, v: Vec2) -> Vec2 {\n\n Vec2 {\n\n x: -s * v.y,\n\n y: s * v.x,\n\n }\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Clone, Copy, PartialEq, Debug)]\n\npub struct Rot {\n\n pub sin: f32,\n\n pub cos: f32,\n\n}\n\n\n\nimpl Rot {\n\n pub fn from_angle(angle: f32) -> Rot {\n\n Rot {\n\n sin: angle.sin(),\n\n cos: angle.cos(),\n\n }\n", "file_path": "src/common/math.rs", "rank": 11, "score": 191684.9837963039 }, { "content": "fn create_ground(world: &mut World) -> b2::BodyHandle {\n\n let def = b2::BodyDef {\n\n position: b2::Vec2 { x: 0., y: 17. },\n\n .. b2::BodyDef::new()\n\n };\n\n\n\n world.create_body(&def)\n\n}\n\n\n", "file_path": "testbed/examples/basic_slider_crank.rs", "rank": 12, "score": 190680.13887886406 }, { "content": "fn create_crank(world: &mut World, ground: b2::BodyHandle) -> b2::BodyHandle {\n\n let shape = b2::PolygonShape::new_box(4., 1.);\n\n\n\n let b_def = b2::BodyDef {\n\n body_type: b2::BodyType::Dynamic,\n\n position: b2::Vec2 { x: -8., y: 20. },\n\n .. b2::BodyDef::new()\n\n };\n\n\n\n let handle = world.create_body(&b_def);\n\n world.body_mut(handle).create_fast_fixture(&shape, 2.);\n\n\n\n let mut j_def = b2::RevoluteJointDef::new(ground, handle);\n\n j_def.init(world, ground, handle, &b2::Vec2 { x: -12., y: 20. });\n\n world.create_joint(&j_def);\n\n\n\n handle\n\n}\n\n\n", "file_path": "testbed/examples/basic_slider_crank.rs", "rank": 13, "score": 187674.58173962357 }, { "content": "pub trait JointDef {\n\n fn joint_type() -> JointType where Self: Sized;\n\n\n\n #[doc(hidden)]\n\n unsafe fn create<U: UserDataTypes>(&self, world: &mut World<U>) -> *mut ffi::Joint;\n\n}\n\n\n\npub struct MetaJoint<U: UserDataTypes> {\n\n joint: UnknownJoint,\n\n user_data: Box<InternalUserData<Joint, U::JointData>>,\n\n}\n\n\n\nimpl<U: UserDataTypes> MetaJoint<U> {\n\n #[doc(hidden)]\n\n pub unsafe fn new(ptr: *mut ffi::Joint, handle: JointHandle, custom: U::JointData) -> Self {\n\n let mut j = MetaJoint {\n\n joint: UnknownJoint::from_ffi(ptr),\n\n user_data: Box::new(InternalUserData {\n\n handle: handle,\n\n custom: custom,\n", "file_path": "src/dynamics/joints/mod.rs", "rank": 14, "score": 180796.6937967671 }, { "content": "fn ungrab<U>(world: &mut b2::World<U>, grabbing: &mut Option<b2::JointHandle>)\n\n where U: UserDataTypes, U::BodyData: Default, U::JointData: Default\n\n{\n\n grabbing.take().map(|j| world.destroy_joint(j));\n\n}\n\n\n", "file_path": "testbed/src/lib.rs", "rank": 15, "score": 179600.58634689526 }, { "content": "pub trait ContactFilter<U: UserDataTypes>: Any {\n\n fn should_collide(&mut self, body_a: BodyAccess<U>, fixture_a: FixtureAccess<U>,\n\n body_b: BodyAccess<U>, fixture_b: FixtureAccess<U>) -> bool;\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct ContactFilterLink {\n\n ptr: *mut ffi::ContactFilterLink,\n\n object: Option<Box<Any>>,\n\n}\n\n\n\nwrap! { ffi::ContactFilterLink => custom ContactFilterLink }\n\n\n\nimpl ContactFilterLink {\n\n pub unsafe fn new() -> Self {\n\n ContactFilterLink {\n\n ptr: ffi::ContactFilterLink_alloc(),\n\n object: None,\n\n }\n\n }\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 16, "score": 179393.34767907736 }, { "content": "pub trait ContactListener<U: UserDataTypes>: Any {\n\n fn begin_contact(&mut self, _: ContactAccess<U>) {}\n\n fn end_contact(&mut self, _: ContactAccess<U>) {}\n\n fn pre_solve(&mut self, _: ContactAccess<U>, _: &Manifold) {}\n\n fn post_solve(&mut self, _: ContactAccess<U>, _: &ContactImpulse) {}\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct ContactListenerLink {\n\n ptr: *mut ffi::ContactListenerLink,\n\n object: Option<Box<Any>>,\n\n}\n\n\n\nwrap! { ffi::ContactListenerLink => custom ContactListenerLink }\n\n\n\nimpl ContactListenerLink {\n\n pub unsafe fn new() -> Self {\n\n ContactListenerLink {\n\n ptr: ffi::ContactListenerLink_alloc(),\n\n object: None,\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 17, "score": 179376.3075056551 }, { "content": "fn query_point<U>(world: &b2::World<U>, p: b2::Vec2) -> Option<b2::BodyHandle>\n\n where U: UserDataTypes, U::BodyData: Default, U::JointData: Default\n\n{\n\n let d = b2::Vec2 {\n\n x: 0.001,\n\n y: 0.001,\n\n };\n\n let aabb = b2::AABB {\n\n lower: p - d,\n\n upper: p + d,\n\n };\n\n\n\n let mut result = None;\n\n {\n\n let mut callback = |body_h: b2::BodyHandle, fixture_h: b2::FixtureHandle| {\n\n let body = world.body(body_h);\n\n let fixture = body.fixture(fixture_h);\n\n\n\n if body.body_type() != b2::BodyType::Static && fixture.test_point(&p) {\n\n\n", "file_path": "testbed/src/lib.rs", "rank": 18, "score": 163262.264594967 }, { "content": "struct ContactFilterLink: public b2ContactFilter {\n\n ContactFilterLink() {}\n\n ~ContactFilterLink() {}\n\n\n\n bool ShouldCollide(b2Fixture* fixture_a, b2Fixture* fixture_b) {\n\n return should_collide(object, fixture_a, fixture_b);\n\n }\n\n\n\n RustObject object;\n\n ShouldCollideCB should_collide;\n\n};\n\n\n\nContactFilterLink* ContactFilterLink_alloc() {\n\n return new ContactFilterLink();\n\n}\n\n\n\nvoid ContactFilterLink_bind(ContactFilterLink* self,\n\n RustObject o,\n\n ShouldCollideCB sc) {\n\n self->object = o;\n", "file_path": "frontend/dynamics/world_callbacks.cpp", "rank": 19, "score": 160953.08239566121 }, { "content": "struct ContactListenerLink: public b2ContactListener {\n\n ContactListenerLink() {}\n\n ~ContactListenerLink() {}\n\n\n\n void BeginContact(b2Contact* contact) {\n\n begin_contact(object, contact);\n\n }\n\n void EndContact(b2Contact* contact) {\n\n end_contact(object, contact);\n\n }\n\n void PreSolve(b2Contact* contact, const b2Manifold* old_manifold) {\n\n pre_solve(object, contact, old_manifold);\n\n }\n\n void PostSolve(b2Contact* contact, const b2ContactImpulse* impulse) {\n\n post_solve(object, contact, impulse);\n\n }\n\n\n\n RustObject object;\n\n BeginContactCB begin_contact;\n\n EndContactCB end_contact;\n", "file_path": "frontend/dynamics/world_callbacks.cpp", "rank": 20, "score": 160929.3452398723 }, { "content": "fn create_cubes(world: &mut World,\n\n ground: b2::BodyHandle) {\n\n let shape = b2::PolygonShape::new_box(0.5, 0.5);\n\n\n\n let mut f_def = b2::FixtureDef {\n\n density: 1.,\n\n friction: 0.3,\n\n .. b2::FixtureDef::new()\n\n };\n\n\n\n let mut b_def = b2::BodyDef {\n\n body_type: b2::BodyType::Dynamic,\n\n .. b2::BodyDef::new()\n\n };\n\n\n\n for i in 0..10 {\n\n b_def.position = b2::Vec2 { x: 0., y: 15. - 1.54*i as f32 };\n\n\n\n let handle = world.create_body(&b_def);\n\n let inertia;\n", "file_path": "testbed/examples/apply_force.rs", "rank": 21, "score": 160255.6720115191 }, { "content": "pub trait Joint: WrappedBase<ffi::Joint> + FromFFI<ffi::Joint> {\n\n fn handle(&self) -> JointHandle {\n\n unsafe { self.base_ptr().handle() }\n\n }\n\n \n\n fn assumed_type() -> JointType where Self: Sized;\n\n\n\n fn get_type(&self) -> JointType {\n\n unsafe { ffi::Joint_get_type(self.base_ptr()) }\n\n }\n\n\n\n fn body_a(&self) -> BodyHandle {\n\n // we don't need &mut self because nothing is actually mutated here\n\n unsafe { ffi::Joint_get_body_a(self.base_ptr() as *mut _).handle() }\n\n }\n\n\n\n fn body_b(&self) -> BodyHandle {\n\n // we don't need &mut self because nothing is actually mutated here\n\n unsafe { ffi::Joint_get_body_b(self.base_ptr() as *mut _).handle() }\n\n }\n", "file_path": "src/dynamics/joints/mod.rs", "rank": 22, "score": 159462.19013717514 }, { "content": "pub fn run<T, U>(mut test: T, mut data: Data<U>, name: &str, mut width: u32, mut height: u32)\n\n where T: Test<U>, U: UserDataTypes, U::BodyData: Default, U::JointData: Default\n\n{\n\n let opengl = OpenGL::V3_2;\n\n\n\n let mut window: PistonWindow = WindowSettings::new(format!(\"{} Test\", name),\n\n [width, height])\n\n .opengl(opengl)\n\n .exit_on_esc(true)\n\n .samples(4)\n\n .build()\n\n .unwrap();\n\n window.set_max_fps(UPDATES_PER_SECOND);\n\n window.set_ups(UPDATES_PER_SECOND);\n\n\n\n let mut running = false;\n\n let mut mouse_position = b2::Vec2 { x: 0., y: 0. };\n\n let mut grabbing = None;\n\n\n\n let dummy = data.world.create_body(&b2::BodyDef::new());\n", "file_path": "testbed/src/lib.rs", "rank": 23, "score": 158165.93018567326 }, { "content": "fn create_piston(world: &mut World,\n\n ground: b2::BodyHandle,\n\n connecting_rod: b2::BodyHandle) {\n\n let shape = b2::PolygonShape::new_box(3., 3.);\n\n\n\n let b_def = b2::BodyDef {\n\n body_type: b2::BodyType::Dynamic,\n\n fixed_rotation: true,\n\n position: b2::Vec2 { x: 12., y: 20. },\n\n .. b2::BodyDef::new()\n\n };\n\n\n\n let handle = world.create_body(&b_def);\n\n world.body_mut(handle).create_fast_fixture(&shape, 2.);\n\n\n\n let mut j_def = b2::RevoluteJointDef::new(connecting_rod, handle);\n\n j_def.init(world, connecting_rod, handle, &b2::Vec2 { x: 12., y: 20. });\n\n world.create_joint(&j_def);\n\n\n\n let mut j_def = b2::PrismaticJointDef::new(ground, handle);\n\n j_def.init(world, ground, handle,\n\n &b2::Vec2 { x: 12., y: 17. },\n\n &b2::Vec2 { x: 1., y: 0. });\n\n world.create_joint(&j_def);\n\n}\n", "file_path": "testbed/examples/basic_slider_crank.rs", "rank": 24, "score": 157319.59024682624 }, { "content": "fn create_connecting_rod(world: &mut World,\n\n crank: b2::BodyHandle) -> b2::BodyHandle {\n\n let shape = b2::PolygonShape::new_box(8., 1.);\n\n\n\n let b_def = b2::BodyDef {\n\n body_type: b2::BodyType::Dynamic,\n\n position: b2::Vec2 { x: 4., y: 20. },\n\n .. b2::BodyDef::new()\n\n };\n\n\n\n let handle = world.create_body(&b_def);\n\n world.body_mut(handle).create_fast_fixture(&shape, 2.);\n\n\n\n let mut j_def = b2::RevoluteJointDef::new(crank, handle);\n\n j_def.init(world, crank, handle, &b2::Vec2 { x: -4., y: 20. });\n\n world.create_joint(&j_def);\n\n\n\n handle\n\n}\n\n\n", "file_path": "testbed/examples/basic_slider_crank.rs", "rank": 25, "score": 154560.58650659534 }, { "content": "fn update_grab<U>(world: &b2::World<U>, target: b2::Vec2, grabbing: Option<b2::JointHandle>)\n\n where U: UserDataTypes, U::BodyData: Default, U::JointData: Default\n\n{\n\n grabbing.map(|j| {\n\n let mut j = world.joint_mut(j);\n\n match **j {\n\n b2::UnknownJoint::Mouse(ref mut j) => {\n\n j.set_target(&target);\n\n }\n\n _ => panic!(\"expected mouse joint\")\n\n }\n\n });\n\n}\n", "file_path": "testbed/src/lib.rs", "rank": 26, "score": 154321.82745569554 }, { "content": "fn circle_rect(o: &b2::Vec2, r: f32) -> [f64; 4] {\n\n let r = r as f64;\n\n let d = 2. * r;\n\n [o.x as f64 - r, o.y as f64 - r, d, d]\n\n}\n\n\n", "file_path": "testbed/src/debug_draw.rs", "rank": 27, "score": 152166.16402867337 }, { "content": "pub trait QueryCallback {\n\n fn report_fixture(&mut self, body: BodyHandle, fixture: FixtureHandle) -> bool;\n\n}\n\n\n\nimpl<F> QueryCallback for F\n\n where F: FnMut(BodyHandle, FixtureHandle) -> bool\n\n{\n\n fn report_fixture(&mut self, body: BodyHandle, fixture: FixtureHandle) -> bool {\n\n self(body, fixture)\n\n }\n\n}\n\n\n\nwrap! { ffi::QueryCallbackLink => #[doc(hidden)] pub QueryCallbackLink }\n\n\n\nimpl QueryCallbackLink {\n\n pub unsafe fn new() -> Self {\n\n QueryCallbackLink::from_ffi(ffi::QueryCallbackLink_alloc())\n\n }\n\n\n\n pub unsafe fn use_with<C: QueryCallback>(&mut self, callback: &mut C) -> *mut ffi::QueryCallback {\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 28, "score": 150400.15956417535 }, { "content": "pub trait Draw {\n\n fn draw_polygon(&mut self, vertices: &[Vec2], color: &Color);\n\n fn draw_solid_polygon(&mut self, vertices: &[Vec2], color: &Color);\n\n fn draw_circle(&mut self, center: &Vec2, radius: f32, color: &Color);\n\n fn draw_solid_circle(&mut self, center: &Vec2, radius: f32, axis: &Vec2, color: &Color);\n\n fn draw_segment(&mut self, p1: &Vec2, p2: &Vec2, color: &Color);\n\n fn draw_transform(&mut self, xf: &Transform);\n\n}\n\n \n\nunsafe extern \"C\" fn draw_polygon<D: Draw>(object: ffi::Any,\n\n vertices: *const Vec2,\n\n count: i32,\n\n color: *const Color) {\n\n // color comes from a C++ &\n\n let draw = mem::transmute::<_, &mut D>(object);\n\n let vertices = ::std::slice::from_raw_parts(vertices, count as usize);\n\n draw.draw_polygon(vertices, &*color)\n\n}\n\n\n\nunsafe extern \"C\" fn draw_solid_polygon<D: Draw>(object: ffi::Any,\n", "file_path": "src/common/mod.rs", "rank": 29, "score": 148503.19795495807 }, { "content": "pub trait RayCastCallback {\n\n fn report_fixture(&mut self,\n\n body: BodyHandle,\n\n fixture: FixtureHandle,\n\n p: &Vec2,\n\n normal: &Vec2,\n\n fraction: f32)\n\n -> f32;\n\n}\n\n\n\nimpl<F> RayCastCallback for F\n\n where F: FnMut(BodyHandle, FixtureHandle, &Vec2, &Vec2, f32) -> f32\n\n{\n\n fn report_fixture(&mut self,\n\n body: BodyHandle,\n\n fixture: FixtureHandle,\n\n p: &Vec2,\n\n normal: &Vec2,\n\n fraction: f32)\n\n -> f32 {\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 30, "score": 147057.75254451053 }, { "content": "type World = b2::World<NoUserData>;\n\n\n", "file_path": "testbed/examples/simple.rs", "rank": 31, "score": 145772.17426616623 }, { "content": "type World = b2::World<NoUserData>;\n\n\n", "file_path": "testbed/examples/breakable.rs", "rank": 32, "score": 145772.17426616623 }, { "content": "type World = b2::World<NoUserData>;\n\n\n", "file_path": "testbed/examples/web.rs", "rank": 33, "score": 145772.17426616623 }, { "content": "type World = b2::World<NoUserData>;\n\n\n", "file_path": "testbed/examples/apply_force.rs", "rank": 34, "score": 142635.48345356178 }, { "content": "struct DestructionListenerLink: public b2DestructionListener {\n\n DestructionListenerLink() {}\n\n ~DestructionListenerLink() {}\n\n\n\n void SayGoodbye(b2Joint* joint) {\n\n say_goodbye_to_joint(object, joint);\n\n }\n\n void SayGoodbye(b2Fixture* fixture) {\n\n say_goodbye_to_fixture(object, fixture);\n\n }\n\n\n\n RustObject object;\n\n SayGoodbyeToJointCB say_goodbye_to_joint;\n\n SayGoodbyeToFixtureCB say_goodbye_to_fixture;\n\n};\n\n\n\nDestructionListenerLink* DestructionListenerLink_alloc() {\n\n return new DestructionListenerLink();\n\n}\n\n\n", "file_path": "frontend/dynamics/world_callbacks.cpp", "rank": 35, "score": 139907.7930209555 }, { "content": "type World = b2::World<NoUserData>;\n\n\n", "file_path": "testbed/examples/basic_slider_crank.rs", "rank": 36, "score": 139694.3685408617 }, { "content": "struct QueryCallbackLink: public b2QueryCallback {\n\n QueryCallbackLink() {}\n\n ~QueryCallbackLink() {}\n\n\n\n bool ReportFixture(b2Fixture* fixture) {\n\n return report_fixture(object, fixture);\n\n }\n\n\n\n RustObject object;\n\n QCReportFixtureCB report_fixture;\n\n};\n\n\n\nQueryCallbackLink* QueryCallbackLink_alloc() {\n\n return new QueryCallbackLink();\n\n}\n\n\n\nvoid QueryCallbackLink_bind(QueryCallbackLink* self,\n\n RustObject object,\n\n QCReportFixtureCB rf) {\n\n self->object = object;\n", "file_path": "frontend/dynamics/world_callbacks.cpp", "rank": 37, "score": 139163.1911446848 }, { "content": "struct RayCastCallbackLink: public b2RayCastCallback {\n\n RayCastCallbackLink() {}\n\n ~RayCastCallbackLink() {}\n\n\n\n f32 ReportFixture(b2Fixture* fixture,\n\n const b2Vec2& point,\n\n const b2Vec2& normal,\n\n f32 fraction) {\n\n return report_fixture(object, fixture, &point, &normal, fraction);\n\n }\n\n\n\n RustObject object;\n\n RCCReportFixtureCB report_fixture;\n\n};\n\n\n\nRayCastCallbackLink* RayCastCallbackLink_alloc() {\n\n return new RayCastCallbackLink();\n\n}\n\n\n\nvoid RayCastCallbackLink_bind(RayCastCallbackLink* self,\n", "file_path": "frontend/dynamics/world_callbacks.cpp", "rank": 38, "score": 134293.33774265 }, { "content": "fn convert_color(c: &b2::Color) -> [f32; 4] {\n\n [c.r, c.g, c.b, c.a]\n\n}\n\n\n\nconst RED: b2::Color = b2::Color {\n\n r: 1.,\n\n g: 0.,\n\n b: 0.,\n\n a: 1.,\n\n};\n\nconst GREEN: b2::Color = b2::Color {\n\n r: 0.,\n\n g: 1.,\n\n b: 0.,\n\n a: 1.,\n\n};\n\nconst BLUE: b2::Color = b2::Color {\n\n r: 0.,\n\n g: 0.,\n\n b: 1.,\n\n a: 1.,\n\n};\n", "file_path": "testbed/src/debug_draw.rs", "rank": 39, "score": 133739.61453357604 }, { "content": "pub trait UserDataTypes {\n\n type BodyData: Sized;\n\n type JointData: Sized;\n\n type FixtureData: Sized;\n\n}\n\n\n\npub struct NoUserData;\n\n\n\nimpl UserDataTypes for NoUserData {\n\n type BodyData = ();\n\n type JointData = ();\n\n type FixtureData = ();\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct InternalUserData<T: ?Sized, U> {\n\n pub handle: TypedHandle<T>,\n\n pub custom: U,\n\n}\n\n\n", "file_path": "src/user_data.rs", "rank": 40, "score": 127570.84651973765 }, { "content": "type CompleteJointSnapshot<U: UserDataTypes> = (JointId, JointSnapshot, U::JointData);\n\n\n\nimpl<U: UserDataTypes> WorldSnapshot<U>\n\n where U::BodyData: Debug + Serialize + DeserializeOwned,\n\n U::FixtureData: Debug + Serialize + DeserializeOwned,\n\n U::JointData: Debug + Serialize + DeserializeOwned,\n\n{\n\n pub fn take(world: &World<U>) -> Self\n\n where U::BodyData: Serialize + Clone,\n\n U::FixtureData: Serialize + Clone,\n\n U::JointData: Serialize + Clone\n\n {\n\n let body_snapshots: Vec<_> = world.bodies()\n\n .map(|(_, body)| {\n\n let body: &MetaBody<U> = &body.borrow();\n\n let fixture_snapshots: Vec<_> = body.fixtures()\n\n .map(|(_, fixture)| {\n\n let fixture: &MetaFixture<U> = &fixture.borrow();\n\n (FixtureSnapshot::take(fixture), fixture.user_data().clone())\n\n })\n", "file_path": "src/serialize.rs", "rank": 41, "score": 124434.97693506171 }, { "content": "pub fn get_point_states(m1: &Manifold,\n\n m2: &Manifold)\n\n -> ([PointState; MAX_MANIFOLD_POINTS],\n\n [PointState; MAX_MANIFOLD_POINTS]) {\n\n unsafe {\n\n let (mut s1, mut s2) = mem::zeroed();\n\n ffi::get_point_states(&mut s1, &mut s2, m1, m2);\n\n (s1, s2)\n\n }\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Clone, Debug)]\n\npub struct RayCastInput {\n\n pub p1: Vec2,\n\n pub p2: Vec2,\n\n pub max_fraction: f32,\n\n}\n\n\n\n#[repr(C)]\n", "file_path": "src/collision/mod.rs", "rank": 42, "score": 123342.01272941299 }, { "content": "pub fn test_overlap<A, B>(shape_a: &A,\n\n index_a: i32,\n\n xf_a: &Transform,\n\n shape_b: &B,\n\n index_b: i32,\n\n xf_b: &Transform)\n\n -> bool\n\n where A: Shape,\n\n B: Shape\n\n{\n\n unsafe {\n\n ffi::test_overlap(shape_a.base_ptr(),\n\n index_a,\n\n shape_b.base_ptr(),\n\n index_b,\n\n xf_a,\n\n xf_b)\n\n }\n\n}\n\n\n", "file_path": "src/collision/mod.rs", "rank": 43, "score": 120440.59262899781 }, { "content": "#[doc(hidden)]\n\npub trait RawUserDataMut: RawUserData {\n\n unsafe fn internal_user_data_mut<T: ?Sized, U>(self) -> *mut InternalUserData<T, U>;\n\n unsafe fn set_internal_user_data<T: ?Sized, U>(self, *mut InternalUserData<T, U>);\n\n}\n\n\n\nmacro_rules! impl_raw_user_data {\n\n { $raw:ty, $getter:path, $setter:path } => {\n\n impl RawUserData for *const $raw {\n\n unsafe fn internal_user_data<T: ?Sized, U>(self) -> *const InternalUserData<T, U> {\n\n $getter(self) as *const InternalUserData<T, U>\n\n }\n\n }\n\n\n\n impl RawUserData for *mut $raw {\n\n unsafe fn internal_user_data<T: ?Sized, U>(self) -> *const InternalUserData<T, U> {\n\n $getter(self) as *const InternalUserData<T, U>\n\n }\n\n }\n\n\n\n impl RawUserDataMut for *mut $raw {\n", "file_path": "src/user_data.rs", "rank": 44, "score": 118393.36644873045 }, { "content": "struct DrawLink: public b2Draw {\n\n DrawLink(): b2Draw() {}\n\n ~DrawLink() {}\n\n\n\n void DrawPolygon(const b2Vec2* vertices, i32 count,\n\n const b2Color& color) {\n\n draw_polygon(object, vertices, count, &color);\n\n }\n\n\n\n void DrawSolidPolygon(const b2Vec2* vertices, i32 count,\n\n const b2Color& color) {\n\n draw_solid_polygon(object, vertices, count, &color);\n\n }\n\n\n\n void DrawCircle(const b2Vec2& center, f32 radius,\n\n const b2Color& color) {\n\n draw_circle(object, &center, radius, &color);\n\n }\n\n\n\n void DrawSolidCircle(const b2Vec2& center, f32 radius,\n", "file_path": "frontend/common/draw.cpp", "rank": 45, "score": 118271.69855707865 }, { "content": "type CompleteBodySnapshot<U: UserDataTypes> = (BodyId, BodySnapshot, U::BodyData, Vec<CompleteFixtureSnapshot<U>>);\n", "file_path": "src/serialize.rs", "rank": 46, "score": 114074.74863333782 }, { "content": "fn polygon(vertices: &[b2::Vec2]) -> Vec<[f64; 2]> {\n\n vertices.iter().map(|v| [v.x as f64, v.y as f64]).collect()\n\n}\n\n\n", "file_path": "testbed/src/debug_draw.rs", "rank": 47, "score": 112599.45970501435 }, { "content": "pub trait Test<U: UserDataTypes> {\n\n fn process_input(&mut self, &Input, &mut Data<U>) {}\n\n\n\n fn step(&mut self, data: &mut Data<U>, dt: f32) {\n\n step(data, dt);\n\n }\n\n}\n\n\n", "file_path": "testbed/src/lib.rs", "rank": 48, "score": 105768.87495921842 }, { "content": "struct HandleEntry<E> {\n\n pub version: usize,\n\n pub inner: Option<RefCell<E>>,\n\n}\n\n\n\nimpl<E> HandleEntry<E> {\n\n fn new() -> HandleEntry<E> {\n\n HandleEntry {\n\n version: 0,\n\n inner: None,\n\n }\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct HandleMap<E, T: ?Sized = ()> {\n\n next_index: usize,\n\n availables: Vec<usize>,\n\n entries: VecMap<HandleEntry<E>>,\n\n phantom: PhantomData<T>,\n", "file_path": "src/handle.rs", "rank": 49, "score": 101475.91223884304 }, { "content": "pub trait Shape: WrappedBase<ffi::Shape> {\n\n fn shape_type(&self) -> ShapeType {\n\n unsafe { ffi::Shape_get_type(self.base_ptr()) }\n\n }\n\n\n\n fn child_count(&self) -> i32 {\n\n unsafe { ffi::Shape_get_child_count_virtual(self.base_ptr()) }\n\n }\n\n\n\n fn test_point(&self, xf: &Transform, p: &Vec2) -> bool {\n\n unsafe { ffi::Shape_test_point_virtual(self.base_ptr(), xf, p) }\n\n }\n\n\n\n fn ray_cast(&self,\n\n input: &RayCastInput,\n\n transform: &Transform,\n\n child_index: i32)\n\n -> RayCastOutput {\n\n unsafe {\n\n let mut output = mem::zeroed();\n", "file_path": "src/collision/shapes/mod.rs", "rank": 50, "score": 100873.67597104349 }, { "content": "#[doc(hidden)]\n\npub trait Wrapped<T> {\n\n unsafe fn ptr(&self) -> *const T;\n\n unsafe fn mut_ptr(&mut self) -> *mut T;\n\n}\n\n\n", "file_path": "src/wrap.rs", "rank": 51, "score": 98477.33837571465 }, { "content": "type UserData = NoUserData;\n", "file_path": "testbed/examples/breakable.rs", "rank": 52, "score": 96727.58660447581 }, { "content": "#[doc(hidden)]\n\npub trait WrappedBase<B> {\n\n unsafe fn base_ptr(&self) -> *const B;\n\n unsafe fn mut_base_ptr(&mut self) -> *mut B;\n\n}\n\n\n", "file_path": "src/wrap.rs", "rank": 53, "score": 95955.89782249685 }, { "content": "struct ContactListener {\n\n should_break: Rc<Cell<bool>>\n\n}\n\n\n\nimpl b2::ContactListener<UserData> for ContactListener {\n\n fn post_solve(&mut self, ca: ContactAccess<UserData>, impulse: &b2::ContactImpulse) {\n\n if !self.should_break.get() {\n\n let count = ca.contact.manifold().count as usize;\n\n \n\n let mut max_impulse = 0f32;\n\n for i in 0..count {\n\n max_impulse = max_impulse.max(impulse.normal_impulses[i]);\n\n }\n\n\n\n if max_impulse > 40. {\n\n self.should_break.set(true);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "testbed/examples/breakable.rs", "rank": 54, "score": 95141.47229009093 }, { "content": "pub trait UserData<U> {\n\n fn user_data(&self) -> &U;\n\n fn user_data_mut(&mut self) -> &mut U;\n\n\n\n fn set_user_data(&mut self, v: U) {\n\n *self.user_data_mut() = v;\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\npub mod ffi {\n\n pub use ffi::Any;\n\n pub use dynamics::body::ffi::Body;\n\n pub use dynamics::fixture::ffi::Fixture;\n\n pub use dynamics::joints::ffi::Joint;\n\n\n\n extern \"C\" {\n\n pub fn Body_get_user_data(slf: *const Body) -> Any;\n\n pub fn Body_set_user_data(slf: *mut Body, data: Any);\n\n pub fn Fixture_get_user_data(slf: *const Fixture) -> Any;\n\n pub fn Fixture_set_user_data(slf: *mut Fixture, data: Any);\n\n pub fn Joint_get_user_data(slf: *const Joint) -> Any;\n\n pub fn Joint_set_user_data(slf: *mut Joint, data: Any);\n\n }\n\n}\n", "file_path": "src/user_data.rs", "rank": 55, "score": 93495.67929028095 }, { "content": "#[doc(hidden)]\n\npub trait RawUserData: Sized {\n\n unsafe fn internal_user_data<T: ?Sized, U>(self) -> *const InternalUserData<T, U>;\n\n\n\n unsafe fn handle<T: ?Sized>(self) -> TypedHandle<T> {\n\n let internal = &*self.internal_user_data::<_, ()>();\n\n internal.handle\n\n }\n\n}\n\n\n", "file_path": "src/user_data.rs", "rank": 56, "score": 91289.0517674571 }, { "content": "#[doc(hidden)]\n\npub trait FromFFI<T> {\n\n unsafe fn from_ffi(ptr: *mut T) -> Self where Self: Sized;\n\n}\n\n\n\npub struct WrappedRefMut<'a, T> {\n\n object: Option<T>,\n\n phantom: PhantomData<&'a ()>,\n\n}\n\n\n\nimpl<'a, T> WrappedRefMut<'a, T> {\n\n #[doc(hidden)]\n\n pub unsafe fn new(t: T) -> WrappedRefMut<'a, T> {\n\n WrappedRefMut {\n\n object: Some(t),\n\n phantom: PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, T> Deref for WrappedRefMut<'a, T> {\n", "file_path": "src/wrap.rs", "rank": 57, "score": 88421.70216821288 }, { "content": "type CompleteFixtureSnapshot<U: UserDataTypes> = (FixtureSnapshot, U::FixtureData);\n", "file_path": "src/serialize.rs", "rank": 58, "score": 86476.92964712562 }, { "content": "struct Session<'a, G>\n\n where G: Graphics + 'a\n\n{\n\n c: Context,\n\n g: &'a mut G,\n\n transform: [[f64; 3]; 2],\n\n}\n\n\n\nimpl<'a, G> b2::Draw for Session<'a, G>\n\n where G: Graphics + 'a\n\n{\n\n fn draw_polygon(&mut self, vertices: &[b2::Vec2], color: &b2::Color) {\n\n let count = vertices.len();\n\n for i in 0..count {\n\n let a = &vertices[i];\n\n let b = &vertices[(i + 1) % count];\n\n self.draw_segment(a, b, color);\n\n }\n\n }\n\n\n", "file_path": "testbed/src/debug_draw.rs", "rank": 59, "score": 86219.65766377788 }, { "content": " fixture: *mut ffi::Fixture,\n\n point: *const Vec2,\n\n normal: *const Vec2,\n\n fraction: f32)\n\n -> f32 {\n\n // point and normal are coming from C++ &s\n\n let callback = mem::transmute::<_, &mut C>(object);\n\n let body_handle = WrappedRef::new(Fixture::from_ffi(fixture)).body();\n\n callback.report_fixture(body_handle, fixture.handle(), &*point, &*normal, fraction)\n\n}\n\n\n\nimpl Drop for RayCastCallbackLink {\n\n fn drop(&mut self) {\n\n unsafe { ffi::RayCastCallbackLink_drop(self.mut_ptr()) }\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\npub mod ffi {\n\n pub use ffi::Any;\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 60, "score": 70191.59064563505 }, { "content": " pub use collision::shapes::ffi::Shape;\n\n pub use dynamics::body::ffi::Body;\n\n pub use dynamics::fixture::ffi::{Fixture, Fixture_get_body};\n\n pub use dynamics::joints::ffi::Joint;\n\n pub use dynamics::contacts::ffi::{Contact, Contact_get_fixture_a, Contact_get_fixture_b};\n\n use common::math::Vec2;\n\n use collision::Manifold;\n\n use super::ContactImpulse;\n\n\n\n pub enum ContactFilter {}\n\n pub enum ContactFilterLink {}\n\n pub enum ContactListener {}\n\n pub enum ContactListenerLink {}\n\n pub enum QueryCallback {}\n\n pub enum QueryCallbackLink {}\n\n pub enum RayCastCallback {}\n\n pub enum RayCastCallbackLink {}\n\n\n\n extern \"C\" {\n\n pub fn ContactFilterLink_alloc() -> *mut ContactFilterLink;\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 61, "score": 70182.92512649698 }, { "content": " self(body, fixture, p, normal, fraction)\n\n }\n\n}\n\n\n\nwrap! { ffi::RayCastCallbackLink => #[doc(hidden)] pub RayCastCallbackLink }\n\n\n\nimpl RayCastCallbackLink {\n\n pub unsafe fn new() -> Self {\n\n RayCastCallbackLink::from_ffi(ffi::RayCastCallbackLink_alloc())\n\n }\n\n\n\n pub unsafe fn use_with<C: RayCastCallback>(&mut self, callback: &mut C) -> *mut ffi::RayCastCallback {\n\n ffi::RayCastCallbackLink_bind(self.mut_ptr(),\n\n mem::transmute(callback),\n\n rccl_report_fixture::<C>);\n\n ffi::RayCastCallbackLink_as_base(self.mut_ptr())\n\n }\n\n}\n\n\n\nunsafe extern \"C\" fn rccl_report_fixture<C: RayCastCallback>(object: ffi::Any,\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 62, "score": 70181.9245050905 }, { "content": " ffi::QueryCallbackLink_bind(self.mut_ptr(),\n\n mem::transmute(callback),\n\n qcl_report_fixture::<C>);\n\n ffi::QueryCallbackLink_as_base(self.mut_ptr())\n\n }\n\n}\n\n\n\nunsafe extern \"C\" fn qcl_report_fixture<C: QueryCallback>(object: ffi::Any,\n\n fixture: *mut ffi::Fixture)\n\n -> bool {\n\n let callback = mem::transmute::<_, &mut C>(object);\n\n let body_handle = WrappedRef::new(Fixture::from_ffi(fixture)).body();\n\n callback.report_fixture(body_handle, fixture.handle())\n\n}\n\n\n\nimpl Drop for QueryCallbackLink {\n\n fn drop(&mut self) {\n\n unsafe { ffi::QueryCallbackLink_drop(self.ptr) }\n\n }\n\n}\n\n\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 63, "score": 70180.67312907241 }, { "content": " \n\n pub unsafe fn use_with<F, U>(&mut self, mut filter: Box<F>) -> *mut ffi::ContactFilter\n\n where F: ContactFilter<U>, U: UserDataTypes\n\n {\n\n ffi::ContactFilterLink_bind(self.mut_ptr(),\n\n mem::transmute::<&mut F, _>(&mut *filter),\n\n cfl_should_collide::<F, U>);\n\n self.object = Some(filter);\n\n ffi::ContactFilterLink_as_base(self.mut_ptr())\n\n }\n\n}\n\n\n\nunsafe extern \"C\" fn cfl_should_collide<F, U>(object: ffi::Any,\n\n fixture_a: *mut ffi::Fixture,\n\n fixture_b: *mut ffi::Fixture)\n\n -> bool\n\n where F: ContactFilter<U>, U: UserDataTypes\n\n{\n\n let filter = mem::transmute::<_, &mut F>(object);\n\n body_access(ffi::Fixture_get_body(fixture_a), |ba|\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 64, "score": 70180.27243147416 }, { "content": "use std::mem;\n\nuse std::any::Any;\n\nuse std::ops::*;\n\nuse std::marker::PhantomData;\n\nuse wrap::*;\n\nuse common::math::Vec2;\n\nuse common::settings::MAX_MANIFOLD_POINTS;\n\nuse collision::Manifold;\n\nuse dynamics::world::BodyHandle;\n\nuse dynamics::body::{Body, FixtureHandle};\n\nuse dynamics::fixture::Fixture;\n\nuse dynamics::contacts::Contact;\n\nuse user_data::{InternalUserData, RawUserData, RawUserDataMut, UserData, UserDataTypes};\n\n\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 65, "score": 70178.98182041245 }, { "content": " pub fn ContactListenerLink_as_base(slf: *mut ContactListenerLink) -> *mut ContactListener;\n\n pub fn ContactListenerLink_drop(slf: *mut ContactListenerLink);\n\n pub fn QueryCallbackLink_alloc() -> *mut QueryCallbackLink;\n\n pub fn QueryCallbackLink_bind(slf: *mut QueryCallbackLink,\n\n object: Any,\n\n report_fixture: unsafe extern \"C\" fn(Any, *mut Fixture)\n\n -> bool);\n\n pub fn QueryCallbackLink_as_base(slf: *mut QueryCallbackLink) -> *mut QueryCallback;\n\n pub fn QueryCallbackLink_drop(slf: *mut QueryCallbackLink);\n\n pub fn RayCastCallbackLink_alloc() -> *mut RayCastCallbackLink;\n\n pub fn RayCastCallbackLink_bind(slf: *mut RayCastCallbackLink,\n\n object: Any,\n\n hit_fixture: unsafe extern \"C\" fn(Any,\n\n *mut Fixture,\n\n *const Vec2,\n\n *const Vec2,\n\n f32)\n\n -> f32);\n\n pub fn RayCastCallbackLink_as_base(slf: *mut RayCastCallbackLink) -> *mut RayCastCallback;\n\n pub fn RayCastCallbackLink_drop(slf: *mut RayCastCallbackLink);\n\n }\n\n}\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 66, "score": 70177.94731730143 }, { "content": " }\n\n }\n\n\n\n pub unsafe fn use_with<L, U>(&mut self, mut listener: Box<L>) -> *mut ffi::ContactListener\n\n where L: ContactListener<U>, U: UserDataTypes\n\n {\n\n ffi::ContactListenerLink_bind(self.mut_ptr(),\n\n mem::transmute::<&mut L, _>(&mut *listener), cll_begin_contact::<L, U>,\n\n cll_end_contact::<L, U>,\n\n cll_pre_solve::<L, U>,\n\n cll_post_solve::<L, U>);\n\n self.object = Some(listener);\n\n ffi::ContactListenerLink_as_base(self.mut_ptr())\n\n }\n\n}\n\n\n\nunsafe extern \"C\" fn cll_begin_contact<L, U>(object: ffi::Any,\n\n contact: *mut ffi::Contact)\n\n where L: ContactListener<U>, U: UserDataTypes\n\n{\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 67, "score": 70177.83226925446 }, { "content": " fixture_access(fixture_a, |fa|\n\n body_access(ffi::Fixture_get_body(fixture_b), |bb|\n\n fixture_access(fixture_b, |fb|\n\n filter.should_collide(ba, fa, bb, fb)))))\n\n}\n\n\n\nimpl Drop for ContactFilterLink {\n\n fn drop(&mut self) {\n\n unsafe { ffi::ContactFilterLink_drop(self.mut_ptr()) }\n\n }\n\n}\n\n\n\n#[repr(C)]\n\npub struct ContactImpulse {\n\n pub normal_impulses: [f32; MAX_MANIFOLD_POINTS],\n\n pub tangent_impulses: [f32; MAX_MANIFOLD_POINTS],\n\n pub count: i32,\n\n}\n\n\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 68, "score": 70177.64958222728 }, { "content": " fn deref(&self) -> &Fixture {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl<'a, U: UserDataTypes> DerefMut for FixtureAccess<'a, U> {\n\n fn deref_mut(&mut self) -> &mut Fixture {\n\n &mut self.0\n\n }\n\n}\n\n\n\npub struct BodyAccess<'a, U: UserDataTypes>(&'a mut Body, PhantomData<U>);\n\n\n\n#[inline(always)]\n\nunsafe fn body_access<F, O, U>(body: *mut ffi::Body, f: F) -> O\n\n where F: for<'a> FnOnce(BodyAccess<'a, U>) -> O,\n\n U: UserDataTypes\n\n{\n\n let mut body = WrappedRefMut::new(Body::from_ffi(body));\n\n f(BodyAccess(&mut body, PhantomData))\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 69, "score": 70177.2488660125 }, { "content": "}\n\n\n\nunsafe extern \"C\" fn cll_post_solve<L, U>(object: ffi::Any,\n\n contact: *mut ffi::Contact,\n\n impulse: *const ContactImpulse)\n\n where L: ContactListener<U>, U: UserDataTypes\n\n{\n\n assert!(!impulse.is_null());\n\n let listener = mem::transmute::<_, &mut L>(object);\n\n contact_access(contact, |c| listener.post_solve(c, &*impulse))\n\n}\n\n\n\nimpl Drop for ContactListenerLink {\n\n fn drop(&mut self) {\n\n unsafe { ffi::ContactListenerLink_drop(self.mut_ptr()) }\n\n }\n\n}\n\n\n\npub struct ContactAccess<'a, U: UserDataTypes> {\n\n pub contact: &'a mut Contact,\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 70, "score": 70176.94870876636 }, { "content": " pub fn ContactFilterLink_bind(slf: *mut ContactFilterLink,\n\n object: Any,\n\n should_collide: unsafe extern \"C\" fn(Any,\n\n *mut Fixture,\n\n *mut Fixture)\n\n -> bool);\n\n pub fn ContactFilterLink_as_base(slf: *mut ContactFilterLink) -> *mut ContactFilter;\n\n pub fn ContactFilterLink_drop(slf: *mut ContactFilterLink);\n\n pub fn ContactListenerLink_alloc() -> *mut ContactListenerLink;\n\n pub fn ContactListenerLink_bind(slf: *mut ContactListenerLink,\n\n object: Any,\n\n begin_contact: unsafe extern \"C\" fn(Any, *mut Contact),\n\n end_contact: unsafe extern \"C\" fn(Any, *mut Contact),\n\n pre_solve: unsafe extern \"C\" fn(Any,\n\n *mut Contact,\n\n *const Manifold)\n\n ,\n\n post_solve: unsafe extern \"C\" fn(Any,\n\n *mut Contact,\n\n *const ContactImpulse));\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 71, "score": 70176.18225694328 }, { "content": " contact: &mut contact,\n\n body_a: ba,\n\n fixture_a: fa,\n\n body_b: bb,\n\n fixture_b: fb\n\n })))))\n\n}\n\n\n\npub struct FixtureAccess<'a, U: UserDataTypes>(&'a mut Fixture, PhantomData<U>);\n\n\n\n#[inline(always)]\n\nunsafe fn fixture_access<F, O, U>(fixture: *mut ffi::Fixture, f: F) -> O\n\n where F: for<'a> FnOnce(FixtureAccess<'a, U>) -> O,\n\n U: UserDataTypes\n\n{\n\n let mut fixture = WrappedRefMut::new(Fixture::from_ffi(fixture));\n\n f(FixtureAccess(&mut fixture, PhantomData))\n\n}\n\n\n\nimpl<'a, U: UserDataTypes> UserData<U::FixtureData> for FixtureAccess<'a, U> {\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 72, "score": 70174.7035227224 }, { "content": "}\n\n\n\nimpl<'a, U: UserDataTypes> UserData<U::BodyData> for BodyAccess<'a, U> {\n\n fn user_data(&self) -> &U::BodyData {\n\n unsafe {\n\n let internal: &InternalUserData<(), U::BodyData> =\n\n &*self.0.ptr().internal_user_data();\n\n &internal.custom\n\n }\n\n }\n\n\n\n fn user_data_mut(&mut self) -> &mut U::BodyData {\n\n unsafe {\n\n let internal: &mut InternalUserData<(), U::BodyData> =\n\n &mut *self.0.mut_ptr().internal_user_data_mut();\n\n &mut internal.custom\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 73, "score": 70174.09830826118 }, { "content": " pub body_a: BodyAccess<'a, U>,\n\n pub fixture_a: FixtureAccess<'a, U>,\n\n pub body_b: BodyAccess<'a, U>,\n\n pub fixture_b: FixtureAccess<'a, U>\n\n}\n\n\n\n#[inline(always)]\n\nunsafe fn contact_access<F, O, U>(contact: *mut ffi::Contact, f: F) -> O\n\n where F: for<'a> FnOnce(ContactAccess<'a, U>) -> O,\n\n U: UserDataTypes\n\n{\n\n let fixture_a = ffi::Contact_get_fixture_a(contact);\n\n let fixture_b = ffi::Contact_get_fixture_b(contact);\n\n let mut contact = WrappedRefMut::new(Contact::from_ffi(contact));\n\n\n\n body_access(ffi::Fixture_get_body(fixture_a), |ba|\n\n fixture_access(fixture_a, |fa|\n\n body_access(ffi::Fixture_get_body(fixture_b), |bb|\n\n fixture_access(fixture_b, |fb|\n\n f(ContactAccess {\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 74, "score": 70171.61012110193 }, { "content": " let listener = mem::transmute::<_, &mut L>(object);\n\n contact_access(contact, |c| listener.begin_contact(c))\n\n}\n\n\n\nunsafe extern \"C\" fn cll_end_contact<L, U>(object: ffi::Any,\n\n contact: *mut ffi::Contact)\n\n where L: ContactListener<U>, U: UserDataTypes\n\n{\n\n let listener = mem::transmute::<_, &mut L>(object);\n\n contact_access(contact, |c| listener.end_contact(c))\n\n}\n\n\n\nunsafe extern \"C\" fn cll_pre_solve<L, U>(object: ffi::Any,\n\n contact: *mut ffi::Contact,\n\n old_manifold: *const Manifold)\n\n where L: ContactListener<U>, U: UserDataTypes\n\n{\n\n assert!(!old_manifold.is_null());\n\n let listener = mem::transmute::<_, &mut L>(object);\n\n contact_access(contact, |c| listener.pre_solve(c, &*old_manifold))\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 75, "score": 70171.52050989667 }, { "content": " fn user_data(&self) -> &U::FixtureData {\n\n unsafe {\n\n let internal: &InternalUserData<(), U::FixtureData> =\n\n &*self.0.ptr().internal_user_data();\n\n &internal.custom\n\n }\n\n }\n\n\n\n fn user_data_mut(&mut self) -> &mut U::FixtureData {\n\n unsafe {\n\n let internal: &mut InternalUserData<(), U::FixtureData> =\n\n &mut *self.0.mut_ptr().internal_user_data_mut();\n\n &mut internal.custom\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, U: UserDataTypes> Deref for FixtureAccess<'a, U> {\n\n type Target = Fixture;\n\n\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 76, "score": 70169.59203457292 }, { "content": "impl<'a, U: UserDataTypes> Deref for BodyAccess<'a, U> {\n\n type Target = Body;\n\n\n\n fn deref(&self) -> &Body {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl<'a, U: UserDataTypes> DerefMut for BodyAccess<'a, U> {\n\n fn deref_mut(&mut self) -> &mut Body {\n\n &mut self.0\n\n }\n\n}\n\n\n", "file_path": "src/dynamics/world_callbacks.rs", "rank": 77, "score": 70168.0866526095 }, { "content": " self->report_fixture = rf;\n\n}\n\n\n\nb2QueryCallback* QueryCallbackLink_as_base(QueryCallbackLink* self) {\n\n return static_cast<b2QueryCallback*>(self);\n\n}\n\n\n\nvoid QueryCallbackLink_drop(QueryCallbackLink* self) {\n\n delete self;\n\n}\n\n\n\ntypedef f32 (*RCCReportFixtureCB)(RustObject, b2Fixture*,\n\n const b2Vec2*, const b2Vec2*, f32);\n\n\n", "file_path": "frontend/dynamics/world_callbacks.cpp", "rank": 78, "score": 70165.75515867485 }, { "content": "void DestructionListenerLink_bind(DestructionListenerLink* self,\n\n RustObject o, SayGoodbyeToJointCB sgtj,\n\n SayGoodbyeToFixtureCB sgtf) {\n\n self->object = o;\n\n self->say_goodbye_to_joint = sgtj;\n\n self->say_goodbye_to_fixture = sgtf;\n\n}\n\n\n\nb2DestructionListener* DestructionListenerLink_as_base(DestructionListenerLink* self) {\n\n return static_cast<b2DestructionListener*>(self);\n\n}\n\n\n\nvoid DestructionListenerLink_drop(DestructionListenerLink* self) {\n\n delete self;\n\n}\n\n\n\ntypedef bool (*ShouldCollideCB)(RustObject, b2Fixture*, b2Fixture*);\n\n\n", "file_path": "frontend/dynamics/world_callbacks.cpp", "rank": 79, "score": 70161.26765605462 }, { "content": " self->should_collide = sc;\n\n}\n\n\n\nb2ContactFilter* ContactFilterLink_as_base(ContactFilterLink* self) {\n\n return static_cast<b2ContactFilter*>(self);\n\n}\n\n\n\nvoid ContactFilterLink_drop(ContactFilterLink* self) {\n\n delete self;\n\n}\n\n\n\ntypedef void (*BeginContactCB)(RustObject, b2Contact*);\n\ntypedef void (*EndContactCB)(RustObject, b2Contact*);\n\ntypedef void (*PreSolveCB)(RustObject, b2Contact*, const b2Manifold*);\n\ntypedef void (*PostSolveCB)(RustObject, b2Contact*, const b2ContactImpulse*);\n\n\n", "file_path": "frontend/dynamics/world_callbacks.cpp", "rank": 80, "score": 70159.57415100599 }, { "content": "typedef void (*SayGoodbyeToJointCB)(RustObject, b2Joint*);\n\ntypedef void (*SayGoodbyeToFixtureCB)(RustObject, b2Fixture*);\n\n\n", "file_path": "frontend/dynamics/world_callbacks.cpp", "rank": 81, "score": 70159.0689404393 }, { "content": "\n\nb2ContactListener* ContactListenerLink_as_base(ContactListenerLink* self) {\n\n return static_cast<b2ContactListener*>(self);\n\n}\n\n\n\nvoid ContactListenerLink_drop(ContactListenerLink* self) {\n\n delete self;\n\n}\n\n\n\ntypedef bool (*QCReportFixtureCB)(RustObject, b2Fixture*);\n\n\n", "file_path": "frontend/dynamics/world_callbacks.cpp", "rank": 82, "score": 70158.79889714393 }, { "content": " RustObject object,\n\n RCCReportFixtureCB rf) {\n\n self->object = object;\n\n self->report_fixture = rf;\n\n}\n\n\n\nb2RayCastCallback* RayCastCallbackLink_as_base(RayCastCallbackLink* self) {\n\n return static_cast<b2RayCastCallback*>(self);\n\n}\n\n\n\nvoid RayCastCallbackLink_drop(RayCastCallbackLink* self) {\n\n delete self;\n\n}\n", "file_path": "frontend/dynamics/world_callbacks.cpp", "rank": 83, "score": 70155.63809281576 }, { "content": " PreSolveCB pre_solve;\n\n PostSolveCB post_solve;\n\n};\n\n\n\nContactListenerLink* ContactListenerLink_alloc() {\n\n return new ContactListenerLink();\n\n}\n\n\n\nvoid ContactListenerLink_bind(ContactListenerLink* self,\n\n RustObject o,\n\n BeginContactCB bc,\n\n EndContactCB ec,\n\n PreSolveCB pres,\n\n PostSolveCB posts) {\n\n self->object = o;\n\n self->begin_contact = bc;\n\n self->end_contact = ec;\n\n self->pre_solve = pres;\n\n self->post_solve = posts;\n\n}\n", "file_path": "frontend/dynamics/world_callbacks.cpp", "rank": 84, "score": 70155.2169693894 }, { "content": "pub use self::wheel::{WheelJoint, WheelJointDef};\n\n\n\n\n\nuse std::ops::{Deref, DerefMut};\n\nuse wrap::*;\n\nuse common::math::Vec2;\n\nuse dynamics::world::{World, BodyHandle, JointHandle};\n\nuse user_data::{UserDataTypes, UserData, RawUserData, RawUserDataMut, InternalUserData};\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone, PartialEq, Debug)]\n\npub enum JointType {\n\n Unknown,\n\n Revolute,\n\n Prismatic,\n\n Distance,\n\n Pulley,\n\n Mouse,\n\n Gear,\n\n Wheel,\n", "file_path": "src/dynamics/joints/mod.rs", "rank": 85, "score": 69285.0194812993 }, { "content": " JointType::Unknown\n\n }\n\n}\n\n\n\n\n\n#[doc(hidden)]\n\npub mod ffi {\n\n pub use ffi::Any;\n\n pub use dynamics::body::ffi::Body;\n\n use common::math::Vec2;\n\n use super::JointType;\n\n\n\n pub enum Joint {}\n\n\n\n extern \"C\" {\n\n pub fn Joint_get_type(slf: *const Joint) -> JointType;\n\n pub fn Joint_get_body_a(slf: *mut Joint) -> *mut Body;\n\n pub fn Joint_get_body_b(slf: *mut Joint) -> *mut Body;\n\n pub fn Joint_get_anchor_a_virtual(slf: *const Joint) -> Vec2;\n\n pub fn Joint_get_anchor_b_virtual(slf: *const Joint) -> Vec2;\n", "file_path": "src/dynamics/joints/mod.rs", "rank": 86, "score": 69278.48042638195 }, { "content": "\n\n fn is_collide_connected(&self) -> bool {\n\n unsafe { ffi::Joint_get_collide_connected(self.base_ptr()) }\n\n }\n\n\n\n fn dump(&mut self) {\n\n unsafe { ffi::Joint_dump_virtual(self.mut_base_ptr()) }\n\n }\n\n\n\n fn shift_origin(&mut self, origin: &Vec2) {\n\n unsafe { ffi::Joint_shift_origin_virtual(self.mut_base_ptr(), origin) }\n\n }\n\n}\n\n\n\n#[repr(C)]\n\n#[doc(hidden)]\n\npub struct JointEdge {\n\n pub other: *mut ffi::Body,\n\n pub joint: *mut ffi::Joint,\n\n pub prev: *mut JointEdge,\n", "file_path": "src/dynamics/joints/mod.rs", "rank": 87, "score": 69272.54806323102 }, { "content": "macro_rules! wrap_joint {\n\n {\n\n $wrapped:ty => $wrap:ident ($joint_type:path)\n\n < $as_base:path\n\n > $base_as:path\n\n } => {\n\n wrap! {\n\n ffi::Joint: $wrapped => pub $wrap\n\n < $as_base\n\n > $base_as\n\n }\n\n\n\n impl Joint for $wrap {\n\n fn assumed_type() -> JointType { $joint_type }\n\n }\n\n };\n\n}\n\n\n\npub mod distance;\n\npub mod friction;\n", "file_path": "src/dynamics/joints/mod.rs", "rank": 88, "score": 69271.29824232176 }, { "content": "pub mod gear;\n\npub mod motor;\n\npub mod mouse;\n\npub mod prismatic;\n\npub mod pulley;\n\npub mod revolute;\n\npub mod rope;\n\npub mod weld;\n\npub mod wheel;\n\n\n\npub use self::distance::{DistanceJoint, DistanceJointDef};\n\npub use self::friction::{FrictionJoint, FrictionJointDef};\n\npub use self::gear::{GearJoint, GearJointDef};\n\npub use self::motor::{MotorJoint, MotorJointDef};\n\npub use self::mouse::{MouseJoint, MouseJointDef};\n\npub use self::prismatic::{PrismaticJoint, PrismaticJointDef};\n\npub use self::pulley::{PulleyJoint, PulleyJointDef};\n\npub use self::revolute::{RevoluteJoint, RevoluteJointDef};\n\npub use self::rope::{RopeJoint, RopeJointDef};\n\npub use self::weld::{WeldJoint, WeldJointDef};\n", "file_path": "src/dynamics/joints/mod.rs", "rank": 89, "score": 69268.2095284278 }, { "content": " pub fn Joint_get_reaction_force_virtual(slf: *const Joint) -> Vec2;\n\n pub fn Joint_get_reaction_torque_virtual(slf: *const Joint) -> f32;\n\n // pub fn Joint_get_next(slf: *mut Joint) -> *mut Joint;\n\n // pub fn Joint_get_next_const(slf: *const Joint) -> *const Joint;\n\n pub fn Joint_is_active(slf: *const Joint) -> bool;\n\n pub fn Joint_get_collide_connected(slf: *const Joint) -> bool;\n\n pub fn Joint_dump_virtual(slf: *mut Joint);\n\n pub fn Joint_shift_origin_virtual(slf: *mut Joint, origin: *const Vec2);\n\n }\n\n}\n", "file_path": "src/dynamics/joints/mod.rs", "rank": 90, "score": 69265.3077695704 }, { "content": " }),\n\n };\n\n j.mut_base_ptr().set_internal_user_data(&mut *j.user_data);\n\n j\n\n }\n\n}\n\n\n\nimpl<U: UserDataTypes> UserData<U::JointData> for MetaJoint<U> {\n\n fn user_data(&self) -> &U::JointData {\n\n &self.user_data.custom\n\n }\n\n\n\n fn user_data_mut(&mut self) -> &mut U::JointData {\n\n &mut self.user_data.custom\n\n }\n\n}\n\n\n\nimpl<U: UserDataTypes> Deref for MetaJoint<U> {\n\n type Target = UnknownJoint;\n\n\n", "file_path": "src/dynamics/joints/mod.rs", "rank": 91, "score": 69264.17055314398 }, { "content": "\n\n fn anchor_a(&self) -> Vec2 {\n\n unsafe { ffi::Joint_get_anchor_a_virtual(self.base_ptr()) }\n\n }\n\n\n\n fn anchor_b(&self) -> Vec2 {\n\n unsafe { ffi::Joint_get_anchor_b_virtual(self.base_ptr()) }\n\n }\n\n\n\n fn reaction_force(&self) -> Vec2 {\n\n unsafe { ffi::Joint_get_reaction_force_virtual(self.base_ptr()) }\n\n }\n\n\n\n fn reaction_torque(&self) -> f32 {\n\n unsafe { ffi::Joint_get_reaction_torque_virtual(self.base_ptr()) }\n\n }\n\n\n\n fn is_active(&self) -> bool {\n\n unsafe { ffi::Joint_is_active(self.base_ptr()) }\n\n }\n", "file_path": "src/dynamics/joints/mod.rs", "rank": 92, "score": 69264.06217828019 }, { "content": " use self::UnknownJoint::*;\n\n match self {\n\n &Distance(ref x) => x.base_ptr(),\n\n &Friction(ref x) => x.base_ptr(),\n\n &Gear(ref x) => x.base_ptr(),\n\n &Motor(ref x) => x.base_ptr(),\n\n &Mouse(ref x) => x.base_ptr(),\n\n &Prismatic(ref x) => x.base_ptr(),\n\n &Pulley(ref x) => x.base_ptr(),\n\n &Revolute(ref x) => x.base_ptr(),\n\n &Rope(ref x) => x.base_ptr(),\n\n &Weld(ref x) => x.base_ptr(),\n\n &Wheel(ref x) => x.base_ptr(),\n\n _ => panic!(\"Truly unknown joint\"),\n\n }\n\n }\n\n\n\n unsafe fn mut_base_ptr(&mut self) -> *mut ffi::Joint {\n\n use self::UnknownJoint::*;\n\n match self {\n", "file_path": "src/dynamics/joints/mod.rs", "rank": 93, "score": 69262.54707036058 }, { "content": " pub next: *mut JointEdge,\n\n}\n\n\n\npub enum UnknownJoint {\n\n Unknown,\n\n Revolute(RevoluteJoint),\n\n Prismatic(PrismaticJoint),\n\n Distance(DistanceJoint),\n\n Pulley(PulleyJoint),\n\n Mouse(MouseJoint),\n\n Gear(GearJoint),\n\n Wheel(WheelJoint),\n\n Weld(WeldJoint),\n\n Friction(FrictionJoint),\n\n Rope(RopeJoint),\n\n Motor(MotorJoint),\n\n}\n\n\n\nimpl WrappedBase<ffi::Joint> for UnknownJoint {\n\n unsafe fn base_ptr(&self) -> *const ffi::Joint {\n", "file_path": "src/dynamics/joints/mod.rs", "rank": 94, "score": 69262.32683451124 }, { "content": " &mut Distance(ref mut x) => x.mut_base_ptr(),\n\n &mut Friction(ref mut x) => x.mut_base_ptr(),\n\n &mut Gear(ref mut x) => x.mut_base_ptr(),\n\n &mut Motor(ref mut x) => x.mut_base_ptr(),\n\n &mut Mouse(ref mut x) => x.mut_base_ptr(),\n\n &mut Prismatic(ref mut x) => x.mut_base_ptr(),\n\n &mut Pulley(ref mut x) => x.mut_base_ptr(),\n\n &mut Revolute(ref mut x) => x.mut_base_ptr(),\n\n &mut Rope(ref mut x) => x.mut_base_ptr(),\n\n &mut Weld(ref mut x) => x.mut_base_ptr(),\n\n &mut Wheel(ref mut x) => x.mut_base_ptr(),\n\n _ => panic!(\"Truly unknown joint\"),\n\n }\n\n }\n\n}\n\n\n\nimpl FromFFI<ffi::Joint> for UnknownJoint {\n\n unsafe fn from_ffi(ptr: *mut ffi::Joint) -> UnknownJoint {\n\n use self::UnknownJoint::*;\n\n assert!(!ptr.is_null());\n", "file_path": "src/dynamics/joints/mod.rs", "rank": 95, "score": 69261.83890154911 }, { "content": " fn deref(&self) -> &UnknownJoint {\n\n &self.joint\n\n }\n\n}\n\n\n\nimpl<U: UserDataTypes> DerefMut for MetaJoint<U> {\n\n fn deref_mut(&mut self) -> &mut UnknownJoint {\n\n &mut self.joint\n\n }\n\n}\n\n\n", "file_path": "src/dynamics/joints/mod.rs", "rank": 96, "score": 69260.38278878953 }, { "content": " let joint_type = ffi::Joint_get_type(ptr as *const ffi::Joint);\n\n match joint_type {\n\n JointType::Revolute => Revolute(RevoluteJoint::from_ffi(ptr)),\n\n JointType::Prismatic => Prismatic(PrismaticJoint::from_ffi(ptr)),\n\n JointType::Distance => Distance(DistanceJoint::from_ffi(ptr)),\n\n JointType::Pulley => Pulley(PulleyJoint::from_ffi(ptr)),\n\n JointType::Mouse => Mouse(MouseJoint::from_ffi(ptr)),\n\n JointType::Gear => Gear(GearJoint::from_ffi(ptr)),\n\n JointType::Wheel => Wheel(WheelJoint::from_ffi(ptr)),\n\n JointType::Weld => Weld(WeldJoint::from_ffi(ptr)),\n\n JointType::Friction => Friction(FrictionJoint::from_ffi(ptr)),\n\n JointType::Rope => Rope(RopeJoint::from_ffi(ptr)),\n\n JointType::Motor => Motor(MotorJoint::from_ffi(ptr)),\n\n _ => Unknown,\n\n }\n\n }\n\n}\n\n\n\nimpl Joint for UnknownJoint {\n\n fn assumed_type() -> JointType {\n", "file_path": "src/dynamics/joints/mod.rs", "rank": 97, "score": 69259.09324791277 }, { "content": " Weld,\n\n Friction,\n\n Rope,\n\n Motor,\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone, PartialEq, Debug)]\n\npub enum LimitState {\n\n Inactive,\n\n Lower,\n\n Upper,\n\n Equal,\n\n}\n\n\n", "file_path": "src/dynamics/joints/mod.rs", "rank": 98, "score": 69250.71796917007 }, { "content": "fn main() {\n\n println!(\"cargo:rustc-link-lib=static=Box2D\");\n\n if let Some(path) = std::env::var(\"BOX2D_LIB_DIR\").ok() {\n\n println!(\"cargo:rustc-link-search=native={}\", path);\n\n } else {\n\n let box2d_install_prefix = cmake::Config::new(\"Box2D/Box2D\")\n\n .define(\"BOX2D_BUILD_STATIC\", \"ON\")\n\n .define(\"BOX2D_INSTALL\", \"ON\")\n\n .define(\"BOX2D_BUILD_SHARED\", \"OFF\")\n\n .define(\"BOX2D_BUILD_EXAMPLES\", \"OFF\")\n\n .define(\"BOX2D_INSTALL_DOC\", \"OFF\")\n\n .build();\n\n println!(\"cargo:rustc-link-search=native={}/lib\", box2d_install_prefix.display());\n\n };\n\n\n\n cc::Build::new()\n\n .cpp(true)\n\n .include(\"Box2D/Box2D\")\n\n .file(\"frontend/lib.cpp\")\n\n .compile(\"libbox2d_frontend.a\");\n\n}\n", "file_path": "build.rs", "rank": 99, "score": 64614.48151198066 } ]
Rust
applications/vote-purge-stress/main.rs
pratikfegade/noria
0460dd90ff8950cf1262bd66b58fd03620221b85
#![allow(clippy::many_single_char_names)] use clap::{value_t_or_exit, App, Arg}; use hdrhistogram::Histogram; use noria::{Builder, DurabilityMode, FrontierStrategy, PersistenceParameters}; use std::time::{Duration, Instant}; const RECIPE: &str = "# base tables CREATE TABLE Article (id int, title varchar(255), PRIMARY KEY(id)); CREATE TABLE Vote (article_id int, user int); # read queries CREATE VIEW VoteCount AS \ SELECT Vote.article_id, COUNT(user) AS votes FROM Vote GROUP BY Vote.article_id; QUERY ArticleWithVoteCount: SELECT Article.id, title, VoteCount.votes AS votes \ FROM Article \ LEFT JOIN VoteCount \ ON (Article.id = VoteCount.article_id) WHERE Article.id = ?;"; #[tokio::main] async fn main() { let args = App::new("purge-stress") .about("Benchmarks the latency of full replays in a user-curated news aggregator") .arg( Arg::with_name("flush-timeout") .long("flush-timeout") .takes_value(true) .default_value("100000") .help("Time to wait before processing a merged packet, in nanoseconds."), ) .arg( Arg::with_name("replay-timeout") .long("replay-timeout") .takes_value(true) .default_value("100000") .help("Time to batch replay requests for, in nanoseconds."), ) .arg( Arg::with_name("time") .short("t") .takes_value(true) .default_value("10") .help("Time to run benchmark for, in seconds."), ) .arg( Arg::with_name("purge") .long("purge") .takes_value(true) .possible_values(&["none", "reader", "all"]) .default_value("none") .help("Disable purging"), ) .arg(Arg::with_name("verbose").long("verbose").short("v")) .get_matches(); let runtime = value_t_or_exit!(args, "time", u64); let mut builder = Builder::default(); if args.is_present("verbose") { builder.log_with(noria::logger_pls()); } builder.set_persistence(PersistenceParameters { mode: DurabilityMode::MemoryOnly, flush_timeout: Duration::new(0, value_t_or_exit!(args, "flush-timeout", u32)), ..Default::default() }); builder.set_sharding(None); builder.set_partial_replay_batch_timeout(Duration::new( 0, value_t_or_exit!(args, "replay-timeout", u32), )); match args.value_of("purge").unwrap() { "all" => { builder.set_frontier_strategy(FrontierStrategy::AllPartial); } "reader" => { builder.set_frontier_strategy(FrontierStrategy::Readers); } "none" => {} _ => unreachable!(), } let (mut g, done) = builder.start_local().await.unwrap(); { g.ready().await.unwrap(); g.install_recipe(RECIPE).await.unwrap(); let mut a = g.table("Article").await.unwrap(); let mut v = g.table("Vote").await.unwrap(); let mut r = g.view("ArticleWithVoteCount").await.unwrap(); a.insert(vec![1.into(), "Hello world #1".into()]) .await .unwrap(); a.insert(vec![2.into(), "Hello world #2".into()]) .await .unwrap(); v.insert(vec![1.into(), "a".into()]).await.unwrap(); v.insert(vec![2.into(), "a".into()]).await.unwrap(); v.insert(vec![1.into(), "b".into()]).await.unwrap(); v.insert(vec![2.into(), "c".into()]).await.unwrap(); v.insert(vec![2.into(), "d".into()]).await.unwrap(); let one = 1.into(); let two = 2.into(); assert_eq!( r.lookup(&[one], true).await.unwrap(), vec![vec![1.into(), "Hello world #1".into(), 2.into()]] ); assert_eq!( r.lookup(&[two], true).await.unwrap(), vec![vec![2.into(), "Hello world #2".into(), 3.into()]] ); let mut n = 0; let start = Instant::now(); let mut stats = Histogram::<u64>::new_with_bounds(1, 60_000_000, 3).unwrap(); while start.elapsed() < Duration::from_secs(runtime) { for &id in &[1, 2] { let start = Instant::now(); r.lookup(&[id.into()], true).await.unwrap(); stats.saturating_record(start.elapsed().as_micros() as u64); n += 1; std::thread::sleep(Duration::from_millis(50)); } } println!("# purge mode: {}", args.value_of("purge").unwrap()); println!( "# replays/s: {:.2}", f64::from(n) / start.elapsed().as_secs_f64() ); println!("# op\tpct\ttime"); println!("replay\t50\t{:.2}\tµs", stats.value_at_quantile(0.5)); println!("replay\t95\t{:.2}\tµs", stats.value_at_quantile(0.95)); println!("replay\t99\t{:.2}\tµs", stats.value_at_quantile(0.99)); println!("replay\t100\t{:.2}\tµs", stats.max()); } drop(g); done.await; }
#![allow(clippy::many_single_char_names)] use clap::{value_t_or_exit, App, Arg}; use hdrhistogram::Histogram; use noria::{Builder, DurabilityMode, FrontierStrategy, PersistenceParameters}; use std::time::{Duration, Instant}; const RECIPE: &str = "# base tables CREATE TABLE Article (id int, title varchar(255), PRIMARY KEY(id)); CREATE TABLE Vote (article_id int, user int); # read queries CREATE VIEW VoteCount AS \ SELECT Vote.article_id, COUNT(user) AS votes FROM Vote GROUP BY Vote.article_id; QUERY ArticleWithVoteCount: SELECT Article.id, title, VoteCount.votes AS votes \ FROM Article \ LEFT JOIN VoteCount \ ON (Article.id = VoteCount.article_id) WHERE Article.id = ?;"; #[tokio::main] async fn main() { let args = App::new("purge-stress") .about("Benchmarks the latency of full replays in a user-curated news aggregator") .arg( Arg::with_name("flush-timeout") .long("flush-timeout") .takes_value(true) .default_value("100000") .help("Time to wait before processing a merged packet, in nanoseconds."), ) .arg( Arg::with_name("replay-timeout") .long("replay-timeout") .takes_value(true) .default_value("100000") .help("Time to batch replay requests for, in nanoseconds."), ) .arg( Arg::with_name("time") .short("t") .takes_value(true) .default_value("10") .help("Time to run benchmark for, in seconds."), ) .arg( Arg::with_name("purge") .long("purge") .takes_value(true) .possible_values(&["none", "reader", "all"]) .default_value("none") .help("Disable purging"), ) .arg(Arg::with_name("verbose").long("verbose").short("v")) .get_matches(); let runtime = value_t_or_exit!(args, "time", u64); let mut builder = Builder::default(); if args.is_present("verbose") { builder.log_with(noria::logger_pls()); } builder.set_persistence(PersistenceParameters { mode: DurabilityMode::MemoryOnly, flush_timeout: Duration::new(0, value_t_or_exit!(args, "flush-timeout", u32)), ..Default::default() }); builder.set_sharding(None); builder.set_partial_replay_batch_timeout(Duration::new( 0, value_t_or_exit!(args, "replay-timeout", u32), )); match args.value_of("purge").unwrap() { "all" => { builder.set_frontier_strategy(FrontierStrategy::AllPartial); } "reader" => { builder.set_frontier_strategy(FrontierStrategy::Readers); } "none" => {} _ => unreachable!(), } let (mut g, done) = builder.start_local().await.unwrap(); { g.ready().await.unwrap(); g.install_recipe(RECIPE).await.unwrap(); let mut a = g.table("Article").await.unwrap(); let mut v = g.table("Vote").await.unwrap(); let mut r = g.view("ArticleWithVoteCount").await.unwrap(); a.insert(vec![1.into(), "Hello world #1".into()]) .await .unwrap(); a.insert(vec![2.into(), "Hello world #2".into()]) .await .unwrap(); v.insert(vec![1.into(), "a".into()]).await.unwrap(); v.insert(vec![2.into(), "a".into()]).await.unwrap(); v.insert(vec![1.into(), "b".into()]).await.unwrap(); v.insert(vec![2.into(), "c".into()]).await.unwrap(); v.insert(vec![2.into(), "d".into()]).await.unwrap(); let one = 1.into(); let two = 2.into(); assert_eq!( r.lookup(&[one], true).await.unwrap(), vec![vec![1.into(), "Hello world #1".into(), 2.into()]] ); assert_eq!( r.lookup(&[two], true).await.unwrap(), vec![vec![2.into(), "Hello world #2".into(), 3.into()]] ); let mut n = 0; let start = Instant::now(); let mut stats = Histogram::<u64>::new_with_bounds(1, 60_000_000, 3).unwrap(); while start.elapsed() < Duration::from_secs(runtime) { for &id in &[1, 2] { let start = Instant::now(); r.lookup(&[id.into()], true).await.unwrap(); stats.saturating_record(start.elapsed().as_micros() as u64); n += 1; std::thread::sleep(Duration::from_milli
s(50)); } } println!("# purge mode: {}", args.value_of("purge").unwrap()); println!( "# replays/s: {:.2}", f64::from(n) / start.elapsed().as_secs_f64() ); println!("# op\tpct\ttime"); println!("replay\t50\t{:.2}\tµs", stats.value_at_quantile(0.5)); println!("replay\t95\t{:.2}\tµs", stats.value_at_quantile(0.95)); println!("replay\t99\t{:.2}\tµs", stats.value_at_quantile(0.99)); println!("replay\t100\t{:.2}\tµs", stats.max()); } drop(g); done.await; }
function_block-function_prefixed
[ { "content": "CREATE TABLE Vote (article_id int, user int);\n\n\n", "file_path": "applications/vote/clients/localsoup/graph.rs", "rank": 2, "score": 473892.9226471856 }, { "content": "CREATE VIEW user_stats AS SELECT users.id, user_comments.comments, user_stories.stories FROM users LEFT JOIN user_comments ON (users.id = user_comments.id) LEFT JOIN user_stories ON (users.id = user_stories.id);\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 3, "score": 449410.76450037723 }, { "content": "CREATE VIEW `good_comments` AS SELECT comments.id, comments.created_at, comments.story_id, comments.user_id, comments.parent_comment_id, FULL_comment_upvotes.votes - FULL_comment_downvotes.votes AS score FROM comments LEFT JOIN FULL_comment_upvotes ON (comments.id = FULL_comment_upvotes.id) LEFT JOIN FULL_comment_downvotes ON (comments.id = FULL_comment_downvotes.id) WHERE comments.is_deleted = 0 AND comments.is_moderated = 0;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 4, "score": 433609.88591636065 }, { "content": "CREATE VIEW user_stats AS SELECT users.id, user_comments.comments, user_stories.stories FROM users LEFT JOIN user_comments ON (users.id = user_comments.id) LEFT JOIN user_stories ON (users.id = user_stories.id);\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 5, "score": 425510.94038462685 }, { "content": "CREATE VIEW `good_comments` AS SELECT comments.id, comments.created_at, comments.story_id, comments.user_id, comments.parent_comment_id, FULL_comment_upvotes.votes - FULL_comment_downvotes.votes AS score FROM comments LEFT JOIN FULL_comment_upvotes ON (comments.id = FULL_comment_upvotes.id) LEFT JOIN FULL_comment_downvotes ON (comments.id = FULL_comment_downvotes.id) WHERE comments.is_deleted = 0 AND comments.is_moderated = 0;\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 6, "score": 415719.1505953223 }, { "content": "CREATE VIEW user_story_karma AS SELECT story_with_votes.user_id AS id, SUM(story_with_votes.score) AS karma FROM story_with_votes GROUP BY story_with_votes.user_id;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 7, "score": 415061.9906515461 }, { "content": "CREATE VIEW user_comment_karma AS SELECT comment_with_votes.user_id AS id, SUM(comment_with_votes.score) AS karma FROM comment_with_votes GROUP BY comment_with_votes.user_id;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 8, "score": 415061.9906515461 }, { "content": "-- Useful intermediate views\n\nCREATE VIEW `comment_with_votes` AS SELECT comments.*, FULL_comment_upvotes.votes AS upvotes, FULL_comment_downvotes.votes AS downvotes, FULL_comment_upvotes.votes - FULL_comment_downvotes.votes AS score FROM comments LEFT JOIN FULL_comment_upvotes ON (comments.id = FULL_comment_upvotes.id) LEFT JOIN FULL_comment_downvotes ON (comments.id = FULL_comment_downvotes.id);\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 9, "score": 408973.73226896924 }, { "content": "CREATE VIEW `story_with_votes` AS SELECT stories.*, FULL_story_upvotes.votes AS upvotes, FULL_story_downvotes.votes AS downvotes, FULL_story_upvotes.votes - FULL_story_downvotes.votes AS score FROM stories LEFT JOIN FULL_story_upvotes ON (stories.id = FULL_story_upvotes.id) LEFT JOIN FULL_story_downvotes ON (stories.id = FULL_story_downvotes.id);\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 10, "score": 408961.90986158815 }, { "content": "CREATE TABLE Article (id int, title varchar(255), PRIMARY KEY(id));\n", "file_path": "applications/vote/clients/localsoup/graph.rs", "rank": 11, "score": 407019.84814575216 }, { "content": "CREATE VIEW FULL_merged_story_score AS SELECT stories.merged_story_id AS id, FULL_story_score.score FROM FULL_story_score JOIN stories ON (FULL_story_score.id = stories.merged_story_id);\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 12, "score": 401576.39738558565 }, { "content": "CREATE VIEW user_karma AS SELECT users.id, user_comment_karma.karma + user_story_karma.karma AS karma FROM users LEFT JOIN user_comment_karma ON (users.id = user_comment_karma.id) LEFT JOIN user_story_karma ON (users.id = user_story_karma.id);\n\n\n\n-----------------------------------------------------\n\n-- Original:\n\n-- CREATE VIEW `replying_comments` AS select `read_ribbons`.`user_id` AS `user_id`,`comments`.`id` AS `comment_id`,`read_ribbons`.`story_id` AS `story_id`,`comments`.`parent_comment_id` AS `parent_comment_id`,`comments`.`created_at` AS `comment_created_at`,`parent_comments`.`user_id` AS `parent_comment_author_id`,`comments`.`user_id` AS `comment_author_id`,`stories`.`user_id` AS `story_author_id`,(`read_ribbons`.`updated_at` < `comments`.`created_at`) AS `is_unread`,(select `votes`.`vote` from `votes` where ((`votes`.`user_id` = `read_ribbons`.`user_id`) and (`votes`.`comment_id` = `comments`.`id`))) AS `current_vote_vote`,(select `votes`.`reason` from `votes` where ((`votes`.`user_id` = `read_ribbons`.`user_id`) and (`votes`.`comment_id` = `comments`.`id`))) AS `current_vote_reason` from (((`read_ribbons` join `comments` on((`comments`.`story_id` = `read_ribbons`.`story_id`))) join `stories` on((`stories`.`id` = `comments`.`story_id`))) left join `comments` `parent_comments` on((`parent_comments`.`id` = `comments`.`parent_comment_id`))) where ((`read_ribbons`.`is_following` = 1) and (`comments`.`user_id` <> `read_ribbons`.`user_id`) and (`comments`.`is_deleted` = 0) and (`comments`.`is_moderated` = 0) and ((`parent_comments`.`user_id` = `read_ribbons`.`user_id`) or (isnull(`parent_comments`.`user_id`) and (`stories`.`user_id` = `read_ribbons`.`user_id`))) and ((`comments`.`upvotes` - `comments`.`downvotes`) >= 0) and (isnull(`parent_comments`.`id`) or ((`parent_comments`.`upvotes` - `parent_comments`.`downvotes`) >= 0)));\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 13, "score": 398035.3176070647 }, { "content": "-- Accessor views\n\nCREATE VIEW story_with_hotness AS SELECT stories.*, FULL_story_hotness.hotness FROM stories LEFT JOIN FULL_story_hotness ON (stories.id = FULL_story_hotness.id);\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 14, "score": 392339.81502614246 }, { "content": "CREATE VIEW FULL_comment_upvotes AS SELECT all_comment_votes.comment_id AS id, COUNT(CASE WHEN all_comment_votes.vote = 1 THEN all_comment_votes.vote END) as votes FROM all_comment_votes GROUP BY all_comment_votes.comment_id;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 15, "score": 390945.7942899746 }, { "content": "CREATE VIEW FULL_story_upvotes AS SELECT all_story_votes.story_id AS id, COUNT(CASE WHEN all_story_votes.vote = 1 THEN all_story_votes.vote END) as votes FROM all_story_votes GROUP BY all_story_votes.story_id;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 16, "score": 390945.79428997455 }, { "content": "CREATE VIEW FULL_comment_downvotes AS SELECT all_comment_votes.comment_id AS id, COUNT(CASE WHEN all_comment_votes.vote = 0 THEN all_comment_votes.vote END) as votes FROM all_comment_votes GROUP BY all_comment_votes.comment_id;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 17, "score": 390940.4122723736 }, { "content": "CREATE VIEW FULL_story_downvotes AS SELECT all_story_votes.story_id AS id, COUNT(CASE WHEN all_story_votes.vote = 0 THEN all_story_votes.vote END) as votes FROM all_story_votes GROUP BY all_story_votes.story_id;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 18, "score": 390940.4122723736 }, { "content": "-- Useful intermediate views\n\nCREATE VIEW `comment_with_votes` AS SELECT comments.*, FULL_comment_upvotes.votes AS upvotes, FULL_comment_downvotes.votes AS downvotes, FULL_comment_upvotes.votes - FULL_comment_downvotes.votes AS score FROM comments LEFT JOIN FULL_comment_upvotes ON (comments.id = FULL_comment_upvotes.id) LEFT JOIN FULL_comment_downvotes ON (comments.id = FULL_comment_downvotes.id);\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 19, "score": 389400.36812005204 }, { "content": "CREATE VIEW `story_with_votes` AS SELECT stories.*, FULL_story_upvotes.votes AS upvotes, FULL_story_downvotes.votes AS downvotes, FULL_story_upvotes.votes - FULL_story_downvotes.votes AS score FROM stories LEFT JOIN FULL_story_upvotes ON (stories.id = FULL_story_upvotes.id) LEFT JOIN FULL_story_downvotes ON (stories.id = FULL_story_downvotes.id);\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 20, "score": 389388.545712671 }, { "content": "-- Other derived stats\n\nCREATE VIEW story_comments AS SELECT stories.id, COUNT(comments.id) as comments FROM stories LEFT JOIN comments ON (stories.id = comments.story_id) GROUP BY stories.id;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 21, "score": 388172.29207207105 }, { "content": "CREATE VIEW user_comment_karma AS SELECT comment_with_votes.user_id AS id, SUM(comment_with_votes.score) AS karma FROM comment_with_votes GROUP BY comment_with_votes.user_id;\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 22, "score": 387564.3564535702 }, { "content": "CREATE VIEW user_story_karma AS SELECT story_with_votes.user_id AS id, SUM(story_with_votes.score) AS karma FROM story_with_votes GROUP BY story_with_votes.user_id;\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 23, "score": 387564.3564535702 }, { "content": "CREATE VIEW FULL_story_upvotes AS SELECT votes.story_id AS id, COUNT(*) as votes FROM votes WHERE votes.comment_id IS NULL AND votes.vote = 1 GROUP BY votes.story_id;\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 24, "score": 385159.69500783464 }, { "content": "CREATE VIEW FULL_comment_upvotes AS SELECT votes.comment_id AS id, COUNT(*) as votes FROM votes WHERE votes.story_id IS NULL AND votes.vote = 1 GROUP BY votes.comment_id;\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 25, "score": 385159.69500783464 }, { "content": "CREATE VIEW FULL_comment_downvotes AS SELECT votes.comment_id AS id, COUNT(*) as votes FROM votes WHERE votes.story_id IS NULL AND votes.vote = 0 GROUP BY votes.comment_id;\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 26, "score": 385154.3129902337 }, { "content": "CREATE VIEW FULL_story_downvotes AS SELECT votes.story_id AS id, COUNT(*) as votes FROM votes WHERE votes.comment_id IS NULL AND votes.vote = 0 GROUP BY votes.story_id;\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 27, "score": 385154.3129902337 }, { "content": "-- Original:\n", "file_path": "applications/lobsters/mysql/db-schema/original.sql", "rank": 28, "score": 384416.4522528297 }, { "content": "-- Original:\n\n-- CREATE VIEW `replying_comments` AS select `read_ribbons`.`user_id` AS `user_id`,`comments`.`id` AS `comment_id`,`read_ribbons`.`story_id` AS `story_id`,`comments`.`parent_comment_id` AS `parent_comment_id`,`comments`.`created_at` AS `comment_created_at`,`parent_comments`.`user_id` AS `parent_comment_author_id`,`comments`.`user_id` AS `comment_author_id`,`stories`.`user_id` AS `story_author_id`,(`read_ribbons`.`updated_at` < `comments`.`created_at`) AS `is_unread`,(select `votes`.`vote` from `votes` where ((`votes`.`user_id` = `read_ribbons`.`user_id`) and (`votes`.`comment_id` = `comments`.`id`))) AS `current_vote_vote`,(select `votes`.`reason` from `votes` where ((`votes`.`user_id` = `read_ribbons`.`user_id`) and (`votes`.`comment_id` = `comments`.`id`))) AS `current_vote_reason` from (((`read_ribbons` join `comments` on((`comments`.`story_id` = `read_ribbons`.`story_id`))) join `stories` on((`stories`.`id` = `comments`.`story_id`))) left join `comments` `parent_comments` on((`parent_comments`.`id` = `comments`.`parent_comment_id`))) where ((`read_ribbons`.`is_following` = 1) and (`comments`.`user_id` <> `read_ribbons`.`user_id`) and (`comments`.`is_deleted` = 0) and (`comments`.`is_moderated` = 0) and ((`parent_comments`.`user_id` = `read_ribbons`.`user_id`) or (isnull(`parent_comments`.`user_id`) and (`stories`.`user_id` = `read_ribbons`.`user_id`))) and ((`comments`.`upvotes` - `comments`.`downvotes`) >= 0) and (isnull(`parent_comments`.`id`) or ((`parent_comments`.`upvotes` - `parent_comments`.`downvotes`) >= 0)));\n\n--\n\n-- Modified:\n\n-- CREATE VIEW `BOUNDARY_replying_comments_for_count` AS\n\n-- \tSELECT `read_ribbons`.`user_id`, `read_ribbons`.`story_id`, `comments`.`id`,\n\n-- \t `comments`.`upvotes` - `comments`.`downvotes` AS saldo,\n\n-- \t `parent_comments`.`upvotes` - `parent_comments`.`downvotes` AS psaldo\n\n-- \tFROM `read_ribbons`\n\n-- \tJOIN `stories` ON (`stories`.`id` = `read_ribbons`.`story_id`)\n\n-- \tJOIN `comments` ON (`comments`.`story_id` = `read_ribbons`.`story_id`)\n\n-- \tLEFT JOIN `parent_comments`\n\n-- \tON (`comments`.`parent_comment_id` = `parent_comments`.`id`)\n\n-- \tWHERE `read_ribbons`.`is_following` = 1\n\n-- \tAND `comments`.`user_id` <> `read_ribbons`.`user_id`\n\n-- \tAND `comments`.`is_deleted` = 0\n\n-- \tAND `comments`.`is_moderated` = 0\n\n-- \tAND saldo >= 0\n\n-- \tAND `read_ribbons`.`updated_at` < `comments`.`created_at`\n\n-- \tAND (\n\n-- (\n", "file_path": "applications/lobsters/mysql/db-schema/noria.sql", "rank": 29, "score": 384416.4522528297 }, { "content": "CREATE VIEW user_stories AS SELECT stories.user_id AS id, COUNT(stories.id) AS stories FROM stories GROUP BY stories.user_id;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 30, "score": 381750.0991019559 }, { "content": "CREATE VIEW user_comments AS SELECT comments.user_id AS id, COUNT(comments.id) AS comments FROM comments GROUP BY comments.user_id;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 31, "score": 381750.0991019558 }, { "content": "CREATE VIEW FULL_story_score AS SELECT story_votes.id, SUM(story_votes.score) as score FROM story_votes GROUP BY story_votes.id;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 32, "score": 379251.3133848188 }, { "content": "CREATE VIEW FULL_comment_score AS SELECT comment_votes.id, SUM(comment_votes.score) as score FROM comment_votes GROUP BY comment_votes.id;\n\n\n\n-- Story score tracking\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 33, "score": 379251.3133848188 }, { "content": "CREATE VIEW FULL_merged_story_score AS SELECT stories.merged_story_id AS id, FULL_story_score.score FROM FULL_story_score JOIN stories ON (FULL_story_score.id = stories.merged_story_id);\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 34, "score": 376379.1158433369 }, { "content": "CREATE VIEW user_karma AS SELECT users.id, user_comment_karma.karma + user_story_karma.karma AS karma FROM users LEFT JOIN user_comment_karma ON (users.id = user_comment_karma.id) LEFT JOIN user_story_karma ON (users.id = user_story_karma.id);\n\n\n\n-----------------------------------------------------\n\n-- Original:\n\n-- CREATE VIEW `replying_comments` AS select `read_ribbons`.`user_id` AS `user_id`,`comments`.`id` AS `comment_id`,`read_ribbons`.`story_id` AS `story_id`,`comments`.`parent_comment_id` AS `parent_comment_id`,`comments`.`created_at` AS `comment_created_at`,`parent_comments`.`user_id` AS `parent_comment_author_id`,`comments`.`user_id` AS `comment_author_id`,`stories`.`user_id` AS `story_author_id`,(`read_ribbons`.`updated_at` < `comments`.`created_at`) AS `is_unread`,(select `votes`.`vote` from `votes` where ((`votes`.`user_id` = `read_ribbons`.`user_id`) and (`votes`.`comment_id` = `comments`.`id`))) AS `current_vote_vote`,(select `votes`.`reason` from `votes` where ((`votes`.`user_id` = `read_ribbons`.`user_id`) and (`votes`.`comment_id` = `comments`.`id`))) AS `current_vote_reason` from (((`read_ribbons` join `comments` on((`comments`.`story_id` = `read_ribbons`.`story_id`))) join `stories` on((`stories`.`id` = `comments`.`story_id`))) left join `comments` `parent_comments` on((`parent_comments`.`id` = `comments`.`parent_comment_id`))) where ((`read_ribbons`.`is_following` = 1) and (`comments`.`user_id` <> `read_ribbons`.`user_id`) and (`comments`.`is_deleted` = 0) and (`comments`.`is_moderated` = 0) and ((`parent_comments`.`user_id` = `read_ribbons`.`user_id`) or (isnull(`parent_comments`.`user_id`) and (`stories`.`user_id` = `read_ribbons`.`user_id`))) and ((`comments`.`upvotes` - `comments`.`downvotes`) >= 0) and (isnull(`parent_comments`.`id`) or ((`parent_comments`.`upvotes` - `parent_comments`.`downvotes`) >= 0)));\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 35, "score": 375861.7165628212 }, { "content": "CREATE VIEW `BOUNDARY_replying_comments_for_count` AS SELECT `read_ribbons`.`user_id`, tails.created_at FROM `read_ribbons` JOIN `tails` ON (`tails`.`story_id` = `read_ribbons`.`story_id`) WHERE `read_ribbons`.`is_following` = 1 AND `tails`.`user_id` <> `read_ribbons`.`user_id` AND `tails`.`created_at` > `read_ribbons`.`updated_at`;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 36, "score": 372528.9758231569 }, { "content": "-----------------------------------------------------\n\n-- Make views for all the computed columns\n\nCREATE VIEW `FULL_story_tag_score` AS SELECT taggings.story_id AS id, SUM(tags.hotness_mod) AS score FROM taggings JOIN tags ON (tags.id = taggings.tag_id) GROUP BY taggings.story_id;\n\n\n\n-- Comment score tracking\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 37, "score": 365635.4166049232 }, { "content": "-- Accessor views\n\nCREATE VIEW story_with_hotness AS SELECT stories.*, FULL_story_hotness.hotness FROM stories LEFT JOIN FULL_story_hotness ON (stories.id = FULL_story_hotness.id);\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 38, "score": 364551.04901819583 }, { "content": "-- Original:\n\n-- CREATE VIEW `replying_comments` AS select `read_ribbons`.`user_id` AS `user_id`,`comments`.`id` AS `comment_id`,`read_ribbons`.`story_id` AS `story_id`,`comments`.`parent_comment_id` AS `parent_comment_id`,`comments`.`created_at` AS `comment_created_at`,`parent_comments`.`user_id` AS `parent_comment_author_id`,`comments`.`user_id` AS `comment_author_id`,`stories`.`user_id` AS `story_author_id`,(`read_ribbons`.`updated_at` < `comments`.`created_at`) AS `is_unread`,(select `votes`.`vote` from `votes` where ((`votes`.`user_id` = `read_ribbons`.`user_id`) and (`votes`.`comment_id` = `comments`.`id`))) AS `current_vote_vote`,(select `votes`.`reason` from `votes` where ((`votes`.`user_id` = `read_ribbons`.`user_id`) and (`votes`.`comment_id` = `comments`.`id`))) AS `current_vote_reason` from (((`read_ribbons` join `comments` on((`comments`.`story_id` = `read_ribbons`.`story_id`))) join `stories` on((`stories`.`id` = `comments`.`story_id`))) left join `comments` `parent_comments` on((`parent_comments`.`id` = `comments`.`parent_comment_id`))) where ((`read_ribbons`.`is_following` = 1) and (`comments`.`user_id` <> `read_ribbons`.`user_id`) and (`comments`.`is_deleted` = 0) and (`comments`.`is_moderated` = 0) and ((`parent_comments`.`user_id` = `read_ribbons`.`user_id`) or (isnull(`parent_comments`.`user_id`) and (`stories`.`user_id` = `read_ribbons`.`user_id`))) and ((`comments`.`upvotes` - `comments`.`downvotes`) >= 0) and (isnull(`parent_comments`.`id`) or ((`parent_comments`.`upvotes` - `parent_comments`.`downvotes`) >= 0)));\n\n--\n\n-- Modified:\n\n-- CREATE VIEW `replying_comments_for_count` AS\n\n-- \tSELECT `read_ribbons`.`user_id`, `read_ribbons`.`story_id`, `comments`.`id`\n\n-- \tFROM `read_ribbons`\n\n-- \tJOIN `stories` ON (`stories`.`id` = `read_ribbons`.`story_id`)\n\n-- \tJOIN `comments` ON (`comments`.`story_id` = `read_ribbons`.`story_id`)\n\n-- \tLEFT JOIN `comments` AS `parent_comments`\n\n-- \tON (`parent_comments`.`id` = `comments`.`parent_comment_id`)\n\n-- \tWHERE `read_ribbons`.`is_following` = 1\n\n-- \tAND `comments`.`user_id` <> `read_ribbons`.`user_id`\n\n-- \tAND `comments`.`is_deleted` = 0\n\n-- \tAND `comments`.`is_moderated` = 0\n\n-- \tAND ( `comments`.`upvotes` - `comments`.`downvotes` ) >= 0\n\n-- \tAND `read_ribbons`.`updated_at` < `comments`.`created_at`\n\n-- \tAND (\n\n-- (\n\n-- `parent_comments`.`user_id` = `read_ribbons`.`user_id`\n\n-- AND\n\n-- ( `parent_comments`.`upvotes` - `parent_comments`.`downvotes` ) >= 0\n\n-- )\n\n-- OR\n\n-- (\n\n-- `parent_comments`.`id` IS NULL\n\n-- AND\n\n-- `stories`.`user_id` = `read_ribbons`.`user_id`\n\n-- )\n\n-- );\n\n--\n\n-- Without newlines:\n\nCREATE VIEW `replying_comments_for_count` AS SELECT `read_ribbons`.`user_id`, `read_ribbons`.`story_id`, `comments`.`id` FROM `read_ribbons` JOIN `stories` ON (`stories`.`id` = `read_ribbons`.`story_id`) JOIN `comments` ON (`comments`.`story_id` = `read_ribbons`.`story_id`) LEFT JOIN `comments` AS `parent_comments` ON (`parent_comments`.`id` = `comments`.`parent_comment_id`) WHERE `read_ribbons`.`is_following` = 1 AND `comments`.`user_id` <> `read_ribbons`.`user_id` AND `comments`.`is_deleted` = 0 AND `comments`.`is_moderated` = 0 AND ( `comments`.`upvotes` - `comments`.`downvotes` ) >= 0 AND `read_ribbons`.`updated_at` < `comments`.`created_at` AND ( ( `parent_comments`.`user_id` = `read_ribbons`.`user_id` AND ( `parent_comments`.`upvotes` - `parent_comments`.`downvotes` ) >= 0) OR ( `parent_comments`.`id` IS NULL AND `stories`.`user_id` = `read_ribbons`.`user_id`));\n\nINSERT INTO `tags` (`tag`) VALUES ('test');\n", "file_path": "applications/lobsters/mysql/db-schema/original.sql", "rank": 39, "score": 361034.7064976401 }, { "content": "CREATE TABLE TableRow (id int, c1 int, c2 int, c3 int, c4 int, c5 int, c6 int, c7 int, c8 int, c9 int, PRIMARY KEY(id));\n\nQUERY ReadRow: SELECT * FROM TableRow WHERE id = ?;\n\nQUERY query_c1: SELECT * FROM TableRow WHERE c1 = ?;\n\nQUERY query_c2: SELECT * FROM TableRow WHERE c2 = ?;\n\nQUERY query_c3: SELECT * FROM TableRow WHERE c3 = ?;\n\nQUERY query_c4: SELECT * FROM TableRow WHERE c4 = ?;\n\nQUERY query_c5: SELECT * FROM TableRow WHERE c5 = ?;\n\nQUERY query_c6: SELECT * FROM TableRow WHERE c6 = ?;\n\nQUERY query_c7: SELECT * FROM TableRow WHERE c7 = ?;\n\nQUERY query_c8: SELECT * FROM TableRow WHERE c8 = ?;\n\nQUERY query_c9: SELECT * FROM TableRow WHERE c9 = ?;\n\n\";\n\n\n\nasync fn build_graph(\n\n authority: Arc<ZookeeperAuthority>,\n\n persistence: PersistenceParameters,\n\n verbose: bool,\n\n) -> (Handle<ZookeeperAuthority>, impl Future<Output = ()>) {\n\n let mut builder = Builder::default();\n\n if verbose {\n", "file_path": "applications/replay/main.rs", "rank": 40, "score": 360715.30180541286 }, { "content": "-- Other derived stats\n\nCREATE VIEW story_comments AS SELECT stories.id, COUNT(comments.id) as comments FROM stories LEFT JOIN comments ON (stories.id = comments.story_id) GROUP BY stories.id;\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 41, "score": 360677.72619696957 }, { "content": "CREATE VIEW FULL_story_comment_score AS SELECT FULL_non_author_comments.story_id AS id, SUM(FULL_comment_score.score) AS score FROM FULL_non_author_comments JOIN FULL_comment_score ON (FULL_comment_score.id = FULL_non_author_comments.id) GROUP BY FULL_non_author_comments.story_id;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 42, "score": 359158.8241532521 }, { "content": "-- Hotness computation\n\n-- XXX: bah.. pretty sad that this join will end up full...\n\nCREATE VIEW FULL_non_author_comments AS SELECT comments.id, comments.story_id FROM comments JOIN stories ON (comments.story_id = stories.id) WHERE comments.user_id <> stories.user_id;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 43, "score": 356467.31459031056 }, { "content": "CREATE VIEW `BOUNDARY_replying_comments_for_count` AS SELECT `read_ribbons`.`user_id`, `read_ribbons`.`story_id`, `comments`.`id`, `comments`.`upvotes` - `comments`.`downvotes` AS saldo, `parent_comments`.`upvotes` - `parent_comments`.`downvotes` AS psaldo FROM `read_ribbons` JOIN `stories` ON (`stories`.`id` = `read_ribbons`.`story_id`) JOIN `comments` ON (`comments`.`story_id` = `read_ribbons`.`story_id`) LEFT JOIN `parent_comments` ON (`comments`.`parent_comment_id` = `parent_comments`.`id`) WHERE `read_ribbons`.`is_following` = 1 AND `comments`.`user_id` <> `read_ribbons`.`user_id` AND `comments`.`is_deleted` = 0 AND `comments`.`is_moderated` = 0 AND saldo >= 0 AND `read_ribbons`.`updated_at` < `comments`.`created_at` AND ( ( `parent_comments`.`user_id` = `read_ribbons`.`user_id` AND psaldo >= 0) OR ( `parent_comments`.`id` IS NULL AND `stories`.`user_id` = `read_ribbons`.`user_id`));\n\n\n", "file_path": "applications/lobsters/mysql/db-schema/noria.sql", "rank": 44, "score": 353495.6569427714 }, { "content": "CREATE VIEW comment_votes AS (SELECT FULL_comment_upvotes.id, FULL_comment_upvotes.votes AS score FROM FULL_comment_upvotes) UNION (SELECT FULL_comment_downvotes.id, 0 - FULL_comment_downvotes.votes AS score FROM FULL_comment_downvotes);\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 45, "score": 351559.36024489504 }, { "content": "CREATE VIEW story_votes AS (SELECT FULL_story_upvotes.id, FULL_story_upvotes.votes AS score FROM FULL_story_upvotes) UNION (SELECT FULL_story_downvotes.id, 0 - FULL_story_downvotes.votes AS score FROM FULL_story_downvotes);\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 46, "score": 351559.36024489504 }, { "content": "CREATE VIEW user_stories AS SELECT stories.user_id AS id, COUNT(stories.id) AS stories FROM stories GROUP BY stories.user_id;\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 47, "score": 351389.2737127573 }, { "content": "CREATE VIEW user_comments AS SELECT comments.user_id AS id, COUNT(comments.id) AS comments FROM comments GROUP BY comments.user_id;\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 48, "score": 351389.2737127573 }, { "content": "CREATE VIEW FULL_story_score AS SELECT story_votes.id, SUM(story_votes.score) as score FROM story_votes GROUP BY story_votes.id;\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 49, "score": 350525.47031298926 }, { "content": "CREATE VIEW FULL_comment_score AS SELECT comment_votes.id, SUM(comment_votes.score) as score FROM comment_votes GROUP BY comment_votes.id;\n\n\n\n-- Story score tracking\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 50, "score": 350525.47031298926 }, { "content": "CREATE VIEW `BOUNDARY_replying_comments_for_count` AS SELECT `read_ribbons`.`user_id`, tails.created_at FROM `read_ribbons` JOIN `tails` ON (`tails`.`story_id` = `read_ribbons`.`story_id`) WHERE `read_ribbons`.`is_following` = 1 AND `tails`.`user_id` <> `read_ribbons`.`user_id` AND `tails`.`created_at` > `read_ribbons`.`updated_at`;\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 51, "score": 350434.7711477622 }, { "content": "CREATE VIEW all_story_votes AS SELECT votes.story_id, votes.user_id, votes.vote FROM votes WHERE votes.comment_id IS NULL;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 52, "score": 345189.43540634 }, { "content": "CREATE VIEW all_comment_votes AS SELECT votes.comment_id, votes.user_id, votes.vote FROM votes WHERE votes.story_id IS NULL;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 53, "score": 345189.43540634005 }, { "content": "fn run<C>(global_args: &clap::ArgMatches, local_args: &clap::ArgMatches)\n\nwhere\n\n C: VoteClient + Unpin + 'static,\n\n C: Service<ReadRequest, Response = (), Error = failure::Error> + Clone + Send,\n\n C: Service<WriteRequest, Response = (), Error = failure::Error> + Clone + Send,\n\n <C as Service<ReadRequest>>::Future: Send,\n\n <C as Service<ReadRequest>>::Response: Send,\n\n <C as Service<WriteRequest>>::Future: Send,\n\n <C as Service<WriteRequest>>::Response: Send,\n\n{\n\n // zipf takes ~66ns to generate a random number depending on the CPU,\n\n // so each load generator cannot reasonably generate much more than ~1M reqs/s.\n\n let per_generator = 3_000_000;\n\n let mut target = value_t_or_exit!(global_args, \"ops\", f64);\n\n let ngen = (target as usize + per_generator - 1) / per_generator; // rounded up\n\n target /= ngen as f64;\n\n\n\n let articles = value_t_or_exit!(global_args, \"articles\", usize);\n\n\n\n let params = Parameters {\n", "file_path": "applications/vote/main.rs", "rank": 54, "score": 341489.09447985125 }, { "content": "-----------------------------------------------------\n\n-- Make views for all the computed columns\n\nCREATE VIEW `FULL_story_tag_score` AS SELECT taggings.story_id AS id, SUM(tags.hotness_mod) AS score FROM taggings JOIN tags ON (tags.id = taggings.tag_id) GROUP BY taggings.story_id;\n\n\n\n-- Comment score tracking\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 55, "score": 340049.15089595143 }, { "content": "CREATE VIEW FULL_story_comment_score AS SELECT FULL_non_author_comments.story_id AS id, SUM(FULL_comment_score.score) AS score FROM FULL_non_author_comments JOIN FULL_comment_score ON (FULL_comment_score.id = FULL_non_author_comments.id) GROUP BY FULL_non_author_comments.story_id;\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 56, "score": 338207.03328926093 }, { "content": "CREATE TABLE `votes` (`id` bigint unsigned NOT NULL AUTO_INCREMENT PRIMARY KEY, `user_id` int unsigned NOT NULL, `story_id` int unsigned NOT NULL, `comment_id` int unsigned, `vote` tinyint NOT NULL, `reason` varchar(1), INDEX `index_votes_on_comment_id` (`comment_id`), INDEX `user_id_comment_id` (`user_id`, `comment_id`), INDEX `user_id_story_id` (`user_id`, `story_id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 57, "score": 333776.1936782595 }, { "content": "CREATE TABLE `moderations` (`id` int NOT NULL AUTO_INCREMENT PRIMARY KEY, `created_at` datetime NOT NULL, `updated_at` datetime NOT NULL, `moderator_user_id` int, `story_id` int, `comment_id` int, `user_id` int, `action` mediumtext, `reason` mediumtext, `is_from_suggestions` tinyint(1) DEFAULT 0, INDEX `index_moderations_on_created_at` (`created_at`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 58, "score": 333227.58648868126 }, { "content": "-- Story score tracking\n\nCREATE VIEW story_upvotes AS SELECT votes.story_id, votes.user_id FROM votes WHERE votes.comment_id IS NULL AND votes.vote = 1;\n", "file_path": "applications/lobsters/mysql/db-schema/natural.sql", "rank": 59, "score": 331402.596132514 }, { "content": "-- Comment score tracking\n\nCREATE VIEW comment_upvotes AS SELECT votes.comment_id, votes.user_id FROM votes WHERE votes.story_id IS NULL AND votes.vote = 1;\n", "file_path": "applications/lobsters/mysql/db-schema/natural.sql", "rank": 60, "score": 331402.596132514 }, { "content": "CREATE TABLE `messages` (`id` int unsigned NOT NULL AUTO_INCREMENT PRIMARY KEY, `created_at` datetime, `author_user_id` int unsigned, `recipient_user_id` int unsigned, `has_been_read` tinyint(1) DEFAULT 0, `subject` varchar(100), `body` mediumtext, `short_id` varchar(30), `deleted_by_author` tinyint(1) DEFAULT 0, `deleted_by_recipient` tinyint(1) DEFAULT 0, UNIQUE INDEX `random_hash` (`short_id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 61, "score": 331398.70510839706 }, { "content": "CREATE VIEW comment_downvotes AS SELECT votes.comment_id, votes.user_id FROM votes WHERE votes.story_id IS NULL AND votes.vote = 0;\n\n\n", "file_path": "applications/lobsters/mysql/db-schema/natural.sql", "rank": 62, "score": 331396.0657486245 }, { "content": "CREATE VIEW story_downvotes AS SELECT votes.story_id, votes.user_id FROM votes WHERE votes.comment_id IS NULL AND votes.vote = 0;\n\n\n", "file_path": "applications/lobsters/mysql/db-schema/natural.sql", "rank": 63, "score": 331396.0657486245 }, { "content": "-- Hotness computation\n\n-- XXX: bah.. pretty sad that this join will end up full...\n\nCREATE VIEW FULL_non_author_comments AS SELECT comments.id, comments.story_id FROM comments JOIN stories ON (comments.story_id = stories.id) WHERE comments.user_id <> stories.user_id;\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 64, "score": 330149.106179663 }, { "content": "CREATE TABLE `suggested_titles` (`id` int NOT NULL AUTO_INCREMENT PRIMARY KEY, `story_id` int, `user_id` int, `title` varchar(150) COLLATE utf8mb4_general_ci DEFAULT '' NOT NULL) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 65, "score": 329328.3228129637 }, { "content": "CREATE TABLE `hats` (`id` int NOT NULL AUTO_INCREMENT PRIMARY KEY, `created_at` datetime, `updated_at` datetime, `user_id` int, `granted_by_user_id` int, `hat` varchar(255) NOT NULL, `link` varchar(255) COLLATE utf8mb4_general_ci, `modlog_use` tinyint(1) DEFAULT 0, `doffed_at` datetime) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 66, "score": 329215.60693846946 }, { "content": "CREATE VIEW story_votes AS (SELECT FULL_story_upvotes.id, FULL_story_upvotes.votes AS score FROM FULL_story_upvotes) UNION (SELECT FULL_story_downvotes.id, 0 - FULL_story_downvotes.votes AS score FROM FULL_story_downvotes);\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 67, "score": 326676.95926031296 }, { "content": "CREATE VIEW comment_votes AS (SELECT FULL_comment_upvotes.id, FULL_comment_upvotes.votes AS score FROM FULL_comment_upvotes) UNION (SELECT FULL_comment_downvotes.id, 0 - FULL_comment_downvotes.votes AS score FROM FULL_comment_downvotes);\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 68, "score": 326676.95926031296 }, { "content": "CREATE TABLE `suggested_taggings` (`id` int NOT NULL AUTO_INCREMENT PRIMARY KEY, `story_id` int, `tag_id` int, `user_id` int) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 70, "score": 324580.0462369231 }, { "content": "CREATE VIEW FULL_story_hotness AS SELECT all_hotness_components.id, SUM(all_hotness_components.score) as hotness FROM all_hotness_components GROUP BY all_hotness_components.id;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 71, "score": 320207.5215513768 }, { "content": "fn run_generator<C, R>(\n\n mut handle: C,\n\n errd: &'static atomic::AtomicBool,\n\n ex: tokio::runtime::Handle,\n\n id_rng: R,\n\n target: f64,\n\n global_args: clap::ArgMatches,\n\n) -> (f64, f64, time::Duration)\n\nwhere\n\n C: VoteClient + Unpin + 'static,\n\n R: rand::distributions::Distribution<usize>,\n\n C: Service<ReadRequest, Response = (), Error = failure::Error> + Clone + Send,\n\n C: Service<WriteRequest, Response = (), Error = failure::Error> + Clone + Send,\n\n <C as Service<ReadRequest>>::Future: Send,\n\n <C as Service<ReadRequest>>::Response: Send,\n\n <C as Service<WriteRequest>>::Future: Send,\n\n <C as Service<WriteRequest>>::Response: Send,\n\n{\n\n let runtime = time::Duration::from_secs(value_t_or_exit!(global_args, \"runtime\", u64));\n\n let every = value_t_or_exit!(global_args, \"ratio\", u32);\n", "file_path": "applications/vote/main.rs", "rank": 72, "score": 319670.1673004021 }, { "content": "CREATE VIEW BOUNDARY_notifications AS SELECT BOUNDARY_replying_comments_for_count.user_id, COUNT(*) AS notifications, FROM `BOUNDARY_replying_comments_for_count` GROUP BY `BOUNDARY_replying_comments_for_count`.`user_id`;\n\n\n\n-- Runtime queries\n\n\n\nVIEW q_0: SELECT 1 AS one FROM users WHERE users.username = ?;\n\nVIEW q_1: SELECT tags.id, tags.tag, tags.description, tags.privileged, tags.is_media, tags.inactive, tags.hotness_mod FROM tags WHERE tags.inactive = 0 AND tags.tag = ?;\n\nVIEW q_2: SELECT 1 AS one FROM stories WHERE stories.short_id = ?;\n\nVIEW q_3: SELECT votes.id, votes.user_id, votes.story_id, votes.comment_id, votes.vote, votes.reason FROM votes WHERE votes.user_id = ? AND votes.story_id = ? AND votes.comment_id = NULL;\n\nVIEW q_4: SELECT stories.id, stories.always_null, stories.created_at, stories.user_id, stories.url, stories.title, stories.description, stories.short_id, stories.is_expired, stories.is_moderated, stories.markeddown_description, stories.story_cache, stories.merged_story_id, stories.unavailable_at, stories.twitter_id, stories.user_is_author FROM stories WHERE stories.short_id = ?;\n\nVIEW q_5: SELECT users.id, users.username, users.email, users.password_digest, users.created_at, users.is_admin, users.password_reset_token, users.session_token, users.about, users.invited_by_user_id, users.is_moderator, users.pushover_mentions, users.rss_token, users.mailing_list_token, users.mailing_list_mode, users.karma, users.banned_at, users.banned_by_user_id, users.banned_reason, users.deleted_at, users.disabled_invite_at, users.disabled_invite_by_user_id, users.disabled_invite_reason, users.settings FROM users WHERE users.id = ?;\n\nVIEW q_6: SELECT 1 AS one FROM comments WHERE comments.short_id = ?;\n\nVIEW q_7: SELECT votes.id, votes.user_id, votes.story_id, votes.comment_id, votes.vote, votes.reason FROM votes WHERE votes.user_id = ? AND votes.story_id = ? AND votes.comment_id = ?;\n\nVIEW q_8: SELECT comments.id, comments.created_at, comments.updated_at, comments.short_id, comments.story_id, comments.user_id, comments.parent_comment_id, comments.thread_id, comments.comment, comments.markeddown_comment, comments.is_deleted, comments.is_moderated, comments.is_from_email, comments.hat_id FROM comments WHERE comments.story_id = ? AND comments.short_id = ?;\n\nVIEW q_9: SELECT frontpage_ids.id FROM frontpage_ids;\n\nVIEW q_10: SELECT story_with_votes.id, story_with_votes.always_null, story_with_votes.created_at, story_with_votes.user_id, story_with_votes.url, story_with_votes.title, story_with_votes.description, story_with_votes.short_id, story_with_votes.is_expired, story_with_votes.is_moderated, story_with_votes.markeddown_description, story_with_votes.story_cache, story_with_votes.merged_story_id, story_with_votes.unavailable_at, story_with_votes.twitter_id, story_with_votes.user_is_author, story_with_votes.upvotes, story_with_votes.downvotes, story_with_votes.score FROM story_with_votes WHERE story_with_votes.id = ? AND story_with_votes.merged_story_id = NULL AND story_with_votes.is_expired = 0 AND story_with_votes.score >= 0;\n\nVIEW q_11: SELECT story_with_votes.id, story_with_votes.always_null, story_with_votes.created_at, story_with_votes.user_id, story_with_votes.url, story_with_votes.title, story_with_votes.description, story_with_votes.short_id, story_with_votes.is_expired, story_with_votes.is_moderated, story_with_votes.markeddown_description, story_with_votes.story_cache, story_with_votes.merged_story_id, story_with_votes.unavailable_at, story_with_votes.twitter_id, story_with_votes.user_is_author, story_with_votes.upvotes, story_with_votes.downvotes, story_with_votes.score FROM story_with_votes WHERE story_with_votes.short_id = ?;\n\nVIEW q_12: SELECT tag_filters.id, tag_filters.created_at, tag_filters.updated_at, tag_filters.user_id, tag_filters.tag_id FROM tag_filters WHERE tag_filters.user_id = ?;\n\nVIEW q_13: SELECT read_ribbons.id, read_ribbons.is_following, read_ribbons.created_at, read_ribbons.updated_at, read_ribbons.user_id, read_ribbons.story_id FROM read_ribbons WHERE read_ribbons.user_id = ? AND read_ribbons.story_id = ?;\n\nVIEW q_14: SELECT taggings.story_id FROM taggings WHERE taggings.story_id = ?;\n\nVIEW q_15: SELECT stories.id FROM stories WHERE stories.merged_story_id = ?;\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 73, "score": 318736.1238447124 }, { "content": "CREATE TABLE `votes` (`id` bigint unsigned NOT NULL AUTO_INCREMENT PRIMARY KEY, `user_id` int unsigned NOT NULL, `story_id` int unsigned NOT NULL, `comment_id` int unsigned, `vote` tinyint NOT NULL, `reason` varchar(1), INDEX `index_votes_on_comment_id` (`comment_id`), INDEX `user_id_comment_id` (`user_id`, `comment_id`), INDEX `user_id_story_id` (`user_id`, `story_id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n\n\n\n-----------------------------------------------------\n\n-- Make views for all the computed columns\n\nFULL_story_tag_score: \n", "file_path": "applications/lobsters/noria/schema.sql", "rank": 74, "score": 317966.50587764266 }, { "content": "CREATE TABLE `votes` (`id` bigint unsigned NOT NULL AUTO_INCREMENT PRIMARY KEY, `user_id` int unsigned NOT NULL, `story_id` int unsigned NOT NULL, `comment_id` int unsigned, `vote` tinyint NOT NULL, `reason` varchar(1), INDEX `index_votes_on_comment_id` (`comment_id`), INDEX `user_id_comment_id` (`user_id`, `comment_id`), INDEX `user_id_story_id` (`user_id`, `story_id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n\n\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 75, "score": 316378.78437990067 }, { "content": "CREATE TABLE `votes` (`id` bigint unsigned NOT NULL AUTO_INCREMENT PRIMARY KEY, `user_id` int unsigned NOT NULL, `story_id` int unsigned NOT NULL, `comment_id` int unsigned, `vote` tinyint NOT NULL, `reason` varchar(1), INDEX `index_votes_on_comment_id` (`comment_id`), INDEX `user_id_comment_id` (`user_id`, `comment_id`), INDEX `user_id_story_id` (`user_id`, `story_id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n", "file_path": "applications/lobsters/mysql/db-schema/original.sql", "rank": 76, "score": 314814.83019984403 }, { "content": "CREATE TABLE `votes` (`id` bigint unsigned NOT NULL AUTO_INCREMENT PRIMARY KEY, `user_id` int unsigned NOT NULL, `story_id` int unsigned NOT NULL, `comment_id` int unsigned, `vote` tinyint NOT NULL, `reason` varchar(1), INDEX `index_votes_on_comment_id` (`comment_id`), INDEX `user_id_comment_id` (`user_id`, `comment_id`), INDEX `user_id_story_id` (`user_id`, `story_id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n\n\n", "file_path": "applications/lobsters/mysql/db-schema/natural.sql", "rank": 77, "score": 314814.8301998441 }, { "content": "CREATE TABLE `votes` (`id` bigint unsigned NOT NULL AUTO_INCREMENT PRIMARY KEY, `user_id` int unsigned NOT NULL, `story_id` int unsigned NOT NULL, `comment_id` int unsigned, `vote` tinyint NOT NULL, `reason` varchar(1), INDEX `index_votes_on_comment_id` (`comment_id`), INDEX `user_id_comment_id` (`user_id`, `comment_id`), INDEX `user_id_story_id` (`user_id`, `story_id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n", "file_path": "applications/lobsters/mysql/db-schema/noria.sql", "rank": 78, "score": 314814.8301998441 }, { "content": "CREATE TABLE `moderations` (`id` int NOT NULL AUTO_INCREMENT PRIMARY KEY, `created_at` datetime NOT NULL, `updated_at` datetime NOT NULL, `moderator_user_id` int, `story_id` int, `comment_id` int, `user_id` int, `action` mediumtext, `reason` mediumtext, `is_from_suggestions` tinyint(1) DEFAULT 0, INDEX `index_moderations_on_created_at` (`created_at`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;\n", "file_path": "applications/lobsters/noria/schema.sql", "rank": 79, "score": 314716.43751707213 }, { "content": "CREATE TABLE `messages` (`id` int unsigned NOT NULL AUTO_INCREMENT PRIMARY KEY, `created_at` datetime, `author_user_id` int unsigned, `recipient_user_id` int unsigned, `has_been_read` tinyint(1) DEFAULT 0, `subject` varchar(100), `body` mediumtext, `short_id` varchar(30), `deleted_by_author` tinyint(1) DEFAULT 0, `deleted_by_recipient` tinyint(1) DEFAULT 0, UNIQUE INDEX `random_hash` (`short_id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;\n", "file_path": "applications/lobsters/noria/schema.sql", "rank": 80, "score": 314542.29341939435 }, { "content": "CREATE TABLE `hidden_stories` (`id` int NOT NULL AUTO_INCREMENT PRIMARY KEY, `user_id` int, `story_id` int, UNIQUE INDEX `index_hidden_stories_on_user_id_and_story_id` (`user_id`, `story_id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 81, "score": 314024.2184768183 }, { "content": "CREATE TABLE `tag_filters` (`id` int NOT NULL AUTO_INCREMENT PRIMARY KEY, `created_at` datetime NOT NULL, `updated_at` datetime NOT NULL, `user_id` int, `tag_id` int, INDEX `user_tag_idx` (`user_id`, `tag_id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 82, "score": 312928.6434491285 }, { "content": "CREATE TABLE `moderations` (`id` int NOT NULL AUTO_INCREMENT PRIMARY KEY, `created_at` datetime NOT NULL, `updated_at` datetime NOT NULL, `moderator_user_id` int, `story_id` int, `comment_id` int, `user_id` int, `action` mediumtext, `reason` mediumtext, `is_from_suggestions` tinyint(1) DEFAULT 0, INDEX `index_moderations_on_created_at` (`created_at`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 83, "score": 312884.06362282974 }, { "content": "CREATE TABLE `messages` (`id` int unsigned NOT NULL AUTO_INCREMENT PRIMARY KEY, `created_at` datetime, `author_user_id` int unsigned, `recipient_user_id` int unsigned, `has_been_read` tinyint(1) DEFAULT 0, `subject` varchar(100), `body` mediumtext, `short_id` varchar(30), `deleted_by_author` tinyint(1) DEFAULT 0, `deleted_by_recipient` tinyint(1) DEFAULT 0, UNIQUE INDEX `random_hash` (`short_id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 84, "score": 312623.98110410135 }, { "content": "-- Frontpage\n\nCREATE VIEW frontpage_ids AS SELECT FULL_story_hotness.id FROM FULL_story_hotness ORDER BY FULL_story_hotness.hotness LIMIT 51 OFFSET 0;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 85, "score": 312598.9214057973 }, { "content": "CREATE TABLE `moderations` (`id` int NOT NULL AUTO_INCREMENT PRIMARY KEY, `created_at` datetime NOT NULL, `updated_at` datetime NOT NULL, `moderator_user_id` int, `story_id` int, `comment_id` int, `user_id` int, `action` mediumtext, `reason` mediumtext, `is_from_suggestions` tinyint(1) DEFAULT 0, INDEX `index_moderations_on_created_at` (`created_at`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;\n", "file_path": "applications/lobsters/mysql/db-schema/natural.sql", "rank": 86, "score": 311082.6596531307 }, { "content": "CREATE TABLE `moderations` (`id` int NOT NULL AUTO_INCREMENT PRIMARY KEY, `created_at` datetime NOT NULL, `updated_at` datetime NOT NULL, `moderator_user_id` int, `story_id` int, `comment_id` int, `user_id` int, `action` mediumtext, `reason` mediumtext, `is_from_suggestions` tinyint(1) DEFAULT 0, INDEX `index_moderations_on_created_at` (`created_at`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;\n", "file_path": "applications/lobsters/mysql/db-schema/noria.sql", "rank": 87, "score": 311082.6596531307 }, { "content": "CREATE TABLE `moderations` (`id` int NOT NULL AUTO_INCREMENT PRIMARY KEY, `created_at` datetime NOT NULL, `updated_at` datetime NOT NULL, `moderator_user_id` int, `story_id` int, `comment_id` int, `user_id` int, `action` mediumtext, `reason` mediumtext, `is_from_suggestions` tinyint(1) DEFAULT 0, INDEX `index_moderations_on_created_at` (`created_at`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;\n", "file_path": "applications/lobsters/mysql/db-schema/original.sql", "rank": 88, "score": 311082.6596531307 }, { "content": "CREATE TABLE `messages` (`id` int unsigned NOT NULL AUTO_INCREMENT PRIMARY KEY, `created_at` datetime, `author_user_id` int unsigned, `recipient_user_id` int unsigned, `has_been_read` tinyint(1) DEFAULT 0, `subject` varchar(100), `body` mediumtext, `short_id` varchar(30), `deleted_by_author` tinyint(1) DEFAULT 0, `deleted_by_recipient` tinyint(1) DEFAULT 0, UNIQUE INDEX `random_hash` (`short_id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;\n", "file_path": "applications/lobsters/mysql/db-schema/natural.sql", "rank": 89, "score": 310740.2766169208 }, { "content": "CREATE TABLE `messages` (`id` int unsigned NOT NULL AUTO_INCREMENT PRIMARY KEY, `created_at` datetime, `author_user_id` int unsigned, `recipient_user_id` int unsigned, `has_been_read` tinyint(1) DEFAULT 0, `subject` varchar(100), `body` mediumtext, `short_id` varchar(30), `deleted_by_author` tinyint(1) DEFAULT 0, `deleted_by_recipient` tinyint(1) DEFAULT 0, UNIQUE INDEX `random_hash` (`short_id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;\n", "file_path": "applications/lobsters/mysql/db-schema/original.sql", "rank": 90, "score": 310740.2766169208 }, { "content": "CREATE TABLE `messages` (`id` int unsigned NOT NULL AUTO_INCREMENT PRIMARY KEY, `created_at` datetime, `author_user_id` int unsigned, `recipient_user_id` int unsigned, `has_been_read` tinyint(1) DEFAULT 0, `subject` varchar(100), `body` mediumtext, `short_id` varchar(30), `deleted_by_author` tinyint(1) DEFAULT 0, `deleted_by_recipient` tinyint(1) DEFAULT 0, UNIQUE INDEX `random_hash` (`short_id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;\n", "file_path": "applications/lobsters/mysql/db-schema/noria.sql", "rank": 91, "score": 310740.2766169208 }, { "content": "CREATE TABLE `hats` (`id` int NOT NULL AUTO_INCREMENT PRIMARY KEY, `created_at` datetime, `updated_at` datetime, `user_id` int, `granted_by_user_id` int, `hat` varchar(255) NOT NULL, `link` varchar(255) COLLATE utf8mb4_general_ci, `modlog_use` tinyint(1) DEFAULT 0, `doffed_at` datetime) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n", "file_path": "applications/lobsters/noria/schema.sql", "rank": 92, "score": 310443.320417479 }, { "content": "CREATE TABLE `hats` (`id` int NOT NULL AUTO_INCREMENT PRIMARY KEY, `created_at` datetime, `updated_at` datetime, `user_id` int, `granted_by_user_id` int, `hat` varchar(255) NOT NULL, `link` varchar(255) COLLATE utf8mb4_general_ci, `modlog_use` tinyint(1) DEFAULT 0, `doffed_at` datetime) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n", "file_path": "server/tests/mergeable-lobsters-schema.sql", "rank": 93, "score": 308025.43625232775 }, { "content": "CREATE TABLE `suggested_titles` (`id` int NOT NULL AUTO_INCREMENT PRIMARY KEY, `story_id` int, `user_id` int, `title` varchar(150) COLLATE utf8mb4_general_ci DEFAULT '' NOT NULL) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n", "file_path": "applications/lobsters/noria/schema.sql", "rank": 94, "score": 307969.1023499641 }, { "content": "CREATE VIEW tails AS SELECT heads.user_id, heads.story_id, good_comments.created_at FROM heads JOIN good_comments ON (good_comments.story_id = heads.story_id) WHERE heads.pid = good_comments.parent_comment_id;\n\n\n", "file_path": "server/tests/filter-aggregate-lobsters-schema.sql", "rank": 95, "score": 306704.37999456265 }, { "content": "CREATE TABLE `hats` (`id` int NOT NULL AUTO_INCREMENT PRIMARY KEY, `created_at` datetime, `updated_at` datetime, `user_id` int, `granted_by_user_id` int, `hat` varchar(255) NOT NULL, `link` varchar(255) COLLATE utf8mb4_general_ci, `modlog_use` tinyint(1) DEFAULT 0, `doffed_at` datetime) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n", "file_path": "applications/lobsters/mysql/db-schema/original.sql", "rank": 96, "score": 305658.0925762167 }, { "content": "CREATE TABLE `hats` (`id` int NOT NULL AUTO_INCREMENT PRIMARY KEY, `created_at` datetime, `updated_at` datetime, `user_id` int, `granted_by_user_id` int, `hat` varchar(255) NOT NULL, `link` varchar(255) COLLATE utf8mb4_general_ci, `modlog_use` tinyint(1) DEFAULT 0, `doffed_at` datetime) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n", "file_path": "applications/lobsters/mysql/db-schema/noria.sql", "rank": 97, "score": 305658.0925762167 }, { "content": "CREATE TABLE `hats` (`id` int NOT NULL AUTO_INCREMENT PRIMARY KEY, `created_at` datetime, `updated_at` datetime, `user_id` int, `granted_by_user_id` int, `hat` varchar(255) NOT NULL, `link` varchar(255) COLLATE utf8mb4_general_ci, `modlog_use` tinyint(1) DEFAULT 0, `doffed_at` datetime) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n", "file_path": "applications/lobsters/mysql/db-schema/natural.sql", "rank": 98, "score": 305658.0925762167 }, { "content": " // Retrieve the result of the count query:\n\n let result = getter.lookup(&[\"Volvo\".into()], true).await.unwrap();\n\n assert_eq!(result.len(), 1);\n\n assert_eq!(result[0][0], 2.into());\n\n}\n\n\n\n#[tokio::test(threaded_scheduler)]\n\nasync fn it_works_with_vote() {\n\n let mut g = start_simple(\"it_works_with_vote\").await;\n\n let sql = \"\n\n # base tables\n\n CREATE TABLE Article (id int, title varchar(255), PRIMARY KEY(id));\n\n CREATE TABLE Vote (article_id int, user int);\n\n\n\n # read queries\n\n QUERY ArticleWithVoteCount: SELECT Article.id, title, VoteCount.votes AS votes \\\n\n FROM Article \\\n\n LEFT JOIN (SELECT Vote.article_id, COUNT(user) AS votes \\\n\n FROM Vote GROUP BY Vote.article_id) AS VoteCount \\\n\n ON (Article.id = VoteCount.article_id) WHERE Article.id = ?;\n", "file_path": "server/src/integration.rs", "rank": 99, "score": 95.0404593222076 } ]
Rust
src/game.rs
afarinetti/gameoflife-rs
4d664f9122178727d2b3216876aa9409b9b7b97b
use std::fmt; #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub enum Cell { Dead, Alive, } impl fmt::Display for Cell { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Cell::Alive => write!(f, "ALIVE"), Cell::Dead => write!(f, "DEAD"), } } } pub struct Grid { num_rows: u32, num_cols: u32, grid: Vec<Cell>, } impl Grid { pub fn new(num_rows: u32, num_cols: u32) -> Grid { Grid { num_rows: num_rows, num_cols: num_cols, grid: vec![Cell::Dead; (num_rows * num_cols) as usize], } } pub fn set_cells(&mut self, cells: &[(u32, u32)]) { for (row, col) in cells.iter().cloned() { let idx = self.cell_to_index(row, col); self.grid[idx] = Cell::Alive; } } fn cell_to_index(&self, row: u32, col: u32) -> usize { ((row * self.num_cols) + col) as usize } pub fn get(&self, row: u32, col: u32) -> Cell { let index = self.cell_to_index(row, col); self.grid[index] } fn set(&mut self, row: u32, col: u32, state: Cell) { let index = self.cell_to_index(row, col); self.grid[index] = state } } impl fmt::Display for Grid { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for line in self.grid.as_slice().chunks(self.num_cols as usize) { for &cell in line { let smybol = if cell == Cell::Dead { '◻' } else { '◼' }; write!(f, "{}", smybol)?; } write!(f, "\n")?; } Ok(()) } } struct Operation { row: u32, col: u32, state: Cell } impl Operation { fn new(row: u32, col: u32, state: Cell) -> Operation { Operation { row, col, state } } } impl fmt::Display for Operation { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Operation[row: {}, col: {}, state: {}]", self.row, self.col, self.state) } } pub struct ConwaySim { grid: Grid, generation: u32, } impl ConwaySim { pub fn new(num_rows: u32, num_cols: u32) -> ConwaySim { ConwaySim { grid: Grid::new(num_rows, num_cols), generation: 0, } } #[allow(dead_code)] pub fn new_with_grid(grid: Grid) -> ConwaySim { ConwaySim { grid, generation: 0 } } pub fn get_generation(&self) -> u32 { self.generation } pub fn is_cell_alive(&self, row: u32, col: u32) -> bool { self.grid.get(row, col) == Cell::Alive } pub fn is_any_cell_alive(&self) -> bool { let mut alive = false; for &cell in self.grid.grid.iter() { if cell == Cell::Alive { alive = true; break; } } return alive; } pub fn get_neighbor_count(&self, row: u32, col: u32) -> u8 { let mut count: u8 = 0; let mut new_row: u32; let mut new_col: u32; if (row > 0) && (col > 0) { new_row = row - 1; new_col = col - 1; if self.is_cell_alive(new_row, new_col) { count += 1; } } if row > 0 { new_row = row - 1; new_col = col; if self.is_cell_alive(new_row, new_col) { count += 1; } } if (row > 0) && ((col + 1) < self.grid.num_cols) { new_row = row - 1; new_col = col + 1; if self.is_cell_alive(new_row, new_col) { count += 1; } } if col > 0 { new_row = row; new_col = col - 1; if self.is_cell_alive(new_row, new_col) { count += 1; } } if (col + 1) < self.grid.num_cols { new_row = row; new_col = col + 1; if self.is_cell_alive(new_row, new_col) { count += 1; } } if ((row + 1) < self.grid.num_rows) && (col > 0) { new_row = row + 1; new_col = col - 1; if self.is_cell_alive(new_row, new_col) { count += 1; } } if (row + 1) < self.grid.num_rows { new_row = row + 1; new_col = col; if self.is_cell_alive(new_row, new_col) { count += 1; } } if ((row + 1) < self.grid.num_rows) && ((col + 1) < self.grid.num_cols) { new_row = row + 1; new_col = col + 1; if self.is_cell_alive(new_row, new_col) { count += 1; } } return count; } pub fn set_cells(&mut self, cells: &[(u32, u32)]) { self.grid.set_cells(cells); } fn apply_rules(&self, row: u32, col: u32) -> Vec<Operation> { let mut operations: Vec<Operation> = Vec::new(); let neighbor_count = self.get_neighbor_count(row, col); let alive = self.is_cell_alive(row, col); if alive { if neighbor_count < 2 { operations.push(Operation::new(row, col, Cell::Dead)); } else if neighbor_count <= 3 { } else { operations.push(Operation::new(row, col, Cell::Dead)); } } else { if neighbor_count == 3 { operations.push(Operation::new(row, col, Cell::Alive)); } } return operations; } pub fn step(&mut self) { let mut operations: Vec<Operation> = Vec::new(); self.generation += 1; for row in 0..self.grid.num_rows { for col in 0..self.grid.num_cols { let results = self.apply_rules(row, col); operations.extend(results); } } for operation in operations { self.grid.set(operation.row, operation.col, operation.state) } } } impl fmt::Display for ConwaySim { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.grid.fmt(f) } }
use std::fmt; #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub enum Cell { Dead, Alive, } impl fmt::Display for Cell { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Cell::Alive => write!(f, "ALIVE"), Cell::Dead => write!(f, "DEAD"), } } } pub struct Grid { num_rows: u32, num_cols: u32, grid: Vec<Cell>, } impl Grid { pub fn new(num_rows: u32, num_cols: u32) -> Grid { Grid { num_rows: num_rows, num_cols: num_cols, grid: vec![Cell::Dead; (num_rows * num_cols) as usize], } } pub fn set_cells(&mut self, cells: &[(u32, u32)]) { for (row, col) in cells.iter().cloned() { let idx = self.cell_to_index(row, col); self.grid[idx] = Cell::Alive; } } fn cell_to_index(&self, row: u32, col: u32) -> usize { ((row * self.num_cols) + col) as usize } pub fn get(&self, row: u32, col: u32) -> Cell { let index = self.cell_to_index(row, col); self.grid[index] } fn set(&mut self, row: u32, col: u32, state: Cell) { let index = self.cell_to_index(row, col); self.grid[index] = state } } impl fmt::Display for Grid { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for line in self.grid.as_slice().chunks(self.num_cols as usize) { for &cell in line { let smybol = if cell == Cell::Dead { '◻' } else { '◼' }; write!(f, "{}", smybol)?; } write!(f, "\n")?; } Ok(()) } } struct Operation { row: u32, col: u32, state: Cell } impl Operation { fn new(row: u32, col: u32, state: Cell) -> Operation { Operation { row, col, state } } } impl fmt::Display for Operation { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Operation[row: {}, col: {}, state: {}]", self.row, self.col, self.state) } } pub struct ConwaySim { grid: Grid, generation: u32, } impl ConwaySim { pub fn new(num_rows: u32, nu
#[allow(dead_code)] pub fn new_with_grid(grid: Grid) -> ConwaySim { ConwaySim { grid, generation: 0 } } pub fn get_generation(&self) -> u32 { self.generation } pub fn is_cell_alive(&self, row: u32, col: u32) -> bool { self.grid.get(row, col) == Cell::Alive } pub fn is_any_cell_alive(&self) -> bool { let mut alive = false; for &cell in self.grid.grid.iter() { if cell == Cell::Alive { alive = true; break; } } return alive; } pub fn get_neighbor_count(&self, row: u32, col: u32) -> u8 { let mut count: u8 = 0; let mut new_row: u32; let mut new_col: u32; if (row > 0) && (col > 0) { new_row = row - 1; new_col = col - 1; if self.is_cell_alive(new_row, new_col) { count += 1; } } if row > 0 { new_row = row - 1; new_col = col; if self.is_cell_alive(new_row, new_col) { count += 1; } } if (row > 0) && ((col + 1) < self.grid.num_cols) { new_row = row - 1; new_col = col + 1; if self.is_cell_alive(new_row, new_col) { count += 1; } } if col > 0 { new_row = row; new_col = col - 1; if self.is_cell_alive(new_row, new_col) { count += 1; } } if (col + 1) < self.grid.num_cols { new_row = row; new_col = col + 1; if self.is_cell_alive(new_row, new_col) { count += 1; } } if ((row + 1) < self.grid.num_rows) && (col > 0) { new_row = row + 1; new_col = col - 1; if self.is_cell_alive(new_row, new_col) { count += 1; } } if (row + 1) < self.grid.num_rows { new_row = row + 1; new_col = col; if self.is_cell_alive(new_row, new_col) { count += 1; } } if ((row + 1) < self.grid.num_rows) && ((col + 1) < self.grid.num_cols) { new_row = row + 1; new_col = col + 1; if self.is_cell_alive(new_row, new_col) { count += 1; } } return count; } pub fn set_cells(&mut self, cells: &[(u32, u32)]) { self.grid.set_cells(cells); } fn apply_rules(&self, row: u32, col: u32) -> Vec<Operation> { let mut operations: Vec<Operation> = Vec::new(); let neighbor_count = self.get_neighbor_count(row, col); let alive = self.is_cell_alive(row, col); if alive { if neighbor_count < 2 { operations.push(Operation::new(row, col, Cell::Dead)); } else if neighbor_count <= 3 { } else { operations.push(Operation::new(row, col, Cell::Dead)); } } else { if neighbor_count == 3 { operations.push(Operation::new(row, col, Cell::Alive)); } } return operations; } pub fn step(&mut self) { let mut operations: Vec<Operation> = Vec::new(); self.generation += 1; for row in 0..self.grid.num_rows { for col in 0..self.grid.num_cols { let results = self.apply_rules(row, col); operations.extend(results); } } for operation in operations { self.grid.set(operation.row, operation.col, operation.state) } } } impl fmt::Display for ConwaySim { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.grid.fmt(f) } }
m_cols: u32) -> ConwaySim { ConwaySim { grid: Grid::new(num_rows, num_cols), generation: 0, } }
function_block-function_prefixed
[ { "content": "fn main() {\n\n let mut sim = game::ConwaySim::new(5, 5);\n\n\n\n sim.set_cells(&[\n\n (2, 1),\n\n (2, 2),\n\n (2, 3)\n\n ]);\n\n\n\n for _i in 0..105 {\n\n sim.step();\n\n \n\n println!(\"Generation: {}\", sim.get_generation());\n\n print!(\"{}\", sim);\n\n println!(\"Any cell alive? {}\", sim.is_any_cell_alive());\n\n println!();\n\n\n\n if !sim.is_any_cell_alive() {\n\n break;\n\n }\n\n\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 1, "score": 18755.100981066353 } ]
Rust
fly-query-rs/src/current_search.rs
HeroicosHM/Hackathon2020
1960fded5de849dca6b2eebdfcd4082d8f7c62f7
use crate::BEARER_AUTH; use reqwest::Client; use serde::{de::IgnoredAny, Deserialize, Serialize}; use wasm_bindgen::prelude::*; const AIRPORT_QUERY: &'static str = " query findAirports($query: String!) { airports(query: $query) { edges { node { ...Airport } } } } fragment Airport on AirportSuggestion { iataCode title selectedText subSuggestions { iataCode title selectedText } } "; #[wasm_bindgen] extern "C" { #[wasm_bindgen(js_namespace = console)] pub fn log(s: &str); } #[derive(Serialize)] struct ApQueryVariables<'a> { query: &'a str, } #[derive(Serialize)] struct ApQuery<'a> { query: &'static str, variables: ApQueryVariables<'a>, } impl<'a> ApQuery<'a> { fn new(search: &'a str) -> Self { ApQuery { query: AIRPORT_QUERY, variables: ApQueryVariables { query: search }, } } } #[derive(Deserialize)] struct ApQueryResponseNode { #[serde(rename(deserialize = "iataCode"))] _iata_code: IgnoredAny, #[serde(rename(deserialize = "title"))] _title: IgnoredAny, #[serde(rename(deserialize = "selectedText"))] selected_text: String, } #[derive(Deserialize)] struct ApQueryResponseNodeMG { #[serde(rename(deserialize = "iataCode"), skip_deserializing)] _iata_code: IgnoredAny, #[serde(rename(deserialize = "title"), skip_deserializing)] _title: IgnoredAny, #[serde(rename(deserialize = "selectedText"))] selected_text: String, #[serde(rename(serialize = "subSuggestions"), default)] subsuggestions: Option<Vec<ApQueryResponseNode>>, } impl ApQueryResponseNodeMG { fn flatten(self, collection: &mut Vec<String>) { if let Some(inner) = self.subsuggestions { for item in inner { collection.push(item.selected_text); } } else { collection.push(self.selected_text); } } } #[derive(Deserialize)] struct ApQueryResponseNodeMGWrapper { node: ApQueryResponseNodeMG, } #[derive(Deserialize)] struct ApQueryResponseAirports { edges: Vec<ApQueryResponseNodeMGWrapper>, } #[derive(Deserialize)] struct ApQueryResponseData { airports: ApQueryResponseAirports, } #[derive(Deserialize)] struct ApQueryResponse { data: ApQueryResponseData, } #[derive(Serialize)] struct ApQueryOut { data: Vec<String>, } impl ApQueryOut { fn new(data: Vec<String>) -> Self { ApQueryOut { data } } fn build_from(api_response: String) -> String { let deserialized = match serde_json::from_str::<ApQueryResponse>(&api_response) { Ok(deserialized) => deserialized, Err(e) => return format!("Error (2): {:?}", e), }; let data = deserialized.data.airports.edges; if data.len() == 0 { log("empty response"); String::new() } else { let mut collection = Vec::with_capacity( data.iter() .map(|mg| { mg.node .subsuggestions .as_ref() .map_or_else(|| 1, |inner| inner.len()) }) .sum(), ); data.into_iter() .for_each(|item| item.node.flatten(&mut collection)); let out = ApQueryOut::new(collection); match serde_json::to_string(&out) { Ok(serialized_out) => serialized_out, Err(e) => format!("Error (3): {:?}", e), } } } } #[wasm_bindgen] pub async fn query_current_search(search: String) -> String { console_error_panic_hook::set_once(); let response = match Client::new() .post("http://localhost:3000/api/graphql") .header("authorization", BEARER_AUTH) .header("content-type", "application/json") .body(serde_json::to_string(&ApQuery::new(&search)).unwrap()) .send() .await { Ok(response) => match response.text().await { Ok(response) => response, Err(e) => format!("Error: {}", e), }, Err(e) => format!("Error: {}", e), }; ApQueryOut::build_from(response) }
use crate::BEARER_AUTH; use reqwest::Client; use serde::{de::IgnoredAny, Deserialize, Serialize}; use wasm_bindgen::prelude::*; const AIRPORT_QUERY: &'static str = " query findAirports($query: String!) { airports(query: $query) { edges { node { ...Airport } } } } fragment Airport on AirportSuggestion { iataCode title selectedText subSuggestions { iataCode title selectedText } } "; #[wasm_bindgen] extern "C" { #[wasm_bindgen(js_namespace = console)] pub fn log(s: &str); } #[derive(Serialize)] struct ApQueryVariables<'a> { query: &'a str, } #[derive(Serialize)] struct ApQuery<'a> { query: &'static str, variables: ApQueryVariables<'a>, } impl<'a> ApQuery<'a> { fn new(search: &'a str) -> Self { ApQuery { query: AIRPORT_QUERY, variables: ApQueryVariables { query: search }, } } } #[derive(Deserialize)] struct ApQueryResponseNode { #[serde(rename(deserialize = "iataCode"))] _iata_code: IgnoredAny, #[serde(rename(deserialize = "title"))] _title: IgnoredAny, #[serde(rename(deserialize = "selectedText"))] selected_text: String, } #[derive(Deserialize)] struct ApQueryResponseNodeMG { #[serde(rename(deserialize = "iataCode"), skip_deserializing)] _iata_code: IgnoredAny, #[serde(rename(deserialize = "title"), skip_deserializing)] _title: IgnoredAny, #[serde(rename(deserialize = "selectedText"))] selected_text: String, #[serde(rename(serialize = "subSuggestions"), default)] subsuggestions: Option<Vec<ApQueryResponseNode>>, } impl ApQueryResponseNodeMG { fn flatten(self, collection: &mut Vec<String>) { if let Some(inner) = self.subsuggestions { for item in inner { collection.push(item.selected_text); } } else { collection.push(self.selected_text); } } } #[derive(Deserialize)] struct ApQueryResponseNodeMGWrapper { node: ApQueryResponseNodeMG, } #[derive(Deserialize)] struct ApQueryResponseAirports { edges: Vec<ApQueryResponseNodeMGWrapper>, } #[derive(Deserialize)] struct ApQueryResponseData { airports: ApQueryResponseAirports, } #[derive(Deserialize)] struct ApQueryResponse { data: ApQueryResponseData, } #[derive(Serialize)] struct ApQueryOut { data: Vec<String>, } impl ApQueryOut { fn new(data: Vec<String>) -> Self { ApQueryOut { data } } fn build_from(api_response: String) -> String { let deserialized = match serde_json::from_str::<ApQueryResponse>(&api_response) { Ok(deserialized) => deserialized, Err(e) => return format!("Error (2): {:?}", e), }; let data = deserialized.data.airports.edges; if data.len() == 0 { log("empty response"); String::new() } else { let mut collection = Vec::with_capacity( data.iter() .map(|mg| { mg.node .subsuggestions .as_ref() .map_or_else(|| 1, |inner| inner.len()) }) .sum(), ); data.into_iter() .for_each(|item| item.node.flatten(&mut collection)); let out = ApQueryOut::new(collection); match serde_json::to_string(&out) { Ok(serialized_out) => serialized_out, Err(e) => format!("Error (3): {:?}", e), } } } } #[wasm_bindgen]
pub async fn query_current_search(search: String) -> String { console_error_panic_hook::set_once(); let response = match Client::new() .post("http://localhost:3000/api/graphql") .header("authorization", BEARER_AUTH) .header("content-type", "application/json") .body(serde_json::to_string(&ApQuery::new(&search)).unwrap()) .send() .await { Ok(response) => match response.text().await { Ok(response) => response, Err(e) => format!("Error: {}", e), }, Err(e) => format!("Error: {}", e), }; ApQueryOut::build_from(response) }
function_block-full_function
[ { "content": "#[derive(Serialize)]\n\nstruct FlightQueryVariables<'a> {\n\n ap_from: &'a str,\n\n ap_to: &'a str,\n\n depart_date: &'a str,\n\n return_date: Option<&'a str>,\n\n round_trip: bool,\n\n}\n\n\n\nimpl<'a> FlightQueryVariables<'a> {\n\n fn new(\n\n ap_from: &'a str,\n\n ap_to: &'a str,\n\n depart_date: &'a str,\n\n return_date: Option<&'a str>,\n\n round_trip: bool,\n\n ) -> Self {\n\n FlightQueryVariables {\n\n ap_from,\n\n ap_to,\n\n depart_date,\n", "file_path": "fly-query-rs/src/flight_query.rs", "rank": 8, "score": 115339.84587616571 }, { "content": "const encodeString = (typeof cachedTextEncoder.encodeInto === 'function'\n\n ? function (arg, view) {\n\n return cachedTextEncoder.encodeInto(arg, view);\n\n}\n\n : function (arg, view) {\n\n const buf = cachedTextEncoder.encode(arg);\n\n view.set(buf);\n\n return {\n\n read: arg.length,\n\n written: buf.length\n\n };\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 10, "score": 88018.03882851487 }, { "content": "export const __wbindgen_json_serialize = function(arg0, arg1) {\n\n const obj = getObject(arg1);\n\n var ret = JSON.stringify(obj === undefined ? null : obj);\n\n var ptr0 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);\n\n var len0 = WASM_VECTOR_LEN;\n\n getInt32Memory0()[arg0 / 4 + 1] = len0;\n\n getInt32Memory0()[arg0 / 4 + 0] = ptr0;\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 11, "score": 87078.54796564463 }, { "content": "export const __wbg_self_00b0599bca667294 = function() {\n\n try {\n\n var ret = self.self;\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 12, "score": 87078.54796564463 }, { "content": "export const __wbg_log_1aca18ad930cbd85 = function(arg0, arg1) {\n\n console.log(getStringFromWasm0(arg0, arg1));\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 13, "score": 87078.54796564463 }, { "content": "export const __wbindgen_string_get = function(arg0, arg1) {\n\n const obj = getObject(arg1);\n\n var ret = typeof(obj) === 'string' ? obj : undefined;\n\n var ptr0 = isLikeNone(ret) ? 0 : passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);\n\n var len0 = WASM_VECTOR_LEN;\n\n getInt32Memory0()[arg0 / 4 + 1] = len0;\n\n getInt32Memory0()[arg0 / 4 + 0] = ptr0;\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 14, "score": 86875.24959645535 }, { "content": "export const __wbindgen_string_new = function(arg0, arg1) {\n\n var ret = getStringFromWasm0(arg0, arg1);\n\n return addHeapObject(ret);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 15, "score": 86875.24959645535 }, { "content": "export const __wbindgen_debug_string = function(arg0, arg1) {\n\n var ret = debugString(getObject(arg1));\n\n var ptr0 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);\n\n var len0 = WASM_VECTOR_LEN;\n\n getInt32Memory0()[arg0 / 4 + 1] = len0;\n\n getInt32Memory0()[arg0 / 4 + 0] = ptr0;\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 16, "score": 86875.24959645535 }, { "content": "export const __widl_f_new_with_str_and_init_Request = function(arg0, arg1, arg2) {\n\n try {\n\n var ret = new Request(getStringFromWasm0(arg0, arg1), getObject(arg2));\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 17, "score": 84874.59578871151 }, { "content": "const encodeString = (typeof cachedTextEncoder.encodeInto === 'function'\n\n ? function (arg, view) {\n\n return cachedTextEncoder.encodeInto(arg, view);\n\n}\n\n : function (arg, view) {\n\n const buf = cachedTextEncoder.encode(arg);\n\n view.set(buf);\n\n return {\n\n read: arg.length,\n\n written: buf.length\n\n };\n", "file_path": "fly-query-rs/pkg/fly_query_rs.js", "rank": 18, "score": 62231.47314350474 }, { "content": "export const __wbg_self_00b0599bca667294 = function() {\n\n try {\n\n var ret = self.self;\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n", "file_path": "fly-query-rs/pkg/fly_query_rs.js", "rank": 23, "score": 61780.08549657525 }, { "content": "export const __wbindgen_json_serialize = function(arg0, arg1) {\n\n const obj = getObject(arg1);\n\n var ret = JSON.stringify(obj === undefined ? null : obj);\n\n var ptr0 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);\n\n var len0 = WASM_VECTOR_LEN;\n\n getInt32Memory0()[arg0 / 4 + 1] = len0;\n\n getInt32Memory0()[arg0 / 4 + 0] = ptr0;\n", "file_path": "fly-query-rs/pkg/fly_query_rs.js", "rank": 24, "score": 61780.08549657525 }, { "content": "export const __wbg_log_1aca18ad930cbd85 = function(arg0, arg1) {\n\n console.log(getStringFromWasm0(arg0, arg1));\n", "file_path": "fly-query-rs/pkg/fly_query_rs.js", "rank": 25, "score": 61780.08549657525 }, { "content": "export const __wbindgen_string_get = function(arg0, arg1) {\n\n const obj = getObject(arg1);\n\n var ret = typeof(obj) === 'string' ? obj : undefined;\n\n var ptr0 = isLikeNone(ret) ? 0 : passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);\n\n var len0 = WASM_VECTOR_LEN;\n\n getInt32Memory0()[arg0 / 4 + 1] = len0;\n\n getInt32Memory0()[arg0 / 4 + 0] = ptr0;\n", "file_path": "fly-query-rs/pkg/fly_query_rs.js", "rank": 26, "score": 61579.3928365551 }, { "content": "export const __wbindgen_string_new = function(arg0, arg1) {\n\n var ret = getStringFromWasm0(arg0, arg1);\n\n return addHeapObject(ret);\n", "file_path": "fly-query-rs/pkg/fly_query_rs.js", "rank": 27, "score": 61579.3928365551 }, { "content": "export const __wbindgen_debug_string = function(arg0, arg1) {\n\n var ret = debugString(getObject(arg1));\n\n var ptr0 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);\n\n var len0 = WASM_VECTOR_LEN;\n\n getInt32Memory0()[arg0 / 4 + 1] = len0;\n\n getInt32Memory0()[arg0 / 4 + 0] = ptr0;\n", "file_path": "fly-query-rs/pkg/fly_query_rs.js", "rank": 28, "score": 61579.3928365551 }, { "content": "export const __widl_f_new_with_str_and_init_Request = function(arg0, arg1, arg2) {\n\n try {\n\n var ret = new Request(getStringFromWasm0(arg0, arg1), getObject(arg2));\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n", "file_path": "fly-query-rs/pkg/fly_query_rs.js", "rank": 29, "score": 60513.13609965653 }, { "content": "const data = [\n\n createData(\"00:00\", 0),\n\n createData(\"03:00\", 300),\n\n createData(\"06:00\", 600),\n\n createData(\"09:00\", 800),\n\n createData(\"12:00\", 1500),\n\n createData(\"15:00\", 2000),\n\n createData(\"18:00\", 2400),\n\n createData(\"21:00\", 2400),\n\n createData(\"24:00\", undefined)\n", "file_path": "react_website/src/pages/flights/Chart.js", "rank": 30, "score": 57832.77732629811 }, { "content": "const heap = new Array(32);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 31, "score": 56025.73156675828 }, { "content": "export const __wbindgen_is_object = function(arg0) {\n\n const val = getObject(arg0);\n\n var ret = typeof(val) === 'object' && val !== null;\n\n return ret;\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 32, "score": 55288.74832872266 }, { "content": "export const __wbg_then_7d828a330efec051 = function(arg0, arg1, arg2) {\n\n var ret = getObject(arg0).then(getObject(arg1), getObject(arg2));\n\n return addHeapObject(ret);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 33, "score": 55288.74832872266 }, { "content": "export const __wbindgen_is_function = function(arg0) {\n\n var ret = typeof(getObject(arg0)) === 'function';\n\n return ret;\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 34, "score": 55288.74832872266 }, { "content": "let heap_next = heap.length;\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 35, "score": 55288.74832872266 }, { "content": "export const __wbindgen_is_undefined = function(arg0) {\n\n var ret = getObject(arg0) === undefined;\n\n return ret;\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 36, "score": 55288.74832872266 }, { "content": "export const __wbindgen_throw = function(arg0, arg1) {\n\n throw new Error(getStringFromWasm0(arg0, arg1));\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 37, "score": 55288.74832872266 }, { "content": "export const __wbindgen_memory = function() {\n\n var ret = wasm.memory;\n\n return addHeapObject(ret);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 38, "score": 55288.74832872266 }, { "content": "export const __wbg_then_b6fef331fde5cf0a = function(arg0, arg1) {\n\n var ret = getObject(arg0).then(getObject(arg1));\n\n return addHeapObject(ret);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 39, "score": 55288.74832872266 }, { "content": "let WASM_VECTOR_LEN = 0;\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 40, "score": 54570.90245206988 }, { "content": "export const __widl_f_new_Headers = function() {\n\n try {\n\n var ret = new Headers();\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 41, "score": 54570.90245206988 }, { "content": "export const __wbg_new_59cb74e423758ede = function() {\n\n var ret = new Error();\n\n return addHeapObject(ret);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 42, "score": 54570.90245206988 }, { "content": "export const __wbg_value_1b88544311a72cbf = function(arg0) {\n\n var ret = getObject(arg0).value;\n\n return addHeapObject(ret);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 43, "score": 54570.90245206988 }, { "content": "export const __widl_instanceof_Response = function(arg0) {\n\n var ret = getObject(arg0) instanceof Response;\n\n return ret;\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 44, "score": 54570.90245206988 }, { "content": "export const __wbg_call_ce7cf17fc6380443 = function(arg0, arg1, arg2) {\n\n try {\n\n var ret = getObject(arg0).call(getObject(arg1), getObject(arg2));\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 45, "score": 54570.90245206988 }, { "content": "export const __wbg_globalThis_22e06d4bea0084e3 = function() {\n\n try {\n\n var ret = globalThis.globalThis;\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 46, "score": 54570.90245206988 }, { "content": "export const __wbg_stack_558ba5917b466edd = function(arg0, arg1) {\n\n var ret = getObject(arg1).stack;\n\n var ptr0 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);\n\n var len0 = WASM_VECTOR_LEN;\n\n getInt32Memory0()[arg0 / 4 + 1] = len0;\n\n getInt32Memory0()[arg0 / 4 + 0] = ptr0;\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 47, "score": 54570.90245206988 }, { "content": "let cachedTextEncoder = new TextEncoder('utf-8');\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 48, "score": 54570.90245206988 }, { "content": "export const __wbindgen_cb_drop = function(arg0) {\n\n const obj = takeObject(arg0).original;\n\n if (obj.cnt-- == 1) {\n\n obj.a = 0;\n\n return true;\n\n }\n\n var ret = false;\n\n return ret;\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 49, "score": 54570.90245206988 }, { "content": "export const __wbg_newwithbyteoffsetandlength_6b93e5ed7d4086de = function(arg0, arg1, arg2) {\n\n var ret = new Uint8Array(getObject(arg0), arg1 >>> 0, arg2 >>> 0);\n\n return addHeapObject(ret);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 50, "score": 54570.90245206988 }, { "content": "export const __widl_f_text_Response = function(arg0) {\n\n try {\n\n var ret = getObject(arg0).text();\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 51, "score": 54570.90245206988 }, { "content": "export const __wbg_get_b086a3091905ea8f = function(arg0, arg1) {\n\n try {\n\n var ret = Reflect.get(getObject(arg0), getObject(arg1));\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 52, "score": 54570.90245206988 }, { "content": "export const __wbg_next_1806e9c639e7c94e = function(arg0) {\n\n var ret = getObject(arg0).next;\n\n return addHeapObject(ret);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 53, "score": 54570.90245206988 }, { "content": "export const __wbg_newnoargs_c4b2cbbd30e2d057 = function(arg0, arg1) {\n\n var ret = new Function(getStringFromWasm0(arg0, arg1));\n\n return addHeapObject(ret);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 54, "score": 54570.90245206988 }, { "content": "export const __wbg_window_aa795c5aad79b8ac = function() {\n\n try {\n\n var ret = window.window;\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 55, "score": 54570.90245206988 }, { "content": "export const __wbg_new_d3eff62d5c013634 = function(arg0, arg1) {\n\n try {\n\n var state0 = {a: arg0, b: arg1};\n\n var cb0 = (arg0, arg1) => {\n\n const a = state0.a;\n\n state0.a = 0;\n\n try {\n\n return __wbg_adapter_74(a, state0.b, arg0, arg1);\n\n } finally {\n\n state0.a = a;\n\n }\n\n };\n\n var ret = new Promise(cb0);\n\n return addHeapObject(ret);\n\n } finally {\n\n state0.a = state0.b = 0;\n\n }\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 56, "score": 54570.90245206988 }, { "content": "export const __widl_instanceof_Window = function(arg0) {\n\n var ret = getObject(arg0) instanceof Window;\n\n return ret;\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 57, "score": 54570.90245206988 }, { "content": "let cachedTextDecoder = new TextDecoder('utf-8', { ignoreBOM: true, fatal: true });\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 58, "score": 54570.90245206988 }, { "content": "let cachegetUint8Memory0 = null;\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 59, "score": 54570.90245206988 }, { "content": "export const __widl_f_status_Response = function(arg0) {\n\n var ret = getObject(arg0).status;\n\n return ret;\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 60, "score": 54570.90245206988 }, { "content": "export const __wbg_resolve_6885947099a907d3 = function(arg0) {\n\n var ret = Promise.resolve(getObject(arg0));\n\n return addHeapObject(ret);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 61, "score": 54570.90245206988 }, { "content": "let cachegetInt32Memory0 = null;\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 62, "score": 54570.90245206988 }, { "content": "export const __wbg_error_4bb6c2a97407129a = function(arg0, arg1) {\n\n try {\n\n console.error(getStringFromWasm0(arg0, arg1));\n\n } finally {\n\n wasm.__wbindgen_free(arg0, arg1);\n\n }\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 63, "score": 54570.90245206988 }, { "content": "export const __wbindgen_closure_wrapper327 = function(arg0, arg1, arg2) {\n\n\n\n const state = { a: arg0, b: arg1, cnt: 1 };\n\n const real = (arg0) => {\n\n state.cnt++;\n\n const a = state.a;\n\n state.a = 0;\n\n try {\n\n return __wbg_adapter_26(a, state.b, arg0);\n\n } finally {\n\n if (--state.cnt === 0) wasm.__wbindgen_export_2.get(96)(a, state.b);\n\n else state.a = a;\n\n }\n\n }\n\n ;\n\n real.original = state;\n\n var ret = real;\n\n return addHeapObject(ret);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 64, "score": 54570.90245206988 }, { "content": "export const __widl_f_headers_Response = function(arg0) {\n\n var ret = getObject(arg0).headers;\n\n return addHeapObject(ret);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 65, "score": 54570.90245206988 }, { "content": "export const __wbg_set_8d5fd23e838df6b0 = function(arg0, arg1, arg2) {\n\n try {\n\n var ret = Reflect.set(getObject(arg0), getObject(arg1), getObject(arg2));\n\n return ret;\n\n } catch (e) {\n\n handleError(e)\n\n }\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 66, "score": 54570.90245206988 }, { "content": "export const __wbg_next_070429384a9059a5 = function(arg0) {\n\n try {\n\n var ret = getObject(arg0).next();\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 67, "score": 54570.90245206988 }, { "content": "export const __wbg_iterator_23e543eb74670550 = function() {\n\n var ret = Symbol.iterator;\n\n return addHeapObject(ret);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 68, "score": 54570.90245206988 }, { "content": "export const __wbg_done_24ef91fda5bda381 = function(arg0) {\n\n var ret = getObject(arg0).done;\n\n return ret;\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 69, "score": 54570.90245206988 }, { "content": "export const __wbg_new_dca22b33e64c73c1 = function(arg0) {\n\n var ret = new Uint8Array(getObject(arg0));\n\n return addHeapObject(ret);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 70, "score": 54570.90245206988 }, { "content": "export const __widl_f_url_Response = function(arg0, arg1) {\n\n var ret = getObject(arg1).url;\n\n var ptr0 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);\n\n var len0 = WASM_VECTOR_LEN;\n\n getInt32Memory0()[arg0 / 4 + 1] = len0;\n\n getInt32Memory0()[arg0 / 4 + 0] = ptr0;\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 71, "score": 54570.90245206988 }, { "content": "export const __widl_f_append_Headers = function(arg0, arg1, arg2, arg3, arg4) {\n\n try {\n\n getObject(arg0).append(getStringFromWasm0(arg1, arg2), getStringFromWasm0(arg3, arg4));\n\n } catch (e) {\n\n handleError(e)\n\n }\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 72, "score": 54570.90245206988 }, { "content": "export const __wbg_new_7dd9b384a913884d = function() {\n\n var ret = new Object();\n\n return addHeapObject(ret);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 73, "score": 54570.90245206988 }, { "content": "export const __wbg_buffer_1bb127df6348017b = function(arg0) {\n\n var ret = getObject(arg0).buffer;\n\n return addHeapObject(ret);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 74, "score": 54570.90245206988 }, { "content": "export const __wbg_global_cc239dc2303f417c = function() {\n\n try {\n\n var ret = global.global;\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 75, "score": 54570.90245206988 }, { "content": "export const __wbg_call_12b949cfc461d154 = function(arg0, arg1) {\n\n try {\n\n var ret = getObject(arg0).call(getObject(arg1));\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 76, "score": 54570.90245206988 }, { "content": "export const __wbindgen_object_clone_ref = function(arg0) {\n\n var ret = getObject(arg0);\n\n return addHeapObject(ret);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 77, "score": 53871.458076523835 }, { "content": "export const __wbindgen_object_drop_ref = function(arg0) {\n\n takeObject(arg0);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 78, "score": 53871.458076523835 }, { "content": "export const __widl_f_fetch_with_request_Window = function(arg0, arg1) {\n\n var ret = getObject(arg0).fetch(getObject(arg1));\n\n return addHeapObject(ret);\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 79, "score": 53871.458076523835 }, { "content": "import * as wasm from './fly_query_rs_bg.wasm';\n\n\n\nlet cachedTextDecoder = new TextDecoder('utf-8', { ignoreBOM: true, fatal: true });\n\n\n\ncachedTextDecoder.decode();\n\n\n\nlet cachegetUint8Memory0 = null;\n\nfunction getUint8Memory0() {\n\n if (cachegetUint8Memory0 === null || cachegetUint8Memory0.buffer !== wasm.memory.buffer) {\n\n cachegetUint8Memory0 = new Uint8Array(wasm.memory.buffer);\n\n }\n\n return cachegetUint8Memory0;\n\n}\n\n\n\nfunction getStringFromWasm0(ptr, len) {\n\n return cachedTextDecoder.decode(getUint8Memory0().subarray(ptr, ptr + len));\n\n}\n\n\n\nconst heap = new Array(32);\n\n\n\nheap.fill(undefined);\n\n\n\nheap.push(undefined, null, true, false);\n\n\n\nlet heap_next = heap.length;\n\n\n\nfunction addHeapObject(obj) {\n\n if (heap_next === heap.length) heap.push(heap.length + 1);\n\n const idx = heap_next;\n\n heap_next = heap[idx];\n\n\n\n heap[idx] = obj;\n\n return idx;\n\n}\n\n\n\nfunction getObject(idx) { return heap[idx]; }\n\n\n\nlet WASM_VECTOR_LEN = 0;\n\n\n\nlet cachedTextEncoder = new TextEncoder('utf-8');\n\n\n\nconst encodeString = (typeof cachedTextEncoder.encodeInto === 'function'\n\n ? function (arg, view) {\n\n return cachedTextEncoder.encodeInto(arg, view);\n\n}\n\n : function (arg, view) {\n\n const buf = cachedTextEncoder.encode(arg);\n\n view.set(buf);\n\n return {\n\n read: arg.length,\n\n written: buf.length\n\n };\n\n});\n\n\n\nfunction passStringToWasm0(arg, malloc, realloc) {\n\n\n\n if (realloc === undefined) {\n\n const buf = cachedTextEncoder.encode(arg);\n\n const ptr = malloc(buf.length);\n\n getUint8Memory0().subarray(ptr, ptr + buf.length).set(buf);\n\n WASM_VECTOR_LEN = buf.length;\n\n return ptr;\n\n }\n\n\n\n let len = arg.length;\n\n let ptr = malloc(len);\n\n\n\n const mem = getUint8Memory0();\n\n\n\n let offset = 0;\n\n\n\n for (; offset < len; offset++) {\n\n const code = arg.charCodeAt(offset);\n\n if (code > 0x7F) break;\n\n mem[ptr + offset] = code;\n\n }\n\n\n\n if (offset !== len) {\n\n if (offset !== 0) {\n\n arg = arg.slice(offset);\n\n }\n\n ptr = realloc(ptr, len, len = offset + arg.length * 3);\n\n const view = getUint8Memory0().subarray(ptr + offset, ptr + len);\n\n const ret = encodeString(arg, view);\n\n\n\n offset += ret.written;\n\n }\n\n\n\n WASM_VECTOR_LEN = offset;\n\n return ptr;\n\n}\n\n\n\nlet cachegetInt32Memory0 = null;\n\nfunction getInt32Memory0() {\n\n if (cachegetInt32Memory0 === null || cachegetInt32Memory0.buffer !== wasm.memory.buffer) {\n\n cachegetInt32Memory0 = new Int32Array(wasm.memory.buffer);\n\n }\n\n return cachegetInt32Memory0;\n\n}\n\n\n\nfunction dropObject(idx) {\n\n if (idx < 36) return;\n\n heap[idx] = heap_next;\n\n heap_next = idx;\n\n}\n\n\n\nfunction takeObject(idx) {\n\n const ret = getObject(idx);\n\n dropObject(idx);\n\n return ret;\n\n}\n\n\n\nfunction isLikeNone(x) {\n\n return x === undefined || x === null;\n\n}\n\n\n\nfunction debugString(val) {\n\n // primitive types\n\n const type = typeof val;\n\n if (type == 'number' || type == 'boolean' || val == null) {\n\n return `${val}`;\n\n }\n\n if (type == 'string') {\n\n return `\"${val}\"`;\n\n }\n\n if (type == 'symbol') {\n\n const description = val.description;\n\n if (description == null) {\n\n return 'Symbol';\n\n } else {\n\n return `Symbol(${description})`;\n\n }\n\n }\n\n if (type == 'function') {\n\n const name = val.name;\n\n if (typeof name == 'string' && name.length > 0) {\n\n return `Function(${name})`;\n\n } else {\n\n return 'Function';\n\n }\n\n }\n\n // objects\n\n if (Array.isArray(val)) {\n\n const length = val.length;\n\n let debug = '[';\n\n if (length > 0) {\n\n debug += debugString(val[0]);\n\n }\n\n for(let i = 1; i < length; i++) {\n\n debug += ', ' + debugString(val[i]);\n\n }\n\n debug += ']';\n\n return debug;\n\n }\n\n // Test for built-in\n\n const builtInMatches = /\\[object ([^\\]]+)\\]/.exec(toString.call(val));\n\n let className;\n\n if (builtInMatches.length > 1) {\n\n className = builtInMatches[1];\n\n } else {\n\n // Failed to match the standard '[object ClassName]'\n\n return toString.call(val);\n\n }\n\n if (className == 'Object') {\n\n // we're a user defined class or Object\n\n // JSON.stringify avoids problems with cycles, and is generally much\n\n // easier than looping through ownProperties of `val`.\n\n try {\n\n return 'Object(' + JSON.stringify(val) + ')';\n\n } catch (_) {\n\n return 'Object';\n\n }\n\n }\n\n // errors\n\n if (val instanceof Error) {\n\n return `${val.name}: ${val.message}\\n${val.stack}`;\n\n }\n\n // TODO we could test for more things here, like `Set`s and `Map`s.\n\n return className;\n\n}\n\nfunction __wbg_adapter_26(arg0, arg1, arg2) {\n\n wasm._dyn_core__ops__function__FnMut__A____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__h59bc6e54e2a18c88(arg0, arg1, addHeapObject(arg2));\n\n}\n\n\n\n/**\n\n* @param {string} search\n\n* @returns {any}\n\n*/\n\nexport function query_current_search(search) {\n\n var ptr0 = passStringToWasm0(search, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);\n\n var len0 = WASM_VECTOR_LEN;\n\n var ret = wasm.query_current_search(ptr0, len0);\n\n return takeObject(ret);\n\n}\n\n\n\nfunction handleError(e) {\n\n wasm.__wbindgen_exn_store(addHeapObject(e));\n\n}\n\nfunction __wbg_adapter_74(arg0, arg1, arg2, arg3) {\n\n wasm.wasm_bindgen__convert__closures__invoke2_mut__h0077ed2153b1f11c(arg0, arg1, addHeapObject(arg2), addHeapObject(arg3));\n\n}\n\n\n\nexport const __wbindgen_string_new = function(arg0, arg1) {\n\n var ret = getStringFromWasm0(arg0, arg1);\n\n return addHeapObject(ret);\n\n};\n\n\n\nexport const __wbindgen_json_serialize = function(arg0, arg1) {\n\n const obj = getObject(arg1);\n\n var ret = JSON.stringify(obj === undefined ? null : obj);\n\n var ptr0 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);\n\n var len0 = WASM_VECTOR_LEN;\n\n getInt32Memory0()[arg0 / 4 + 1] = len0;\n\n getInt32Memory0()[arg0 / 4 + 0] = ptr0;\n\n};\n\n\n\nexport const __wbindgen_object_drop_ref = function(arg0) {\n\n takeObject(arg0);\n\n};\n\n\n\nexport const __wbindgen_cb_drop = function(arg0) {\n\n const obj = takeObject(arg0).original;\n\n if (obj.cnt-- == 1) {\n\n obj.a = 0;\n\n return true;\n\n }\n\n var ret = false;\n\n return ret;\n\n};\n\n\n\nexport const __wbg_log_1aca18ad930cbd85 = function(arg0, arg1) {\n\n console.log(getStringFromWasm0(arg0, arg1));\n\n};\n\n\n\nexport const __wbg_new_59cb74e423758ede = function() {\n\n var ret = new Error();\n\n return addHeapObject(ret);\n\n};\n\n\n\nexport const __wbg_stack_558ba5917b466edd = function(arg0, arg1) {\n\n var ret = getObject(arg1).stack;\n\n var ptr0 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);\n\n var len0 = WASM_VECTOR_LEN;\n\n getInt32Memory0()[arg0 / 4 + 1] = len0;\n\n getInt32Memory0()[arg0 / 4 + 0] = ptr0;\n\n};\n\n\n\nexport const __wbg_error_4bb6c2a97407129a = function(arg0, arg1) {\n\n try {\n\n console.error(getStringFromWasm0(arg0, arg1));\n\n } finally {\n\n wasm.__wbindgen_free(arg0, arg1);\n\n }\n\n};\n\n\n\nexport const __widl_instanceof_Window = function(arg0) {\n\n var ret = getObject(arg0) instanceof Window;\n\n return ret;\n\n};\n\n\n\nexport const __widl_f_new_Headers = function() {\n\n try {\n\n var ret = new Headers();\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n\n};\n\n\n\nexport const __widl_f_append_Headers = function(arg0, arg1, arg2, arg3, arg4) {\n\n try {\n\n getObject(arg0).append(getStringFromWasm0(arg1, arg2), getStringFromWasm0(arg3, arg4));\n\n } catch (e) {\n\n handleError(e)\n\n }\n\n};\n\n\n\nexport const __widl_f_new_with_str_and_init_Request = function(arg0, arg1, arg2) {\n\n try {\n\n var ret = new Request(getStringFromWasm0(arg0, arg1), getObject(arg2));\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n\n};\n\n\n\nexport const __widl_instanceof_Response = function(arg0) {\n\n var ret = getObject(arg0) instanceof Response;\n\n return ret;\n\n};\n\n\n\nexport const __widl_f_url_Response = function(arg0, arg1) {\n\n var ret = getObject(arg1).url;\n\n var ptr0 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);\n\n var len0 = WASM_VECTOR_LEN;\n\n getInt32Memory0()[arg0 / 4 + 1] = len0;\n\n getInt32Memory0()[arg0 / 4 + 0] = ptr0;\n\n};\n\n\n\nexport const __widl_f_status_Response = function(arg0) {\n\n var ret = getObject(arg0).status;\n\n return ret;\n\n};\n\n\n\nexport const __widl_f_headers_Response = function(arg0) {\n\n var ret = getObject(arg0).headers;\n\n return addHeapObject(ret);\n\n};\n\n\n\nexport const __widl_f_text_Response = function(arg0) {\n\n try {\n\n var ret = getObject(arg0).text();\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n\n};\n\n\n\nexport const __widl_f_fetch_with_request_Window = function(arg0, arg1) {\n\n var ret = getObject(arg0).fetch(getObject(arg1));\n\n return addHeapObject(ret);\n\n};\n\n\n\nexport const __wbindgen_object_clone_ref = function(arg0) {\n\n var ret = getObject(arg0);\n\n return addHeapObject(ret);\n\n};\n\n\n\nexport const __wbindgen_is_function = function(arg0) {\n\n var ret = typeof(getObject(arg0)) === 'function';\n\n return ret;\n\n};\n\n\n\nexport const __wbindgen_is_object = function(arg0) {\n\n const val = getObject(arg0);\n\n var ret = typeof(val) === 'object' && val !== null;\n\n return ret;\n\n};\n\n\n\nexport const __wbg_next_1806e9c639e7c94e = function(arg0) {\n\n var ret = getObject(arg0).next;\n\n return addHeapObject(ret);\n\n};\n\n\n\nexport const __wbg_next_070429384a9059a5 = function(arg0) {\n\n try {\n\n var ret = getObject(arg0).next();\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n\n};\n\n\n\nexport const __wbg_done_24ef91fda5bda381 = function(arg0) {\n\n var ret = getObject(arg0).done;\n\n return ret;\n\n};\n\n\n\nexport const __wbg_value_1b88544311a72cbf = function(arg0) {\n\n var ret = getObject(arg0).value;\n\n return addHeapObject(ret);\n\n};\n\n\n\nexport const __wbg_iterator_23e543eb74670550 = function() {\n\n var ret = Symbol.iterator;\n\n return addHeapObject(ret);\n\n};\n\n\n\nexport const __wbg_get_b086a3091905ea8f = function(arg0, arg1) {\n\n try {\n\n var ret = Reflect.get(getObject(arg0), getObject(arg1));\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n\n};\n\n\n\nexport const __wbg_call_12b949cfc461d154 = function(arg0, arg1) {\n\n try {\n\n var ret = getObject(arg0).call(getObject(arg1));\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n\n};\n\n\n\nexport const __wbg_newnoargs_c4b2cbbd30e2d057 = function(arg0, arg1) {\n\n var ret = new Function(getStringFromWasm0(arg0, arg1));\n\n return addHeapObject(ret);\n\n};\n\n\n\nexport const __wbg_call_ce7cf17fc6380443 = function(arg0, arg1, arg2) {\n\n try {\n\n var ret = getObject(arg0).call(getObject(arg1), getObject(arg2));\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n\n};\n\n\n\nexport const __wbg_new_7dd9b384a913884d = function() {\n\n var ret = new Object();\n\n return addHeapObject(ret);\n\n};\n\n\n\nexport const __wbg_new_d3eff62d5c013634 = function(arg0, arg1) {\n\n try {\n\n var state0 = {a: arg0, b: arg1};\n\n var cb0 = (arg0, arg1) => {\n\n const a = state0.a;\n\n state0.a = 0;\n\n try {\n\n return __wbg_adapter_74(a, state0.b, arg0, arg1);\n\n } finally {\n\n state0.a = a;\n\n }\n\n };\n\n var ret = new Promise(cb0);\n\n return addHeapObject(ret);\n\n } finally {\n\n state0.a = state0.b = 0;\n\n }\n\n};\n\n\n\nexport const __wbg_resolve_6885947099a907d3 = function(arg0) {\n\n var ret = Promise.resolve(getObject(arg0));\n\n return addHeapObject(ret);\n\n};\n\n\n\nexport const __wbg_then_b6fef331fde5cf0a = function(arg0, arg1) {\n\n var ret = getObject(arg0).then(getObject(arg1));\n\n return addHeapObject(ret);\n\n};\n\n\n\nexport const __wbg_then_7d828a330efec051 = function(arg0, arg1, arg2) {\n\n var ret = getObject(arg0).then(getObject(arg1), getObject(arg2));\n\n return addHeapObject(ret);\n\n};\n\n\n\nexport const __wbg_globalThis_22e06d4bea0084e3 = function() {\n\n try {\n\n var ret = globalThis.globalThis;\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n\n};\n\n\n\nexport const __wbg_self_00b0599bca667294 = function() {\n\n try {\n\n var ret = self.self;\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n\n};\n\n\n\nexport const __wbg_window_aa795c5aad79b8ac = function() {\n\n try {\n\n var ret = window.window;\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n\n};\n\n\n\nexport const __wbg_global_cc239dc2303f417c = function() {\n\n try {\n\n var ret = global.global;\n\n return addHeapObject(ret);\n\n } catch (e) {\n\n handleError(e)\n\n }\n\n};\n\n\n\nexport const __wbindgen_is_undefined = function(arg0) {\n\n var ret = getObject(arg0) === undefined;\n\n return ret;\n\n};\n\n\n\nexport const __wbg_buffer_1bb127df6348017b = function(arg0) {\n\n var ret = getObject(arg0).buffer;\n\n return addHeapObject(ret);\n\n};\n\n\n\nexport const __wbg_newwithbyteoffsetandlength_6b93e5ed7d4086de = function(arg0, arg1, arg2) {\n\n var ret = new Uint8Array(getObject(arg0), arg1 >>> 0, arg2 >>> 0);\n\n return addHeapObject(ret);\n\n};\n\n\n\nexport const __wbg_new_dca22b33e64c73c1 = function(arg0) {\n\n var ret = new Uint8Array(getObject(arg0));\n\n return addHeapObject(ret);\n\n};\n\n\n\nexport const __wbg_set_8d5fd23e838df6b0 = function(arg0, arg1, arg2) {\n\n try {\n\n var ret = Reflect.set(getObject(arg0), getObject(arg1), getObject(arg2));\n\n return ret;\n\n } catch (e) {\n\n handleError(e)\n\n }\n\n};\n\n\n\nexport const __wbindgen_string_get = function(arg0, arg1) {\n\n const obj = getObject(arg1);\n\n var ret = typeof(obj) === 'string' ? obj : undefined;\n\n var ptr0 = isLikeNone(ret) ? 0 : passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);\n\n var len0 = WASM_VECTOR_LEN;\n\n getInt32Memory0()[arg0 / 4 + 1] = len0;\n\n getInt32Memory0()[arg0 / 4 + 0] = ptr0;\n\n};\n\n\n\nexport const __wbindgen_debug_string = function(arg0, arg1) {\n\n var ret = debugString(getObject(arg1));\n\n var ptr0 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);\n\n var len0 = WASM_VECTOR_LEN;\n\n getInt32Memory0()[arg0 / 4 + 1] = len0;\n\n getInt32Memory0()[arg0 / 4 + 0] = ptr0;\n\n};\n\n\n\nexport const __wbindgen_throw = function(arg0, arg1) {\n\n throw new Error(getStringFromWasm0(arg0, arg1));\n\n};\n\n\n\nexport const __wbindgen_memory = function() {\n\n var ret = wasm.memory;\n\n return addHeapObject(ret);\n\n};\n\n\n\nexport const __wbindgen_closure_wrapper327 = function(arg0, arg1, arg2) {\n\n\n\n const state = { a: arg0, b: arg1, cnt: 1 };\n\n const real = (arg0) => {\n\n state.cnt++;\n\n const a = state.a;\n\n state.a = 0;\n\n try {\n\n return __wbg_adapter_26(a, state.b, arg0);\n\n } finally {\n\n if (--state.cnt === 0) wasm.__wbindgen_export_2.get(96)(a, state.b);\n\n else state.a = a;\n\n }\n\n }\n\n ;\n\n real.original = state;\n\n var ret = real;\n\n return addHeapObject(ret);\n\n};\n\n\n", "file_path": "react_website/external/fly_query_rs.js", "rank": 80, "score": 51244.23441482306 }, { "content": "export const mainListItems = (\n\n <div>\n\n <NavLink to=\"/flights\">\n\n <ListItem button>\n\n <ListItemIcon>\n\n <FlightIcon />\n\n </ListItemIcon>\n\n <ListItemText primary=\"Flights\" />\n\n </ListItem>\n\n </NavLink>\n\n\n\n <NavLink to=\"/hotels\">\n\n <ListItem button>\n\n <ListItemIcon>\n\n <HotelIcon />\n\n </ListItemIcon>\n\n <ListItemText primary=\"Hotels\" />\n\n </ListItem>\n\n </NavLink>\n\n </div>\n", "file_path": "react_website/src/components/listItems.js", "rank": 81, "score": 40147.26363481091 }, { "content": "const useStyles = makeStyles(theme => ({\n\n root: {\n\n margin: theme.spacing(6, 0, 3),\n\n },\n\n lightBulb: {\n\n verticalAlign: 'middle',\n\n marginRight: theme.spacing(1),\n\n },\n", "file_path": "react_website/src/ProTip.js", "rank": 82, "score": 33338.83663121037 }, { "content": "const useStyles = makeStyles(theme => ({\n\n seeMore: {\n\n marginTop: theme.spacing(3)\n\n }\n", "file_path": "react_website/src/pages/flights/Orders.js", "rank": 83, "score": 32900.28521767759 }, { "content": "const useStyles = makeStyles({\n\n depositContext: {\n\n flex: 1\n\n }\n", "file_path": "react_website/src/pages/flights/Deposits.js", "rank": 84, "score": 32900.28521767759 }, { "content": " def visual_data():\n\n flightDataSet = {}\n\n numTrips = 0\n\n for i in range(0, 30):\n\n originDate = (datetime.datetime.now() + datetime.timedelta(days = i)).strftime(\"%Y-%m-%d\")\n\n print(originDate)\n\n endDate = (datetime.datetime.now() + datetime.timedelta(days = tripLength + i)).strftime(\"%Y-%m-%d\")\n\n variables = {\n\n \"fromCode\": fromCode,\n\n \"toCode\": toCode,\n\n \"startDate\": originDate,\n\n \"endDate\": endDate,\n\n \"roundTrip\": roundTrip\n\n }\n\n s = session.post(\"https://dev.fly.me/api/graphql\", json = {'query': flight_query, 'variables': variables})\n\n print(s.status_code)\n\n if s.status_code == 200:\n\n try:\n\n data = json.loads(s.text)\n\n data = data['data']['simpleAirSearch']['select']['products']['edges']\n\n flightDataSet[originDate] = []\n\n for sec in data:\n\n flightDataSet[originDate].append({\n\n \"totalPrice\": sec['node']['fareInfo']['totalPrice'],\n\n \"currency\": sec['node']['fareInfo']['currency'],\n\n \"originDisplayTime\": sec['node']['ods'][0]['originDisplayTime'],\n\n \"formattedOriginTime\": sec['node']['ods'][0]['formattedOriginTime']\n\n })\n\n numTrips += 1\n\n except:\n\n print('error')\n\n else:\n\n print('failed')\n\n\n", "file_path": "DialogWebRequest/flaskSampleHook.py", "rank": 85, "score": 32865.95859483124 }, { "content": "def flight_search():\n\n airports = request.args.get('airports')\n\n fromCode = airports.split(',')[0]\n\n toCode = airports.split(',')[1]\n\n\n\n dates = request.args.get('dates')\n\n fromDate = dates.split(',')[0]\n\n toDate = dates.split(',')[1]\n\n\n\n if request.args.get('round-trip').lower() == \"true\":\n\n roundTrip = True\n\n else:\n\n roundTrip = False\n\n\n\n query = \"\"\"\n\n query name($roundTrip: Boolean!, $fromCode: String!, $toCode: String!, $fromDate: LocalDate!, $toDate: LocalDate!) {\n\n simpleAirSearch(input: {\n\n stops: [\n\n { portCode: $fromCode, earliestDate: $fromDate }\n\n { portCode: $toCode, earliestDate: $toDate }\n\n ]\n\n returnsToOrigin: $roundTrip\n\n }) {\n\n select {\n\n products(first: 10) {\n\n edges {\n\n node {\n\n type\n\n productId\n\n fareInfo {\n\n validatingCarrier, totalPrice, currency\n\n totalTax\n\n f_fare: formattedPrice(type: BASE)\n\n f_total_price: formattedPrice(type: TOTAL)\n\n f_taxes_and_fees_total: formattedPrice(type: TAXES_AND_FEES)\n\n fareFamily {\n\n seatSelectionIncluded\n\n fareFamilyDescription\n\n }\n\n }\n\n ods {\n\n id\n\n origin\n\n originDisplayTime\n\n destinationDisplayTime\n\n departureTime: formattedOriginTime(pattern: \"h:mm a\")\n\n arrivalTime: formattedDestinationTime(pattern: \"h:mm a\")\n\n formattedOriginTime(pattern: \"MMMM dd, yyyy\")\n\n formattedDestinationTime(pattern: \"MMMM dd, yyyy\")\n\n origin\n\n originDescriptive {\n\n primaryType {\n\n type\n\n name\n\n code\n\n }\n\n timeZoneCode\n\n city: ltv(types: [CITY]) {\n\n name\n\n }\n\n country: ltv(types: [COUNTRY]) {\n\n code\n\n name\n\n }\n\n state: ltv(types: [STATE]) {\n\n code\n\n }\n\n }\n\n destination\n\n destinationDescriptive {\n\n primaryType {\n\n type\n\n name\n\n code\n\n }\n\n timeZoneCode\n\n city: ltv(types: [CITY]) {\n\n name\n\n }\n\n country: ltv(types: [COUNTRY]) {\n\n code\n\n name\n\n }\n\n state: ltv(types: [STATE]) {\n\n code\n\n }\n\n }\n\n layovers {\n\n detail: arrivalPortDescriptive {\n\n city: ltv(types: [CITY]) {\n\n name\n\n }\n\n port: ltv(types: [AIRPORT]) {\n\n code\n\n }\n\n }\n\n layoverTimeMinutes\n\n }\n\n segments {\n\n flightNumber\n\n departurePort, departureTime, marketingCarrier\n\n departurePortDescriptive {\n\n primaryType {\n\n type\n\n name\n\n code\n\n }\n\n timeZoneCode\n\n }\n\n departureTerminal\n\n arrivalPort, arrivalTime\n\n arrivalPortDescriptive {\n\n primaryType {\n\n type\n\n name\n\n code\n\n }\n\n timeZoneCode\n\n }\n\n arrivalTerminal\n\n aircraft {\n\n code\n\n shortName\n\n longName\n\n }\n\n flightTimeMinutes\n\n layoverTimeMinutes\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n \"\"\"\n\n\n\n variables = {\n\n \"fromCode\": fromCode,\n\n \"toCode\": toCode,\n\n \"fromDate\": fromDate,\n\n \"toDate\": toDate,\n\n \"roundTrip\": roundTrip\n\n }\n\n print(variables)\n\n\n\n s = session.post(\"https://dev.fly.me/api/graphql\", json = {'query': query, 'variables': variables})\n\n print(s.status_code)\n\n print(s.text)\n\n if s.status_code == 200:\n\n #try:\n\n #data = json.loads(s.text)\n\n #data = data['data']['airports']['edges']\n\n return s.text\n\n #except:\n\n # return 'fail'\n\n else:\n", "file_path": "DialogWebRequest/flaskSampleHook.py", "rank": 86, "score": 32520.44740729492 }, { "content": "def get_visual_data():\n\n flight_query = \"\"\"\n\n query name($roundTrip: Boolean!, $fromCode: String!, $toCode: String!, $startDate: LocalDate!, $endDate: LocalDate!){\n\n simpleAirSearch(input: {\n\n stops: [\n\n { portCode: $fromCode, earliestDate: $startDate }\n\n { portCode: $toCode, earliestDate: $endDate }\n\n ]\n\n returnsToOrigin: $roundTrip\n\n }) {\n\n select {\n\n products {\n\n edges {\n\n node {\n\n fareInfo { totalPrice, currency }\n\n ods {\n\n originDisplayTime\n\n formattedOriginTime(pattern: \"MMMM dd, yyyy\")\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n \"\"\"\n\n\n\n fromCode = \"FRA\"\n\n toCode = \"FCO\"\n\n roundTrip = False\n\n tripLength = 3 # in days\n\n\n\n def visual_data():\n\n flightDataSet = {}\n\n numTrips = 0\n\n for i in range(0, 30):\n\n originDate = (datetime.datetime.now() + datetime.timedelta(days = i)).strftime(\"%Y-%m-%d\")\n\n print(originDate)\n\n endDate = (datetime.datetime.now() + datetime.timedelta(days = tripLength + i)).strftime(\"%Y-%m-%d\")\n\n variables = {\n\n \"fromCode\": fromCode,\n\n \"toCode\": toCode,\n\n \"startDate\": originDate,\n\n \"endDate\": endDate,\n\n \"roundTrip\": roundTrip\n\n }\n\n s = session.post(\"https://dev.fly.me/api/graphql\", json = {'query': flight_query, 'variables': variables})\n\n print(s.status_code)\n\n if s.status_code == 200:\n\n try:\n\n data = json.loads(s.text)\n\n data = data['data']['simpleAirSearch']['select']['products']['edges']\n\n flightDataSet[originDate] = []\n\n for sec in data:\n\n flightDataSet[originDate].append({\n\n \"totalPrice\": sec['node']['fareInfo']['totalPrice'],\n\n \"currency\": sec['node']['fareInfo']['currency'],\n\n \"originDisplayTime\": sec['node']['ods'][0]['originDisplayTime'],\n\n \"formattedOriginTime\": sec['node']['ods'][0]['formattedOriginTime']\n\n })\n\n numTrips += 1\n\n except:\n\n print('error')\n\n else:\n\n print('failed')\n\n\n\n yield json.dumps(flightDataSet[originDate], indent = 4)\n\n return Response(visual_data(), mimetype='application/json')\n", "file_path": "DialogWebRequest/flaskSampleHook.py", "rank": 87, "score": 32439.24079830474 }, { "content": " return_date,\n\n round_trip,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Serialize)]\n\npub struct FlightQuery<'a> {\n\n query: &'static str,\n\n variables: FlightQueryVariables<'a>,\n\n}\n\n\n\nimpl<'a> FlightQuery<'a> {\n\n pub fn new(\n\n ap_from: &'a str,\n\n ap_to: &'a str,\n\n depart_date: &'a str,\n\n return_date: Option<&'a str>,\n\n round_trip: bool,\n\n ) -> Self {\n", "file_path": "fly-query-rs/src/flight_query.rs", "rank": 88, "score": 31721.947134038626 }, { "content": " }\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\";\n\n\n\n#[wasm_bindgen]\n\nextern \"C\" {\n\n #[wasm_bindgen(js_namespace = console)]\n\n pub fn log(s: &str);\n\n}\n\n\n\n#[derive(Serialize)]\n", "file_path": "fly-query-rs/src/flight_query.rs", "rank": 89, "score": 31721.10104759205 }, { "content": "use crate::BEARER_AUTH;\n\nuse reqwest::Client;\n\nuse serde::Serialize;\n\nuse wasm_bindgen::prelude::*;\n\n\n\nconst FLIGHT_QUERY: &'static str = \"\n\nquery name($roundTrip: Boolean!, $fromCode: String!, $toCode: String!, $fromDate: LocalDate!, $toDate: LocalDate!) {\n\n simpleAirSearch(input: {\n\n stops: [\n\n { portCode: $fromCode, earliestDate: $fromDate }\n\n { portCode: $toCode, earliestDate: $toDate }\n\n ]\n\n returnsToOrigin: $roundTrip\n\n }) {\n\n select {\n\n products(first: 1) {\n\n edges {\n\n node {\n\n type\n\n productId\n", "file_path": "fly-query-rs/src/flight_query.rs", "rank": 90, "score": 31719.359865874587 }, { "content": " FlightQuery {\n\n query: FLIGHT_QUERY,\n\n variables: FlightQueryVariables::new(\n\n ap_from,\n\n ap_to,\n\n depart_date,\n\n return_date,\n\n round_trip,\n\n ),\n\n }\n\n }\n\n}\n\n\n\n#[wasm_bindgen]\n\npub async fn flight_query(\n\n ap_from: String,\n\n ap_to: String,\n\n depart_date: String,\n\n return_date: Option<String>,\n\n round_trip: bool,\n", "file_path": "fly-query-rs/src/flight_query.rs", "rank": 91, "score": 31714.917168786258 }, { "content": ") -> String {\n\n console_error_panic_hook::set_once();\n\n let response = match Client::new()\n\n .post(\"http://localhost:3000/api/graphql\")\n\n .header(\"authorization\", BEARER_AUTH)\n\n .header(\"content-type\", \"application/json\")\n\n .body(\n\n serde_json::to_string(&FlightQuery::new(\n\n &ap_from,\n\n &ap_to,\n\n &depart_date,\n\n return_date\n\n .as_ref()\n\n .map_or_else(|| None, |s| Some(s.as_str())),\n\n round_trip,\n\n ))\n\n .unwrap(),\n\n )\n\n .send()\n\n .await\n", "file_path": "fly-query-rs/src/flight_query.rs", "rank": 92, "score": 31713.952803734468 }, { "content": " {\n\n Ok(response) => match response.text().await {\n\n Ok(response) => response,\n\n Err(e) => format!(\"Error: {}\", e),\n\n },\n\n Err(e) => format!(\"Error: {}\", e),\n\n };\n\n response\n\n}\n", "file_path": "fly-query-rs/src/flight_query.rs", "rank": 93, "score": 31711.186685343444 }, { "content": " city: ltv(types: [CITY]) {\n\n name\n\n }\n\n port: ltv(types: [AIRPORT]) {\n\n code\n\n }\n\n }\n\n layoverTimeMinutes\n\n }\n\n segments {\n\n flightNumber\n\n departurePort, departureTime, marketingCarrier\n\n departurePortDescriptive {\n\n primaryType {\n\n type\n\n name\n\n code\n\n }\n\n timeZoneCode\n\n }\n", "file_path": "fly-query-rs/src/flight_query.rs", "rank": 94, "score": 31708.221262011037 }, { "content": " destinationDescriptive {\n\n primaryType {\n\n type\n\n name\n\n code\n\n }\n\n timeZoneCode\n\n city: ltv(types: [CITY]) {\n\n name\n\n }\n\n country: ltv(types: [COUNTRY]) {\n\n code\n\n name\n\n }\n\n state: ltv(types: [STATE]) {\n\n code\n\n }\n\n }\n\n layovers {\n\n detail: arrivalPortDescriptive {\n", "file_path": "fly-query-rs/src/flight_query.rs", "rank": 95, "score": 31706.560747222546 }, { "content": " origin\n\n originDescriptive {\n\n primaryType {\n\n type\n\n name\n\n code\n\n }\n\n timeZoneCode\n\n city: ltv(types: [CITY]) {\n\n name\n\n }\n\n country: ltv(types: [COUNTRY]) {\n\n code\n\n name\n\n }\n\n state: ltv(types: [STATE]) {\n\n code\n\n }\n\n }\n\n destination\n", "file_path": "fly-query-rs/src/flight_query.rs", "rank": 96, "score": 31706.560747222546 }, { "content": " departureTerminal\n\n arrivalPort, arrivalTime\n\n arrivalPortDescriptive {\n\n primaryType {\n\n type\n\n name\n\n code\n\n }\n\n timeZoneCode\n\n }\n\n arrivalTerminal\n\n aircraft {\n\n code\n\n shortName\n\n longName\n\n }\n\n flightTimeMinutes\n\n layoverTimeMinutes\n\n }\n\n }\n", "file_path": "fly-query-rs/src/flight_query.rs", "rank": 97, "score": 31706.560747222546 }, { "content": " fareInfo {\n\n validatingCarrier, totalPrice, currency\n\n totalTax\n\n f_fare: formattedPrice(type: BASE)\n\n f_total_price: formattedPrice(type: TOTAL)\n\n f_taxes_and_fees_total: formattedPrice(type: TAXES_AND_FEES)\n\n fareFamily {\n\n seatSelectionIncluded\n\n fareFamilyDescription\n\n }\n\n }\n\n ods {\n\n id\n\n origin\n\n originDisplayTime\n\n destinationDisplayTime\n\n departureTime: formattedOriginTime(pattern: \\\"h:mm a\\\")\n\n arrivalTime: formattedDestinationTime(pattern: \\\"h:mm a\\\")\n\n formattedOriginTime(pattern: \\\"MMMM dd, yyyy\\\")\n\n formattedDestinationTime(pattern: \\\"MMMM dd, yyyy\\\")\n", "file_path": "fly-query-rs/src/flight_query.rs", "rank": 98, "score": 31706.560747222546 }, { "content": "import React from \"react\";\n\nimport PropTypes from \"prop-types\";\n\nimport Typography from \"@material-ui/core/Typography\";\n\n\n\nexport default function Title(props) {\n\n return (\n\n <Typography component=\"h2\" variant=\"h6\" color=\"primary\" gutterBottom>\n\n {props.children}\n\n </Typography>\n\n );\n\n}\n\n\n\nTitle.propTypes = {\n\n children: PropTypes.node\n\n};\n", "file_path": "react_website/src/pages/flights/Title.js", "rank": 99, "score": 30560.52023848673 } ]
Rust
src/validation.rs
djc/rpki-validator
a133f7dad930290ad51d65dbecf862d7063099f0
use std::sync::Arc; use std::sync::RwLock; use ipnetwork::IpNetwork; use rpki::asres::AsId; use storage::{RecordStorage, Record}; pub struct RecordValidator { storage: Arc<RwLock<RecordStorage>>, } impl RecordValidator { pub fn new(storage: Arc<RwLock<RecordStorage>>) -> Self { RecordValidator { storage } } pub fn validate(&self, prefix: &IpNetwork, origin: u32) -> ValidationResult { let origin = AsId::from(origin); let records = self.storage.read().unwrap().find_records(prefix); let mut valid_records = Vec::new(); let mut invalid_length = Vec::new(); let mut invalid_origin = Vec::new(); let mut found_valid_origin = false; for record in records { let has_valid_length = record.max_length() >= prefix.prefix(); let has_valid_origin = record.origin() == origin; if has_valid_origin && has_valid_length { valid_records.push(record); } else { if !has_valid_origin { invalid_origin.push(record); } else if !has_valid_length { invalid_length.push(record); } } found_valid_origin = found_valid_origin || has_valid_origin; } let records = ValidationRecords::new( valid_records, invalid_origin, invalid_length, ); if !records.matched().is_empty() { ValidationResult::Valid(records) } else if !records.unmatched_origin().is_empty() || !records.unmatched_length().is_empty() { if records.unmatched_origin().is_empty() || found_valid_origin { ValidationResult::InvalidPrefixLength(records) } else { ValidationResult::InvalidOrigin(records) } } else { ValidationResult::NotFound } } } #[derive(Debug, Default, PartialEq)] pub struct ValidationRecords { matched : Vec<Record>, unmatched_origin : Vec<Record>, unmatched_length : Vec<Record>, } impl ValidationRecords { pub fn new(matched: Vec<Record>, unmatched_origin: Vec<Record>, unmatched_length: Vec<Record>) -> Self { ValidationRecords { matched, unmatched_origin, unmatched_length } } pub fn matched(&self) -> &Vec<Record> { &self.matched } pub fn unmatched_origin(&self) -> &Vec<Record> { &self.unmatched_origin } pub fn unmatched_length(&self) -> &Vec<Record> { &self.unmatched_length } } #[derive(Debug, PartialEq)] pub enum ValidationResult { Valid(ValidationRecords), InvalidOrigin(ValidationRecords), InvalidPrefixLength(ValidationRecords), NotFound, } impl ValidationResult { pub fn is_valid(&self) -> bool { match self { ValidationResult::Valid(_) => true, _ => false } } } #[cfg(test)] mod tests { use std::path::PathBuf; use storage::TrustAnchor; use super::*; fn create_records() -> Vec<Record> { vec![ Record::new(IpNetwork::V4("1.2.3.0/24".parse().unwrap()), AsId::from(1), 26), Record::new(IpNetwork::V4("1.2.2.0/23".parse().unwrap()), AsId::from(1), 24), Record::new(IpNetwork::V4("1.2.0.0/16".parse().unwrap()), AsId::from(3), 16), Record::new(IpNetwork::V4("1.2.3.0/24".parse().unwrap()), AsId::from(4), 24), Record::new(IpNetwork::V4("4.4.4.0/24".parse().unwrap()), AsId::from(1), 26), ] } fn create_verifier(records: Vec<Record>) -> RecordValidator { let mut storage = RecordStorage::new(); let trust_anchor = Arc::new(TrustAnchor::new("foo".to_string())); storage.add_records(records, PathBuf::new(), &trust_anchor); RecordValidator::new(Arc::new(RwLock::new(storage))) } #[test] fn verify_valid() { let records = create_records(); let verifier = create_verifier(records.clone()); assert_eq!( ValidationResult::Valid( ValidationRecords::new( vec![ records[0].clone(), records[1].clone() ], vec![ records[3].clone(), records[2].clone() ], vec![], ) ), verifier.validate(&IpNetwork::V4("1.2.3.0/24".parse().unwrap()), 1), ); assert_eq!( ValidationResult::Valid( ValidationRecords::new( vec![ records[0].clone() ], vec![ records[3].clone(), records[2].clone() ], vec![ records[1].clone() ], ) ), verifier.validate(&IpNetwork::V4("1.2.3.0/25".parse().unwrap()), 1), ); } #[test] fn verify_invalid_origin() { let records = create_records(); let verifier = create_verifier(records.clone()); assert_eq!( ValidationResult::InvalidOrigin( ValidationRecords::new( vec![ ], vec![ records[0].clone(), records[3].clone(), records[1].clone(), records[2].clone(), ], vec![ ], ) ), verifier.validate(&IpNetwork::V4("1.2.3.0/24".parse().unwrap()), 10), ); } #[test] fn verify_invalid_prefix_length() { let records = create_records(); let verifier = create_verifier(records.clone()); assert_eq!( ValidationResult::InvalidPrefixLength( ValidationRecords::new( vec![ ], vec![ ], vec![ records[4].clone() ], ) ), verifier.validate(&IpNetwork::V4("4.4.4.0/27".parse().unwrap()), 1), ); } #[test] fn verify_not_found() { let records = create_records(); let verifier = create_verifier(records.clone()); assert_eq!( ValidationResult::NotFound, verifier.validate(&IpNetwork::V4("3.3.3.0/24".parse().unwrap()), 1), ); assert_eq!( ValidationResult::NotFound, verifier.validate(&IpNetwork::V4("1.2.0.0/15".parse().unwrap()), 1), ); } }
use std::sync::Arc; use std::sync::RwLock; use ipnetwork::IpNetwork; use rpki::asres::AsId; use storage::{RecordStorage, Record}; pub struct RecordValidator { storage: Arc<RwLock<RecordStorage>>, } impl RecordValidator { pub fn new(storage: Arc<RwLock<RecordStorage>>) -> Self { RecordValidator { storage } } pub fn validate(&self, prefix: &IpNetwork, origin: u32) -> ValidationResult { let origin = AsId::from(origin); let records = self.storage.read().unwrap().find_records(prefix); let mut valid_records = Vec::new(); let mut invalid_length = Vec::new(); let mut invalid_origin = Vec::new(); let mut found_valid_origin = false; for record in records { let has_valid_length = record.max_length() >= prefix.prefix(); let has_valid_origin = record.origin() == origin; if has_valid_origin && has_valid_length { valid_records.push(record); } else { if !has_valid_origin { invalid_origin.push(record); } else if !has_valid_length { invalid_length.push(record); } } found_valid_origin = found_valid_origin || has_valid_origin; } let records = ValidationRecords::new( valid_records, invalid_origin, invalid_length, ); if !records.matched().is_empty() { ValidationResult::Valid(records) } else if !records.unmatched_origin().is_empty() || !records.unmatched_length().is_empty() { if records.unmatched_origin().is_empty() || found_valid_origin { ValidationResult::InvalidPrefixLength(records) } else { ValidationResult::InvalidOrigin(records) } } else { ValidationResult::NotFound } } } #[derive(Debug, Default, PartialEq)] pub struct ValidationRecords { matched : Vec<Record>, unmatched_origin : Vec<Record>, unmatched_length : Vec<Record>, } impl ValidationRecords {
pub fn matched(&self) -> &Vec<Record> { &self.matched } pub fn unmatched_origin(&self) -> &Vec<Record> { &self.unmatched_origin } pub fn unmatched_length(&self) -> &Vec<Record> { &self.unmatched_length } } #[derive(Debug, PartialEq)] pub enum ValidationResult { Valid(ValidationRecords), InvalidOrigin(ValidationRecords), InvalidPrefixLength(ValidationRecords), NotFound, } impl ValidationResult { pub fn is_valid(&self) -> bool { match self { ValidationResult::Valid(_) => true, _ => false } } } #[cfg(test)] mod tests { use std::path::PathBuf; use storage::TrustAnchor; use super::*; fn create_records() -> Vec<Record> { vec![ Record::new(IpNetwork::V4("1.2.3.0/24".parse().unwrap()), AsId::from(1), 26), Record::new(IpNetwork::V4("1.2.2.0/23".parse().unwrap()), AsId::from(1), 24), Record::new(IpNetwork::V4("1.2.0.0/16".parse().unwrap()), AsId::from(3), 16), Record::new(IpNetwork::V4("1.2.3.0/24".parse().unwrap()), AsId::from(4), 24), Record::new(IpNetwork::V4("4.4.4.0/24".parse().unwrap()), AsId::from(1), 26), ] } fn create_verifier(records: Vec<Record>) -> RecordValidator { let mut storage = RecordStorage::new(); let trust_anchor = Arc::new(TrustAnchor::new("foo".to_string())); storage.add_records(records, PathBuf::new(), &trust_anchor); RecordValidator::new(Arc::new(RwLock::new(storage))) } #[test] fn verify_valid() { let records = create_records(); let verifier = create_verifier(records.clone()); assert_eq!( ValidationResult::Valid( ValidationRecords::new( vec![ records[0].clone(), records[1].clone() ], vec![ records[3].clone(), records[2].clone() ], vec![], ) ), verifier.validate(&IpNetwork::V4("1.2.3.0/24".parse().unwrap()), 1), ); assert_eq!( ValidationResult::Valid( ValidationRecords::new( vec![ records[0].clone() ], vec![ records[3].clone(), records[2].clone() ], vec![ records[1].clone() ], ) ), verifier.validate(&IpNetwork::V4("1.2.3.0/25".parse().unwrap()), 1), ); } #[test] fn verify_invalid_origin() { let records = create_records(); let verifier = create_verifier(records.clone()); assert_eq!( ValidationResult::InvalidOrigin( ValidationRecords::new( vec![ ], vec![ records[0].clone(), records[3].clone(), records[1].clone(), records[2].clone(), ], vec![ ], ) ), verifier.validate(&IpNetwork::V4("1.2.3.0/24".parse().unwrap()), 10), ); } #[test] fn verify_invalid_prefix_length() { let records = create_records(); let verifier = create_verifier(records.clone()); assert_eq!( ValidationResult::InvalidPrefixLength( ValidationRecords::new( vec![ ], vec![ ], vec![ records[4].clone() ], ) ), verifier.validate(&IpNetwork::V4("4.4.4.0/27".parse().unwrap()), 1), ); } #[test] fn verify_not_found() { let records = create_records(); let verifier = create_verifier(records.clone()); assert_eq!( ValidationResult::NotFound, verifier.validate(&IpNetwork::V4("3.3.3.0/24".parse().unwrap()), 1), ); assert_eq!( ValidationResult::NotFound, verifier.validate(&IpNetwork::V4("1.2.0.0/15".parse().unwrap()), 1), ); } }
pub fn new(matched: Vec<Record>, unmatched_origin: Vec<Record>, unmatched_length: Vec<Record>) -> Self { ValidationRecords { matched, unmatched_origin, unmatched_length } }
function_block-full_function
[ { "content": "#[derive(Eq, Ord, PartialEq, PartialOrd)]\n\nstruct RecordMetadata {\n\n origin: AsId,\n\n max_length: u8,\n\n path: Arc<PathBuf>,\n\n trust_anchor: Arc<TrustAnchor>,\n\n}\n\n\n\nimpl RecordMetadata {\n\n fn new(origin: AsId,\n\n max_length: u8,\n\n path: Arc<PathBuf>,\n\n trust_anchor: Arc<TrustAnchor>)\n\n -> Self\n\n {\n\n RecordMetadata {\n\n origin,\n\n max_length,\n\n path,\n\n trust_anchor,\n\n }\n", "file_path": "src/storage.rs", "rank": 0, "score": 77765.65280556976 }, { "content": "fn bench_lookup_ipv4(b: &mut Bencher) {\n\n let mut storage = RecordStorage::new();\n\n let mut current_buffer : [u8; 4] = [1, 0, 0, 0];\n\n let origins = vec![\n\n AsId::from(1),\n\n AsId::from(2),\n\n AsId::from(3),\n\n ];\n\n let total_prefixes = 10000;\n\n let anchor = Arc::new(TrustAnchor::new(\"bar\".to_string()));\n\n for _ in 0..total_prefixes {\n\n let addr = IpAddr::from(current_buffer);\n\n let mut records = Vec::new();\n\n let prefix = IpNetwork::new(addr, 24).unwrap();\n\n for origin in origins.iter() {\n\n records.push(Record::new(prefix, *origin, 24));\n\n }\n\n // We need unique paths, otherwise we'll keep erasing the previous records\n\n // thinking the source file was modified\n\n storage.add_records(records, PathBuf::from(format!(\"{:?}\", prefix)), &anchor);\n", "file_path": "benches/storage.rs", "rank": 1, "score": 68153.6397363015 }, { "content": "fn bootstrap(storage: &Arc<RwLock<RecordStorage>>,\n\n executor: &mut Executor,\n\n status: &Arc<Mutex<ProcessingStatus>>,\n\n metrics: &Metrics,\n\n config: &Config) \n\n -> bool\n\n{\n\n let entries = match fs::read_dir(&config.tal.directory) {\n\n Ok(e) => e,\n\n Err(e) => {\n\n error!(\"Error processing TAL directory \\\"{}\\\": {}\", config.tal.directory, e);\n\n return false;\n\n }\n\n };\n\n for entry in entries {\n\n let entry = entry.unwrap().path();\n\n info!(\"Creating processor for file {:?}\", entry);\n\n let fetcher = RsyncFetcher::new(&config.rsync.binary);\n\n let processor = Processor::new(\n\n entry,\n", "file_path": "src/main.rs", "rank": 2, "score": 62730.140828788804 }, { "content": "struct ExecutorImpl {\n\n new_work_receiver: Receiver<ScheduledWork>,\n\n ready_work_sender: Sender<WorkPtr>,\n\n works: BinaryHeap<ScheduledWork>,\n\n}\n\n\n\nimpl Ord for ScheduledWork {\n\n fn cmp(&self, other: &ScheduledWork) -> Ordering {\n\n other.execution_time().cmp(&self.execution_time())\n\n }\n\n}\n\n\n\nimpl PartialOrd for ScheduledWork {\n\n fn partial_cmp(&self, other: &ScheduledWork) -> Option<Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n\n\n\nimpl PartialEq for ScheduledWork {\n\n fn eq(&self, other: &ScheduledWork) -> bool {\n", "file_path": "src/executor.rs", "rank": 3, "score": 56411.39205767155 }, { "content": "fn default_rsync() -> String {\n\n \"rsync\".to_string()\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 4, "score": 51160.26893240647 }, { "content": "fn default_cache_path() -> String {\n\n env::var(\"CACHE_PATH\").unwrap_or(\"/tmp/rpki-validator-cache\".to_string())\n\n}\n\n\n\n// TalConfig\n\n\n\n#[derive(Deserialize)]\n\npub struct TalConfig {\n\n #[serde(default = \"default_tal_directory\")]\n\n pub directory: String,\n\n}\n\n\n\nimpl Default for TalConfig {\n\n fn default() -> Self {\n\n TalConfig {\n\n directory: default_tal_directory()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 5, "score": 48719.619162456074 }, { "content": "fn default_tal_directory() -> String {\n\n env::var(\"TAL_PATH\").unwrap_or(\"tal\".to_string())\n\n}\n\n\n\n// ValidationConfig\n\n\n\n#[derive(Deserialize)]\n\n#[serde(default)]\n\npub struct ValidationConfig {\n\n pub strict: bool,\n\n pub threads: usize,\n\n}\n\n\n\nimpl Default for ValidationConfig {\n\n fn default() -> Self {\n\n ValidationConfig {\n\n strict: false,\n\n threads: num_cpus::get()\n\n }\n\n }\n", "file_path": "src/config.rs", "rank": 6, "score": 48719.619162456074 }, { "content": "fn main() {\n\n let matches = clap::App::new(\"RPKI validator\")\n\n .version(\"0.1.0\")\n\n .author(\"Matias Fontanini\")\n\n .about(\"Syncs and validates RPKI records\")\n\n .arg(clap::Arg::with_name(\"config\")\n\n .short(\"c\")\n\n .long(\"config\")\n\n .value_name(\"FILE\")\n\n .help(\"The config file to use\")\n\n .takes_value(true)\n\n .required(true))\n\n .get_matches();\n\n\n\n simple_logger::init_with_level(log::Level::Info).unwrap();\n\n\n\n let config = match Config::from_path(matches.value_of(\"config\").unwrap()) {\n\n Some(c) => c,\n\n None => return,\n\n };\n", "file_path": "src/main.rs", "rank": 7, "score": 36313.01814864933 }, { "content": "struct Worker {\n\n\n\n}\n\n\n\nimpl Worker {\n\n fn start_work(work_receiver: Arc<Mutex<Receiver<WorkPtr>>>,\n\n new_work_sender: Sender<ScheduledWork>) {\n\n thread::spawn(move || {\n\n loop {\n\n let work = work_receiver.lock().unwrap().recv();\n\n let result = work.ok().map(|mut work| {\n\n if let Some(execution_time) = work.execute() {\n\n let time_from_now = execution_time - Instant::now();\n\n info!(\"Re-scheduling work for {}s from now\", time_from_now.as_secs());\n\n new_work_sender.send(ScheduledWork::new(work, execution_time)).ok()\n\n }\n\n else {\n\n Some(())\n\n }\n\n });\n\n if result.is_none() {\n\n break;\n\n }\n\n }\n\n });\n\n }\n\n}\n\n\n", "file_path": "src/executor.rs", "rank": 8, "score": 35753.69538192815 }, { "content": "struct Api {\n\n\n\n}\n\n\n\nimpl Api {\n\n fn validate(req: &HttpRequest<AppState>) -> HttpResponse {\n\n let match_info = req.match_info();\n\n let prefix = format!(\"{}/{}\", &match_info[\"prefix\"], &match_info[\"length\"]);\n\n let prefix = match prefix.parse() {\n\n Ok(p) => p,\n\n Err(_) => return ApiResponse::Error(\"Invalid prefix\".to_string()).build(),\n\n };\n\n let asn = match match_info[\"asn\"].parse() {\n\n Ok(a) => a,\n\n Err(_) => return ApiResponse::Error(\"Invalid ASN\".to_string()).build(),\n\n };\n\n let result = req.state().validator.validate(&prefix, asn);\n\n ApiResponse::ValidationResponse(prefix, asn, result).build()\n\n }\n\n\n", "file_path": "src/main.rs", "rank": 9, "score": 35753.69538192815 }, { "content": "struct ProcessorWork {\n\n processor: Processor,\n\n status: Arc<Mutex<ProcessingStatus>>,\n\n storage: Arc<RwLock<RecordStorage>>,\n\n metrics: Metrics,\n\n}\n\n\n\nimpl ProcessorWork {\n\n fn new(processor: Processor,\n\n status: Arc<Mutex<ProcessingStatus>>,\n\n storage: Arc<RwLock<RecordStorage>>,\n\n metrics: Metrics)\n\n -> Self\n\n {\n\n ProcessorWork {\n\n processor,\n\n status,\n\n storage,\n\n metrics,\n\n }\n", "file_path": "src/main.rs", "rank": 10, "score": 34193.33086603243 }, { "content": "struct ProcessingStatus {\n\n trust_anchors: HashMap<String, TrustAnchorStatus>,\n\n}\n\n\n\nimpl ProcessingStatus {\n\n fn new() -> Self {\n\n ProcessingStatus {\n\n trust_anchors: HashMap::new(),\n\n }\n\n }\n\n\n\n fn create_trust_anchor(&mut self, name: &str) {\n\n self.trust_anchors.insert(name.to_string(), TrustAnchorStatus::default());\n\n }\n\n\n\n fn mark_successful_run(&mut self, trust_anchor_name: &str, last_run: DateTime<Local>,\n\n last_duration: Duration) {\n\n let status = self.get_entry(trust_anchor_name, last_run, last_duration);\n\n status.successful_runs += 1;\n\n }\n", "file_path": "src/main.rs", "rank": 11, "score": 34193.33086603243 }, { "content": "struct AlwaysProcess {\n\n\n\n}\n\n\n\nimpl ModuleProcess for AlwaysProcess {\n\n fn should_process_module(&self) -> bool {\n\n true\n\n }\n\n\n\n fn should_process_file(&self, _file_path: &Path) -> bool {\n\n true\n\n }\n\n\n\n fn remove_deleted_files(&self, _storage: &mut RecordStorage) {\n\n\n\n }\n\n}\n\n\n", "file_path": "src/processor.rs", "rank": 12, "score": 34193.33086603243 }, { "content": "struct ScheduledWork {\n\n work: WorkPtr,\n\n execution_time: Instant,\n\n}\n\n\n\nimpl ScheduledWork {\n\n fn new(work: WorkPtr, execution_time: Instant) -> ScheduledWork {\n\n ScheduledWork {\n\n work,\n\n execution_time,\n\n }\n\n }\n\n\n\n fn execution_time(&self) -> &Instant {\n\n &self.execution_time\n\n }\n\n\n\n fn work(self) -> WorkPtr {\n\n self.work\n\n }\n", "file_path": "src/executor.rs", "rank": 13, "score": 34193.33086603243 }, { "content": "struct MetricsHandler {\n\n registry: Registry,\n\n}\n\n\n\nimpl MetricsHandler {\n\n fn new(registry: Registry) -> Self {\n\n MetricsHandler {\n\n registry\n\n }\n\n }\n\n}\n\n\n\nimpl<S> Handler<S> for MetricsHandler {\n\n type Result = HttpResponse;\n\n\n\n fn handle(&self, _: &HttpRequest<S>) -> Self::Result {\n\n let mut buffer = Vec::<u8>::new();\n\n let encoder = TextEncoder::new();\n\n let metric_familys = self.registry.gather();\n\n for mf in metric_familys {\n\n if let Err(e) = encoder.encode(&[mf], &mut buffer) {\n\n warn!(\"ignoring prometheus encoding error: {:?}\", e);\n\n }\n\n }\n\n String::from_utf8(buffer.clone()).unwrap().into()\n\n }\n\n}\n\n\n\n// Misc\n\n\n", "file_path": "src/main.rs", "rank": 14, "score": 34193.33086603243 }, { "content": "struct AppState {\n\n validator: RecordValidator,\n\n storage: Arc<RwLock<RecordStorage>>,\n\n status: Arc<Mutex<ProcessingStatus>>,\n\n}\n\n\n\nimpl AppState {\n\n fn new(validator: RecordValidator,\n\n storage: Arc<RwLock<RecordStorage>>,\n\n status: Arc<Mutex<ProcessingStatus>>)\n\n -> Self\n\n {\n\n AppState {\n\n validator,\n\n storage,\n\n status,\n\n }\n\n }\n\n}\n\n\n\n// API responses\n\n\n", "file_path": "src/main.rs", "rank": 15, "score": 34193.33086603243 }, { "content": "struct ProcessModified {\n\n new_files : HashSet<PathBuf>,\n\n modified_files : HashSet<PathBuf>,\n\n deleted_files : HashSet<PathBuf>,\n\n}\n\n\n\nimpl ProcessModified {\n\n fn new() -> ProcessModified {\n\n ProcessModified {\n\n new_files : HashSet::new(),\n\n modified_files : HashSet::new(),\n\n deleted_files : HashSet::new(),\n\n }\n\n }\n\n\n\n fn add_new_file(&mut self, path: PathBuf) {\n\n self.new_files.insert(path);\n\n }\n\n\n\n fn add_modified_file(&mut self, path: PathBuf) {\n", "file_path": "src/processor.rs", "rank": 16, "score": 34193.33086603243 }, { "content": "#[derive(Default)]\n\nstruct TrustAnchorStatus {\n\n successful_runs: u64,\n\n error_runs: u64,\n\n last_run: Option<DateTime<Local>>,\n\n last_duration: Option<Duration>\n\n}\n\n\n\n// Work\n\n\n", "file_path": "src/main.rs", "rank": 17, "score": 32831.97182685803 }, { "content": "pub trait Work {\n\n // Returns next execution time\n\n fn execute(&mut self) -> Option<Instant>;\n\n}\n\n\n", "file_path": "src/executor.rs", "rank": 18, "score": 32175.269425609742 }, { "content": " }\n\n\n\n pub fn name(&self) -> &str {\n\n &self.name\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Record {\n\n prefix: IpNetwork,\n\n origin: AsId,\n\n max_length: u8,\n\n}\n\n\n\nimpl Record {\n\n pub fn new(prefix: IpNetwork, origin: AsId, max_length: u8) -> Self {\n\n Record {\n\n prefix,\n\n origin,\n\n max_length,\n", "file_path": "src/storage.rs", "rank": 19, "score": 24716.225061686157 }, { "content": " }\n\n}\n\n\n\npub struct RecordStorage {\n\n // Note: the record metadata entries for a given prefix is sorted\n\n // by (origin, max_length, path). This allows us to remove duplicates when iterating them\n\n prefix_records: HashMap<IpNetwork, Vec<RecordMetadata>>,\n\n known_paths: HashSet<Arc<PathBuf>>,\n\n trust_anchor_total_records: HashMap<Arc<TrustAnchor>, i64>,\n\n}\n\n\n\nimpl RecordStorage {\n\n const MIN_IPV4_PREFIX_LENGTH : u8 = 8;\n\n const MIN_IPV6_PREFIX_LENGTH : u8 = 19;\n\n\n\n pub fn new() -> Self {\n\n RecordStorage {\n\n prefix_records: HashMap::new(),\n\n known_paths: HashSet::new(),\n\n trust_anchor_total_records: HashMap::new(),\n", "file_path": "src/storage.rs", "rank": 20, "score": 24715.080971624528 }, { "content": " let metadata = RecordMetadata::new(\n\n record.origin,\n\n record.max_length,\n\n source_path.clone(),\n\n trust_anchor.clone(),\n\n );\n\n let records = self.prefix_records.entry(record.prefix).or_default();\n\n records.push(metadata);\n\n records.sort();\n\n }\n\n self.known_paths.insert(source_path);\n\n }\n\n\n\n pub fn remove_records<'a>(&mut self, source_paths: impl Iterator<Item=&'a PathBuf>) {\n\n let mut trust_anchor_deletions : HashMap<Arc<TrustAnchor>, i64> = HashMap::new();\n\n let source_paths = source_paths.collect::<HashSet<_>>();\n\n let mut empty_prefixes = Vec::new();\n\n for (prefix, records) in self.prefix_records.iter_mut() {\n\n records.retain(|r| {\n\n let should_retain = !source_paths.contains(&*r.path);\n", "file_path": "src/storage.rs", "rank": 21, "score": 24714.601525948718 }, { "content": " if !should_retain {\n\n *trust_anchor_deletions.entry(r.trust_anchor.clone()).or_default() += 1;\n\n }\n\n should_retain\n\n });\n\n if records.is_empty() {\n\n empty_prefixes.push(prefix.clone());\n\n }\n\n }\n\n for prefix in empty_prefixes {\n\n self.prefix_records.remove(&prefix);\n\n }\n\n for (trust_anchor, removed) in trust_anchor_deletions {\n\n self.set_total_records_diff(trust_anchor, removed * -1);\n\n } \n\n }\n\n\n\n pub fn find_records(&self, prefix: &IpNetwork) -> Vec<Record> {\n\n let (address, max_mask, min_mask) = match prefix.ip() {\n\n IpAddr::V4(a) => (u32::from(a) as u128, 32, Self::MIN_IPV4_PREFIX_LENGTH),\n", "file_path": "src/storage.rs", "rank": 22, "score": 24714.59980433782 }, { "content": " IpAddr::V6(a) => (u128::from(a), 128, Self::MIN_IPV6_PREFIX_LENGTH),\n\n };\n\n let mut mask_length = prefix.prefix();\n\n let mut mask : u128 = (2u128.pow(mask_length as u32) - 1) << (max_mask - mask_length);\n\n let mut output = Vec::new();\n\n while mask_length >= min_mask {\n\n let current_address = address & mask;\n\n let current_address = if prefix.is_ipv6() {\n\n IpAddr::V6(Ipv6Addr::from(current_address))\n\n } else {\n\n IpAddr::V4(Ipv4Addr::from(current_address as u32))\n\n };\n\n let current_prefix = IpNetwork::new(current_address, mask_length).unwrap();\n\n if let Some(records) = self.prefix_records.get(&current_prefix) {\n\n let mut last_tuple = None;\n\n for record in records {\n\n let current_tuple = (record.origin, record.max_length);\n\n if last_tuple != Some(current_tuple) {\n\n output.push(Record::new(current_prefix, record.origin, record.max_length));\n\n }\n", "file_path": "src/storage.rs", "rank": 23, "score": 24713.84676793161 }, { "content": " for (prefix, metadatas) in self.prefix_records.iter() {\n\n let mut last_tuple = None;\n\n for metadata in metadatas {\n\n let current_tuple = (metadata.origin, metadata.max_length);\n\n // Make sure we don't push the same record twice\n\n if last_tuple != Some(current_tuple) {\n\n output.push(Record::new(*prefix, metadata.origin, metadata.max_length));\n\n }\n\n last_tuple = Some(current_tuple);\n\n }\n\n }\n\n output\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "src/storage.rs", "rank": 24, "score": 24711.416453955408 }, { "content": " }\n\n }\n\n\n\n pub fn prefix(&self) -> &IpNetwork {\n\n &self.prefix\n\n }\n\n\n\n pub fn origin(&self) -> AsId {\n\n self.origin\n\n }\n\n\n\n pub fn max_length(&self) -> u8 {\n\n self.max_length\n\n }\n\n}\n\n\n\n// RecordMetadata\n\n\n\n#[derive(Eq, Ord, PartialEq, PartialOrd)]\n", "file_path": "src/storage.rs", "rank": 25, "score": 24711.086496932334 }, { "content": "#[macro_use] extern crate bencher;\n\nextern crate ipnetwork;\n\nextern crate rpki_validator;\n\nextern crate rpki;\n\n\n\nuse std::net::IpAddr;\n\nuse std::path::PathBuf;\n\nuse std::sync::Arc;\n\n\n\nuse bencher::Bencher;\n\n\n\nuse ipnetwork::IpNetwork;\n\n\n\nuse rpki::asres::AsId;\n\n\n\nuse rpki_validator::storage::{RecordStorage, Record, TrustAnchor};\n\n\n", "file_path": "benches/storage.rs", "rank": 26, "score": 24710.714632282456 }, { "content": "use std::collections::HashMap;\n\nuse std::collections::HashSet;\n\nuse std::net::{IpAddr, Ipv4Addr, Ipv6Addr};\n\nuse std::sync::Arc;\n\nuse std::path::PathBuf;\n\n\n\nuse ipnetwork::IpNetwork;\n\n\n\nuse rpki::asres::AsId;\n\n\n\n#[derive(Debug, Hash, Eq, Ord, PartialEq, PartialOrd)]\n\npub struct TrustAnchor {\n\n name: String,\n\n}\n\n\n\nimpl TrustAnchor {\n\n pub fn new(name: String) -> Self {\n\n TrustAnchor {\n\n name,\n\n }\n", "file_path": "src/storage.rs", "rank": 27, "score": 24710.434881627265 }, { "content": " last_tuple = Some(current_tuple);\n\n }\n\n }\n\n mask_length -= 1;\n\n mask = mask << 1;\n\n }\n\n output\n\n }\n\n\n\n pub fn total_prefixes(&self) -> usize {\n\n self.prefix_records.len()\n\n }\n\n\n\n pub fn total_records(&self, trust_anchor_name: &str) -> i64 {\n\n let trust_anchor = TrustAnchor::new(trust_anchor_name.to_string());\n\n *self.trust_anchor_total_records.get(&trust_anchor).unwrap_or(&0) \n\n }\n\n\n\n pub fn records(&self) -> Vec<Record> {\n\n let mut output = Vec::new();\n", "file_path": "src/storage.rs", "rank": 28, "score": 24709.74207995021 }, { "content": " storage.find_records(&IpNetwork::V6(\"dead:beef::/32\".parse().unwrap())),\n\n [ records[0].clone(), records[2].clone() ]\n\n );\n\n assert_eq!(\n\n storage.find_records(&IpNetwork::V6(\"dead:be00::/24\".parse().unwrap())),\n\n [ records[2].clone() ]\n\n );\n\n assert_eq!(\n\n storage.find_records(&IpNetwork::V6(\"dead:be00::/32\".parse().unwrap())),\n\n [ records[2].clone() ]\n\n );\n\n assert_eq!(\n\n storage.find_records(&IpNetwork::V6(\"feed:be00::/24\".parse().unwrap())),\n\n []\n\n );\n\n }\n\n\n\n #[test]\n\n fn duplicate_records() {\n\n let mut storage = RecordStorage::new();\n", "file_path": "src/storage.rs", "rank": 29, "score": 24707.55558718447 }, { "content": " if current_buffer[2] == 255 {\n\n current_buffer[2] = 0;\n\n current_buffer[1] += 1;\n\n }\n\n else {\n\n current_buffer[2] += 1;\n\n }\n\n }\n\n assert_eq!(total_prefixes, storage.total_prefixes());\n\n\n\n let prefix = IpNetwork::V4(\"1.2.3.0/24\".parse().unwrap());\n\n b.iter(|| {\n\n storage.find_records(&prefix)\n\n });\n\n}\n\n\n\nbenchmark_group!(benches, bench_lookup_ipv4);\n\nbenchmark_main!(benches);\n", "file_path": "benches/storage.rs", "rank": 30, "score": 24707.439319052934 }, { "content": " let records = vec![\n\n Record::new(IpNetwork::V4(\"1.2.3.0/24\".parse().unwrap()), AsId::from(1), 24),\n\n ];\n\n let trust_anchor = Arc::new(TrustAnchor::new(\"foo\".to_string()));\n\n // Add the same records twice for 2 different paths\n\n storage.add_records(records.clone(), PathBuf::from(\"asd\"), &trust_anchor);\n\n storage.add_records(records.clone(), PathBuf::from(\"dsa\"), &trust_anchor);\n\n assert_eq!(\n\n storage.find_records(&IpNetwork::V4(\"1.2.3.0/24\".parse().unwrap())),\n\n records\n\n );\n\n }\n\n\n\n #[test]\n\n fn record_modification() {\n\n let mut storage = RecordStorage::new();\n\n let records1 = vec![\n\n Record::new(IpNetwork::V4(\"1.2.3.0/24\".parse().unwrap()), AsId::from(1), 24),\n\n Record::new(IpNetwork::V4(\"1.2.0.0/16\".parse().unwrap()), AsId::from(1), 24),\n\n ];\n", "file_path": "src/storage.rs", "rank": 31, "score": 24707.04369632575 }, { "content": " fn find_records_ipv4() {\n\n let mut storage = RecordStorage::new();\n\n let records = vec![\n\n Record::new(IpNetwork::V4(\"1.2.3.0/24\".parse().unwrap()), AsId::from(1), 24),\n\n Record::new(IpNetwork::V4(\"3.3.3.0/24\".parse().unwrap()), AsId::from(2), 23),\n\n Record::new(IpNetwork::V4(\"1.2.0.0/16\".parse().unwrap()), AsId::from(3), 16),\n\n ];\n\n let trust_anchor = Arc::new(TrustAnchor::new(\"foo\".to_string()));\n\n storage.add_records(records.clone(), PathBuf::new(), &trust_anchor);\n\n assert_eq!(\n\n storage.find_records(&IpNetwork::V4(\"1.2.3.0/24\".parse().unwrap())),\n\n [ records[0].clone(), records[2].clone() ]\n\n );\n\n assert_eq!(\n\n storage.find_records(&IpNetwork::V4(\"1.2.0.0/16\".parse().unwrap())),\n\n [ records[2].clone() ]\n\n );\n\n assert_eq!(\n\n storage.find_records(&IpNetwork::V4(\"1.2.0.0/24\".parse().unwrap())),\n\n [ records[2].clone() ]\n", "file_path": "src/storage.rs", "rank": 32, "score": 24707.04389575241 }, { "content": " let records2 = vec![\n\n Record::new(IpNetwork::V4(\"1.2.3.0/24\".parse().unwrap()), AsId::from(1), 25),\n\n ];\n\n let trust_anchor = Arc::new(TrustAnchor::new(\"foo\".to_string()));\n\n let path = PathBuf::from(\"asd\");\n\n storage.add_records(records1.clone(), path.clone(), &trust_anchor);\n\n storage.add_records(records2.clone(), path.clone(), &trust_anchor);\n\n // We should find records2 as it overrides the records in record1\n\n assert_eq!(\n\n storage.find_records(&IpNetwork::V4(\"1.2.3.0/24\".parse().unwrap())),\n\n records2\n\n );\n\n assert_eq!(1, storage.total_records(trust_anchor.name()));\n\n }\n\n\n\n #[test]\n\n fn record_counts() {\n\n let mut storage = RecordStorage::new();\n\n let records1 = vec![\n\n Record::new(IpNetwork::V4(\"1.2.0.0/16\".parse().unwrap()), AsId::from(1), 24),\n", "file_path": "src/storage.rs", "rank": 33, "score": 24706.956846024015 }, { "content": " );\n\n assert_eq!(\n\n storage.find_records(&IpNetwork::V4(\"3.3.0.0/16\".parse().unwrap())),\n\n []\n\n );\n\n\n\n assert_eq!(3, storage.total_records(trust_anchor.name()));\n\n }\n\n\n\n #[test]\n\n fn find_records_ipv6() {\n\n let mut storage = RecordStorage::new();\n\n let records = vec![\n\n Record::new(IpNetwork::V6(\"dead:beef::/32\".parse().unwrap()), AsId::from(1), 24),\n\n Record::new(IpNetwork::V6(\"feed:beef::/32\".parse().unwrap()), AsId::from(2), 23),\n\n Record::new(IpNetwork::V6(\"dead:be00::/24\".parse().unwrap()), AsId::from(3), 16),\n\n ];\n\n let trust_anchor = Arc::new(TrustAnchor::new(\"foo\".to_string()));\n\n storage.add_records(records.clone(), PathBuf::new(), &trust_anchor);\n\n assert_eq!(\n", "file_path": "src/storage.rs", "rank": 34, "score": 24706.936698588484 }, { "content": " }\n\n }\n\n\n\n fn set_total_records_diff(&mut self, trust_anchor: Arc<TrustAnchor>, diff: i64) {\n\n *self.trust_anchor_total_records.entry(trust_anchor).or_insert(0) += diff;\n\n }\n\n\n\n pub fn add_records(&mut self,\n\n records: Vec<Record>,\n\n source_path: PathBuf,\n\n trust_anchor: &Arc<TrustAnchor>) {\n\n let is_modified = self.known_paths.remove(&source_path);\n\n if is_modified {\n\n self.remove_records([ source_path.clone() ].iter());\n\n }\n\n // Increment the count for this TA\n\n self.set_total_records_diff(trust_anchor.clone(), records.len() as i64);\n\n\n\n let source_path = Arc::new(source_path);\n\n for record in records {\n", "file_path": "src/storage.rs", "rank": 35, "score": 24706.214666301323 }, { "content": " ];\n\n let records2 = vec![\n\n Record::new(IpNetwork::V4(\"1.2.3.0/24\".parse().unwrap()), AsId::from(1), 25),\n\n ];\n\n let trust_anchor = Arc::new(TrustAnchor::new(\"foo\".to_string()));\n\n // Add them in two runs\n\n storage.add_records(records1.clone(), PathBuf::from(\"asd\"), &trust_anchor);\n\n storage.add_records(records2.clone(), PathBuf::from(\"dsa\"), &trust_anchor);\n\n\n\n assert_eq!(2, storage.total_records(trust_anchor.name()));\n\n }\n\n}\n", "file_path": "src/storage.rs", "rank": 36, "score": 24705.03728448903 }, { "content": "use rpki::x509;\n\n\n\nuse rsync::{RsyncAction, RsyncFetcher};\n\nuse storage::{Record, RecordStorage, TrustAnchor};\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n Io(io::Error),\n\n Tal(ReadError),\n\n Generic(&'static str),\n\n Validation,\n\n Other,\n\n}\n\n\n\nimpl From<io::Error> for Error {\n\n fn from(e: io::Error) -> Error {\n\n Error::Io(e)\n\n }\n\n}\n\n\n", "file_path": "src/processor.rs", "rank": 42, "score": 12.995315992904885 }, { "content": " storage: Arc<RwLock<RecordStorage>>,\n\n processing_interval : Duration,\n\n policy: ProcessingPolicy,\n\n}\n\n\n\nimpl Processor {\n\n pub fn new(tal_path: PathBuf,\n\n output_path: &str,\n\n strict_mode: bool,\n\n rsync_fetcher: RsyncFetcher,\n\n storage: Arc<RwLock<RecordStorage>>,\n\n processing_interval: Duration)\n\n -> Self\n\n {\n\n let name = match tal_path.file_stem().and_then(OsStr::to_str).map(str::to_string) {\n\n Some(n) => n,\n\n None => {\n\n warn!(\"Failed to get file stem for {:?}\", tal_path);\n\n tal_path.to_string_lossy().to_string()\n\n },\n", "file_path": "src/processor.rs", "rank": 43, "score": 12.1858622362031 }, { "content": " (HttpResponse::Ok(), ApiResponse::build_status_body(trust_anchors_statuses))\n\n },\n\n };\n\n response.content_type(\"application/json\")\n\n .body(body.to_string())\n\n }\n\n\n\n fn build_validation_body(prefix: IpNetwork, asn: u32, result: ValidationResult)\n\n -> serde_json::Value\n\n {\n\n let (state, reason, description, records) = match result {\n\n ValidationResult::Valid(records) => {\n\n (\"Valid\", \"\", \"VRPs cover prefix\", records)\n\n },\n\n ValidationResult::InvalidOrigin(records) => {\n\n (\"Invalid\", \"as\", \"VRPs cover prefix but the origin is different\", records)\n\n },\n\n ValidationResult::InvalidPrefixLength(records) => {\n\n (\"Invalid\", \"length\", \"VRPs cover prefix but the length is larger than allowed\",\n\n records)\n", "file_path": "src/main.rs", "rank": 44, "score": 11.945094239059976 }, { "content": " \"route\": {\n\n \"origin_asn\": format!(\"AS{}\", asn),\n\n \"prefix\": format!(\"{}\", prefix)\n\n },\n\n \"validity\": {\n\n \"state\": state,\n\n \"reason\": reason,\n\n \"description\": description,\n\n \"VRPs\": {\n\n \"matched\": records_to_json(records.matched()),\n\n \"unmatched_as\": records_to_json(records.unmatched_origin()),\n\n \"unmatched_length\": records_to_json(records.unmatched_length()),\n\n }\n\n }\n\n }\n\n })\n\n }\n\n\n\n fn build_export_body(records: Vec<Record>) -> serde_json::Value {\n\n json!({\n", "file_path": "src/main.rs", "rank": 45, "score": 11.83881670578248 }, { "content": "use chrono::DateTime;\n\n\n\nuse ipnetwork::IpNetwork;\n\n\n\nuse prometheus::{Encoder, Registry, TextEncoder};\n\n\n\nuse rpki_validator::executor::{Executor, Work};\n\nuse rpki_validator::metrics::Metrics;\n\nuse rpki_validator::config::Config;\n\nuse rpki_validator::processor::Processor;\n\nuse rpki_validator::rsync::RsyncFetcher;\n\nuse rpki_validator::storage::{RecordStorage, Record};\n\nuse rpki_validator::validation::{RecordValidator, ValidationRecords, ValidationResult};\n\n\n", "file_path": "src/main.rs", "rank": 46, "score": 11.281201229924825 }, { "content": " },\n\n ValidationResult::NotFound => {\n\n (\"NotFound\", \"\", \"No VRP covers the prefix\", ValidationRecords::default())\n\n },\n\n };\n\n let records_to_json = |records: &Vec<Record>| {\n\n let mut output = Vec::new();\n\n for record in records {\n\n output.push(\n\n json!({\n\n \"asn\": format!(\"{}\", record.origin()),\n\n \"prefix\": format!(\"{}\", record.prefix()),\n\n \"max_length\": record.max_length(),\n\n })\n\n );\n\n }\n\n output\n\n };\n\n json!({\n\n \"validated_route\": {\n", "file_path": "src/main.rs", "rank": 48, "score": 10.963833617887328 }, { "content": " if let Some(routes) = routes {\n\n let records = routes.iter().map(|address| {\n\n let prefix = IpNetwork::new(address.address(), address.address_length());\n\n match prefix {\n\n Ok(prefix) => {\n\n let record = Record::new(\n\n prefix,\n\n routes.as_id(),\n\n address.max_length()\n\n );\n\n Some(record)\n\n },\n\n Err(_) => None\n\n }\n\n }).filter(Option::is_some).map(Option::unwrap).collect::<Vec<_>>();\n\n self.storage.write().unwrap().add_records(records, path, &self.trust_anchor);\n\n }\n\n }\n\n Ok(())\n\n }\n", "file_path": "src/processor.rs", "rank": 49, "score": 10.557195313096653 }, { "content": "use std::time::Duration;\n\n\n\nuse prometheus::{HistogramOpts, HistogramVec, IntCounterVec, IntGaugeVec, Opts, Registry};\n\n\n\n#[derive(Clone)]\n\npub struct Metrics {\n\n update_time: HistogramVec,\n\n total_records: IntGaugeVec,\n\n total_runs: IntCounterVec,\n\n}\n\n\n\nimpl Metrics {\n\n pub fn new() -> Self {\n\n let update_time_buckets = vec![\n\n 1.0, 2.0, 3.0, 5.0, 7.5, 10.0, 15.0, 20.0, 40.0, 60.0, 80.0, 120.0, 160.0, 200.0\n\n ];\n\n\n\n let update_time = HistogramVec::new(\n\n HistogramOpts::new(\n\n \"update_time_duration_seconds\",\n", "file_path": "src/metrics.rs", "rank": 50, "score": 10.478248694116392 }, { "content": "}\n\n\n\n// ApiServerConfig\n\n\n\n#[derive(Deserialize)]\n\n#[serde(default)]\n\npub struct ApiServerConfig {\n\n pub endpoint: String\n\n}\n\n\n\nimpl Default for ApiServerConfig {\n\n fn default() -> Self {\n\n ApiServerConfig {\n\n endpoint: env::var(\"API_ENDPOINT\").unwrap_or(\"127.0.0.1:8080\".to_string())\n\n }\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 51, "score": 10.161987182485785 }, { "content": "extern crate bcder;\n\nextern crate bytes;\n\nextern crate ipnetwork;\n\n#[macro_use] extern crate log;\n\nextern crate num_cpus;\n\nextern crate prometheus;\n\nextern crate rpki;\n\n#[macro_use] extern crate serde_derive;\n\nextern crate toml;\n\n\n\npub mod config;\n\npub mod executor;\n\npub mod metrics;\n\npub mod processor;\n\npub mod rsync;\n\npub mod storage;\n\npub mod validation;\n", "file_path": "src/lib.rs", "rank": 53, "score": 10.038307583328107 }, { "content": "use std::io;\n\nuse std::io::BufRead;\n\nuse std::path::{Path, PathBuf};\n\nuse std::process::{Command, Stdio, Output};\n\n\n\nuse rpki::uri;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum RsyncAction {\n\n CreateFile(PathBuf),\n\n ModifyFile(PathBuf),\n\n DeleteFile(PathBuf),\n\n}\n\n\n\nimpl RsyncAction {\n\n pub fn path(&self) -> &Path {\n\n match self {\n\n RsyncAction::CreateFile(p) |\n\n RsyncAction::ModifyFile(p) |\n\n RsyncAction::DeleteFile(p) => p\n", "file_path": "src/rsync.rs", "rank": 54, "score": 9.355296081603989 }, { "content": "use std::env;\n\nuse std::io::Read;\n\nuse std::fs::File;\n\n\n\nuse num_cpus;\n\nuse toml;\n\n\n\n#[derive(Deserialize)]\n\npub struct Config {\n\n pub rsync: RsyncConfig,\n\n #[serde(default)]\n\n pub tal: TalConfig,\n\n #[serde(default)]\n\n pub validation: ValidationConfig,\n\n #[serde(default)]\n\n pub api_server: ApiServerConfig\n\n}\n\n\n\nimpl Config {\n\n pub fn from_path(path: &str) -> Option<Config> {\n", "file_path": "src/config.rs", "rank": 55, "score": 8.95815845689262 }, { "content": "impl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Error::Io(e) => e.fmt(f),\n\n Error::Tal(e) => e.fmt(f),\n\n Error::Generic(e) => write!(f, \"{}\", e),\n\n Error::Validation => write!(f, \"Failed to validate record\"),\n\n Error::Other => write!(f, \"Unknown error\"),\n\n }\n\n }\n\n}\n\n\n\n// ProcessingPolicy\n\n\n", "file_path": "src/processor.rs", "rank": 56, "score": 8.761025837220338 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::fmt;\n\n\n\n #[derive(Clone)]\n\n struct DummyWork {\n\n\n\n }\n\n\n\n impl Work for DummyWork {\n\n fn execute(&mut self) -> Option<Instant> {\n\n None\n\n }\n\n }\n\n\n\n impl fmt::Debug for ScheduledWork {\n", "file_path": "src/executor.rs", "rank": 57, "score": 8.527608766871065 }, { "content": " storage.remove_records(self.deleted_files.iter());\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub enum ProcessingPolicyType {\n\n DownloadAndProcess,\n\n ProcessExisting,\n\n ProcessChanged,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct ProcessingPolicy {\n\n policy_type: ProcessingPolicyType,\n\n next_processing_offset: Duration,\n\n cached_module_processors: HashMap<PathBuf, Arc<ModuleProcess + Sync + Send>>,\n\n}\n\n\n\nimpl ProcessingPolicy {\n", "file_path": "src/processor.rs", "rank": 58, "score": 8.45041120929056 }, { "content": "# API\n\n\n\nThe exposed _JSON_ API has only one endpoint to query for the validity of an advertisement:\n\n\n\n```\n\nhttp://endpoint/api/v1/validity/AS<number>/<prefix>\n\n```\n\n\n\nFor example, using `curl` and `jq` to pretty-print the JSON output:\n\n\n\n```bash\n\n$ curl http://127.0.0.1:8080/api/v1/validity/AS13335/1.1.1.0/24 2>/dev/null | jq\n\n{\n\n \"validated_route\": {\n\n \"route\": {\n\n \"origin_asn\": \"AS13335\",\n\n \"prefix\": \"1.1.1.0/24\"\n\n },\n\n \"validity\": {\n\n \"VRPs\": {\n\n \"matched\": [\n\n {\n\n \"asn\": \"AS13335\",\n\n \"max_length\": 24,\n\n \"prefix\": \"1.1.1.0/24\"\n\n }\n\n ],\n\n \"unmatched_as\": [],\n\n \"unmatched_length\": []\n\n },\n\n \"description\": \"VRPs cover prefix\",\n\n \"reason\": \"\",\n\n \"state\": \"Valid\"\n\n }\n\n }\n\n}\n\n```\n\n\n\n# Metrics\n\n\n\nA [prometheus](https://prometheus.io/) endpoint is exposed at `/mgmt/metrics` so you can\n\nset up alerts, dashboards to make sure everything's running smoothly.\n\n\n\n# Docker image\n\n\n\nThe provided `Dockerfile` will build a _docker_ image using any `tal` files inside the _tal_\n\ndirectory.\n\n\n\n# ARIN tal\n\n\n\nThe _ARIN_ TAL file is not provided by default because they have a pretty strict policy on the\n\nuser of it. You can go ahead and download it from [this link](https://www.arin.net/resources/rpki/tal.html)\n\nand place it in the `tal` directory. Make sure to download the version using the *RFC 7730* format.\n", "file_path": "README.md", "rank": 59, "score": 8.405485210268413 }, { "content": " self.modified_files.insert(path);\n\n }\n\n\n\n fn add_deleted_file(&mut self, path: PathBuf) {\n\n self.deleted_files.insert(path);\n\n }\n\n}\n\n\n\nimpl ModuleProcess for ProcessModified {\n\n fn should_process_module(&self) -> bool {\n\n !self.new_files.is_empty() || !self.modified_files.is_empty()\n\n }\n\n\n\n fn should_process_file(&self, file_path: &Path) -> bool {\n\n self.new_files.contains(file_path) || self.modified_files.contains(file_path)\n\n }\n\n\n\n fn remove_deleted_files(&self, storage: &mut RecordStorage) {\n\n if !self.deleted_files.is_empty() {\n\n info!(\"Removing state for {} deleted files\", self.deleted_files.len());\n", "file_path": "src/processor.rs", "rank": 60, "score": 8.133445780337775 }, { "content": " pub fn observe_update_time(&mut self, trust_anchor: &str, elapsed: Duration) {\n\n // Convert to seconds\n\n let elapsed = elapsed.as_secs() as f64 + elapsed.subsec_millis() as f64 * 0.001;\n\n self.update_time\n\n .with_label_values(&[&trust_anchor])\n\n .observe(elapsed);\n\n }\n\n\n\n pub fn set_total_records(&mut self, trust_anchor: &str, record_count: i64) {\n\n // Convert to seconds\n\n self.total_records\n\n .with_label_values(&[&trust_anchor])\n\n .set(record_count);\n\n }\n\n\n\n pub fn increase_total_runs(&mut self, trust_anchor: &str, successful: bool) {\n\n let successful_str = if successful { \"yes\" } else { \"no\" };\n\n // Convert to seconds\n\n self.total_runs\n\n .with_label_values(&[&trust_anchor, &successful_str])\n\n .inc_by(1);\n\n }\n\n}\n", "file_path": "src/metrics.rs", "rank": 61, "score": 8.121431776715133 }, { "content": " info!(\"Using config file {}\", path);\n\n let mut file = match File::open(path) {\n\n Ok(f) => f,\n\n Err(e) => {\n\n error!(\"Failed to open config file: {:}\", e);\n\n return None;\n\n }\n\n };\n\n let mut contents = String::new();\n\n match file.read_to_string(&mut contents) {\n\n Ok(_) => (),\n\n Err(e) => {\n\n error!(\"Failed to read config file: {:}\", e);\n\n return None;\n\n }\n\n };\n\n match toml::from_str(&contents) {\n\n Ok(c) => Some(c),\n\n Err(e) => {\n\n error!(\"Failed to parse config file: {:}\", e);\n", "file_path": "src/config.rs", "rank": 62, "score": 7.916987658971529 }, { "content": "\n\npub struct RsyncFetcher {\n\n rsync_path: String,\n\n}\n\n\n\nimpl RsyncFetcher {\n\n pub fn new(rsync_path: &str) -> Self {\n\n RsyncFetcher {\n\n rsync_path: rsync_path.to_string(),\n\n }\n\n }\n\n\n\n pub fn fetch(&self, uri: &uri::Rsync, output_dir: &Path) -> io::Result<RsyncOutput> {\n\n let mut base_output = PathBuf::new();\n\n base_output.push(output_dir);\n\n\n\n let rsync_output = self.spawn_rsync(uri, output_dir)?;\n\n let cursor = io::Cursor::new(rsync_output.stdout);\n\n let actions = cursor.lines()\n\n .map(move |line| {\n", "file_path": "src/rsync.rs", "rank": 63, "score": 7.89274682934883 }, { "content": "}\n\n\n\npub struct Executor {\n\n new_work_sender: Sender<ScheduledWork>,\n\n}\n\n\n\nimpl Executor {\n\n pub fn new(num_workers: usize) -> Self {\n\n let (new_work_tx, new_work_rx) = channel();\n\n let (ready_work_tx, ready_work_rx) = channel();\n\n let ready_work_rx = Arc::new(Mutex::new(ready_work_rx));\n\n for _ in 0..num_workers {\n\n Worker::start_work(ready_work_rx.clone(), new_work_tx.clone());\n\n }\n\n thread::spawn(move || {\n\n let executor_impl = ExecutorImpl::new(new_work_rx, ready_work_tx);\n\n executor_impl.work();\n\n });\n\n Executor {\n\n new_work_sender: new_work_tx,\n\n }\n\n }\n\n\n\n pub fn add_work(&mut self, work: WorkPtr, execution_time: Instant) {\n\n let new_work = ScheduledWork::new(work, execution_time);\n\n self.new_work_sender.send(new_work).expect(\"Failed to write work into channel\");\n\n }\n\n}\n\n\n", "file_path": "src/executor.rs", "rank": 64, "score": 7.728020840832668 }, { "content": "\n\n if let Err(e) = RsyncFetcher::new(&config.rsync.binary).check_rsync_binary() {\n\n error!(\"Failed to execute rsync binary: {}\", e);\n\n return;\n\n }\n\n\n\n // Setup prometheus metrics\n\n let metrics = Metrics::new();\n\n let mut registry = Registry::new();\n\n metrics.register(&mut registry);\n\n\n\n // Setup our processing blocks\n\n let storage = Arc::new(RwLock::new(RecordStorage::new()));\n\n let mut executor = Executor::new(config.validation.threads);\n\n let status = Arc::new(Mutex::new(ProcessingStatus::new()));\n\n\n\n // Bootstrap our processing\n\n if !bootstrap(&storage, &mut executor, &status, &metrics, &config) {\n\n return;\n\n }\n", "file_path": "src/main.rs", "rank": 65, "score": 7.389885346953047 }, { "content": "\n\n // Start the API\n\n server::new(\n\n move || {\n\n let registry = registry.clone();\n\n vec![\n\n // API handler\n\n App::with_state(AppState::new(RecordValidator::new(storage.clone()),\n\n storage.clone(),\n\n status.clone()))\n\n .prefix(\"/api/v1\")\n\n .resource(\n\n \"/validity/AS{asn}/{prefix}/{length}\",\n\n |r| r.method(http::Method::GET).f(Api::validate)\n\n )\n\n .resource(\n\n \"/status\",\n\n |r| r.method(http::Method::GET).f(Api::status)\n\n )\n\n .resource(\n", "file_path": "src/main.rs", "rank": 66, "score": 7.23776791086585 }, { "content": "use std::collections::{HashMap, HashSet};\n\nuse std::io;\n\nuse std::io::Read;\n\nuse std::ffi::OsStr;\n\nuse std::fmt;\n\nuse std::fs::{File, create_dir_all};\n\nuse std::path::{PathBuf, Path};\n\nuse std::sync::{Arc, RwLock};\n\nuse std::time::{Duration, Instant};\n\n\n\nuse bytes::Bytes;\n\n\n\nuse ipnetwork::IpNetwork;\n\n\n\nuse rpki::cert::{Cert, ResourceCert};\n\nuse rpki::crl::{Crl, CrlStore};\n\nuse rpki::manifest::{Manifest, ManifestContent, ManifestHash};\n\nuse rpki::roa::{Roa, RouteOriginAttestation};\n\nuse rpki::tal::{Tal, ReadError};\n\nuse rpki::uri;\n", "file_path": "src/processor.rs", "rank": 67, "score": 6.949293538086488 }, { "content": "\n\n Metrics {\n\n update_time,\n\n total_records,\n\n total_runs,\n\n }\n\n }\n\n\n\n pub fn register(&self, registry: &mut Registry) {\n\n registry\n\n .register(Box::new(self.update_time.clone()))\n\n .unwrap();\n\n registry\n\n .register(Box::new(self.total_records.clone()))\n\n .unwrap();\n\n registry\n\n .register(Box::new(self.total_runs.clone()))\n\n .unwrap();\n\n }\n\n\n", "file_path": "src/metrics.rs", "rank": 68, "score": 6.907148681576116 }, { "content": "\n\n pub fn update(&mut self) -> Result<(), Error> {\n\n let mut file = match File::open(&*self.tal_path.to_string_lossy()) {\n\n Ok(file) => file,\n\n Err(e) => {\n\n error!(\"Failed to open TAL file {:?}: {}\", self.tal_path, e);\n\n return Err(Error::Io(e));\n\n }\n\n };\n\n info!(\"Processing tal file {:?}\", self.tal_path);\n\n let tal = match Tal::read(&self.tal_path, &mut file) {\n\n Ok(tal) => tal,\n\n Err(e) => {\n\n error!(\"Failed to parse TAL file: {}\", e);\n\n return Err(Error::Tal(e));\n\n }\n\n };\n\n // Process this TAL file\n\n let output = self.process_tal(tal);\n\n // Process any file deletions\n", "file_path": "src/processor.rs", "rank": 69, "score": 6.802804167604765 }, { "content": " _ => panic!(\"\"),\n\n }\n\n }\n\n\n\n\n\n fn load_module(&mut self,\n\n uri: &uri::Rsync,\n\n output_path: &Path,\n\n rsync_fetcher: &RsyncFetcher)\n\n -> Result<Arc<ModuleProcess>, Error>\n\n {\n\n if let Some(cached_entry) = self.cached_module_processors.get(output_path) {\n\n return Ok(cached_entry.clone());\n\n }\n\n let output = self.do_load_module(uri, output_path, rsync_fetcher)?;\n\n self.cached_module_processors.insert(output_path.to_path_buf(), output.clone());\n\n Ok(output)\n\n }\n\n\n\n fn remove_deleted_files(&self, storage: &mut RecordStorage) {\n", "file_path": "src/processor.rs", "rank": 70, "score": 6.591482997475368 }, { "content": " }\n\n Err(Error::Validation)\n\n }\n\n\n\n fn process_object(&mut self,\n\n uri: &uri::Rsync,\n\n hash: ManifestHash,\n\n issuer_cert: &ResourceCert,\n\n store: &mut CrlStore)\n\n -> Result<Option<RouteOriginAttestation>, Error>\n\n {\n\n if uri.ends_with(\".cer\") {\n\n let bytes = match self.load_file(&uri)? {\n\n Some(b) => b,\n\n None => return Ok(None),\n\n };\n\n if hash.verify(&bytes).is_err() {\n\n info!(\"Verification of file {:?} failed\", self.get_path(uri));\n\n return Ok(None)\n\n }\n", "file_path": "src/processor.rs", "rank": 73, "score": 6.390070080689158 }, { "content": " \"roas\": records.into_iter().map(|r| {\n\n json!({\n\n \"prefix\": format!(\"{}\", r.prefix()),\n\n \"asn\": format!(\"{}\", r.origin()),\n\n \"maxLength\": r.max_length(),\n\n })\n\n }).collect::<Vec<_>>()\n\n })\n\n }\n\n\n\n fn build_status_body(trust_anchors_statuses: &HashMap<String, TrustAnchorStatus>)\n\n -> serde_json::Value\n\n {\n\n let updated_anchor_count = trust_anchors_statuses.iter()\n\n .filter(|(_, status)| status.successful_runs > 0)\n\n .count();\n\n let service_status = if updated_anchor_count == trust_anchors_statuses.len() {\n\n \"Ready\"\n\n } else {\n\n \"Updating\"\n", "file_path": "src/main.rs", "rank": 74, "score": 6.165152336851285 }, { "content": " None\n\n }\n\n }\n\n }\n\n}\n\n\n\n// RsyncConfig\n\n\n\n#[derive(Deserialize)]\n\npub struct RsyncConfig {\n\n #[serde(default = \"default_rsync\")]\n\n pub binary: String,\n\n pub interval: u32,\n\n #[serde(default = \"default_cache_path\")]\n\n pub cache_path: String,\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 75, "score": 6.104763030621254 }, { "content": " self.policy.remove_deleted_files(&mut self.storage.write().unwrap());\n\n // Now change our policy and then return\n\n self.policy = self.policy.next(self.processing_interval);\n\n output\n\n }\n\n\n\n fn process_tal(&mut self, tal: Tal) -> Result<(), Error> {\n\n for uri in tal.uris() {\n\n let output_path = self.get_module_path(uri);\n\n self.policy.load_module(\n\n uri,\n\n &output_path,\n\n &self.rsync_fetcher\n\n )?;\n\n let bytes = match self.load_file(uri)? {\n\n Some(b) => b,\n\n None => continue,\n\n };\n\n let path = self.get_path(uri);\n\n info!(\"Processing TAL file {:?}\", path);\n", "file_path": "src/processor.rs", "rank": 76, "score": 5.97143382130958 }, { "content": " }\n\n\n\n fn work(mut self) {\n\n loop {\n\n // Process any works which are ready\n\n self.process_scheduled_works();\n\n\n\n let timeout = match self.works.peek() {\n\n Some(new_work) => new_work.execution_time - Instant::now(),\n\n None => Self::MAX_TIMEOUT,\n\n };\n\n match self.new_work_receiver.recv_timeout(timeout) {\n\n Ok(new_work) => self.works.push(new_work),\n\n Err(RecvTimeoutError::Disconnected) => {\n\n info!(\"Executor failed to read from channel, stopping it\");\n\n break;\n\n },\n\n // We simply hit a timeout. Keep retrying\n\n Err(RecvTimeoutError::Timeout) => (),\n\n }\n", "file_path": "src/executor.rs", "rank": 77, "score": 5.894351173771488 }, { "content": " }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct RsyncOutput {\n\n actions: Vec<RsyncAction>,\n\n}\n\n\n\nimpl RsyncOutput {\n\n pub fn new(actions: Vec<RsyncAction>) -> RsyncOutput {\n\n RsyncOutput {\n\n actions\n\n }\n\n }\n\n\n\n pub fn actions(self) -> Vec<RsyncAction> {\n\n self.actions\n\n }\n\n}\n", "file_path": "src/rsync.rs", "rank": 78, "score": 5.724755993936782 }, { "content": " self.execution_time() == other.execution_time()\n\n }\n\n}\n\n\n\nimpl Eq for ScheduledWork {\n\n\n\n}\n\n\n\nimpl ExecutorImpl {\n\n const MAX_TIMEOUT: Duration = Duration::from_secs(1);\n\n\n\n fn new(new_work_receiver: Receiver<ScheduledWork>,\n\n ready_work_sender: Sender<WorkPtr>)\n\n -> Self \n\n {\n\n ExecutorImpl {\n\n new_work_receiver,\n\n ready_work_sender,\n\n works: BinaryHeap::new(),\n\n }\n", "file_path": "src/executor.rs", "rank": 79, "score": 5.592338715966968 }, { "content": " error!(\"Failed to update {} trust anchor: {}\", trust_anchor, e);\n\n }\n\n }\n\n }\n\n\n\n let total_records = self.storage.read().unwrap().total_records(trust_anchor);\n\n self.metrics.set_total_records(trust_anchor, total_records as i64);\n\n self.metrics.observe_update_time(trust_anchor, elapsed);\n\n Some(self.processor.next_update_time())\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 80, "score": 5.419023272202598 }, { "content": "\n\n fn get_manifest(&mut self, issuer_cert: &ResourceCert, store: &mut CrlStore)\n\n -> Result<Option<ManifestContent>, Error>\n\n {\n\n for uri in issuer_cert.manifest_uris() {\n\n let uri = match uri.into_rsync_uri() {\n\n Some(uri) => uri,\n\n None => continue,\n\n };\n\n let bytes = match self.rsync_load_file(&uri)? {\n\n Some(b) => b,\n\n None => continue,\n\n };\n\n let manifest = match Manifest::decode(bytes, self.strict_mode) {\n\n Ok(manifest) => manifest,\n\n Err(_) => {\n\n warn!(\"Failed to decode manifest for URI {:?}\", uri);\n\n continue\n\n }\n\n };\n", "file_path": "src/processor.rs", "rank": 81, "score": 5.343433175966396 }, { "content": "extern crate actix_web;\n\nextern crate chrono;\n\n#[macro_use] extern crate log;\n\nextern crate ipnetwork;\n\nextern crate num_cpus;\n\nextern crate prometheus;\n\nextern crate rpki_validator;\n\n#[macro_use] extern crate serde_json;\n\nextern crate simple_logger;\n\nextern crate clap;\n\n\n\nuse std::collections::HashMap;\n\nuse std::fs;\n\nuse std::sync::{Arc, Mutex, RwLock};\n\nuse std::time::{Duration, Instant};\n\n\n\nuse actix_web::{http, server, App, HttpRequest, HttpResponse};\n\nuse actix_web::dev::Handler;\n\n\n\nuse chrono::Local;\n", "file_path": "src/main.rs", "rank": 82, "score": 5.321594531842241 }, { "content": " }\n\n}\n\n\n\nimpl Work for ProcessorWork {\n\n fn execute(&mut self) -> Option<Instant> {\n\n info!(\"Running update on {} trust anchor\", self.processor.trust_anchor_name());\n\n let now = Instant::now();\n\n let update_result = self.processor.update();\n\n let elapsed = now.elapsed();\n\n let trust_anchor = self.processor.trust_anchor_name();\n\n {\n\n let mut status = self.status.lock().unwrap();\n\n match update_result {\n\n Ok(_) => {\n\n self.metrics.increase_total_runs(trust_anchor, true);\n\n status.mark_successful_run(trust_anchor, Local::now(), elapsed);\n\n },\n\n Err(e) => {\n\n self.metrics.increase_total_runs(trust_anchor, false);\n\n status.mark_error_run(trust_anchor, Local::now(), elapsed);\n", "file_path": "src/main.rs", "rank": 83, "score": 5.317640784912021 }, { "content": " Some(some) => some,\n\n None => return Ok(())\n\n };\n\n for uri in uri_list.iter() {\n\n let uri = match uri.into_rsync_uri() {\n\n Some(uri) => uri,\n\n None => continue\n\n };\n\n\n\n // If we already have that CRL, use it.\n\n if let Some(crl) = store.get(&uri) {\n\n if crl.contains(&cert.serial_number()) {\n\n return Err(Error::Validation)\n\n }\n\n else {\n\n return Ok(())\n\n }\n\n }\n\n\n\n // Otherwise, try to load it, use it, and then store it.\n", "file_path": "src/processor.rs", "rank": 84, "score": 5.115017376734402 }, { "content": " fn export(req: &HttpRequest<AppState>) -> HttpResponse {\n\n let records = req.state().storage.read().unwrap().records();\n\n ApiResponse::ExportResponse(records).build()\n\n }\n\n\n\n fn status(req: &HttpRequest<AppState>) -> HttpResponse {\n\n let status = req.state().status.lock().unwrap();\n\n ApiResponse::StatusResponse(status.trust_anchor_statuses()).build()\n\n }\n\n}\n\n\n\n// Metrics\n\n\n", "file_path": "src/main.rs", "rank": 85, "score": 4.938685343196685 }, { "content": " let (cert, manifest) = match manifest.validate(issuer_cert, self.strict_mode) {\n\n Ok(manifest) => manifest,\n\n Err(_) => {\n\n info!(\"Failed to validate manifest for URI {:?}\", uri);\n\n continue\n\n }\n\n };\n\n if let Err(_) = self.check_crl(cert.as_ref(), issuer_cert, store) {\n\n info!(\"Certificate for URI {:?} has been revoked\", uri);\n\n continue\n\n }\n\n return Ok(Some(manifest))\n\n }\n\n Ok(None)\n\n }\n\n\n\n fn check_crl(&mut self, cert: &Cert, issuer_cert: &ResourceCert, store: &mut CrlStore)\n\n -> Result<(), Error>\n\n {\n\n let uri_list = match cert.crl_distribution() {\n", "file_path": "src/processor.rs", "rank": 86, "score": 4.8559476243982465 }, { "content": " \"Time taken to update and process every trust anchor\",\n\n ).buckets(update_time_buckets),\n\n &[\"trust_anchor\"]\n\n ).unwrap();\n\n\n\n let total_records = IntGaugeVec::new(\n\n Opts::new(\n\n \"records_total\",\n\n \"Number of records in storage\",\n\n ),\n\n &[\"trust_anchor\"],\n\n ).unwrap();\n\n\n\n let total_runs = IntCounterVec::new(\n\n Opts::new(\n\n \"runs_total\",\n\n \"Number of runs performed\",\n\n ),\n\n &[\"trust_anchor\", \"successful\"],\n\n ).unwrap();\n", "file_path": "src/metrics.rs", "rank": 87, "score": 4.843759328351117 }, { "content": "RPKI validator\n\n===\n\n\n\nThis is an RPKI validator written in Rust that provides a subset of the features provided\n\nby [RIPE's RPKI validator](https://github.com/RIPE-NCC/rpki-validator). These include:\n\n\n\n* Downloading RPKI repositories via rsync and validating the records in them.\n\n* Providing an API to query for the validity of an advertisement. The API provided uses\n\nthe same paths and formats as RIPE's validator, allowing swapping them transparently.\n\n\n\nThis application wouldn't exist if it wasn't for [NLnet Labs](https://github.com/NLnetLabs)\n\namazing [library](https://github.com/NLnetLabs/rpki-rs) that really does all the heavy work.\n\nThe code in the `processor` module is based on their\n\n[routinator](https://github.com/NLnetLabs/routinator) project. Thanks to them for putting\n\nall that hard work into those projects!\n\n\n\n# Why?\n\n\n\nThis implementation, while lacking many features, has a few advantages over RIPE's validator.\n\nAmong them:\n\n\n\n* The memory footprint is much smaller. Using all 5 RIR repositories via rsync on RIPE's validator,\n\nmemory usage can go up to several GBs. This implementation instead uses something around\n\n30MB of RAM.\n\n* Record validation is considerably faster. A lot of this processing speed comes simply from using\n\n[rpki-rs](https://github.com/NLnetLabs/rpki-rs) but I imagine this is also due to the fact that\n\neverything is kept in memory rather than in a database on the filesystem. As an example, in my\n\nlaptop it takes about 2 seconds to validate the entire _LACNIC_ repository, whereas it takes about\n\n100 to do the same when running RIPE's one.\n\n* Any subsequent rsync call after the initial one per trust anchor, only new/modified\n\nfiles are processed. This means that if nothing in the repository has changed, the validator\n\nwon't re-validate everything because there's really nothing to be validated. Records are mapped\n\nto the particular file they came from so whenever it's either modified or deleted, the associated\n\nrecords get invalidated. This reduces the CPU usage and only uses it when there's actually\n\nsomething to validate.\n\n* Consistently fast API. While doing some tests, I noticed RIPE's validator's average response \n\ntime for validation API calls would be around 150ms per call, having sporadic spikes which could\n\ngo up to 20 seconds. This validator replies consistently at around 15ms. Validating an\n\nadvertisement is O(1) and finding matched/unmatched records takes about 1.5 microseconds in \n\na 2.2ghz CPU.\n\n* [Prometheus](https://prometheus.io/) metrics are exposed via an HTTP endpoint so it's possible\n", "file_path": "README.md", "rank": 88, "score": 4.71175517911529 }, { "content": " };\n\n Processor {\n\n tal_path,\n\n trust_anchor: Arc::new(TrustAnchor::new(name)),\n\n output_path: output_path.to_string(),\n\n strict_mode,\n\n rsync_fetcher,\n\n storage,\n\n processing_interval,\n\n policy: ProcessingPolicy::new(ProcessingPolicyType::DownloadAndProcess),\n\n }\n\n }\n\n\n\n pub fn next_update_time(&self) -> Instant {\n\n self.policy.next_process_time()\n\n }\n\n\n\n pub fn trust_anchor_name(&self) -> &str {\n\n &self.trust_anchor.name()\n\n }\n", "file_path": "src/processor.rs", "rank": 89, "score": 4.566012076546254 }, { "content": "\n\n fn load_file(&self, uri: &uri::Rsync) -> Result<Option<Bytes>, Error> {\n\n let file_path = self.get_path(uri);\n\n let file = File::open(&file_path);\n\n match file {\n\n Ok(mut file) => {\n\n let mut buffer = Vec::new();\n\n file.read_to_end(&mut buffer)?;\n\n Ok(Some(Bytes::from(buffer)))\n\n }\n\n Err(ref e) if e.kind() == io::ErrorKind::NotFound => {\n\n warn!(\"Ignoring file not found {:?}\", file_path);\n\n Ok(None)\n\n },\n\n Err(e) => Err(Error::Io(e))\n\n }\n\n }\n\n\n\n fn rsync_load_file(&mut self, uri: &uri::Rsync) -> Result<Option<Bytes>, Error> {\n\n let output_path = self.get_module_path(uri);\n", "file_path": "src/processor.rs", "rank": 90, "score": 4.461583038960994 }, { "content": " pub fn new(policy_type: ProcessingPolicyType) -> Self {\n\n ProcessingPolicy {\n\n policy_type,\n\n next_processing_offset: Duration::default(),\n\n cached_module_processors: HashMap::new(),\n\n }\n\n }\n\n\n\n fn next(&self, default_execution_offset: Duration) -> ProcessingPolicy {\n\n // If have just processed the existing ones, we want to immediately rsync and process\n\n // all changes. Otherwise, we've already rsync'd, let's wait until our scheduled\n\n // execution time and process modifications by then\n\n let (policy, offset) = match self.policy_type {\n\n ProcessingPolicyType::ProcessExisting => {\n\n (ProcessingPolicyType::ProcessChanged, Duration::default())\n\n },\n\n ProcessingPolicyType::ProcessChanged | ProcessingPolicyType::DownloadAndProcess => {\n\n (ProcessingPolicyType::ProcessChanged, default_execution_offset)\n\n },\n\n };\n", "file_path": "src/processor.rs", "rank": 91, "score": 4.427679544522105 }, { "content": " return Ok(Arc::new(AlwaysProcess{}))\n\n },\n\n _ => (),\n\n };\n\n let uri = uri::Rsync::new(uri.to_module(), Bytes::new());\n\n info!(\"Downloading files for directory {:?}\", output_path);\n\n create_dir_all(&output_path)?;\n\n let rsync_output = rsync_fetcher.fetch(\n\n &uri,\n\n output_path\n\n )?;\n\n let mut created = Vec::new();\n\n let mut modified = Vec::new();\n\n let mut deleted = Vec::new();\n\n for action in rsync_output.actions() {\n\n match action {\n\n RsyncAction::CreateFile(p) => created.push(p),\n\n RsyncAction::ModifyFile(p) => modified.push(p),\n\n RsyncAction::DeleteFile(p) => deleted.push(p),\n\n }\n", "file_path": "src/processor.rs", "rank": 92, "score": 4.312714944060269 }, { "content": " issuer_cert,\n\n manifest,\n\n crl_store,\n\n module_processor,\n\n repo_uri\n\n )\n\n }\n\n\n\n fn process_manifest(&mut self,\n\n issuer_cert: ResourceCert,\n\n manifest: ManifestContent,\n\n mut crl_store: CrlStore,\n\n module_processor: Arc<ModuleProcess>,\n\n repo_uri: uri::Rsync)\n\n -> Result<(), Error> {\n\n for item in manifest.iter_uris(repo_uri.clone()) {\n\n let (uri, hash) = match item {\n\n Ok(item) => item,\n\n Err(e) => {\n\n warn!(\"Verification failed for {}: {}\", repo_uri, e);\n", "file_path": "src/processor.rs", "rank": 93, "score": 4.2929106346247075 }, { "content": " Some(uri) => uri,\n\n None => return Ok(())\n\n };\n\n let output_path = self.get_module_path(&repo_uri);\n\n let module_processor = self.policy.load_module(\n\n &repo_uri,\n\n &output_path,\n\n &self.rsync_fetcher\n\n )?;\n\n if !module_processor.should_process_module() {\n\n info!(\"Skipping unmodified certificate file {:?}\", output_path);\n\n return Ok(());\n\n }\n\n let mut crl_store = CrlStore::new();\n\n let manifest = match self.get_manifest(&issuer_cert, &mut crl_store)? {\n\n Some(manifest) => manifest,\n\n None => return Ok(())\n\n };\n\n\n\n self.process_manifest(\n", "file_path": "src/processor.rs", "rank": 94, "score": 4.2929106346247075 }, { "content": "\n\n fn mark_error_run(&mut self, trust_anchor_name: &str, last_run: DateTime<Local>,\n\n last_duration: Duration) {\n\n let entry = self.get_entry(trust_anchor_name, last_run, last_duration);\n\n entry.error_runs += 1;\n\n }\n\n\n\n fn get_entry(&mut self, trust_anchor_name: &str, last_run: DateTime<Local>,\n\n last_duration: Duration)\n\n -> &mut TrustAnchorStatus\n\n {\n\n let entry = self.trust_anchors.entry(trust_anchor_name.to_string()).or_default();\n\n entry.last_run = Some(last_run);\n\n entry.last_duration = Some(last_duration);\n\n entry\n\n }\n\n\n\n fn trust_anchor_statuses(&self) -> &HashMap<String, TrustAnchorStatus> {\n\n &self.trust_anchors\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 95, "score": 4.158990971858669 }, { "content": " self.parse_action(&line.unwrap(), &base_output)\n\n })\n\n .filter(|o| o.is_some())\n\n .map(|o| o.unwrap())\n\n .collect();\n\n Ok(RsyncOutput::new(actions))\n\n }\n\n\n\n pub fn check_rsync_binary(&self) -> io::Result<()> {\n\n let mut cmd = Command::new(&self.rsync_path);\n\n cmd.args(&[\"-h\"]);\n\n cmd.output().map(|_| ())\n\n }\n\n\n\n fn parse_action(&self, action: &str, base_output: &PathBuf) -> Option<RsyncAction> {\n\n let mut tokens = action.split(' ');\n\n let changes = tokens.next().unwrap();\n\n let tokens : Vec<&str> = tokens.filter(|t| !t.is_empty()).collect();\n\n let path = base_output.join(&tokens.join(\" \"));\n\n if changes == \"*deleting\" {\n", "file_path": "src/rsync.rs", "rank": 96, "score": 3.9511268765332592 }, { "content": " continue;\n\n },\n\n };\n\n let path = self.get_path(&uri);\n\n // Ignore if it it's not modified and it's not a certificate file\n\n if !module_processor.should_process_file(&path) && !uri.ends_with(\".cer\") {\n\n debug!(\"Skipping unmodified file {:?}\", path);\n\n continue;\n\n }\n\n debug!(\"Processing file {:?}\", path);\n\n let routes = self.process_object(\n\n &uri,\n\n hash,\n\n &issuer_cert,\n\n &mut crl_store\n\n );\n\n let routes = match routes {\n\n Ok(routes) => routes,\n\n Err(_) => continue,\n\n };\n", "file_path": "src/processor.rs", "rank": 97, "score": 3.912979205491601 }, { "content": "use std::cmp::Ordering;\n\nuse std::collections::binary_heap::BinaryHeap;\n\nuse std::sync::{Arc, Mutex};\n\nuse std::sync::mpsc::{Sender, Receiver, channel, RecvTimeoutError};\n\nuse std::thread;\n\nuse std::time::{Duration, Instant};\n\n\n", "file_path": "src/executor.rs", "rank": 98, "score": 3.821112386540643 }, { "content": " }\n\n }\n\n\n\n fn process_scheduled_works(&mut self) {\n\n loop {\n\n let is_ready = match self.works.peek() {\n\n Some(scheduled_work) => scheduled_work.execution_time() <= &Instant::now(),\n\n None => false,\n\n };\n\n if is_ready {\n\n debug!(\"Found work ready to be executed, forwarding to worker\");\n\n let scheduled_work = self.works.pop().unwrap();\n\n if self.ready_work_sender.send(scheduled_work.work()).is_err() {\n\n info!(\"Failed to send ready work\");\n\n }\n\n }\n\n else {\n\n break;\n\n }\n\n }\n", "file_path": "src/executor.rs", "rank": 99, "score": 3.807556362053926 } ]
Rust
day-12/src/matt.rs
rust-nairobi/aoc-2020
1d39ff72936833c216c89a91f3ba3a4c9c2aac3e
use std::fs::File; use std::io::{self, BufRead}; struct Position { x: isize, y: isize, } struct Ship { dir: f64, pos: Position, waypoint: Position, } impl Ship { fn new() -> Ship { Ship { dir: 0f64, pos: Position { x: 0, y: 0 }, waypoint: Position { x: 10, y: 1 }, } } fn navigate(&mut self, actions: &[Action]) { for action in actions { match action { Action::Right(v) => self.dir = ((self.dir - *v as f64) as isize % 360) as f64, Action::Left(v) => self.dir = ((self.dir + *v as f64) as isize % 360) as f64, Action::Forward(v) => { self.pos.x += self.dir.to_radians().cos() as isize * *v; self.pos.y += self.dir.to_radians().sin() as isize * *v; } Action::North(v) => self.pos.y += *v, Action::South(v) => self.pos.y -= *v, Action::East(v) => self.pos.x += *v, Action::West(v) => self.pos.x -= *v, } } } fn turn(&mut self, v: isize) { let r = ((self.waypoint.x.pow(2) + self.waypoint.y.pow(2)) as f64).sqrt(); let t = (self.waypoint.y as f64).atan2(self.waypoint.x as f64); let t = t - (v as f64).to_radians(); self.waypoint.x = (r * t.cos()).round() as isize; self.waypoint.y = (r * t.sin()).round() as isize; } fn navigate2(&mut self, actions: &[Action]) { for action in actions { match action { Action::Right(v) => self.turn(*v), Action::Left(v) => self.turn(*v - 2 * v), Action::Forward(v) => { self.pos.x += self.waypoint.x * *v; self.pos.y += self.waypoint.y * *v; } Action::North(v) => self.waypoint.y += *v, Action::South(v) => self.waypoint.y -= *v, Action::East(v) => self.waypoint.x += *v, Action::West(v) => self.waypoint.x -= *v, } } } fn mdist(&self) -> isize { self.pos.x.abs() + self.pos.y.abs() } } enum Action { East(isize), North(isize), South(isize), West(isize), Forward(isize), Left(isize), Right(isize), } fn part_one(actions: &[Action]) -> isize { let mut ship = Ship::new(); ship.navigate(actions); ship.mdist() } fn part_two(actions: &[Action]) -> isize { let mut ship = Ship::new(); ship.navigate2(actions); ship.mdist() } fn main() { let actions = load_input("input.txt").unwrap(); println!("Part One: {} ", part_one(&actions)); println!("Part Two: {} ", part_two(&actions)); } fn load_input(fname: &str) -> io::Result<Vec<Action>> { let file = File::open(fname)?; let buf = io::BufReader::new(file); Ok(buf .lines() .filter_map(|x| x.ok()) .map(|x| { let val = x[1..].parse::<isize>().unwrap(); match &x[0..1] { "E" => Action::East(val), "F" => Action::Forward(val), "L" => Action::Left(val), "N" => Action::North(val), "R" => Action::Right(val), "S" => Action::South(val), "W" => Action::West(val), _ => unreachable!(), } }) .collect()) } #[test] fn test() { let actions = load_input("test.txt").unwrap(); let mut ship = Ship::new(); ship.navigate(&actions); assert_eq!(25, ship.mdist()); }
use std::fs::File; use std::io::{self, BufRead}; struct Position { x: isize, y: isize, } struct Ship { dir: f64, pos: Position, waypoint: Position, } impl Ship { fn new() -> Ship { Ship { dir: 0f64, pos: Position { x: 0, y: 0 }, waypoint: Position { x: 10, y: 1 }, } } fn navigate(&mut self, actions: &[Action]) { for action in actions { match action { Action::Right(v) => self.dir = ((self.dir - *v as f64) as isize % 360) as f64, Action::Left(v) => self.dir = ((self.dir + *v as f64) as isize % 360) as f64, Action::Forward(v) => { self.pos.x += self.dir.to_radians().cos() as isize * *v; self.pos.y += self.dir.to_radians().sin() as isize * *v; } Action::North(v) => self.pos.y += *v, Action::South(v) => self.pos.y -= *v, Action::East(v) => self.pos.x += *v, Action::West(v) => self.pos.x -= *v, } } } fn turn(&mut self, v: isize) { let r = ((self.waypoint.x.pow(2) + self.waypoint.y.pow(2)) as f64).sqrt(); let t = (self.waypoint.y as f64).atan2(self.waypoint.x as f64); let t = t - (v as f64).to_radians(); self.waypoint.x = (r * t.cos()).round() as isize; self.waypoint.y = (r * t.sin()).round() as isize; } fn navigate2(&mut self, actions: &[Action]) { for action in actions { match action { Action::Right(v) => self.turn(*v), Action::Left(v) => self.turn(*v - 2 * v), Action::Forward(v) => { self.pos.x += self.waypoint.x * *v; self.pos.y += self.waypoint.y * *v; } Action::North(v) => self.waypoint.y += *v, Action::South(v) => self.waypoint.y -= *v, Action::East(v) => self.waypoint.x += *v, Action::West(v) => self.waypoint.x -= *v, } } } fn mdist(&self) -> isize { self.pos.x.abs() + self.pos.y.abs() } } enum Action { East(isize), North(isize), South(isize), West(isize), Forward(isize), Left(isize), Right(isize), } fn part_one(actions: &[Action]) -> isize { let mut ship = Ship::new(); ship.navigate(actions); ship.mdist() } fn part_two(actions: &[Action]) -> isize { let mut ship = Ship::new(); ship.navigate2(actions); ship.mdist() } fn main() { let actions = load_input("input.txt").unwrap(); println!("Part One: {} ", part_one(&actions)); println!("Part Two: {} ", part_two(&actions)); } fn load_input(fname: &str) -> io::Result<Vec<Action>> { let file = File::open(fname)?; let buf = io::BufReader::new(file); Ok(buf .line
#[test] fn test() { let actions = load_input("test.txt").unwrap(); let mut ship = Ship::new(); ship.navigate(&actions); assert_eq!(25, ship.mdist()); }
s() .filter_map(|x| x.ok()) .map(|x| { let val = x[1..].parse::<isize>().unwrap(); match &x[0..1] { "E" => Action::East(val), "F" => Action::Forward(val), "L" => Action::Left(val), "N" => Action::North(val), "R" => Action::Right(val), "S" => Action::South(val), "W" => Action::West(val), _ => unreachable!(), } }) .collect()) }
function_block-function_prefixed
[ { "content": "fn parse_input_line(line: &str) -> GridLine {\n\n let mut grid_line = vec![];\n\n for chr in line.chars() {\n\n match chr {\n\n '#' => grid_line.push(GridPoint::Tree),\n\n '.' => grid_line.push(GridPoint::OpenSquare),\n\n _ => unreachable!(),\n\n }\n\n }\n\n grid_line\n\n}\n\n\n", "file_path": "day-3/src/matt.rs", "rank": 2, "score": 152095.8572203536 }, { "content": "fn part_two(mut v: Vec<(usize, usize)>) -> usize {\n\n v = v.into_iter().map(|x| (x.1, x.0)).collect();\n\n v.sort_by_key(|x| x.0);\n\n let mut n = 1;\n\n let mut r = v[v.len() - 1].0 - v[v.len() - 1].1;\n\n for i in 1..v.len() {\n\n n *= v[v.len() - i].0;\n\n let (n2, k2) = v[v.len() - i - 1];\n\n r = step(r, n, n2, k2);\n\n }\n\n r\n\n}\n\n\n", "file_path": "day-13/src/matt.rs", "rank": 3, "score": 126119.8843186651 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "src/main.rs", "rank": 7, "score": 122491.43867847175 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "day-11/src/main.rs", "rank": 8, "score": 120278.73981022034 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "day-10/src/main.rs", "rank": 9, "score": 120278.73981022034 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "day-8/src/main.rs", "rank": 10, "score": 120278.73981022034 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "day-13/src/main.rs", "rank": 11, "score": 120278.73981022034 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "day-7/src/main.rs", "rank": 12, "score": 120278.73981022034 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "day-9/src/main.rs", "rank": 13, "score": 120278.73981022034 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "day-5/src/main.rs", "rank": 14, "score": 120278.73981022034 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "day-1/src/main.rs", "rank": 15, "score": 120278.73981022034 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "day-12/src/main.rs", "rank": 16, "score": 120278.73981022034 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "day-6/src/main.rs", "rank": 17, "score": 120278.73981022034 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "day-2/src/main.rs", "rank": 18, "score": 120278.73981022034 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "day-4/src/main.rs", "rank": 19, "score": 120278.73981022034 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "day-3/src/main.rs", "rank": 20, "score": 120278.73981022034 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "day-15/src/main.rs", "rank": 21, "score": 120278.73981022034 }, { "content": "fn parse_input_line(line: &str) -> Result<PwdEntry, std::num::ParseIntError> {\n\n let parts: Vec<_> = line.split(' ').collect();\n\n let low_high: Vec<_> = parts[0].split('-').collect();\n\n let low: i32 = low_high[0].parse()?;\n\n let high: i32 = low_high[1].parse()?;\n\n let chr: char = parts[1].split(':').collect::<Vec<_>>()[0]\n\n .chars()\n\n .next()\n\n .unwrap();\n\n let pwd = parts[2].to_string();\n\n\n\n Ok(PwdEntry::new((low, high, chr, pwd)))\n\n}\n\n\n", "file_path": "day-2/src/matt.rs", "rank": 22, "score": 119667.3720376981 }, { "content": "fn part_two(grid: &mut Grid) -> usize {\n\n let dxns = vec![\n\n (1, 0),\n\n (-1, 0),\n\n (0, 1),\n\n (0, -1),\n\n (-1, -1),\n\n (-1, 1),\n\n (1, -1),\n\n (1, 1),\n\n ];\n\n let length = grid.length();\n\n let width = grid.width();\n\n loop {\n\n let mut changes = vec![];\n\n for x in 0..length {\n\n for y in 0..width {\n\n let mut num_occupied = 0;\n\n for d in &dxns {\n\n for scale in 1.. {\n", "file_path": "day-11/src/matt.rs", "rank": 23, "score": 113712.57007990271 }, { "content": "fn part_one(grid: &mut Grid) -> usize {\n\n let length = grid.length();\n\n let width = grid.width();\n\n loop {\n\n let mut changes = vec![];\n\n for x in 0..length {\n\n for y in 0..width {\n\n let is_empty: bool = gen_surround((x, y), length, width)\n\n .into_iter()\n\n .filter(|p| grid.get(p.0, p.1) == Pos::Occupied)\n\n .count()\n\n == 0;\n\n if is_empty && grid.is_empty(x, y) {\n\n changes.push((x, y, Pos::Occupied));\n\n }\n\n let crowded: bool = gen_surround((x, y), length, width)\n\n .into_iter()\n\n .filter(|p| grid.get(p.0, p.1) == Pos::Occupied)\n\n .count()\n\n >= 4;\n", "file_path": "day-11/src/matt.rs", "rank": 24, "score": 113690.77833663093 }, { "content": "fn main() {\n\n let seat_ids: HashSet<u32> = get_passes().iter().map(|pass| pass.seat_id()).collect();\n\n\n\n println!(\"Part 1: \\n----------\");\n\n part_1(&seat_ids);\n\n\n\n println!(\"----------\");\n\n println!(\"Part 2: \\n----------\");\n\n part_2(&seat_ids);\n\n}\n", "file_path": "day-5/src/vickz84259.rs", "rank": 25, "score": 112124.63186830975 }, { "content": "fn main() {\n\n let lines = read_lines();\n\n part_1(&lines);\n\n\n\n println!(\"----------\");\n\n\n\n part_2(&lines);\n\n}\n", "file_path": "day-2/src/vickz84259.rs", "rank": 26, "score": 112124.63186830975 }, { "content": "fn main() {\n\n let map = input().unwrap();\n\n let slope = Slope::new(3, 1);\n\n\n\n println!(\"Part One: {}\", trees_encountered(&map, &slope));\n\n println!(\n\n \"Part Two: {}\",\n\n trees_encountered_multiplied(\n\n &map,\n\n vec!(\n\n Slope::new(1, 1),\n\n Slope::new(3, 1),\n\n Slope::new(5, 1),\n\n Slope::new(7, 1),\n\n Slope::new(1, 2),\n\n )\n\n )\n\n );\n\n}\n\n\n", "file_path": "day-3/src/matt.rs", "rank": 27, "score": 112124.63186830975 }, { "content": "fn main() {\n\n let passports = get_passports();\n\n\n\n println!(\"Part 1: \\n----------\");\n\n part_1(&passports);\n\n\n\n println!(\"----------\");\n\n println!(\"Part 2: \\n----------\");\n\n part_2(&passports);\n\n}\n", "file_path": "day-4/src/vickz84259.rs", "rank": 28, "score": 112124.63186830975 }, { "content": "fn main() {\n\n println!(\"Part One: {} \", part_one(vec!(0, 5, 4, 1, 10, 14, 7), 2020));\n\n println!(\n\n \"Part Two: {} \",\n\n part_one(vec!(0, 5, 4, 1, 10, 14, 7), 30000000)\n\n );\n\n}\n\n\n", "file_path": "day-15/src/matt.rs", "rank": 29, "score": 112124.63186830975 }, { "content": "fn main() {\n\n let lines = get_lines();\n\n\n\n println!(\"Part 1: \\n ----------\");\n\n\n\n println!(\"Default Map\");\n\n let mut start = Instant::now();\n\n\n\n let default_map = DefaultMap::new(&lines);\n\n part_1(&default_map);\n\n println!(\"\\tTime Taken: {:?}\", start.elapsed());\n\n\n\n println!(\"Bool Map\");\n\n start = Instant::now();\n\n\n\n let bool_map = BoolMap::new(&lines);\n\n part_1(&bool_map);\n\n println!(\"\\tTime Taken: {:?}\", start.elapsed());\n\n\n\n println!(\"Bit Map\");\n", "file_path": "day-3/src/vickz84259.rs", "rank": 30, "score": 112124.63186830975 }, { "content": "fn main() {\n\n let cypher = load_input(\"input.txt\").unwrap();\n\n\n\n println!(\"Part One: {} \", part_one(&cypher, 25 + 1));\n\n println!(\"Part Two: {} \", part_two(&cypher, 25 + 1));\n\n}\n\n\n", "file_path": "day-9/src/matt.rs", "rank": 31, "score": 112124.63186830975 }, { "content": "fn main() {\n\n let (time, buses) = load_input(\"input.txt\").unwrap();\n\n let buses2 = load_input2(\"input.txt\").unwrap();\n\n\n\n println!(\"Part One: {} \", part_one(time, buses));\n\n println!(\"Part Two: {} \", part_two(buses2));\n\n}\n\n\n", "file_path": "day-13/src/matt.rs", "rank": 32, "score": 112124.63186830975 }, { "content": "fn main() {\n\n println!(\"Part 1: \\n----------\");\n\n let bags = get_bags();\n\n\n\n let mut start = Instant::now();\n\n part_1(&bags);\n\n println!(\"Time Taken: {:?}\", start.elapsed());\n\n\n\n println!(\"----------\");\n\n println!(\"Part 2: \\n----------\");\n\n\n\n start = Instant::now();\n\n part_2(&bags);\n\n println!(\"Time Taken: {:?}\", start.elapsed());\n\n}\n", "file_path": "day-7/src/vickz84259.rs", "rank": 33, "score": 112124.63186830975 }, { "content": "fn main() {\n\n let ruleset = load_input(\"input.txt\").unwrap();\n\n\n\n println!(\"Part One: {} \", part_one(&ruleset, \"shiny gold\"));\n\n println!(\"Part Two: {} \", part_two(&ruleset, \"shiny gold\"));\n\n}\n\n\n", "file_path": "day-7/src/matt.rs", "rank": 34, "score": 112124.63186830975 }, { "content": "fn main() {\n\n let input = input().unwrap();\n\n\n\n println!(\"Part One: {}\", valid_passwords(&input));\n\n println!(\"Part Two: {}\", valid_passwords2(&input));\n\n}\n\n\n", "file_path": "day-2/src/matt.rs", "rank": 35, "score": 112124.63186830975 }, { "content": "fn main() {\n\n let input = vec![\n\n 1864, 1192, 1802, 1850, 1986, 1514, 1620, 1910, 1557, 1529, 1081, 1227, 1869, 1545, 1064,\n\n 1509, 1060, 1590, 1146, 1855, 667, 1441, 1241, 1473, 1321, 1429, 1534, 1959, 1188, 1597,\n\n 1256, 1673, 1879, 1821, 1423, 1838, 1392, 1941, 1124, 1629, 1780, 1271, 1190, 1680, 1379,\n\n 1601, 1670, 1916, 1787, 1844, 2000, 1672, 1276, 1896, 1746, 1369, 1687, 1263, 1948, 1159,\n\n 1710, 1304, 1806, 1709, 1286, 1635, 1075, 1125, 1607, 1408, 1903, 1143, 1736, 1266, 1645,\n\n 1571, 1488, 1200, 211, 1148, 1585, 2005, 1724, 1071, 1690, 1189, 1101, 1315, 1452, 1622,\n\n 1074, 1486, 1209, 1253, 1422, 1235, 1354, 1399, 1675, 241, 1229, 1136, 1901, 1453, 1344,\n\n 1685, 1985, 1455, 1764, 1634, 1935, 1386, 1772, 1174, 1743, 1818, 1156, 1221, 167, 1398,\n\n 1552, 1816, 1197, 1829, 1930, 1812, 1983, 1185, 1579, 1928, 1892, 1978, 1720, 1584, 1506,\n\n 1245, 1539, 1653, 1876, 1883, 1982, 1114, 1406, 2002, 1765, 1175, 1947, 1519, 1943, 1566,\n\n 1361, 1830, 1679, 999, 1366, 1575, 1556, 1555, 1065, 1606, 1508, 1548, 1162, 1664, 1525,\n\n 1925, 1975, 1384, 1076, 1790, 1656, 1578, 1671, 1424, 757, 1485, 1677, 1583, 1395, 1793,\n\n 1111, 1522, 1195, 1128, 1123, 1151, 1568, 1559, 1331, 1191, 1753, 1630, 1979, 953, 1480,\n\n 1655, 1100, 1419, 1560, 1667,\n\n ];\n\n\n\n println!(\"Solution: {}\", fix_expense_report(&input));\n\n}\n\n\n", "file_path": "day-1/src/matt.rs", "rank": 36, "score": 112124.63186830975 }, { "content": "fn main() {\n\n let layout = load_input(\"input.txt\").unwrap();\n\n let mut grid = Grid::new(layout);\n\n\n\n println!(\"Part One: {} \", part_one(&mut grid.clone()));\n\n println!(\"Part Two: {} \", part_two(&mut grid));\n\n}\n\n\n", "file_path": "day-11/src/matt.rs", "rank": 37, "score": 112124.63186830975 }, { "content": "fn main() {\n\n let groups = load_input(\"src/matt.txt\").unwrap();\n\n\n\n println!(\"Part One: {} \", part_one(&groups));\n\n println!(\"Part Two: {} \", part_two(&groups));\n\n}\n\n\n", "file_path": "day-6/src/matt.rs", "rank": 39, "score": 112124.63186830975 }, { "content": "fn main() {\n\n let entries = get_entries();\n\n\n\n let mut start = Instant::now();\n\n part_1(&entries);\n\n println!(\"Time Taken: {:?}\", start.elapsed());\n\n\n\n println!(\"---------------\");\n\n\n\n start = Instant::now();\n\n part_2(&entries);\n\n println!(\"Time Taken: {:?}\", start.elapsed());\n\n}\n", "file_path": "day-1/src/vickz84259.rs", "rank": 40, "score": 112124.63186830975 }, { "content": "fn main() {\n\n let program = load_program(\"input.txt\").unwrap();\n\n\n\n println!(\"Part One: {} \", part_one(&program).0);\n\n println!(\"Part Two: {} \", part_two(&program));\n\n}\n\n\n", "file_path": "day-8/src/matt.rs", "rank": 41, "score": 112124.63186830975 }, { "content": "fn main() {\n\n let passes = input(\"src/matt.txt\").unwrap();\n\n\n\n println!(\"Part One: {} \", part_one(&passes));\n\n println!(\"Part Two: {} \", part_two(&passes));\n\n}\n\n\n", "file_path": "day-5/src/matt.rs", "rank": 42, "score": 112124.63186830975 }, { "content": "fn main() {\n\n let groups = get_groups();\n\n\n\n println!(\"Part 1: \\n----------\");\n\n part_1(&groups);\n\n\n\n println!(\"----------\");\n\n println!(\"Part 2: \\n----------\");\n\n part_2(&groups);\n\n}\n", "file_path": "day-6/src/vickz84259.rs", "rank": 43, "score": 112124.63186830975 }, { "content": "fn main() {\n\n let passports = input(\"src/matt.txt\").unwrap();\n\n\n\n println!(\n\n \"Part One: {} \",\n\n passports.iter().filter(|x| x.is_valid()).count(),\n\n );\n\n println!(\n\n \"Part Two: {} \",\n\n passports.iter().filter(|x| x.is_valid_strict()).count(),\n\n );\n\n}\n\n\n", "file_path": "day-4/src/matt.rs", "rank": 44, "score": 112124.63186830975 }, { "content": "fn main() {\n\n let joltages = load_input(\"input.txt\").unwrap();\n\n\n\n println!(\"Part One: {} \", part_one(&joltages));\n\n println!(\"Part Two: {} \", part_two(&joltages));\n\n}\n\n\n", "file_path": "day-10/src/matt.rs", "rank": 45, "score": 112124.63186830975 }, { "content": "fn part_two(rs: &RuleSet, c: &str) -> usize {\n\n let rules = rs.get(c).unwrap();\n\n if rules.is_empty() {\n\n 0\n\n } else {\n\n let mut count = 0;\n\n for rule in rules {\n\n count += rule.0 + rule.0 * part_two(rs, &rule.1)\n\n }\n\n count\n\n }\n\n}\n\n\n", "file_path": "day-7/src/matt.rs", "rank": 46, "score": 107881.57240591626 }, { "content": "fn part_one(rs: &RuleSet, c: &str) -> usize {\n\n let mut count = 0;\n\n for color in rs.keys() {\n\n count += recursive_find(rs, c, rs.get(color).unwrap());\n\n }\n\n count\n\n}\n\n\n", "file_path": "day-7/src/matt.rs", "rank": 47, "score": 107860.509458654 }, { "content": "fn bag_count<'a, 'b>(cache: &'b mut NumCache<'a>, bags: &'a Bags, bag: &'a str) -> u32 {\n\n if cache.contains_key(bag) {\n\n return cache[bag];\n\n }\n\n\n\n let contents = &bags[bag];\n\n let result = (0..contents.length)\n\n .map(|index| {\n\n let (number, inner_bag) = &contents[index].split_at(2);\n\n let number: u32 = number.trim().parse().unwrap();\n\n\n\n number + (number * bag_count(cache, bags, inner_bag))\n\n })\n\n .sum();\n\n\n\n cache.insert(bag, result);\n\n result\n\n}\n\n\n", "file_path": "day-7/src/vickz84259.rs", "rank": 48, "score": 98881.33591567911 }, { "content": "fn can_contain<'a, 'b>(cache: &'b mut BoolCache<'a>, bags: &'a Bags, bag: &'a str) -> bool {\n\n if cache.contains_key(bag) {\n\n return cache[bag];\n\n }\n\n\n\n let contents = &bags[bag];\n\n let result = (0..contents.length).any(|index| {\n\n let (_, inner_bag) = &contents[index].split_at(2);\n\n *inner_bag == \"shiny gold\" || can_contain(cache, bags, inner_bag)\n\n });\n\n\n\n cache.insert(bag, result);\n\n result\n\n}\n\n\n", "file_path": "day-7/src/vickz84259.rs", "rank": 49, "score": 98881.33591567911 }, { "content": "fn part_two(program: &Program) -> isize {\n\n let mut nth = 0;\n\n loop {\n\n let mut prog = program.to_owned();\n\n flip_nth_instruction(&mut prog, nth);\n\n let (acc, looped) = part_one(&prog);\n\n if !looped {\n\n return acc;\n\n } else {\n\n nth += 1;\n\n }\n\n }\n\n}\n\n\n", "file_path": "day-8/src/matt.rs", "rank": 50, "score": 95073.0911454026 }, { "content": "fn part_one(program: &Program) -> (isize, bool) {\n\n let mut acc = 0;\n\n let mut will_loop = false;\n\n let mut pos = 0;\n\n let mut executed = vec![];\n\n while !will_loop && pos < program.len() {\n\n let ins = &program[pos];\n\n if executed.contains(&pos) {\n\n will_loop = true;\n\n continue;\n\n }\n\n executed.push(pos);\n\n match ins {\n\n Instruction::Acc(Sign::Positive, x) => {\n\n acc += x;\n\n pos += 1;\n\n }\n\n Instruction::Acc(Sign::Negative, x) => {\n\n acc -= x;\n\n pos += 1;\n\n }\n\n Instruction::Jmp(Sign::Positive, x) => pos += *x as usize,\n\n Instruction::Jmp(Sign::Negative, x) => pos -= *x as usize,\n\n _ => pos += 1,\n\n }\n\n }\n\n (acc, will_loop)\n\n}\n", "file_path": "day-8/src/matt.rs", "rank": 52, "score": 91065.5337066305 }, { "content": "fn parse_boarding_pass(s: &str) -> BoardingPass {\n\n let (row, col) = s.split_at(s.len() - 3);\n\n let row_binstr = row\n\n .chars()\n\n .map(|x| match x {\n\n 'F' => '0',\n\n 'B' => '1',\n\n _ => unreachable!(),\n\n })\n\n .collect::<String>();\n\n let col_binstr = col\n\n .chars()\n\n .map(|x| match x {\n\n 'R' => '1',\n\n 'L' => '0',\n\n _ => unreachable!(),\n\n })\n\n .collect::<String>();\n\n let row = usize::from_str_radix(&row_binstr, 2).unwrap();\n\n let col = usize::from_str_radix(&col_binstr, 2).unwrap();\n\n BoardingPass::new(row, col)\n\n}\n", "file_path": "day-5/src/matt.rs", "rank": 53, "score": 90351.67390588598 }, { "content": "fn part_two(v: &Vec<GroupAnswers>) -> usize {\n\n v.into_iter()\n\n .map(|g| count_common_answers(g.to_vec(), AnswersType::Intersection))\n\n .sum()\n\n}\n", "file_path": "day-6/src/matt.rs", "rank": 54, "score": 87998.94568497874 }, { "content": "fn part_one(v: &Vec<GroupAnswers>) -> usize {\n\n v.into_iter()\n\n .map(|g| count_common_answers(g.to_vec(), AnswersType::Union))\n\n .sum()\n\n}\n", "file_path": "day-6/src/matt.rs", "rank": 55, "score": 87977.15394170694 }, { "content": "fn is_field_valid(f: &Field, val: &str) -> bool {\n\n match f {\n\n Field::Byr => {\n\n if let Ok(num) = val.parse::<i32>() {\n\n num >= 1920 && num <= 2002\n\n } else {\n\n false\n\n }\n\n }\n\n Field::Iyr => {\n\n if let Ok(num) = val.parse::<i32>() {\n\n num >= 2010 && num <= 2020\n\n } else {\n\n false\n\n }\n\n }\n\n Field::Eyr => {\n\n if let Ok(num) = val.parse::<i32>() {\n\n num >= 2020 && num <= 2030\n\n } else {\n", "file_path": "day-4/src/matt.rs", "rank": 56, "score": 84629.33473582048 }, { "content": "fn load_input(fname: &str) -> io::Result<Cypher> {\n\n let file = File::open(fname)?;\n\n let buf = io::BufReader::new(file);\n\n Ok(buf\n\n .lines()\n\n .filter_map(|x| x.ok())\n\n .filter_map(|x| x.parse::<usize>().ok())\n\n .collect())\n\n}\n\n\n", "file_path": "day-9/src/matt.rs", "rank": 57, "score": 83041.84697218578 }, { "content": "fn load_program(fname: &str) -> io::Result<Program> {\n\n let file = File::open(fname)?;\n\n let buf = io::BufReader::new(file);\n\n let mut program: Program = Vec::new();\n\n for line in buf.lines() {\n\n program.push(parse_line(line?))\n\n }\n\n Ok(program)\n\n}\n\n\n", "file_path": "day-8/src/matt.rs", "rank": 58, "score": 83041.84697218578 }, { "content": "fn load_input(fname: &str) -> io::Result<Joltages> {\n\n let file = File::open(fname)?;\n\n let buf = io::BufReader::new(file);\n\n let mut js: Vec<_> = buf\n\n .lines()\n\n .filter_map(|x| x.ok())\n\n .filter_map(|x| x.parse::<usize>().ok())\n\n .collect();\n\n js.push(0);\n\n js.sort_unstable();\n\n js.push(js[js.len() - 1] + 3);\n\n Ok(js)\n\n}\n\n\n", "file_path": "day-10/src/matt.rs", "rank": 59, "score": 83041.84697218578 }, { "content": "fn load_input(fname: &str) -> io::Result<Layout> {\n\n let file = File::open(fname)?;\n\n let buf = io::BufReader::new(file);\n\n Ok(buf\n\n .lines()\n\n .map(|l| {\n\n l.unwrap()\n\n .chars()\n\n .map(|c| match c {\n\n '.' => Pos::Floor,\n\n 'L' => Pos::Empty,\n\n '#' => Pos::Occupied,\n\n _ => unreachable!(),\n\n })\n\n .collect::<Vec<_>>()\n\n })\n\n .collect())\n\n}\n\n\n", "file_path": "day-11/src/matt.rs", "rank": 60, "score": 83041.84697218578 }, { "content": "fn get_lines() -> MapLines {\n\n let file = File::open(\"input_3.txt\").expect(\"Unable to read file\");\n\n let lines = io::BufReader::new(file).lines();\n\n\n\n lines.filter_map(Result::ok).collect()\n\n}\n\n\n", "file_path": "day-3/src/vickz84259.rs", "rank": 61, "score": 82474.84990810562 }, { "content": "fn flip_nth_instruction(program: &mut Program, nth: usize) {\n\n let mut nth = nth;\n\n for idx in 0..program.len() - 1 {\n\n let ins = program[idx];\n\n match ins {\n\n Instruction::Acc(_, _) => {}\n\n _ => {\n\n if nth == 0 {\n\n match ins {\n\n Instruction::Jmp(s, i) => program[idx] = Instruction::Nop(s, i),\n\n Instruction::Nop(s, i) => program[idx] = Instruction::Jmp(s, i),\n\n _ => {}\n\n }\n\n return;\n\n } else {\n\n nth -= 1;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "day-8/src/matt.rs", "rank": 62, "score": 82236.64266989756 }, { "content": "fn load_input(fname: &str) -> io::Result<RuleSet> {\n\n let file = File::open(fname)?;\n\n let buf = io::BufReader::new(file);\n\n let mut ruleset: RuleSet = HashMap::new();\n\n for line in buf.lines() {\n\n let (color, rules) = parse_rule_line(line?);\n\n ruleset.insert(color, rules);\n\n }\n\n Ok(ruleset)\n\n}\n\n\n", "file_path": "day-7/src/matt.rs", "rank": 63, "score": 81557.10583756019 }, { "content": "fn input(fname: &str) -> io::Result<Vec<Passport>> {\n\n let mut input = vec![];\n\n let file = File::open(fname)?;\n\n let buf = io::BufReader::new(file);\n\n let mut p = vec![];\n\n for item in buf.lines() {\n\n let line = item?;\n\n if line.is_empty() {\n\n input.push(Passport::from_seq(&p));\n\n p.clear();\n\n } else {\n\n p.push(line);\n\n }\n\n }\n\n input.push(Passport::from_seq(&p));\n\n Ok(input)\n\n}\n\n\n", "file_path": "day-4/src/matt.rs", "rank": 64, "score": 80717.5380422521 }, { "content": "#[test]\n\nfn test_two() {\n\n let ruleset = load_input(\"test2.txt\").unwrap();\n\n assert_eq!(126, part_two(&ruleset, \"shiny gold\"));\n\n}\n", "file_path": "day-7/src/matt.rs", "rank": 65, "score": 79944.13752756664 }, { "content": "#[test]\n\nfn test_one() {\n\n let ruleset = load_input(\"test.txt\").unwrap();\n\n assert_eq!(4, part_one(&ruleset, \"shiny gold\"));\n\n}\n", "file_path": "day-7/src/matt.rs", "rank": 66, "score": 79918.8452458889 }, { "content": "fn input(fname: &str) -> io::Result<Vec<BoardingPass>> {\n\n let file = File::open(fname)?;\n\n let buf = io::BufReader::new(file);\n\n Ok(buf\n\n .lines()\n\n .map(|line| parse_boarding_pass(&line.unwrap()))\n\n .collect())\n\n}\n\n\n", "file_path": "day-5/src/matt.rs", "rank": 67, "score": 79232.7969076265 }, { "content": "fn load_input(fname: &str) -> io::Result<Vec<GroupAnswers>> {\n\n let file = File::open(fname)?;\n\n let buf = io::BufReader::new(file);\n\n let mut groups = vec![];\n\n let mut group = vec![];\n\n for line in buf.lines() {\n\n let line = line?;\n\n if line.is_empty() {\n\n groups.push(vec_to_group(group.clone()));\n\n group.clear();\n\n } else {\n\n group.push(line)\n\n }\n\n }\n\n groups.push(vec_to_group(group.clone()));\n\n Ok(groups)\n\n}\n\n\n", "file_path": "day-6/src/matt.rs", "rank": 68, "score": 77841.14001825439 }, { "content": "fn count_common_answers(mut group: GroupAnswers, criteria: AnswersType) -> usize {\n\n match criteria {\n\n AnswersType::Union => group\n\n .into_iter()\n\n .fold(HashSet::new(), |acc, x| acc.union(&x).cloned().collect())\n\n .len(),\n\n AnswersType::Intersection => {\n\n let first = group.pop().unwrap();\n\n group\n\n .into_iter()\n\n .fold(first, |acc, x| acc.intersection(&x).cloned().collect())\n\n .len()\n\n }\n\n }\n\n}\n", "file_path": "day-6/src/matt.rs", "rank": 69, "score": 75933.09093889463 }, { "content": "fn load_input2(fname: &str) -> io::Result<Vec<(usize, usize)>> {\n\n let file = File::open(fname)?;\n\n let buf = io::BufReader::new(file);\n\n let mut v = vec![];\n\n for line in buf.lines().skip(1) {\n\n let l = line?;\n\n for x in l\n\n .split(',')\n\n .enumerate()\n\n .filter_map(|l| match l.1.parse::<usize>() {\n\n Ok(v) => Some((l.0, v)),\n\n Err(_) => None,\n\n })\n\n {\n\n v.push(x);\n\n }\n\n }\n\n Ok(v)\n\n}\n\n\n", "file_path": "day-13/src/matt.rs", "rank": 70, "score": 75790.46693762776 }, { "content": "fn recursive_find(rs: &RuleSet, c: &str, rules: &[Rule]) -> usize {\n\n for rule in rules {\n\n if rule.1 == c || recursive_find(rs, c, rs.get(&rule.1).unwrap()) == 1 {\n\n return 1;\n\n }\n\n }\n\n 0\n\n}\n\n\n", "file_path": "day-7/src/matt.rs", "rank": 71, "score": 75790.46693762776 }, { "content": "fn load_input(fname: &str) -> io::Result<(usize, Vec<usize>)> {\n\n let file = File::open(fname)?;\n\n let buf = io::BufReader::new(file);\n\n let mut v = buf.lines().filter_map(|x| x.ok());\n\n let time: usize = v.next().unwrap().parse().unwrap();\n\n let buses: Vec<usize> = v\n\n .next()\n\n .unwrap()\n\n .split(',')\n\n .filter_map(|x| x.parse::<usize>().ok())\n\n .collect();\n\n\n\n Ok((time, buses))\n\n}\n\n\n", "file_path": "day-13/src/matt.rs", "rank": 72, "score": 75790.46693762776 }, { "content": "fn read_lines() -> Vec<String> {\n\n let file = File::open(\"input_2.txt\").expect(\"Unable to read file\");\n\n io::BufReader::new(file)\n\n .lines()\n\n .filter_map(Result::ok)\n\n .collect()\n\n}\n\n\n", "file_path": "day-2/src/vickz84259.rs", "rank": 73, "score": 71707.55515247348 }, { "content": "fn part_2(lines: &Vec<String>) {\n\n println!(\"Part 2:\");\n\n\n\n let valid_count = lines.iter().filter(is_valid_password_2).count();\n\n println!(\"Answer: {} passwords\", valid_count)\n\n}\n\n\n", "file_path": "day-2/src/vickz84259.rs", "rank": 74, "score": 70133.31607882609 }, { "content": "fn parse_line(l: String) -> Instruction {\n\n let parts: Vec<_> = l.split(' ').collect();\n\n let arg = parts[1];\n\n let sign = match &arg[0..1] {\n\n \"+\" => Sign::Positive,\n\n \"-\" => Sign::Negative,\n\n _ => unreachable!(),\n\n };\n\n let offset: isize = arg[1..].parse().unwrap();\n\n match parts[0] {\n\n \"acc\" => Instruction::Acc(sign, offset),\n\n \"jmp\" => Instruction::Jmp(sign, offset),\n\n \"nop\" => Instruction::Nop(sign, offset),\n\n _ => unreachable!(),\n\n }\n\n}\n", "file_path": "day-8/src/matt.rs", "rank": 75, "score": 70133.31607882609 }, { "content": "fn part_1(lines: &Vec<String>) {\n\n println!(\"Part 1:\");\n\n\n\n let valid_count = lines.iter().filter(is_valid_password).count();\n\n println!(\"Answer: {} passwords\", valid_count);\n\n}\n\n\n", "file_path": "day-2/src/vickz84259.rs", "rank": 76, "score": 70133.31607882609 }, { "content": "fn part_two(joltages: &[usize]) -> usize {\n\n let mut arrangements = vec![0; *joltages.iter().max().unwrap() + 1];\n\n arrangements[0] = 1;\n\n for jolt in joltages {\n\n for source in jolt.saturating_sub(3)..*jolt {\n\n arrangements[*jolt] += arrangements[source];\n\n }\n\n }\n\n *arrangements.last().unwrap()\n\n}\n", "file_path": "day-10/src/matt.rs", "rank": 77, "score": 68318.65099290214 }, { "content": "fn part_one(joltages: &[usize]) -> usize {\n\n let mut three_count = 0;\n\n let mut one_count = 0;\n\n\n\n for diff in joltages.windows(2).map(|w| w[1] - w[0]) {\n\n if diff == 3 {\n\n three_count += 1;\n\n } else if diff == 1 {\n\n one_count += 1;\n\n }\n\n }\n\n three_count * one_count\n\n}\n\n\n", "file_path": "day-10/src/matt.rs", "rank": 78, "score": 68296.07821208717 }, { "content": "fn parse_rule_line(l: String) -> (Color, Rules) {\n\n let mut rules: Rules = vec![];\n\n let parts: Vec<_> = l.split(\"bags contain\").collect();\n\n let color: Color = parts[0].trim().to_string();\n\n for part in parts[1].split(',') {\n\n if part.contains(\"no other bags\") {\n\n continue;\n\n }\n\n let mut subs = part.trim().split(' ');\n\n let num: usize = subs.next().unwrap().trim().parse().unwrap();\n\n let sub_color: String = subs.take(2).fold(String::new(), |acc, x| {\n\n format!(\"{} {}\", acc, x).trim().to_string()\n\n });\n\n rules.push((num, sub_color));\n\n }\n\n (color, rules)\n\n}\n", "file_path": "day-7/src/matt.rs", "rank": 79, "score": 65236.82009075896 }, { "content": "fn part_two(xs: &Vec<BoardingPass>) -> usize {\n\n let mut ids: Vec<_> = xs.iter().map(|x| x.seat_id).collect();\n\n ids.sort();\n\n let min = *ids.iter().min().unwrap();\n\n let max = *ids.iter().max().unwrap();\n\n for idx in min..=max {\n\n if !ids.contains(&idx) {\n\n return idx;\n\n }\n\n }\n\n 0\n\n}\n", "file_path": "day-5/src/matt.rs", "rank": 80, "score": 63662.16909657382 }, { "content": "fn part_one(xs: &Vec<BoardingPass>) -> usize {\n\n xs.iter().map(|x| x.seat_id).max().unwrap()\n\n}\n", "file_path": "day-5/src/matt.rs", "rank": 81, "score": 63641.10614931157 }, { "content": "fn find_irreducible(v: &[usize]) -> Option<usize> {\n\n let l = v.len() - 1;\n\n for i in &v[..l] {\n\n for j in &v[..l] {\n\n if i + j == v[l] {\n\n return None;\n\n }\n\n }\n\n }\n\n Some(v[l])\n\n}\n\n\n", "file_path": "day-9/src/matt.rs", "rank": 82, "score": 63089.53869693348 }, { "content": "#[derive(Clone, Copy, Debug, PartialEq)]\n\nenum Instruction {\n\n Acc(Sign, isize),\n\n Jmp(Sign, isize),\n\n Nop(Sign, isize),\n\n}\n\n\n", "file_path": "day-8/src/matt.rs", "rank": 83, "score": 62806.80430741582 }, { "content": "#[derive(Copy, Clone, Debug, PartialEq)]\n\nenum Pos {\n\n Floor,\n\n Empty,\n\n Occupied,\n\n}\n\n\n", "file_path": "day-11/src/matt.rs", "rank": 84, "score": 62806.80430741582 }, { "content": "#[derive(Debug)]\n\nenum Entry {\n\n StrVal(String),\n\n IntVal(u32),\n\n}\n\n\n", "file_path": "day-4/src/vickz84259.rs", "rank": 85, "score": 62806.80430741582 }, { "content": "#[derive(Debug, PartialEq, Eq, Hash)]\n\nenum Field {\n\n Byr,\n\n Iyr,\n\n Hgt,\n\n Hcl,\n\n Ecl,\n\n Pid,\n\n Eyr,\n\n Cid,\n\n}\n", "file_path": "day-4/src/matt.rs", "rank": 86, "score": 62806.80430741582 }, { "content": "#[derive(Clone, Copy, Debug, PartialEq)]\n\nenum Sign {\n\n Positive,\n\n Negative,\n\n}\n\n\n", "file_path": "day-8/src/matt.rs", "rank": 87, "score": 62806.80430741582 }, { "content": "fn part_two(cypher: &Cypher, length: usize) -> usize {\n\n let n = part_one(cypher, length);\n\n let mut sz = 2;\n\n loop {\n\n for window in cypher.windows(sz) {\n\n let window: Vec<_> = window.iter().collect();\n\n if window.iter().fold(0, |acc, x| acc + *x) == n {\n\n return *window.iter().min().unwrap() + *window.iter().max().unwrap();\n\n }\n\n }\n\n sz += 1;\n\n }\n\n}\n\n\n", "file_path": "day-9/src/matt.rs", "rank": 88, "score": 62493.03685525836 }, { "content": "fn part_one(cypher: &Cypher, length: usize) -> usize {\n\n for window in cypher.windows(length) {\n\n if let Some(num) = find_irreducible(window) {\n\n return num;\n\n }\n\n }\n\n 0\n\n}\n\n\n", "file_path": "day-9/src/matt.rs", "rank": 89, "score": 62471.9739079961 }, { "content": "struct Slope {\n\n right: usize,\n\n down: usize,\n\n}\n\nimpl Slope {\n\n fn new(right: usize, down: usize) -> Slope {\n\n Slope { right, down }\n\n }\n\n}\n\n\n", "file_path": "day-3/src/matt.rs", "rank": 90, "score": 62365.96061363943 }, { "content": "#[derive(Debug)]\n\nstruct Group {\n\n number: u32,\n\n questions: HashMap<char, u32>,\n\n}\n\n\n\nimpl Group {\n\n fn new() -> Self {\n\n Group {\n\n number: 0,\n\n questions: HashMap::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for Group {\n\n type Err = &'static str;\n\n\n\n fn from_str(s: &str) -> Result<Group, Self::Err> {\n\n let mut group = Group::new();\n\n\n", "file_path": "day-6/src/vickz84259.rs", "rank": 91, "score": 62365.96061363943 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Grid {\n\n layout: Vec<Row>,\n\n}\n\nimpl Grid {\n\n fn new(layout: Vec<Row>) -> Grid {\n\n Grid { layout }\n\n }\n\n\n\n fn get(&self, x: usize, y: usize) -> Pos {\n\n self.layout[x][y]\n\n }\n\n\n\n fn set(&mut self, x: usize, y: usize, pos: Pos) {\n\n self.layout[x][y] = pos\n\n }\n\n\n\n fn length(&self) -> usize {\n\n self.layout.len()\n\n }\n\n\n", "file_path": "day-11/src/matt.rs", "rank": 92, "score": 62365.96061363943 }, { "content": "#[derive(Default, Debug)]\n\nstruct Passport {\n\n birth_year: Option<Entry>,\n\n issue_year: Option<Entry>,\n\n exp_year: Option<Entry>,\n\n height: Option<Entry>,\n\n hair_color: Option<Entry>,\n\n eye_color: Option<Entry>,\n\n pid: Option<Entry>,\n\n cid: Option<Entry>,\n\n}\n\n\n", "file_path": "day-4/src/vickz84259.rs", "rank": 93, "score": 62365.96061363943 }, { "content": "fn vec_to_group(v: Vec<String>) -> GroupAnswers {\n\n v.into_iter()\n\n .map(|line| line.chars().collect::<Answers>())\n\n .collect()\n\n}\n", "file_path": "day-6/src/matt.rs", "rank": 94, "score": 62299.74448328186 }, { "content": "enum AnswersType {\n\n Intersection,\n\n Union,\n\n}\n\n\n", "file_path": "day-6/src/matt.rs", "rank": 95, "score": 61557.79408571763 }, { "content": "enum ValidationAlgo {\n\n One,\n\n Two,\n\n}\n\n\n\nimpl PwdEntry {\n\n fn new(tup: (i32, i32, char, String)) -> PwdEntry {\n\n PwdEntry {\n\n low: tup.0,\n\n high: tup.1,\n\n pat: tup.2,\n\n pwd: tup.3,\n\n }\n\n }\n\n}\n\n\n", "file_path": "day-2/src/matt.rs", "rank": 96, "score": 61557.79408571763 }, { "content": "#[derive(Debug, Clone)]\n\nenum GridPoint {\n\n OpenSquare,\n\n Tree,\n\n}\n", "file_path": "day-3/src/matt.rs", "rank": 97, "score": 61557.79408571763 }, { "content": "struct BoardingPass {\n\n row_range: (u32, u32),\n\n col_range: (u32, u32),\n\n}\n\n\n", "file_path": "day-5/src/vickz84259.rs", "rank": 98, "score": 61124.41965358589 }, { "content": "#[derive(Debug)]\n\nstruct BagContents {\n\n data: String,\n\n length: usize,\n\n}\n\n\n\nimpl BagContents {\n\n fn new(line: String) -> (String, Self) {\n\n let (name, bags_str) = line.split(\"contain\").collect_tuple().unwrap();\n\n\n\n let length = match bags_str.contains(\"no other bag\") {\n\n true => 0,\n\n false => bags_str.split(\",\").count(),\n\n };\n\n let name = name.split(\" bag\").nth(0).unwrap().to_string();\n\n let contents = BagContents {\n\n data: bags_str.to_string(),\n\n length,\n\n };\n\n\n\n (name, contents)\n", "file_path": "day-7/src/vickz84259.rs", "rank": 99, "score": 61124.41965358589 } ]
Rust
libs/prisma-models/src/record.rs
VanCoding/prisma-engines
bec4da3195df1ce40b2559939510abce2159b635
use crate::{DomainError, ModelProjection, OrderBy, PrismaValue, RecordProjection, ScalarFieldRef, SortOrder}; use itertools::Itertools; use std::collections::HashMap; #[derive(Debug, Clone)] pub struct SingleRecord { pub record: Record, pub field_names: Vec<String>, } impl Into<ManyRecords> for SingleRecord { fn into(self) -> ManyRecords { ManyRecords { records: vec![self.record], field_names: self.field_names, } } } impl SingleRecord { pub fn new(record: Record, field_names: Vec<String>) -> Self { Self { record, field_names } } pub fn projection(&self, projection: &ModelProjection) -> crate::Result<RecordProjection> { self.record.projection(&self.field_names, projection) } pub fn get_field_value(&self, field: &str) -> crate::Result<&PrismaValue> { self.record.get_field_value(&self.field_names, field) } } #[derive(Debug, Clone, Default)] pub struct ManyRecords { pub records: Vec<Record>, pub field_names: Vec<String>, } impl ManyRecords { pub fn new(field_names: Vec<String>) -> Self { Self { records: Vec::new(), field_names, } } pub fn empty(selected_fields: &ModelProjection) -> Self { Self { records: Vec::new(), field_names: selected_fields.names().map(|n| n.to_string()).collect(), } } pub fn from_projection(projection: Vec<Vec<PrismaValue>>, selected_fields: &ModelProjection) -> Self { Self { records: projection .into_iter() .map(|v| Record { values: v, parent_id: None, }) .collect(), field_names: selected_fields.db_names().collect(), } } pub fn order_by(&mut self, order_bys: &[OrderBy]) { let field_indices: HashMap<&str, usize> = self .field_names .iter() .enumerate() .map(|(i, name)| (name.as_str(), i)) .collect(); self.records.sort_by(|a, b| { let mut orderings = order_bys.iter().map(|o| { let index = field_indices[o.field.db_name()]; match o.sort_order { SortOrder::Ascending => a.values[index].cmp(&b.values[index]), SortOrder::Descending => b.values[index].cmp(&a.values[index]), } }); orderings .next() .map(|first| orderings.fold(first, |acc, ord| acc.then(ord))) .unwrap() }) } pub fn push(&mut self, record: Record) { self.records.push(record); } pub fn projections(&self, model_projection: &ModelProjection) -> crate::Result<Vec<RecordProjection>> { self.records .iter() .map(|record| record.projection(&self.field_names, model_projection)) .collect() } pub fn as_pairs(&self) -> Vec<Vec<(String, PrismaValue)>> { self.records .iter() .map(|record| { record .values .iter() .zip(self.field_names.iter()) .map(|(value, name)| (name.clone(), value.clone())) .collect() }) .collect() } pub fn reverse(&mut self) { self.records.reverse(); } pub fn with_unique_records(mut self) -> Self { self.records = self.records.into_iter().unique().collect(); self } } #[derive(Debug, Default, Clone, Eq, PartialEq, Hash)] pub struct Record { pub values: Vec<PrismaValue>, pub parent_id: Option<RecordProjection>, } impl Record { pub fn new(values: Vec<PrismaValue>) -> Record { Record { values, ..Default::default() } } pub fn projection( &self, field_names: &[String], model_projection: &ModelProjection, ) -> crate::Result<RecordProjection> { let pairs: Vec<(ScalarFieldRef, PrismaValue)> = model_projection .fields() .into_iter() .flat_map(|field| { field.scalar_fields().into_iter().map(|field| { self.get_field_value(field_names, field.db_name()) .map(|val| (field, val.clone())) }) }) .collect::<crate::Result<Vec<_>>>()?; Ok(RecordProjection { pairs }) } pub fn identifying_values( &self, field_names: &[String], model_projection: &ModelProjection, ) -> crate::Result<Vec<&PrismaValue>> { let x: Vec<&PrismaValue> = model_projection .fields() .into_iter() .flat_map(|field| { field .scalar_fields() .into_iter() .map(|source_field| self.get_field_value(field_names, &source_field.name)) }) .collect::<crate::Result<Vec<_>>>()?; Ok(x) } pub fn get_field_value(&self, field_names: &[String], field: &str) -> crate::Result<&PrismaValue> { let index = field_names.iter().position(|r| r == field).map(Ok).unwrap_or_else(|| { Err(DomainError::FieldNotFound { name: field.to_string(), model: format!( "Field not found in record {:?}. Field names are: {:?}, looking for: {:?}", &self, &field_names, field ), }) })?; Ok(&self.values[index]) } pub fn set_parent_id(&mut self, parent_id: RecordProjection) { self.parent_id = Some(parent_id); } }
use crate::{DomainError, ModelProjection, OrderBy, PrismaValue, RecordProjection, ScalarFieldRef, SortOrder}; use itertools::Itertools; use std::collections::HashMap; #[derive(Debug, Clone)] pub struct SingleRecord { pub record: Record, pub field_names: Vec<String>, } impl Into<ManyRecords> for SingleRecord { fn into(self) -> ManyRecords { ManyRecords { records: vec![self.record], field_names: self.field_names, } } } impl SingleRecord { pub fn new(record: Record, field_names: Vec<String>) -> Self { Self { record, field_names } } pub fn projection(&self, projection: &ModelProjection) -> crate::Result<RecordProjection> { self.record.projection(&self.field_names, projection) } pub fn get_field_value(&self, field: &str) -> crate::Result<&PrismaValue> { self.record.get_field_value(&self.field_names, field) } } #[derive(Debug, Clone, Default)] pub struct ManyRecords { pub records: Vec<Record>, pub field_names: Vec<String>, } impl ManyRecords { pub fn new(field_names: Vec<String>) -> Self { Self { records: Vec::new(), field_names, } } pub fn empty(selected_fields: &ModelProjection) -> Self { Self { records: Vec::new(), field_names: selected_fields.names().map(|n| n.to_string()).collect(), } } pub fn from_projection(projection: Vec<Vec<PrismaValue>>, selected_fields: &ModelProjection) -> Self { Self { records: projection .into_iter() .map(|v| Record { values: v, parent_id: None, }) .collect(), field_names: selected_fields.db_names().collect(), } } pub fn order_by(&mut self, order_bys: &[OrderBy]) { let field_indices: HashMap<&str, usize> = self .field_names .iter() .enumerate() .map(|(i, name)| (name.as_str(), i)) .collect(); self.records.sort_by(|a, b| { let mut orderings = order_bys.iter().map(|o| { let index = field_indices[o.field.db_name()];
}); orderings .next() .map(|first| orderings.fold(first, |acc, ord| acc.then(ord))) .unwrap() }) } pub fn push(&mut self, record: Record) { self.records.push(record); } pub fn projections(&self, model_projection: &ModelProjection) -> crate::Result<Vec<RecordProjection>> { self.records .iter() .map(|record| record.projection(&self.field_names, model_projection)) .collect() } pub fn as_pairs(&self) -> Vec<Vec<(String, PrismaValue)>> { self.records .iter() .map(|record| { record .values .iter() .zip(self.field_names.iter()) .map(|(value, name)| (name.clone(), value.clone())) .collect() }) .collect() } pub fn reverse(&mut self) { self.records.reverse(); } pub fn with_unique_records(mut self) -> Self { self.records = self.records.into_iter().unique().collect(); self } } #[derive(Debug, Default, Clone, Eq, PartialEq, Hash)] pub struct Record { pub values: Vec<PrismaValue>, pub parent_id: Option<RecordProjection>, } impl Record { pub fn new(values: Vec<PrismaValue>) -> Record { Record { values, ..Default::default() } } pub fn projection( &self, field_names: &[String], model_projection: &ModelProjection, ) -> crate::Result<RecordProjection> { let pairs: Vec<(ScalarFieldRef, PrismaValue)> = model_projection .fields() .into_iter() .flat_map(|field| { field.scalar_fields().into_iter().map(|field| { self.get_field_value(field_names, field.db_name()) .map(|val| (field, val.clone())) }) }) .collect::<crate::Result<Vec<_>>>()?; Ok(RecordProjection { pairs }) } pub fn identifying_values( &self, field_names: &[String], model_projection: &ModelProjection, ) -> crate::Result<Vec<&PrismaValue>> { let x: Vec<&PrismaValue> = model_projection .fields() .into_iter() .flat_map(|field| { field .scalar_fields() .into_iter() .map(|source_field| self.get_field_value(field_names, &source_field.name)) }) .collect::<crate::Result<Vec<_>>>()?; Ok(x) } pub fn get_field_value(&self, field_names: &[String], field: &str) -> crate::Result<&PrismaValue> { let index = field_names.iter().position(|r| r == field).map(Ok).unwrap_or_else(|| { Err(DomainError::FieldNotFound { name: field.to_string(), model: format!( "Field not found in record {:?}. Field names are: {:?}, looking for: {:?}", &self, &field_names, field ), }) })?; Ok(&self.values[index]) } pub fn set_parent_id(&mut self, parent_id: RecordProjection) { self.parent_id = Some(parent_id); } }
match o.sort_order { SortOrder::Ascending => a.values[index].cmp(&b.values[index]), SortOrder::Descending => b.values[index].cmp(&a.values[index]), }
if_condition
[ { "content": "pub fn replace_field_names(target: &mut Vec<String>, old_name: &str, new_name: &str) {\n\n target\n\n .iter_mut()\n\n .map(|v| {\n\n if v == old_name {\n\n *v = new_name.to_string()\n\n }\n\n })\n\n .for_each(drop);\n\n}\n", "file_path": "introspection-engine/connectors/sql-introspection-connector/src/introspection_helpers.rs", "rank": 0, "score": 409725.11468278023 }, { "content": "/// Attempts to match a given name to the (schema) name of a compound indexes on the model and returns the first match.\n\npub fn resolve_index_fields(name: &str, model: &ModelRef) -> Option<Vec<ScalarFieldRef>> {\n\n model\n\n .unique_indexes()\n\n .into_iter()\n\n .find(|index| schema_builder::compound_index_field_name(index) == name)\n\n .map(|index| index.fields())\n\n}\n", "file_path": "query-engine/core/src/query_graph_builder/extractors/utils.rs", "rank": 1, "score": 408287.1401772858 }, { "content": "/// Computes a compound field name based on an index.\n\npub fn compound_index_field_name(index: &Index) -> String {\n\n index.name.clone().unwrap_or_else(|| {\n\n let index_fields = index.fields();\n\n let field_names: Vec<&str> = index_fields.iter().map(|sf| sf.name.as_ref()).collect();\n\n\n\n field_names.join(\"_\")\n\n })\n\n}\n\n\n", "file_path": "query-engine/core/src/schema_builder/utils.rs", "rank": 2, "score": 377282.21544967295 }, { "content": "/// Field convenience wrapper function.\n\npub fn input_field<T, S>(name: T, field_types: S, default_value: Option<dml::DefaultValue>) -> InputField\n\nwhere\n\n T: Into<String>,\n\n S: Into<Vec<InputType>>,\n\n{\n\n InputField {\n\n name: name.into(),\n\n field_types: field_types.into(),\n\n default_value,\n\n is_required: true,\n\n deprecation: None,\n\n }\n\n}\n\n\n\nimpl Into<Vec<InputType>> for InputType {\n\n fn into(self) -> Vec<InputType> {\n\n vec![self]\n\n }\n\n}\n\n\n", "file_path": "query-engine/core/src/schema_builder/utils.rs", "rank": 3, "score": 368107.52360815764 }, { "content": "/// Attempts to resolve a field name to a compound field.\n\npub fn resolve_compound_field(name: &str, model: &ModelRef) -> Option<Vec<ScalarFieldRef>> {\n\n resolve_compound_id(name, model).or_else(|| resolve_index_fields(name, model))\n\n}\n\n\n", "file_path": "query-engine/core/src/query_graph_builder/extractors/utils.rs", "rank": 4, "score": 366097.2508421823 }, { "content": "pub fn parse_field(model_name: &str, token: &Token) -> Result<Field, DatamodelError> {\n\n let mut name: Option<Identifier> = None;\n\n let mut attributes: Vec<Attribute> = Vec::new();\n\n let mut field_type: Option<((FieldArity, String), Span)> = None;\n\n let mut comments: Vec<String> = Vec::new();\n\n\n\n for current in token.relevant_children() {\n\n match current.as_rule() {\n\n Rule::non_empty_identifier => name = Some(current.to_id()),\n\n Rule::field_type => field_type = Some((parse_field_type(&current)?, Span::from_pest(current.as_span()))),\n\n Rule::LEGACY_COLON => {\n\n return Err(DatamodelError::new_legacy_parser_error(\n\n \"Field declarations don't require a `:`.\",\n\n Span::from_pest(current.as_span()),\n\n ))\n\n }\n\n Rule::attribute => attributes.push(parse_attribute(&current)),\n\n Rule::doc_comment_and_new_line => comments.push(parse_doc_comment(&current)),\n\n Rule::doc_comment => comments.push(parse_doc_comment(&current)),\n\n _ => parsing_catch_all(&current, \"field\"),\n", "file_path": "libs/datamodel/core/src/ast/parser/parse_field.rs", "rank": 5, "score": 354728.341934288 }, { "content": "/// Attempts to match a given name to the (schema) name of a compound id field on the model.\n\npub fn resolve_compound_id(name: &str, model: &ModelRef) -> Option<Vec<ScalarFieldRef>> {\n\n model.fields().id().and_then(|fields| {\n\n let names = fields.iter().map(|f| f.name.clone()).collect::<Vec<_>>();\n\n\n\n if name == schema_builder::compound_id_field_name(&names) {\n\n Some(fields)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n", "file_path": "query-engine/core/src/query_graph_builder/extractors/utils.rs", "rank": 6, "score": 350750.72111785196 }, { "content": "/// Creates a top level delete record query and adds it to the query graph.\n\npub fn delete_record(graph: &mut QueryGraph, model: ModelRef, mut field: ParsedField) -> QueryGraphBuilderResult<()> {\n\n graph.flag_transactional();\n\n\n\n let where_arg = field.arguments.lookup(args::WHERE).unwrap();\n\n let filter = extract_unique_filter(where_arg.value.try_into()?, &model)?;\n\n\n\n // Prefetch read query for the delete\n\n let mut read_query = read::find_unique(field, Arc::clone(&model))?;\n\n read_query.add_filter(filter.clone());\n\n\n\n let read_node = graph.create_node(Query::Read(read_query));\n\n let delete_query = Query::Write(WriteQuery::DeleteRecord(DeleteRecord {\n\n model: Arc::clone(&model),\n\n record_filter: Some(filter.into()),\n\n }));\n\n\n\n let delete_node = graph.create_node(delete_query);\n\n utils::insert_deletion_checks(graph, &model, &read_node, &delete_node)?;\n\n\n\n graph.create_edge(\n", "file_path": "query-engine/core/src/query_graph_builder/write/delete.rs", "rank": 7, "score": 342846.70034863806 }, { "content": "/// Creates a create record query and adds it to the query graph, together with it's nested queries and companion read query.\n\npub fn create_record(graph: &mut QueryGraph, model: ModelRef, mut field: ParsedField) -> QueryGraphBuilderResult<()> {\n\n graph.flag_transactional();\n\n\n\n let data_map = match field.arguments.lookup(args::DATA) {\n\n Some(data) => data.value.try_into()?,\n\n None => ParsedInputMap::new(),\n\n };\n\n\n\n let create_node = create::create_record_node(graph, Arc::clone(&model), data_map)?;\n\n\n\n // Follow-up read query on the write\n\n let read_query = read::find_unique(field, model.clone())?;\n\n let read_node = graph.create_node(Query::Read(read_query));\n\n\n\n graph.add_result_node(&read_node);\n\n graph.create_edge(\n\n &create_node,\n\n &read_node,\n\n QueryGraphDependency::ParentProjection(\n\n model.primary_identifier(),\n", "file_path": "query-engine/core/src/query_graph_builder/write/create.rs", "rank": 8, "score": 342846.432818367 }, { "content": "/// Creates an update record query and adds it to the query graph, together with it's nested queries and companion read query.\n\npub fn update_record(graph: &mut QueryGraph, model: ModelRef, mut field: ParsedField) -> QueryGraphBuilderResult<()> {\n\n // \"where\"\n\n let where_arg: ParsedInputMap = field.arguments.lookup(args::WHERE).unwrap().value.try_into()?;\n\n let filter = extract_unique_filter(where_arg, &model)?;\n\n\n\n // \"data\"\n\n let data_argument = field.arguments.lookup(args::DATA).unwrap();\n\n let data_map: ParsedInputMap = data_argument.value.try_into()?;\n\n\n\n let update_node = update_record_node(graph, filter, Arc::clone(&model), data_map)?;\n\n\n\n let read_query = read::find_unique(field, model.clone())?;\n\n let read_node = graph.create_node(Query::Read(read_query));\n\n\n\n graph.add_result_node(&read_node);\n\n graph.create_edge(\n\n &update_node,\n\n &read_node,\n\n QueryGraphDependency::ParentProjection(\n\n model.primary_identifier(),\n", "file_path": "query-engine/core/src/query_graph_builder/write/update.rs", "rank": 9, "score": 342846.432818367 }, { "content": "pub fn upsert_record(graph: &mut QueryGraph, model: ModelRef, mut field: ParsedField) -> QueryGraphBuilderResult<()> {\n\n graph.flag_transactional();\n\n\n\n let where_arg: ParsedInputMap = field.arguments.lookup(args::WHERE).unwrap().value.try_into()?;\n\n\n\n let filter = extract_unique_filter(where_arg, &model)?;\n\n let model_id = model.primary_identifier();\n\n\n\n let create_argument = field.arguments.lookup(args::CREATE).unwrap();\n\n let update_argument = field.arguments.lookup(args::UPDATE).unwrap();\n\n\n\n let read_parent_records = utils::read_ids_infallible(model.clone(), model_id.clone(), filter.clone());\n\n let read_parent_records_node = graph.create_node(read_parent_records);\n\n\n\n let create_node = create::create_record_node(graph, Arc::clone(&model), create_argument.value.try_into()?)?;\n\n let update_node = update::update_record_node(graph, filter, Arc::clone(&model), update_argument.value.try_into()?)?;\n\n\n\n let read_query = read::find_unique(field, Arc::clone(&model))?;\n\n let read_node_create = graph.create_node(Query::Read(read_query.clone()));\n\n let read_node_update = graph.create_node(Query::Read(read_query));\n", "file_path": "query-engine/core/src/query_graph_builder/write/upsert.rs", "rank": 10, "score": 342840.5233943845 }, { "content": "pub fn deduplicate_relation_field_names(datamodel: &mut Datamodel) {\n\n let mut duplicated_relation_fields = vec![];\n\n\n\n for model in datamodel.models() {\n\n for field in model.relation_fields() {\n\n if model.fields().filter(|f| field.name == f.name()).count() > 1 {\n\n duplicated_relation_fields.push((\n\n model.name.clone(),\n\n field.name.clone(),\n\n field.relation_info.name.clone(),\n\n ));\n\n }\n\n }\n\n }\n\n\n\n duplicated_relation_fields\n\n .iter()\n\n .for_each(|(model, field, relation_name)| {\n\n let mut field = datamodel.find_model_mut(model).find_relation_field_mut(field);\n\n //todo self vs normal relation?\n\n field.name = format!(\"{}_{}\", field.name, &relation_name);\n\n });\n\n}\n", "file_path": "introspection-engine/connectors/sql-introspection-connector/src/introspection_helpers.rs", "rank": 11, "score": 334310.0346108822 }, { "content": "fn map_enum_type(ctx: &mut BuilderContext, enum_name: &str) -> EnumType {\n\n let e = ctx\n\n .internal_data_model\n\n .find_enum(enum_name)\n\n .expect(\"Enum references must always be valid.\");\n\n\n\n e.into()\n\n}\n\n\n\npub(crate) fn affected_records_object_type(ctx: &mut BuilderContext) -> ObjectTypeWeakRef {\n\n let ident = Identifier::new(\"AffectedRowsOutput\".to_owned(), PRISMA_NAMESPACE);\n\n return_cached_output!(ctx, &ident);\n\n\n\n let object_type = Arc::new(object_type(\n\n ident.clone(),\n\n vec![field(fields::COUNT, vec![], OutputType::int(), None)],\n\n None,\n\n ));\n\n\n\n ctx.cache_output_type(ident, object_type.clone());\n", "file_path": "query-engine/core/src/schema_builder/output_types/output_objects.rs", "rank": 12, "score": 328863.82990505325 }, { "content": "/// Iterator to walk all the scalar fields in the schema, associating them with their parent model.\n\npub fn walk_scalar_fields(datamodel: &Datamodel) -> impl Iterator<Item = ScalarFieldWalker<'_>> + '_ {\n\n walk_models(datamodel).flat_map(|model| model.scalar_fields())\n\n}\n\n\n", "file_path": "libs/datamodel/core/src/walkers.rs", "rank": 13, "score": 325906.9834942487 }, { "content": "pub fn warning_enum_values_with_empty_names(affected: &[EnumAndValue]) -> Warning {\n\n Warning {\n\n code: 4,\n\n message: \"These enum values were commented out because their names are currently not supported by Prisma. Please provide valid ones that match [a-zA-Z][a-zA-Z0-9_]* using the `@map` attribute.\"\n\n .into(),\n\n affected: serde_json::to_value(&affected).unwrap(),\n\n }\n\n}\n\n\n", "file_path": "introspection-engine/connectors/sql-introspection-connector/src/warnings.rs", "rank": 14, "score": 324551.8679501915 }, { "content": "/// Enum type convenience wrapper function.\n\npub fn string_enum_type<T>(name: T, values: Vec<String>) -> EnumType\n\nwhere\n\n T: Into<String>,\n\n{\n\n EnumType::String(StringEnumType {\n\n name: name.into(),\n\n values,\n\n })\n\n}\n\n\n", "file_path": "query-engine/core/src/schema_builder/utils.rs", "rank": 15, "score": 322228.94784138334 }, { "content": "fn map_enum_input_type(ctx: &mut BuilderContext, enum_name: &str) -> InputType {\n\n let e = ctx\n\n .internal_data_model\n\n .find_enum(enum_name)\n\n .expect(\"Enum references must always be valid.\");\n\n\n\n let et: EnumType = e.into();\n\n\n\n et.into()\n\n}\n\n\n", "file_path": "query-engine/core/src/schema_builder/input_types/mod.rs", "rank": 16, "score": 320936.0256823028 }, { "content": "fn parse_enum_value(enum_name: &str, token: &Token) -> Result<EnumValue, DatamodelError> {\n\n let mut name: Option<Identifier> = None;\n\n let mut attributes: Vec<Attribute> = vec![];\n\n let mut comments: Vec<String> = vec![];\n\n\n\n // todo validate that the identifier is valid???\n\n for current in token.relevant_children() {\n\n match current.as_rule() {\n\n Rule::non_empty_identifier => name = Some(current.to_id()),\n\n Rule::maybe_empty_identifier => name = Some(current.to_id()),\n\n Rule::attribute => attributes.push(parse_attribute(&current)),\n\n Rule::number => {\n\n return Err(DatamodelError::new_enum_validation_error(\n\n &format!(\n\n \"The enum value `{}` is not valid. Enum values must not start with a number.\",\n\n current.as_str()\n\n ),\n\n enum_name,\n\n Span::from_pest(token.as_span()),\n\n ));\n", "file_path": "libs/datamodel/core/src/ast/parser/parse_enum.rs", "rank": 17, "score": 316887.0845632129 }, { "content": "pub fn parse(filter_key: &str, field: &RelationFieldRef, input: ParsedInputValue) -> QueryGraphBuilderResult<Filter> {\n\n let value: Option<ParsedInputMap> = input.try_into()?;\n\n\n\n match (filter_key, value) {\n\n // Relation list filters\n\n (filters::SOME, Some(value)) => Ok(field.at_least_one_related(extract_filter(value, &field.related_model())?)),\n\n (filters::NONE, Some(value)) => Ok(field.no_related(extract_filter(value, &field.related_model())?)),\n\n (filters::EVERY, Some(value)) => Ok(field.every_related(extract_filter(value, &field.related_model())?)),\n\n\n\n // One-relation filters\n\n (filters::IS, Some(value)) => Ok(field.to_one_related(extract_filter(value, &field.related_model())?)),\n\n (filters::IS, None) => Ok(field.one_relation_is_null()),\n\n (filters::IS_NOT, Some(value)) => Ok(field.no_related(extract_filter(value, &field.related_model())?)),\n\n (filters::IS_NOT, None) => Ok(Filter::not(vec![field.one_relation_is_null()])),\n\n\n\n _ => Err(QueryGraphBuilderError::InputError(format!(\n\n \"Invalid filter key `{}` input combination for relation filter\",\n\n filter_key\n\n ))),\n\n }\n\n}\n", "file_path": "query-engine/core/src/query_graph_builder/extractors/filters/relation.rs", "rank": 18, "score": 316236.4286219381 }, { "content": "pub fn mssql_2017_url(schema_name: &str) -> String {\n\n let (host, port) = db_host_mssql_2017();\n\n\n\n format!(\n\n \"sqlserver://{host}:{port};database=master;schema={schema_name};user=SA;password=<YourStrong@Passw0rd>;trustServerCertificate=true;socket_timeout=60;isolationLevel=READ UNCOMMITTED\",\n\n schema_name = schema_name,\n\n host = host,\n\n port = port,\n\n )\n\n}\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 19, "score": 305499.70519978774 }, { "content": "pub fn postgres_11_url(db_name: &str) -> String {\n\n let (host, port) = db_host_and_port_postgres_11();\n\n\n\n format!(\n\n \"postgresql://postgres:prisma@{}:{}/{}?schema={}&statement_cache_size=0&socket_timeout=60\",\n\n host, port, db_name, SCHEMA_NAME\n\n )\n\n}\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 20, "score": 305499.70519978774 }, { "content": "pub fn postgres_13_url(db_name: &str) -> String {\n\n let (host, port) = db_host_and_port_postgres_13();\n\n\n\n format!(\n\n \"postgresql://postgres:prisma@{}:{}/{}?schema={}&statement_cache_size=0&socket_timeout=60\",\n\n host, port, db_name, SCHEMA_NAME\n\n )\n\n}\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 21, "score": 305499.70519978774 }, { "content": "pub fn mariadb_url(db_name: &str) -> String {\n\n let (host, port) = db_host_and_port_mariadb();\n\n\n\n // maximum length of identifiers on mysql\n\n let db_name = mysql_safe_identifier(db_name);\n\n\n\n format!(\n\n \"mysql://root:prisma@{host}:{port}/{db_name}?connect_timeout=20&socket_timeout=60\",\n\n host = host,\n\n port = port,\n\n db_name = db_name,\n\n )\n\n}\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 22, "score": 305499.70519978774 }, { "content": "pub fn mssql_2019_url(schema_name: &str) -> String {\n\n let (host, port) = db_host_and_port_mssql_2019();\n\n\n\n format!(\n\n \"sqlserver://{host}:{port};database=master;schema={schema_name};user=SA;password=<YourStrong@Passw0rd>;trustServerCertificate=true;socket_timeout=60;isolationLevel=READ UNCOMMITTED\",\n\n schema_name = schema_name,\n\n host = host,\n\n port = port,\n\n )\n\n}\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 23, "score": 305499.70519978774 }, { "content": "pub fn postgres_10_url(db_name: &str) -> String {\n\n let (host, port) = db_host_and_port_postgres_10();\n\n\n\n format!(\n\n \"postgresql://postgres:prisma@{}:{}/{}?schema={}&statement_cache_size=0&socket_timeout=60\",\n\n host, port, db_name, SCHEMA_NAME\n\n )\n\n}\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 24, "score": 305499.70519978774 }, { "content": "pub fn postgres_9_url(db_name: &str) -> String {\n\n let (host, port) = db_host_and_port_postgres_9();\n\n\n\n format!(\n\n \"postgresql://postgres:prisma@{}:{}/{}?schema={}&statement_cache_size=0&socket_timeout=60\",\n\n host, port, db_name, SCHEMA_NAME\n\n )\n\n}\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 25, "score": 305499.70519978774 }, { "content": "pub fn mysql_5_6_url(db_name: &str) -> String {\n\n let (host, port) = db_host_and_port_mysql_5_6();\n\n\n\n // maximum length of identifiers on mysql\n\n let db_name = mysql_safe_identifier(db_name);\n\n\n\n format!(\n\n \"mysql://root:prisma@{host}:{port}/{db_name}?connect_timeout=20&socket_timeout=60\",\n\n host = host,\n\n port = port,\n\n db_name = db_name,\n\n )\n\n}\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 26, "score": 305499.70519978774 }, { "content": "pub fn mysql_url(db_name: &str) -> String {\n\n let db_name = mysql_safe_identifier(db_name);\n\n let (host, port) = db_host_and_port_mysql_5_7();\n\n\n\n format!(\n\n \"mysql://root:prisma@{host}:{port}/{db_name}?connect_timeout=20&socket_timeout=60\",\n\n host = host,\n\n port = port,\n\n db_name = db_name,\n\n )\n\n}\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 27, "score": 305499.70519978774 }, { "content": "pub fn mysql_8_url(db_name: &str) -> String {\n\n let (host, port) = db_host_and_port_mysql_8_0();\n\n\n\n // maximum length of identifiers on mysql\n\n let db_name = mysql_safe_identifier(db_name);\n\n\n\n format!(\n\n \"mysql://root:prisma@{host}:{port}{maybe_slash}{db_name}?connect_timeout=20&socket_timeout=60\",\n\n maybe_slash = if db_name.is_empty() { \"\" } else { \"/\" },\n\n host = host,\n\n port = port,\n\n db_name = db_name,\n\n )\n\n}\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 28, "score": 305499.70519978774 }, { "content": "pub fn postgres_12_url(db_name: &str) -> String {\n\n let (host, port) = db_host_and_port_postgres_12();\n\n\n\n format!(\n\n \"postgresql://postgres:prisma@{}:{}/{}?schema={}&statement_cache_size=0&socket_timeout=60\",\n\n host, port, db_name, SCHEMA_NAME\n\n )\n\n}\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 29, "score": 305499.70519978774 }, { "content": "pub fn pgbouncer_url(db_name: &str) -> String {\n\n let (host, port) = db_host_and_port_for_pgbouncer();\n\n\n\n format!(\n\n \"postgresql://postgres:prisma@{}:{}/{}?schema={}&pgbouncer=true&socket_timeout=60\",\n\n host, port, db_name, SCHEMA_NAME\n\n )\n\n}\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 30, "score": 305499.70519978774 }, { "content": "/// Ensures that if a cursor is provided, its fields are also selected.\n\n/// Necessary for post-processing of unstable orderings with cursor operations.\n\npub fn merge_cursor_fields(selected_fields: ModelProjection, cursor: &Option<RecordProjection>) -> ModelProjection {\n\n match cursor {\n\n Some(cursor) => selected_fields.merge(cursor.into()),\n\n None => selected_fields,\n\n }\n\n}\n\n\n", "file_path": "query-engine/core/src/query_graph_builder/read/utils.rs", "rank": 31, "score": 304809.4520688668 }, { "content": "pub fn comment(target: &mut dyn LineWriteable, comment_text: &str) {\n\n let trimmed = strip_new_line(&comment_text);\n\n let trimmed = trimmed.trim();\n\n\n\n target.write(trimmed);\n\n target.end_line();\n\n}\n\n\n", "file_path": "libs/datamodel/core/src/ast/reformat/helpers.rs", "rank": 32, "score": 302839.7129480878 }, { "content": "fn fetch_db_name<'a>(url: &'a Url, default: &'static str) -> &'a str {\n\n match url.path_segments() {\n\n Some(mut segments) => segments.next().unwrap_or(default),\n\n None => default,\n\n }\n\n}\n\n\n\npub async fn create_mysql_database(original_url: &Url) -> Result<Quaint, AnyError> {\n\n let mut mysql_db_url = original_url.clone();\n\n mysql_db_url.set_path(\"/mysql\");\n\n\n\n let db_name = fetch_db_name(&original_url, \"mysql\");\n\n\n\n debug_assert!(!db_name.is_empty());\n\n debug_assert!(\n\n db_name.len() < 64,\n\n \"db_name should be less than 64 characters, got {:?}\",\n\n db_name.len()\n\n );\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 33, "score": 302495.5948886785 }, { "content": "fn render_default(default: &DefaultValue) -> Cow<'_, str> {\n\n match default.kind() {\n\n DefaultKind::DBGENERATED(val) => val.as_str().into(),\n\n DefaultKind::VALUE(PrismaValue::String(val)) | DefaultKind::VALUE(PrismaValue::Enum(val)) => {\n\n format!(\"E'{}'\", escape_string_literal(&val)).into()\n\n }\n\n DefaultKind::VALUE(PrismaValue::Bytes(b)) => Quoted::postgres_string(format_hex(b)).to_string().into(),\n\n DefaultKind::NOW => \"CURRENT_TIMESTAMP\".into(),\n\n DefaultKind::VALUE(PrismaValue::DateTime(val)) => Quoted::postgres_string(val).to_string().into(),\n\n DefaultKind::VALUE(val) => val.to_string().into(),\n\n DefaultKind::SEQUENCE(_) => Default::default(),\n\n }\n\n}\n", "file_path": "migration-engine/connectors/sql-migration-connector/src/sql_renderer/postgres_renderer.rs", "rank": 34, "score": 302417.18008724635 }, { "content": "fn render_default(default: &DefaultValue) -> Cow<'_, str> {\n\n match default.kind() {\n\n DefaultKind::DBGENERATED(val) => val.as_str().into(),\n\n DefaultKind::VALUE(PrismaValue::String(val)) | DefaultKind::VALUE(PrismaValue::Enum(val)) => {\n\n Quoted::sqlite_string(escape_quotes(&val)).to_string().into()\n\n }\n\n DefaultKind::VALUE(PrismaValue::Bytes(b)) => Quoted::sqlite_string(format_hex(b)).to_string().into(),\n\n DefaultKind::NOW => \"CURRENT_TIMESTAMP\".into(),\n\n DefaultKind::VALUE(PrismaValue::DateTime(val)) => Quoted::sqlite_string(val).to_string().into(),\n\n DefaultKind::VALUE(val) => format!(\"{}\", val).into(),\n\n DefaultKind::SEQUENCE(_) => \"\".into(),\n\n }\n\n}\n", "file_path": "migration-engine/connectors/sql-migration-connector/src/sql_renderer/sqlite_renderer.rs", "rank": 35, "score": 302417.1800872464 }, { "content": "fn render_default(default: &DefaultValue) -> Cow<'_, str> {\n\n match default.kind() {\n\n DefaultKind::DBGENERATED(val) => val.as_str().into(),\n\n DefaultKind::VALUE(PrismaValue::String(val)) | DefaultKind::VALUE(PrismaValue::Enum(val)) => {\n\n Quoted::mssql_string(escape_string_literal(&val)).to_string().into()\n\n }\n\n DefaultKind::VALUE(PrismaValue::Bytes(b)) => format!(\"0x{}\", common::format_hex(b)).into(),\n\n DefaultKind::NOW => \"CURRENT_TIMESTAMP\".into(),\n\n DefaultKind::VALUE(PrismaValue::DateTime(val)) => Quoted::mssql_string(val).to_string().into(),\n\n DefaultKind::VALUE(PrismaValue::Boolean(val)) => Cow::from(if *val { \"1\" } else { \"0\" }),\n\n DefaultKind::VALUE(val) => val.to_string().into(),\n\n DefaultKind::SEQUENCE(_) => \"\".into(),\n\n }\n\n}\n", "file_path": "migration-engine/connectors/sql-migration-connector/src/sql_renderer/mssql_renderer.rs", "rank": 36, "score": 302417.1800872464 }, { "content": "fn where_input_field<T>(ctx: &mut BuilderContext, name: T, field: &RelationFieldRef) -> InputField\n\nwhere\n\n T: Into<String>,\n\n{\n\n let input_object_type = filter_objects::where_unique_object_type(ctx, &field.related_model());\n\n input_field(\n\n name.into(),\n\n list_union_object_type(input_object_type, field.is_list),\n\n None,\n\n )\n\n .optional()\n\n}\n", "file_path": "query-engine/core/src/schema_builder/input_types/input_fields.rs", "rank": 37, "score": 302047.8984896574 }, { "content": "pub fn db_host_and_port_mssql_2019() -> (&'static str, usize) {\n\n match std::env::var(\"IS_BUILDKITE\") {\n\n Ok(_) => (\"test-db-mssql-2019\", 1433),\n\n Err(_) => (\"127.0.0.1\", 1433),\n\n }\n\n}\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 38, "score": 301733.43003317143 }, { "content": "pub fn db_host_and_port_mysql_5_7() -> (&'static str, usize) {\n\n match std::env::var(\"IS_BUILDKITE\") {\n\n Ok(_) => (\"test-db-mysql-5-7\", 3306),\n\n Err(_) => (\"127.0.0.1\", 3306),\n\n }\n\n}\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 39, "score": 301733.43003317143 }, { "content": "pub fn db_host_and_port_mysql_8_0() -> (&'static str, usize) {\n\n match std::env::var(\"IS_BUILDKITE\") {\n\n Ok(_) => (\"test-db-mysql-8-0\", 3306),\n\n Err(_) => (\"127.0.0.1\", 3307),\n\n }\n\n}\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 40, "score": 301733.43003317143 }, { "content": "pub fn db_host_and_port_postgres_12() -> (&'static str, usize) {\n\n match std::env::var(\"IS_BUILDKITE\") {\n\n Ok(_) => (\"test-db-postgres-12\", 5432),\n\n Err(_) => (\"127.0.0.1\", 5434),\n\n }\n\n}\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 41, "score": 301733.43003317143 }, { "content": "pub fn decode_bytes(s: &str) -> PrismaValueResult<Vec<u8>> {\n\n base64::decode(s).map_err(|_| ConversionFailure::new(\"base64 encoded bytes\", \"PrismaValue::Bytes\"))\n\n}\n\n\n\nimpl TryFrom<serde_json::Value> for PrismaValue {\n\n type Error = crate::error::ConversionFailure;\n\n\n\n fn try_from(v: serde_json::Value) -> PrismaValueResult<Self> {\n\n match v {\n\n serde_json::Value::String(s) => Ok(PrismaValue::String(s)),\n\n serde_json::Value::Array(v) => {\n\n let vals: PrismaValueResult<Vec<PrismaValue>> = v.into_iter().map(PrismaValue::try_from).collect();\n\n Ok(PrismaValue::List(vals?))\n\n }\n\n serde_json::Value::Null => Ok(PrismaValue::Null),\n\n serde_json::Value::Bool(b) => Ok(PrismaValue::Boolean(b)),\n\n serde_json::Value::Number(num) => {\n\n if num.is_i64() {\n\n Ok(PrismaValue::Int(num.as_i64().unwrap()))\n\n } else {\n", "file_path": "libs/prisma-value/src/lib.rs", "rank": 42, "score": 301658.4345863342 }, { "content": "pub fn sqlite_test_url(db_name: &str) -> String {\n\n format!(\"file:{}\", sqlite_test_file(db_name))\n\n}\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 43, "score": 301655.106457467 }, { "content": "pub fn sqlite_test_file(db_name: &str) -> String {\n\n static SERVER_ROOT: Lazy<std::path::PathBuf> = Lazy::new(|| {\n\n std::env::var(\"SERVER_ROOT\")\n\n .map(|root| std::path::Path::new(&root).join(\"db\"))\n\n .unwrap_or_else(|_| {\n\n let dir = std::env::temp_dir().join(\"prisma_tests_server_root\");\n\n let path = dir.to_string_lossy().into_owned();\n\n\n\n std::fs::create_dir_all(&path).expect(\"failed to create SERVER_ROOT directory\");\n\n\n\n path.into()\n\n })\n\n });\n\n\n\n let file_path = SERVER_ROOT.join(db_name);\n\n\n\n // Truncate the file.\n\n std::fs::File::create(&file_path).expect(\"Failed to create or truncate SQLite database.\");\n\n\n\n file_path.to_string_lossy().into_owned()\n\n}\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 44, "score": 301655.106457467 }, { "content": "pub fn execute_raw(graph: &mut QueryGraph, field: ParsedField) -> QueryGraphBuilderResult<()> {\n\n let raw_query = Query::Write(WriteQuery::ExecuteRaw(raw_query(field)?));\n\n\n\n graph.create_node(raw_query);\n\n Ok(())\n\n}\n\n\n", "file_path": "query-engine/core/src/query_graph_builder/write/raw.rs", "rank": 45, "score": 300527.19302518514 }, { "content": "pub fn query_raw(graph: &mut QueryGraph, field: ParsedField) -> QueryGraphBuilderResult<()> {\n\n let raw_query = Query::Write(WriteQuery::QueryRaw(raw_query(field)?));\n\n\n\n graph.create_node(raw_query);\n\n Ok(())\n\n}\n\n\n", "file_path": "query-engine/core/src/query_graph_builder/write/raw.rs", "rank": 46, "score": 300527.19302518514 }, { "content": "/// Creates SelectedFields from a query selection.\n\n/// Automatically adds model IDs to the selected fields as well.\n\n/// Unwraps are safe due to query validation.\n\npub fn collect_selected_fields(from: &[FieldPair], model: &ModelRef) -> ModelProjection {\n\n let selected_fields = from\n\n .iter()\n\n .filter_map(|pair| {\n\n model\n\n .fields()\n\n .find_from_scalar(&pair.parsed_field.name)\n\n .ok()\n\n .map(|sf| sf.into())\n\n })\n\n .collect::<Vec<Field>>();\n\n\n\n let selected_projection = ModelProjection::new(selected_fields);\n\n let model_id = model.primary_identifier();\n\n\n\n model_id.merge(selected_projection)\n\n}\n\n\n", "file_path": "query-engine/core/src/query_graph_builder/read/utils.rs", "rank": 47, "score": 294681.68077545933 }, { "content": "/// `INSERT` a new record to the database. Resulting an `INSERT` ast and an\n\n/// optional `RecordProjection` if available from the arguments or model.\n\npub fn create_record(model: &ModelRef, mut args: WriteArgs) -> (Insert<'static>, Option<RecordProjection>) {\n\n let return_id = args.as_record_projection(model.primary_identifier());\n\n\n\n let fields: Vec<_> = model\n\n .fields()\n\n .scalar()\n\n .into_iter()\n\n .filter(|field| args.has_arg_for(&field.db_name()))\n\n .collect();\n\n\n\n let insert = fields\n\n .into_iter()\n\n .fold(Insert::single_into(model.as_table()), |insert, field| {\n\n let db_name = field.db_name();\n\n let value = args.take_field_value(db_name).unwrap();\n\n let value: PrismaValue = value\n\n .try_into()\n\n .expect(\"Create calls can only use PrismaValue write expressions (right now).\");\n\n\n\n insert.value(db_name.to_owned(), field.value(value))\n\n });\n\n\n\n (\n\n Insert::from(insert).returning(model.primary_identifier().as_columns()),\n\n return_id,\n\n )\n\n}\n\n\n", "file_path": "query-engine/connectors/sql-query-connector/src/query_builder/write.rs", "rank": 48, "score": 294595.6626637162 }, { "content": "pub fn collect_selection_order(from: &[FieldPair]) -> Vec<String> {\n\n from.iter()\n\n .map(|pair| {\n\n pair.parsed_field\n\n .alias\n\n .clone()\n\n .unwrap_or_else(|| pair.parsed_field.name.clone())\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "query-engine/core/src/query_graph_builder/read/utils.rs", "rank": 49, "score": 294352.40066358994 }, { "content": "fn function_to_serde(name: &str, args: &[PrismaValue]) -> serde_json::Value {\n\n let func = Function {\n\n name: String::from(name),\n\n args: args.iter().map(|arg| prisma_value_to_serde(arg)).collect(),\n\n };\n\n\n\n serde_json::to_value(&func).expect(\"Failed to render function JSON\")\n\n}\n\n\n", "file_path": "libs/datamodel/core/src/json/dmmf/to_dmmf.rs", "rank": 50, "score": 292744.0074498771 }, { "content": "/// Builds a read query from a parsed incoming read query field.\n\npub fn find_unique(mut field: ParsedField, model: ModelRef) -> QueryGraphBuilderResult<ReadQuery> {\n\n let filter = match field.arguments.lookup(args::WHERE) {\n\n Some(where_arg) => {\n\n let arg: ParsedInputMap = where_arg.value.try_into()?;\n\n Some(extractors::extract_unique_filter(arg, &model)?)\n\n }\n\n None => None,\n\n };\n\n\n\n let name = field.name;\n\n let alias = field.alias;\n\n let model = model;\n\n let nested_fields = field.nested_fields.unwrap().fields;\n\n let (aggr_fields_pairs, nested_fields) = extractors::extract_nested_rel_aggr_selections(nested_fields);\n\n\n\n if !aggr_fields_pairs.is_empty() {\n\n return Err(QueryGraphBuilderError::InputError(\n\n \"Aggregation selections are not yet implemented for findUnique queries.\".to_owned(),\n\n ));\n\n }\n", "file_path": "query-engine/core/src/query_graph_builder/read/one.rs", "rank": 51, "score": 289769.4194035863 }, { "content": "pub fn group_by(mut field: ParsedField, model: ModelRef) -> QueryGraphBuilderResult<ReadQuery> {\n\n let name = field.name;\n\n let alias = field.alias;\n\n let model = model;\n\n\n\n let by_arg = field.arguments.lookup(args::BY).unwrap().value;\n\n let group_by = extract_grouping(by_arg)?;\n\n let having: Option<Filter> = match field.arguments.lookup(args::HAVING) {\n\n Some(having_arg) => Some(extract_filter(having_arg.value.try_into()?, &model)?),\n\n None => None,\n\n };\n\n\n\n let args = extractors::extract_query_args(field.arguments, &model)?;\n\n let nested_fields = field.nested_fields.unwrap().fields;\n\n let selection_order = collect_selection_tree(&nested_fields);\n\n\n\n let selectors: Vec<_> = nested_fields\n\n .into_iter()\n\n .map(|field| resolve_query(field, &model))\n\n .collect::<QueryGraphBuilderResult<_>>()?;\n", "file_path": "query-engine/core/src/query_graph_builder/read/aggregations/group_by.rs", "rank": 52, "score": 289763.2350991776 }, { "content": "pub fn strip_new_line(str: &str) -> &str {\n\n if str.ends_with('\\n') {\n\n &str[0..str.len() - 1] // slice away line break.\n\n } else {\n\n &str\n\n }\n\n}\n", "file_path": "libs/datamodel/core/src/ast/reformat/helpers.rs", "rank": 53, "score": 288764.1542558258 }, { "content": "/// Parses and validates a datamodel string, using core attributes only.\n\n/// In case of an error, a pretty, colorful string is returned.\n\npub fn parse_datamodel_or_pretty_error(datamodel_string: &str, file_name: &str) -> Result<ValidatedDatamodel, String> {\n\n parse_datamodel_internal(datamodel_string, false, false)\n\n .map_err(|err| err.to_pretty_string(file_name, datamodel_string))\n\n}\n\n\n", "file_path": "libs/datamodel/core/src/lib.rs", "rank": 54, "score": 286393.004706431 }, { "content": "pub fn sanitize_datamodel_names(datamodel: &mut Datamodel, family: &SqlFamily) {\n\n let enum_renames = sanitize_models(datamodel, family);\n\n sanitize_enums(datamodel, &enum_renames);\n\n}\n\n\n", "file_path": "introspection-engine/connectors/sql-introspection-connector/src/sanitize_datamodel_names.rs", "rank": 55, "score": 283050.03486742015 }, { "content": "pub fn lower_default_value(dv: dml::DefaultValue) -> ast::Expression {\n\n match dv {\n\n dml::DefaultValue::Single(v) => lower_prisma_value(&v),\n\n dml::DefaultValue::Expression(e) => {\n\n let exprs = e.args.iter().map(lower_prisma_value).collect();\n\n ast::Expression::Function(e.name, exprs, ast::Span::empty())\n\n }\n\n }\n\n}\n\n\n", "file_path": "libs/datamodel/core/src/transform/attributes/default.rs", "rank": 56, "score": 282130.69501561427 }, { "content": "/// The maximum length of identifiers on mysql is 64 bytes.\n\n///\n\n/// Source: https://dev.mysql.com/doc/mysql-reslimits-excerpt/5.5/en/identifier-length.html\n\npub fn mysql_safe_identifier(identifier: &str) -> &str {\n\n if identifier.len() < 64 {\n\n identifier\n\n } else {\n\n identifier.get(0..63).expect(\"mysql identifier truncation\")\n\n }\n\n}\n\n\n", "file_path": "libs/test-setup/src/lib.rs", "rank": 57, "score": 280924.6305807248 }, { "content": "pub fn warning_enriched_with_map_on_enum_value(affected: &[EnumAndValue]) -> Warning {\n\n Warning {\n\n code: 10,\n\n message: \"These enum values were enriched with `@map` information taken from the previous Prisma schema.\"\n\n .into(),\n\n affected: serde_json::to_value(&affected).unwrap(),\n\n }\n\n}\n\n\n", "file_path": "introspection-engine/connectors/sql-introspection-connector/src/warnings.rs", "rank": 58, "score": 278751.4593135033 }, { "content": "/// Transforms a document to a `Record`, fields ordered as defined in `fields`.\n\nfn document_to_record(mut doc: Document, fields: &[String]) -> crate::Result<Record> {\n\n let mut values: Vec<PrismaValue> = Vec::with_capacity(fields.len());\n\n\n\n for field in fields {\n\n let bson = doc.remove(field).unwrap_or(Bson::Null);\n\n let val = value_from_bson(bson)?;\n\n\n\n values.push(val);\n\n }\n\n\n\n Ok(Record::new(values))\n\n}\n\n\n\n/// Consumes a cursor stream until exhausted.\n\nasync fn vacuum_cursor(mut cursor: Cursor) -> crate::Result<Vec<Document>> {\n\n let mut docs = vec![];\n\n\n\n while let Some(result) = cursor.next().await {\n\n match result {\n\n Ok(document) => docs.push(document),\n\n Err(e) => return Err(e.into()),\n\n }\n\n }\n\n\n\n Ok(docs)\n\n}\n", "file_path": "query-engine/connectors/mongodb-query-connector/src/queries/mod.rs", "rank": 59, "score": 277928.5144037719 }, { "content": "/// Computes a compound field name based on a multi-field id.\n\npub fn compound_id_field_name<T>(field_names: &[T]) -> String\n\nwhere\n\n T: AsRef<str>,\n\n{\n\n // Extremely sophisticated.\n\n field_names.iter().map(AsRef::as_ref).join(\"_\")\n\n}\n", "file_path": "query-engine/core/src/schema_builder/utils.rs", "rank": 60, "score": 277676.03507310105 }, { "content": "/// Iterator over all the relations in the schema. Each relation will only occur\n\n/// once.\n\npub fn walk_relations(datamodel: &Datamodel) -> impl Iterator<Item = RelationWalker<'_>> {\n\n walk_models(datamodel)\n\n .flat_map(move |model| model.relation_fields())\n\n .unique_by(|walker| walker.relation_name())\n\n .map(|relation_field| {\n\n let field_b = relation_field.opposite_side();\n\n\n\n RelationWalker {\n\n field_a: relation_field,\n\n field_b,\n\n }\n\n })\n\n}\n\n\n", "file_path": "libs/datamodel/core/src/walkers.rs", "rank": 61, "score": 275249.49359407637 }, { "content": "/// Field convenience wrapper function.\n\npub fn field<T>(\n\n name: T,\n\n arguments: Vec<InputField>,\n\n field_type: OutputType,\n\n query_info: Option<QueryInfo>,\n\n) -> OutputField\n\nwhere\n\n T: Into<String>,\n\n{\n\n OutputField {\n\n name: name.into(),\n\n arguments: arguments.into_iter().map(Arc::new).collect(),\n\n field_type: Arc::new(field_type),\n\n query_info,\n\n is_nullable: false,\n\n deprecation: None,\n\n }\n\n}\n\n\n", "file_path": "query-engine/core/src/schema_builder/utils.rs", "rank": 62, "score": 274472.9686730993 }, { "content": "fn values_match(previous: &str, next: &str) -> bool {\n\n previous == next\n\n}\n", "file_path": "migration-engine/connectors/sql-migration-connector/src/sql_schema_differ/enums.rs", "rank": 63, "score": 272916.5264902902 }, { "content": "/// Find the model mapping to the passed in database name.\n\npub fn find_model_by_db_name<'a>(datamodel: &'a Datamodel, db_name: &str) -> Option<ModelWalker<'a>> {\n\n datamodel\n\n .models\n\n .iter()\n\n .enumerate()\n\n .find(|(_, model)| model.database_name() == Some(db_name) || model.name == db_name)\n\n .map(|(model_idx, _model)| ModelWalker { datamodel, model_idx })\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct ModelWalker<'a> {\n\n datamodel: &'a Datamodel,\n\n model_idx: usize,\n\n}\n\n\n\nimpl<'a> ModelWalker<'a> {\n\n pub fn database_name(&self) -> &'a str {\n\n self.get().database_name.as_ref().unwrap_or(&self.get().name)\n\n }\n\n\n", "file_path": "libs/datamodel/core/src/walkers.rs", "rank": 64, "score": 272603.06468183245 }, { "content": "fn render_default<'a>(column: &ColumnWalker<'a>, default: &'a DefaultValue) -> Cow<'a, str> {\n\n match default.kind() {\n\n DefaultKind::DBGENERATED(val) => val.as_str().into(),\n\n DefaultKind::VALUE(PrismaValue::String(val)) | DefaultKind::VALUE(PrismaValue::Enum(val)) => {\n\n Quoted::mysql_string(escape_string_literal(&val)).to_string().into()\n\n }\n\n DefaultKind::NOW => {\n\n let precision = column\n\n .column_native_type()\n\n .as_ref()\n\n .and_then(MySqlType::timestamp_precision)\n\n .unwrap_or(3);\n\n\n\n format!(\"CURRENT_TIMESTAMP({})\", precision).into()\n\n }\n\n DefaultKind::VALUE(PrismaValue::DateTime(dt)) if column.column_type_family().is_datetime() => {\n\n Quoted::mysql_string(dt.to_rfc3339()).to_string().into()\n\n }\n\n DefaultKind::VALUE(val) => val.to_string().into(),\n\n DefaultKind::SEQUENCE(_) => Default::default(),\n\n }\n\n}\n", "file_path": "migration-engine/connectors/sql-migration-connector/src/sql_renderer/mysql_renderer.rs", "rank": 65, "score": 272560.4501279195 }, { "content": "fn field_avg_output_type(ctx: &mut BuilderContext, field: &ScalarFieldRef) -> OutputType {\n\n match field.type_identifier {\n\n TypeIdentifier::Int | TypeIdentifier::BigInt | TypeIdentifier::Float => OutputType::float(),\n\n TypeIdentifier::Decimal => OutputType::decimal(),\n\n _ => map_scalar_output_type_for_field(ctx, field),\n\n }\n\n}\n\n\n", "file_path": "query-engine/core/src/schema_builder/output_types/aggregation/mod.rs", "rank": 66, "score": 271761.9671561358 }, { "content": "fn map_scalar_input_type_for_field(ctx: &mut BuilderContext, field: &ScalarFieldRef) -> InputType {\n\n map_scalar_input_type(ctx, &field.type_identifier, field.is_list)\n\n}\n\n\n", "file_path": "query-engine/core/src/schema_builder/input_types/mod.rs", "rank": 67, "score": 271761.9671561358 }, { "content": "/// Iterator over all the models in the schema.\n\npub fn walk_models(datamodel: &Datamodel) -> impl Iterator<Item = ModelWalker<'_>> + '_ {\n\n (0..datamodel.models.len()).map(move |model_idx| ModelWalker { datamodel, model_idx })\n\n}\n\n\n", "file_path": "libs/datamodel/core/src/walkers.rs", "rank": 68, "score": 271443.0772672117 }, { "content": "fn attribute_name(index_type: dml::IndexType) -> &'static str {\n\n if index_type == dml::IndexType::Unique {\n\n \"unique\"\n\n } else {\n\n \"index\"\n\n }\n\n}\n\n\n", "file_path": "libs/datamodel/core/src/transform/attributes/unique_and_index.rs", "rank": 69, "score": 271147.8637137871 }, { "content": "/// Default inflector, anglicized mode.\n\npub fn default() -> &'static Inflector {\n\n &DEFAULT\n\n}\n\n\n", "file_path": "libs/prisma-inflector/src/lib.rs", "rank": 70, "score": 270808.85424532177 }, { "content": "/// Traverse all the columns in the schema.\n\npub fn walk_columns(schema: &SqlSchema) -> impl Iterator<Item = ColumnWalker<'_>> {\n\n schema.tables.iter().enumerate().flat_map(move |(table_index, table)| {\n\n (0..table.columns.len()).map(move |column_index| ColumnWalker {\n\n schema,\n\n column_index,\n\n table_index,\n\n })\n\n })\n\n}\n\n\n\n/// Traverse a table column.\n\n#[derive(Clone, Copy)]\n\npub struct ColumnWalker<'a> {\n\n /// The schema the column is contained in.\n\n schema: &'a SqlSchema,\n\n /// The index of the column in the table.\n\n column_index: usize,\n\n /// The index of the table in the schema.\n\n table_index: usize,\n\n}\n", "file_path": "libs/sql-schema-describer/src/walkers.rs", "rank": 71, "score": 268478.2793100885 }, { "content": "pub fn warning_fields_with_empty_names(affected: &[ModelAndField]) -> Warning {\n\n Warning {\n\n code: 2,\n\n message: \"These fields were commented out because their names are currently not supported by Prisma. Please provide valid ones that match [a-zA-Z][a-zA-Z0-9_]* using the `@map` attribute.\"\n\n .into(),\n\n affected: serde_json::to_value(&affected).unwrap(),\n\n }\n\n}\n\n\n", "file_path": "introspection-engine/connectors/sql-introspection-connector/src/warnings.rs", "rank": 72, "score": 266377.5971110701 }, { "content": "pub fn lower_prisma_value(pv: &PrismaValue) -> ast::Expression {\n\n match pv {\n\n PrismaValue::Boolean(true) => ast::Expression::BooleanValue(String::from(\"true\"), ast::Span::empty()),\n\n PrismaValue::Boolean(false) => ast::Expression::BooleanValue(String::from(\"false\"), ast::Span::empty()),\n\n PrismaValue::String(value) => ast::Expression::StringValue(value.clone(), ast::Span::empty()),\n\n PrismaValue::Enum(value) => ast::Expression::ConstantValue(value.clone(), ast::Span::empty()),\n\n PrismaValue::DateTime(value) => ast::Expression::StringValue(value.to_rfc3339(), ast::Span::empty()),\n\n PrismaValue::Float(value) => ast::Expression::NumericValue(value.to_string(), ast::Span::empty()),\n\n PrismaValue::Int(value) => ast::Expression::NumericValue(value.to_string(), ast::Span::empty()),\n\n PrismaValue::BigInt(value) => ast::Expression::NumericValue(value.to_string(), ast::Span::empty()),\n\n PrismaValue::Null => ast::Expression::ConstantValue(\"null\".to_string(), ast::Span::empty()),\n\n PrismaValue::Uuid(val) => ast::Expression::StringValue(val.to_string(), ast::Span::empty()),\n\n PrismaValue::Json(val) => ast::Expression::StringValue(val.to_string(), ast::Span::empty()),\n\n PrismaValue::List(vec) => ast::Expression::Array(\n\n vec.iter().map(|pv| lower_prisma_value(pv)).collect(),\n\n ast::Span::empty(),\n\n ),\n\n PrismaValue::Xml(val) => ast::Expression::StringValue(val.to_string(), ast::Span::empty()),\n\n PrismaValue::Bytes(b) => ast::Expression::StringValue(prisma_value::encode_bytes(b), ast::Span::empty()),\n\n }\n\n}\n", "file_path": "libs/datamodel/core/src/transform/attributes/default.rs", "rank": 73, "score": 266232.6559457088 }, { "content": "/// Attempts to convert a PrismaValue to a database value without any additional type information.\n\n/// Can't reliably map Null values.\n\npub fn convert_lossy<'a>(pv: PrismaValue) -> Value<'a> {\n\n match pv {\n\n PrismaValue::String(s) => s.into(),\n\n PrismaValue::Float(f) => f.into(),\n\n PrismaValue::Boolean(b) => b.into(),\n\n PrismaValue::DateTime(d) => d.with_timezone(&Utc).into(),\n\n PrismaValue::Enum(e) => e.into(),\n\n PrismaValue::Int(i) => (i as i64).into(),\n\n PrismaValue::BigInt(i) => (i as i64).into(),\n\n PrismaValue::Uuid(u) => u.to_string().into(),\n\n PrismaValue::List(l) => Value::Array(Some(l.into_iter().map(convert_lossy).collect())),\n\n PrismaValue::Json(s) => Value::Json(serde_json::from_str(&s).unwrap()),\n\n PrismaValue::Bytes(b) => Value::Bytes(Some(b.into())),\n\n PrismaValue::Xml(s) => Value::Xml(Some(s.into())),\n\n PrismaValue::Null => Value::Integer(None), // Can't tell which type the null is supposed to be.\n\n }\n\n}\n", "file_path": "libs/prisma-models/src/sql_ext/scalar_field.rs", "rank": 74, "score": 265894.2949451429 }, { "content": "pub fn test_native_types_compatibility(datamodel: &str, error_msg: &str, datasource: &str) {\n\n let dml = format!(\n\n r#\"\n\n {datasource}\n\n\n\n {datamodel}\n\n \"#,\n\n datasource = datasource,\n\n datamodel = datamodel,\n\n );\n\n\n\n let error = parse_error(&dml);\n\n\n\n error.assert_is_message(error_msg);\n\n}\n\n\n", "file_path": "libs/datamodel/core/tests/types/helper.rs", "rank": 75, "score": 265032.7649735039 }, { "content": "pub fn invalid_url_description(database_str: &str, error_details: &str) -> String {\n\n let docs = r#\"https://www.prisma.io/docs/reference/database-reference/connection-urls\"#;\n\n\n\n let details = formatdoc! {r#\"\n\n {} in `{}`. Please refer to the documentation in {} for constructing a correct\n\n connection string. In some cases, certain characters must be escaped. Please\n\n check the string for any illegal characters.\"#, error_details, database_str, docs};\n\n\n\n details.replace('\\n', \" \")\n\n}\n\n\n", "file_path": "libs/user-facing-errors/src/quaint.rs", "rank": 76, "score": 265004.7174397196 }, { "content": "fn convert_value<'a>(field: &ScalarFieldRef, value: PrismaValue) -> Value<'a> {\n\n field.value(value)\n\n}\n\n\n", "file_path": "query-engine/connectors/sql-query-connector/src/filter_conversion.rs", "rank": 77, "score": 264254.20672698994 }, { "content": "pub fn find_input_type<'a>(dmmf: &'a DataModelMetaFormat, namespace: &str, type_name: &str) -> &'a DmmfInputType {\n\n dmmf.schema\n\n .input_object_types\n\n .get(namespace)\n\n .unwrap_or_else(|| panic!(\"unknown dmmf namespace {}\", namespace))\n\n .iter()\n\n .find(|o| o.name == type_name)\n\n .unwrap_or_else(|| panic!(\"could not find output type named {}\", type_name))\n\n}\n\n\n", "file_path": "query-engine/request-handlers/src/tests/dmmf/helpers.rs", "rank": 78, "score": 262416.50039097737 }, { "content": "pub fn find_output_type<'a>(dmmf: &'a DataModelMetaFormat, namespace: &str, type_name: &str) -> &'a DmmfOutputType {\n\n dmmf.schema\n\n .output_object_types\n\n .get(namespace)\n\n .unwrap_or_else(|| panic!(\"unknown dmmf namespace {}\", namespace))\n\n .iter()\n\n .find(|o| o.name == type_name)\n\n .unwrap_or_else(|| panic!(\"could not find output type named {}\", type_name))\n\n}\n\n\n", "file_path": "query-engine/request-handlers/src/tests/dmmf/helpers.rs", "rank": 79, "score": 262416.50039097737 }, { "content": "#[test]\n\nfn indexes_on_enum_fields_must_work() {\n\n let dml = r#\"\n\n model User {\n\n id Int @id\n\n role Role\n\n\n\n @@index([role])\n\n }\n\n\n\n enum Role {\n\n Admin\n\n Member\n\n }\n\n \"#;\n\n\n\n let schema = parse(dml);\n\n let user_model = schema.assert_has_model(\"User\");\n\n user_model.assert_has_index(IndexDefinition {\n\n name: None,\n\n fields: vec![\"role\".to_string()],\n\n tpe: IndexType::Normal,\n\n });\n\n}\n\n\n", "file_path": "libs/datamodel/core/tests/attributes/index.rs", "rank": 80, "score": 261989.72385720522 }, { "content": "fn validate_uniqueness_of_names_of_indexes(providers: &[&str], must_error: bool) {\n\n let dml = r#\"\n\n model User {\n\n id Int @id\n\n neighborId Int\n\n\n\n @@index([id], name: \"metaId\")\n\n }\n\n\n\n model Post {\n\n id Int @id\n\n optionId Int\n\n\n\n @@index([id], name: \"metaId\")\n\n }\n\n \"#;\n\n\n\n let error_msg= \"The index name `metaId` is declared multiple times. With the current connector index names have to be globally unique.\";\n\n\n\n test_capability_support(providers, must_error, dml, error_msg);\n\n}\n\n\n", "file_path": "libs/datamodel/core/tests/capabilities/mod.rs", "rank": 81, "score": 259230.26771284913 }, { "content": "#[allow(dead_code)] // Not sure why the compiler thinks this is never used.\n\npub fn parse(datamodel_string: &str) -> Datamodel {\n\n match datamodel::parse_datamodel(datamodel_string) {\n\n Ok(s) => s.subject,\n\n Err(errs) => {\n\n panic!(\n\n \"Datamodel parsing failed\\n\\n{}\",\n\n errs.to_pretty_string(\"\", datamodel_string)\n\n )\n\n }\n\n }\n\n}\n\n\n", "file_path": "libs/datamodel/core/tests/common.rs", "rank": 82, "score": 257409.33709837712 }, { "content": "pub fn test_native_types_without_attributes(native_type: &str, scalar_type: &str, error_msg: &str, datasource: &str) {\n\n let dml = format!(\n\n r#\"\n\n model Blog {{\n\n id Int @id\n\n bigInt {scalar_type} @db.{native_type}\n\n }}\n\n \"#,\n\n native_type = native_type,\n\n scalar_type = scalar_type,\n\n );\n\n\n\n test_native_types_compatibility(&dml, &error_msg, datasource);\n\n}\n", "file_path": "libs/datamodel/core/tests/types/helper.rs", "rank": 83, "score": 256690.20433421782 }, { "content": "fn convert_list_value<'a>(field: &ScalarFieldRef, values: Vec<PrismaValue>) -> Value<'a> {\n\n Value::Array(Some(values.into_iter().map(|val| field.value(val)).collect()))\n\n}\n\n\n", "file_path": "query-engine/connectors/sql-query-connector/src/filter_conversion.rs", "rank": 84, "score": 256262.83653619568 }, { "content": "#[test]\n\nfn must_error_if_default_value_for_enum_is_not_valid() {\n\n let dml = r#\"\n\n model Model {\n\n id Int @id\n\n enum A @default(B)\n\n }\n\n\n\n enum A {\n\n A\n\n }\n\n \"#;\n\n\n\n let errors = parse_error(dml);\n\n\n\n errors.assert_is(DatamodelError::new_attribute_validation_error(\n\n \"The defined default value is not a valid value of the enum specified for the field.\",\n\n \"default\",\n\n Span::new(46, 65),\n\n ));\n\n}\n\n\n", "file_path": "libs/datamodel/core/tests/attributes/default_negative.rs", "rank": 85, "score": 254785.78884671364 }, { "content": "#[test]\n\nfn stringified_field_names_in_index_return_nice_error() {\n\n let dm = r#\"\n\n model User {\n\n id Int @id\n\n firstName String\n\n lastName String\n\n\n\n @@index([\"firstName\", \"lastName\"])\n\n }\n\n \"#;\n\n\n\n let err = parse_error(dm);\n\n\n\n err.assert_is(DatamodelError::TypeMismatchError {\n\n expected_type: \"constant literal\".into(),\n\n received_type: \"string\".into(),\n\n raw: \"firstName\".into(),\n\n span: Span::new(135, 146),\n\n });\n\n}\n", "file_path": "libs/datamodel/core/tests/attributes/index.rs", "rank": 86, "score": 254502.4998109594 }, { "content": "#[test]\n\nfn must_error_if_default_value_for_relation_field() {\n\n let dml = r#\"\n\n model Model {\n\n id Int @id\n\n rel A @default(\"\")\n\n }\n\n\n\n model A {\n\n id Int @id\n\n }\n\n \"#;\n\n\n\n let errors = parse_error(dml);\n\n\n\n errors.assert_is(DatamodelError::new_attribute_validation_error(\n\n \"Cannot set a default value on a relation field.\",\n\n \"default\",\n\n Span::new(53, 64),\n\n ));\n\n}\n\n\n", "file_path": "libs/datamodel/core/tests/attributes/default_negative.rs", "rank": 87, "score": 254453.55019343222 }, { "content": "#[allow(dead_code)] // Not sure why the compiler thinks this is never used.\n\npub fn parse_error(datamodel_string: &str) -> Diagnostics {\n\n match datamodel::parse_datamodel(datamodel_string) {\n\n Ok(_) => panic!(\"Expected an error when parsing schema.\"),\n\n Err(errs) => errs,\n\n }\n\n}\n\n\n", "file_path": "libs/datamodel/core/tests/common.rs", "rank": 88, "score": 254416.44546112564 }, { "content": "pub fn unquote_string(val: &str) -> String {\n\n val.trim_start_matches('\\'')\n\n .trim_end_matches('\\'')\n\n .trim_start_matches('\\\\')\n\n .trim_start_matches('\"')\n\n .trim_end_matches('\"')\n\n .trim_end_matches('\\\\')\n\n .into()\n\n}\n\n\n", "file_path": "libs/sql-schema-describer/src/lib.rs", "rank": 89, "score": 254410.23994241812 }, { "content": "pub fn parse_configuration(datamodel_string: &str) -> Configuration {\n\n match datamodel::parse_configuration(datamodel_string) {\n\n Ok(c) => c.subject,\n\n Err(errs) => {\n\n panic!(\n\n \"Configuration parsing failed\\n\\n{}\",\n\n errs.to_pretty_string(\"\", datamodel_string)\n\n )\n\n }\n\n }\n\n}\n\n\n", "file_path": "libs/datamodel/core/tests/common.rs", "rank": 90, "score": 254410.23994241812 }, { "content": "pub fn parse_one_u32(args: Vec<String>, type_name: &str) -> Result<u32, ConnectorError> {\n\n let number_of_args = args.len();\n\n\n\n match parse_u32_arguments(args)?.as_slice() {\n\n [x] => Ok(*x),\n\n _ => Err(ConnectorError::new_argument_count_mismatch_error(\n\n type_name,\n\n 1,\n\n number_of_args,\n\n )),\n\n }\n\n}\n\n\n", "file_path": "libs/datamodel/connectors/datamodel-connector/src/helper.rs", "rank": 91, "score": 251956.0020841895 }, { "content": "pub fn parse_with_diagnostics(datamodel_string: &str) -> ValidatedDatamodel {\n\n match datamodel::parse_datamodel(datamodel_string) {\n\n Ok(s) => s,\n\n Err(errs) => {\n\n for err in errs.to_error_iter() {\n\n err.pretty_print(&mut std::io::stderr().lock(), \"\", datamodel_string)\n\n .unwrap();\n\n }\n\n\n\n panic!(\"Datamodel parsing failed. Please see error above.\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "libs/datamodel/core/tests/common.rs", "rank": 92, "score": 251530.37974562048 }, { "content": "pub fn warning_default_uuid_warning(affected: &[ModelAndField]) -> Warning {\n\n Warning {\n\n code: 6,\n\n message:\n\n \"These id fields had a `@default(uuid())` added because we believe the schema was created by Prisma 1.\"\n\n .into(),\n\n affected: serde_json::to_value(&affected).unwrap(),\n\n }\n\n}\n\n\n", "file_path": "introspection-engine/connectors/sql-introspection-connector/src/warnings.rs", "rank": 93, "score": 251477.38343385793 }, { "content": "pub fn warning_default_cuid_warning(affected: &[ModelAndField]) -> Warning {\n\n Warning {\n\n code: 5,\n\n message:\n\n \"These id fields had a `@default(cuid())` added because we believe the schema was created by Prisma 1.\"\n\n .into(),\n\n affected: serde_json::to_value(&affected).unwrap(),\n\n }\n\n}\n\n\n", "file_path": "introspection-engine/connectors/sql-introspection-connector/src/warnings.rs", "rank": 94, "score": 251477.38343385793 }, { "content": "#[tracing::instrument]\n\npub fn error_on_changed_provider(migrations_directory_path: &str, provider: &str) -> ConnectorResult<()> {\n\n match match_provider_in_lock_file(migrations_directory_path, provider) {\n\n None => Ok(()),\n\n Some(false) => Err(ConnectorError::user_facing_error(ProviderSwitchedError {\n\n provider: provider.into(),\n\n })),\n\n Some(true) => Ok(()),\n\n }\n\n}\n\n\n\n/// Check whether provider matches Return None/Some(true)/Some(false)\n", "file_path": "migration-engine/connectors/migration-connector/src/migrations_directory.rs", "rank": 95, "score": 250888.9642434739 }, { "content": "#[test]\n\nfn multi_field_unique_indexes_on_enum_fields_must_work() {\n\n let dml = r#\"\n\n model User {\n\n id Int @id\n\n role Role\n\n\n\n @@unique([role])\n\n }\n\n\n\n enum Role {\n\n Admin\n\n Member\n\n }\n\n \"#;\n\n\n\n let schema = parse(dml);\n\n let user_model = schema.assert_has_model(\"User\");\n\n user_model.assert_has_index(IndexDefinition {\n\n name: None,\n\n fields: vec![\"role\".to_string()],\n\n tpe: IndexType::Unique,\n\n });\n\n}\n\n\n", "file_path": "libs/datamodel/core/tests/attributes/unique.rs", "rank": 96, "score": 250859.93088177257 }, { "content": "/// Merges the incoming write argument values into the given, already loaded, ids. Overwrites existing values.\n\npub fn merge_write_args(loaded_ids: Vec<RecordProjection>, incoming_args: WriteArgs) -> Vec<RecordProjection> {\n\n if loaded_ids.is_empty() || incoming_args.is_empty() {\n\n return loaded_ids;\n\n }\n\n\n\n // Contains all positions that need to be updated with the given expression.\n\n let positions: HashMap<usize, &WriteExpression> = loaded_ids\n\n .first()\n\n .unwrap()\n\n .pairs\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(i, (field, _))| incoming_args.get_field_value(field.db_name()).map(|val| (i, val)))\n\n .collect();\n\n\n\n loaded_ids\n\n .into_iter()\n\n .map(|mut id| {\n\n for (position, expr) in positions.iter() {\n\n let current_val = id.pairs[position.to_owned()].1.clone();\n\n id.pairs[position.to_owned()].1 = apply_expression(current_val, (*expr).clone());\n\n }\n\n\n\n id\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "query-engine/connectors/query-connector/src/write_args.rs", "rank": 97, "score": 249759.98881551533 }, { "content": "#[test]\n\nfn must_error_when_unknown_fields_are_used() {\n\n let dml = r#\"\n\n model User {\n\n id Int @id\n\n\n\n @@index([foo,bar])\n\n }\n\n \"#;\n\n\n\n let errors = parse_error(dml);\n\n\n\n errors.assert_is(DatamodelError::new_model_validation_error(\n\n \"The index definition refers to the unknown fields foo, bar.\",\n\n \"User\",\n\n Span::new(48, 64),\n\n ));\n\n}\n\n\n", "file_path": "libs/datamodel/core/tests/attributes/index.rs", "rank": 98, "score": 247844.33796880697 }, { "content": "pub fn parse_type_alias(token: &Token) -> Field {\n\n let mut name: Option<Identifier> = None;\n\n let mut attributes: Vec<Attribute> = vec![];\n\n let mut base_type: Option<(String, Span)> = None;\n\n let mut comment: Option<Comment> = None;\n\n\n\n for current in token.relevant_children() {\n\n match current.as_rule() {\n\n Rule::TYPE_KEYWORD => {}\n\n Rule::non_empty_identifier => name = Some(current.to_id()),\n\n Rule::base_type => base_type = Some((parse_base_type(&current), Span::from_pest(current.as_span()))),\n\n Rule::attribute => attributes.push(parse_attribute(&current)),\n\n Rule::comment_block => comment = parse_comment_block(&current),\n\n _ => parsing_catch_all(&current, \"custom type\"),\n\n }\n\n }\n\n\n\n match (name, base_type) {\n\n (Some(name), Some((field_type, field_type_span))) => Field {\n\n field_type: Identifier {\n", "file_path": "libs/datamodel/core/src/ast/parser/parse_types.rs", "rank": 99, "score": 246148.72214766132 } ]
Rust
bench-streamer/src/main.rs
YandriHN/solana
456e6711f0c24b13ae5e923ff8cd2af3caab496d
#![allow(clippy::integer_arithmetic)] use { clap::{crate_description, crate_name, Arg, Command}, crossbeam_channel::unbounded, solana_streamer::{ packet::{Packet, PacketBatch, PacketBatchRecycler, PACKET_DATA_SIZE}, streamer::{receiver, PacketBatchReceiver, StreamerReceiveStats}, }, std::{ cmp::max, net::{IpAddr, Ipv4Addr, SocketAddr, UdpSocket}, sync::{ atomic::{AtomicBool, AtomicUsize, Ordering}, Arc, }, thread::{sleep, spawn, JoinHandle, Result}, time::{Duration, SystemTime}, }, }; fn producer(addr: &SocketAddr, exit: Arc<AtomicBool>) -> JoinHandle<()> { let send = UdpSocket::bind("0.0.0.0:0").unwrap(); let mut packet_batch = PacketBatch::default(); packet_batch.packets.resize(10, Packet::default()); for w in packet_batch.packets.iter_mut() { w.meta.size = PACKET_DATA_SIZE; w.meta.set_addr(addr); } let packet_batch = Arc::new(packet_batch); spawn(move || loop { if exit.load(Ordering::Relaxed) { return; } let mut num = 0; for p in &packet_batch.packets { let a = p.meta.addr(); assert!(p.meta.size <= PACKET_DATA_SIZE); send.send_to(&p.data[..p.meta.size], &a).unwrap(); num += 1; } assert_eq!(num, 10); }) } fn sink(exit: Arc<AtomicBool>, rvs: Arc<AtomicUsize>, r: PacketBatchReceiver) -> JoinHandle<()> { spawn(move || loop { if exit.load(Ordering::Relaxed) { return; } let timer = Duration::new(1, 0); if let Ok(packet_batch) = r.recv_timeout(timer) { rvs.fetch_add(packet_batch.packets.len(), Ordering::Relaxed); } }) } fn main() -> Result<()> { let mut num_sockets = 1usize; let matches = Command::new(crate_name!()) .about(crate_description!()) .version(solana_version::version!()) .arg( Arg::new("num-recv-sockets") .long("num-recv-sockets") .value_name("NUM") .takes_value(true) .help("Use NUM receive sockets"), ) .arg( Arg::new("num-producers") .long("num-producers") .value_name("NUM") .takes_value(true) .help("Use this many producer threads."), ) .get_matches(); if let Some(n) = matches.value_of("num-recv-sockets") { num_sockets = max(num_sockets, n.to_string().parse().expect("integer")); } let num_producers: u64 = matches.value_of_t("num_producers").unwrap_or(4); let port = 0; let ip_addr = IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)); let mut addr = SocketAddr::new(ip_addr, 0); let exit = Arc::new(AtomicBool::new(false)); let mut read_channels = Vec::new(); let mut read_threads = Vec::new(); let recycler = PacketBatchRecycler::default(); let (_port, read_sockets) = solana_net_utils::multi_bind_in_range( ip_addr, (port, port + num_sockets as u16), num_sockets, ) .unwrap(); let stats = Arc::new(StreamerReceiveStats::new("bench-streamer-test")); for read in read_sockets { read.set_read_timeout(Some(Duration::new(1, 0))).unwrap(); addr = read.local_addr().unwrap(); let (s_reader, r_reader) = unbounded(); read_channels.push(r_reader); read_threads.push(receiver( Arc::new(read), exit.clone(), s_reader, recycler.clone(), stats.clone(), 1, true, None, )); } let producer_threads: Vec<_> = (0..num_producers) .into_iter() .map(|_| producer(&addr, exit.clone())) .collect(); let rvs = Arc::new(AtomicUsize::new(0)); let sink_threads: Vec<_> = read_channels .into_iter() .map(|r_reader| sink(exit.clone(), rvs.clone(), r_reader)) .collect(); let start = SystemTime::now(); let start_val = rvs.load(Ordering::Relaxed); sleep(Duration::new(5, 0)); let elapsed = start.elapsed().unwrap(); let end_val = rvs.load(Ordering::Relaxed); let time = elapsed.as_secs() * 10_000_000_000 + u64::from(elapsed.subsec_nanos()); let ftime = (time as f64) / 10_000_000_000_f64; let fcount = (end_val - start_val) as f64; println!("performance: {:?}", fcount / ftime); exit.store(true, Ordering::Relaxed); for t_reader in read_threads { t_reader.join()?; } for t_producer in producer_threads { t_producer.join()?; } for t_sink in sink_threads { t_sink.join()?; } Ok(()) }
#![allow(clippy::integer_arithmetic)] use { clap::{crate_description, crate_name, Arg, Command}, crossbeam_channel::unbounded, solana_streamer::{ packet::{Packet, PacketBatch, PacketBatchRecycler, PACKET_DATA_SIZE}, streamer::{receiver, PacketBatchReceiver, StreamerReceiveStats}, }, std::{ cmp::max, net::{IpAddr, Ipv4Addr, SocketAddr, UdpSocket}, sync::{ atomic::{AtomicBool, AtomicUsize, Ordering}, Arc, }, thread::{sleep, spawn, JoinHandle, Result}, time::{Duration, SystemTime}, }, }; fn producer(addr: &SocketAddr, exit: Arc<AtomicBool>) -> JoinHandle<()> { let send = UdpSocket::bind("0.0.0.0:0").unwrap(); let mut packet_batch = PacketBatch::default(); packet_batch.packets.resize(10, Packet::default()); for w in packet_batch.packets.iter_mut() { w.meta.size = PACKET_DATA_SIZE; w.meta.set_addr(addr); } let packet_batch = Arc::new(packet_batch); spawn(move || loop { if exit.load(Ordering::Relaxed) { return; } let mut num = 0; for p in &packet_batch.packets { let a = p.meta.addr(); assert!(p.meta.size <= PACKET_DATA_SIZE); send.send_to(&p.data[..p.meta.size], &a).unwrap(); num += 1; } assert_eq!(num, 10); }) }
fn main() -> Result<()> { let mut num_sockets = 1usize; let matches = Command::new(crate_name!()) .about(crate_description!()) .version(solana_version::version!()) .arg( Arg::new("num-recv-sockets") .long("num-recv-sockets") .value_name("NUM") .takes_value(true) .help("Use NUM receive sockets"), ) .arg( Arg::new("num-producers") .long("num-producers") .value_name("NUM") .takes_value(true) .help("Use this many producer threads."), ) .get_matches(); if let Some(n) = matches.value_of("num-recv-sockets") { num_sockets = max(num_sockets, n.to_string().parse().expect("integer")); } let num_producers: u64 = matches.value_of_t("num_producers").unwrap_or(4); let port = 0; let ip_addr = IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)); let mut addr = SocketAddr::new(ip_addr, 0); let exit = Arc::new(AtomicBool::new(false)); let mut read_channels = Vec::new(); let mut read_threads = Vec::new(); let recycler = PacketBatchRecycler::default(); let (_port, read_sockets) = solana_net_utils::multi_bind_in_range( ip_addr, (port, port + num_sockets as u16), num_sockets, ) .unwrap(); let stats = Arc::new(StreamerReceiveStats::new("bench-streamer-test")); for read in read_sockets { read.set_read_timeout(Some(Duration::new(1, 0))).unwrap(); addr = read.local_addr().unwrap(); let (s_reader, r_reader) = unbounded(); read_channels.push(r_reader); read_threads.push(receiver( Arc::new(read), exit.clone(), s_reader, recycler.clone(), stats.clone(), 1, true, None, )); } let producer_threads: Vec<_> = (0..num_producers) .into_iter() .map(|_| producer(&addr, exit.clone())) .collect(); let rvs = Arc::new(AtomicUsize::new(0)); let sink_threads: Vec<_> = read_channels .into_iter() .map(|r_reader| sink(exit.clone(), rvs.clone(), r_reader)) .collect(); let start = SystemTime::now(); let start_val = rvs.load(Ordering::Relaxed); sleep(Duration::new(5, 0)); let elapsed = start.elapsed().unwrap(); let end_val = rvs.load(Ordering::Relaxed); let time = elapsed.as_secs() * 10_000_000_000 + u64::from(elapsed.subsec_nanos()); let ftime = (time as f64) / 10_000_000_000_f64; let fcount = (end_val - start_val) as f64; println!("performance: {:?}", fcount / ftime); exit.store(true, Ordering::Relaxed); for t_reader in read_threads { t_reader.join()?; } for t_producer in producer_threads { t_producer.join()?; } for t_sink in sink_threads { t_sink.join()?; } Ok(()) }
fn sink(exit: Arc<AtomicBool>, rvs: Arc<AtomicUsize>, r: PacketBatchReceiver) -> JoinHandle<()> { spawn(move || loop { if exit.load(Ordering::Relaxed) { return; } let timer = Duration::new(1, 0); if let Ok(packet_batch) = r.recv_timeout(timer) { rvs.fetch_add(packet_batch.packets.len(), Ordering::Relaxed); } }) }
function_block-full_function
[ { "content": "#[cfg(not(windows))]\n\nfn symlink_dir<P: AsRef<Path>, Q: AsRef<Path>>(src: P, dst: Q) -> std::io::Result<()> {\n\n std::os::unix::fs::symlink(src, dst)\n\n}\n\n\n", "file_path": "install/src/command.rs", "rank": 0, "score": 301982.69425580115 }, { "content": "#[cfg(target_os = \"linux\")]\n\nfn sendmmsg_retry(sock: &UdpSocket, hdrs: &mut [mmsghdr]) -> Result<(), SendPktsError> {\n\n let sock_fd = sock.as_raw_fd();\n\n let mut total_sent = 0;\n\n let mut erropt = None;\n\n\n\n let mut pkts = &mut *hdrs;\n\n while !pkts.is_empty() {\n\n let npkts = match unsafe { libc::sendmmsg(sock_fd, &mut pkts[0], pkts.len() as u32, 0) } {\n\n -1 => {\n\n if erropt.is_none() {\n\n erropt = Some(io::Error::last_os_error());\n\n }\n\n // skip over the failing packet\n\n 1_usize\n\n }\n\n n => {\n\n // if we fail to send all packets we advance to the failing\n\n // packet and retry in order to capture the error code\n\n total_sent += n as usize;\n\n n as usize\n", "file_path": "streamer/src/sendmmsg.rs", "rank": 1, "score": 275000.832488771 }, { "content": "fn write_fees<W: io::Write>(w: &mut W, transaction_fee: u64, prefix: &str) -> io::Result<()> {\n\n writeln!(w, \"{} Fee: ◎{}\", prefix, lamports_to_sol(transaction_fee))\n\n}\n\n\n", "file_path": "cli-output/src/display.rs", "rank": 2, "score": 274800.9463640007 }, { "content": "/// Spend and verify from every node in the network\n\npub fn spend_and_verify_all_nodes<S: ::std::hash::BuildHasher + Sync + Send>(\n\n entry_point_info: &ContactInfo,\n\n funding_keypair: &Keypair,\n\n nodes: usize,\n\n ignore_nodes: HashSet<Pubkey, S>,\n\n socket_addr_space: SocketAddrSpace,\n\n) {\n\n let cluster_nodes =\n\n discover_cluster(&entry_point_info.gossip, nodes, socket_addr_space).unwrap();\n\n assert!(cluster_nodes.len() >= nodes);\n\n let ignore_nodes = Arc::new(ignore_nodes);\n\n cluster_nodes.par_iter().for_each(|ingress_node| {\n\n if ignore_nodes.contains(&ingress_node.id) {\n\n return;\n\n }\n\n let random_keypair = Keypair::new();\n\n let (rpc, tpu) = ingress_node.client_facing_addr();\n\n let client = create_client(rpc, tpu);\n\n let bal = client\n\n .poll_get_balance_with_commitment(\n", "file_path": "local-cluster/src/cluster_tests.rs", "rank": 3, "score": 271059.1567854462 }, { "content": "#[cfg(not(target_os = \"linux\"))]\n\npub fn recv_mmsg(socket: &UdpSocket, packets: &mut [Packet]) -> io::Result</*num packets:*/ usize> {\n\n debug_assert!(packets.iter().all(|pkt| pkt.meta == Meta::default()));\n\n let mut i = 0;\n\n let count = cmp::min(NUM_RCVMMSGS, packets.len());\n\n for p in packets.iter_mut().take(count) {\n\n p.meta.size = 0;\n\n match socket.recv_from(&mut p.data) {\n\n Err(_) if i > 0 => {\n\n break;\n\n }\n\n Err(e) => {\n\n return Err(e);\n\n }\n\n Ok((nrecv, from)) => {\n\n p.meta.size = nrecv;\n\n p.meta.set_addr(&from);\n\n if i == 0 {\n\n socket.set_nonblocking(true)?;\n\n }\n\n }\n\n }\n\n i += 1;\n\n }\n\n Ok(i)\n\n}\n\n\n", "file_path": "streamer/src/recvmmsg.rs", "rank": 4, "score": 266431.8479223008 }, { "content": "#[cfg(target_os = \"linux\")]\n\n#[allow(clippy::uninit_assumed_init)]\n\npub fn recv_mmsg(sock: &UdpSocket, packets: &mut [Packet]) -> io::Result</*num packets:*/ usize> {\n\n // Assert that there are no leftovers in packets.\n\n debug_assert!(packets.iter().all(|pkt| pkt.meta == Meta::default()));\n\n const SOCKADDR_STORAGE_SIZE: usize = mem::size_of::<sockaddr_storage>();\n\n\n\n let mut hdrs: [mmsghdr; NUM_RCVMMSGS] = unsafe { mem::zeroed() };\n\n let mut iovs: [iovec; NUM_RCVMMSGS] = unsafe { mem::MaybeUninit::uninit().assume_init() };\n\n let mut addrs: [sockaddr_storage; NUM_RCVMMSGS] = unsafe { mem::zeroed() };\n\n\n\n let sock_fd = sock.as_raw_fd();\n\n let count = cmp::min(iovs.len(), packets.len());\n\n\n\n for (packet, hdr, iov, addr) in\n\n izip!(packets.iter_mut(), &mut hdrs, &mut iovs, &mut addrs).take(count)\n\n {\n\n *iov = iovec {\n\n iov_base: packet.data.as_mut_ptr() as *mut libc::c_void,\n\n iov_len: packet.data.len(),\n\n };\n\n hdr.msg_hdr.msg_name = addr as *mut _ as *mut _;\n", "file_path": "streamer/src/recvmmsg.rs", "rank": 5, "score": 266431.8479223008 }, { "content": "fn test_node(exit: &Arc<AtomicBool>) -> (Arc<ClusterInfo>, GossipService, UdpSocket) {\n\n let keypair = Arc::new(Keypair::new());\n\n let mut test_node = Node::new_localhost_with_pubkey(&keypair.pubkey());\n\n let cluster_info = Arc::new(ClusterInfo::new(\n\n test_node.info.clone(),\n\n keypair,\n\n SocketAddrSpace::Unspecified,\n\n ));\n\n let gossip_service = GossipService::new(\n\n &cluster_info,\n\n None,\n\n test_node.sockets.gossip,\n\n None,\n\n true, // should_check_duplicate_instance\n\n None,\n\n exit,\n\n );\n\n let _ = cluster_info.my_contact_info();\n\n (\n\n cluster_info,\n\n gossip_service,\n\n test_node.sockets.tvu.pop().unwrap(),\n\n )\n\n}\n\n\n", "file_path": "gossip/tests/gossip.rs", "rank": 6, "score": 260670.54017782235 }, { "content": "pub fn process_transaction_log(args: &TransactionLogArgs) -> Result<(), Error> {\n\n let db = db::open_db(&args.transaction_db, true)?;\n\n db::write_transaction_log(&db, &args.output_path)?;\n\n Ok(())\n\n}\n\n\n\nuse {\n\n crate::db::check_output_file,\n\n solana_sdk::{pubkey::Pubkey, signature::Keypair},\n\n tempfile::{tempdir, NamedTempFile},\n\n};\n", "file_path": "tokens/src/commands.rs", "rank": 7, "score": 255209.28842573264 }, { "content": "fn deposit_many(bank: &Bank, pubkeys: &mut Vec<Pubkey>, num: usize) -> Result<(), LamportsError> {\n\n for t in 0..num {\n\n let pubkey = solana_sdk::pubkey::new_rand();\n\n let account =\n\n AccountSharedData::new((t + 1) as u64, 0, AccountSharedData::default().owner());\n\n pubkeys.push(pubkey);\n\n assert!(bank.get_account(&pubkey).is_none());\n\n bank.deposit(&pubkey, (t + 1) as u64)?;\n\n assert_eq!(bank.get_account(&pubkey).unwrap(), account);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "runtime/benches/accounts.rs", "rank": 8, "score": 253588.59686383145 }, { "content": "fn main() -> Result<(), std::io::Error> {\n\n let proto_base_path = std::path::PathBuf::from(\"proto\");\n\n let proto_files = [\"confirmed_block.proto\", \"transaction_by_addr.proto\"];\n\n let mut protos = Vec::new();\n\n for proto_file in &proto_files {\n\n let proto = proto_base_path.join(proto_file);\n\n println!(\"cargo::rerun-if-changed={}\", proto.display());\n\n protos.push(proto);\n\n }\n\n\n\n tonic_build::configure()\n\n .build_client(true)\n\n .build_server(false)\n\n .type_attribute(\n\n \"TransactionErrorType\",\n\n \"#[cfg_attr(test, derive(enum_iterator::IntoEnumIterator))]\",\n\n )\n\n .type_attribute(\n\n \"InstructionErrorType\",\n\n \"#[cfg_attr(test, derive(enum_iterator::IntoEnumIterator))]\",\n\n )\n\n .compile(&protos, &[proto_base_path])\n\n}\n", "file_path": "storage-proto/build.rs", "rank": 9, "score": 253195.4940425543 }, { "content": "pub fn update_token_args(client: &RpcClient, args: &mut Option<SplTokenArgs>) -> Result<(), Error> {\n\n if let Some(spl_token_args) = args {\n\n let sender_account = client\n\n .get_account(&spl_token_args.token_account_address)\n\n .unwrap_or_default();\n\n let mint_address =\n\n pubkey_from_spl_token(&SplTokenAccount::unpack(&sender_account.data)?.mint);\n\n spl_token_args.mint = mint_address;\n\n update_decimals(client, args)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "tokens/src/spl_token.rs", "rank": 10, "score": 249613.37001162797 }, { "content": "fn null_tracer() -> Option<impl Fn(&RewardCalculationEvent) + Send + Sync> {\n\n None::<fn(&RewardCalculationEvent)>\n\n}\n\n\n\nimpl fmt::Display for RewardType {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(\n\n f,\n\n \"{}\",\n\n match self {\n\n RewardType::Fee => \"fee\",\n\n RewardType::Rent => \"rent\",\n\n RewardType::Staking => \"staking\",\n\n RewardType::Voting => \"voting\",\n\n }\n\n )\n\n }\n\n}\n\n\n", "file_path": "runtime/src/bank.rs", "rank": 11, "score": 248135.78655294253 }, { "content": "pub fn update_decimals(client: &RpcClient, args: &mut Option<SplTokenArgs>) -> Result<(), Error> {\n\n if let Some(spl_token_args) = args {\n\n let mint_account = client.get_account(&spl_token_args.mint).unwrap_or_default();\n\n let mint = Mint::unpack(&mint_account.data)?;\n\n spl_token_args.decimals = mint.decimals;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "tokens/src/spl_token.rs", "rank": 12, "score": 243717.6280519268 }, { "content": "pub fn process_balances(client: &RpcClient, args: &BalancesArgs) -> Result<(), Error> {\n\n let allocations: Vec<Allocation> =\n\n read_allocations(&args.input_csv, None, false, args.spl_token_args.is_some())?;\n\n let allocations = merge_allocations(&allocations);\n\n\n\n let token = if let Some(spl_token_args) = &args.spl_token_args {\n\n spl_token_args.mint.to_string()\n\n } else {\n\n \"◎\".to_string()\n\n };\n\n println!(\"{} {}\", style(\"Token:\").bold(), token);\n\n\n\n println!(\n\n \"{}\",\n\n style(format!(\n\n \"{:<44} {:>24} {:>24} {:>24}\",\n\n \"Recipient\", \"Expected Balance\", \"Actual Balance\", \"Difference\"\n\n ))\n\n .bold()\n\n );\n", "file_path": "tokens/src/commands.rs", "rank": 14, "score": 242793.19399065909 }, { "content": "fn process_spy(matches: &ArgMatches, socket_addr_space: SocketAddrSpace) -> std::io::Result<()> {\n\n let num_nodes_exactly = matches\n\n .value_of(\"num_nodes_exactly\")\n\n .map(|num| num.to_string().parse().unwrap());\n\n let num_nodes = matches\n\n .value_of(\"num_nodes\")\n\n .map(|num| num.to_string().parse().unwrap())\n\n .or(num_nodes_exactly);\n\n let timeout = matches\n\n .value_of(\"timeout\")\n\n .map(|secs| secs.to_string().parse().unwrap());\n\n let pubkey = matches\n\n .value_of(\"node_pubkey\")\n\n .map(|pubkey_str| pubkey_str.parse::<Pubkey>().unwrap());\n\n let shred_version = value_t_or_exit!(matches, \"shred_version\", u16);\n\n let identity_keypair = keypair_of(matches, \"identity\");\n\n\n\n let entrypoint_addr = parse_entrypoint(matches);\n\n\n\n let gossip_host = parse_gossip_host(matches, entrypoint_addr);\n", "file_path": "gossip/src/main.rs", "rank": 15, "score": 239627.27394459178 }, { "content": "fn main() -> Result<(), std::io::Error> {\n\n let manifest_dir = std::path::PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n\n\n let out_dir = manifest_dir.join(\"../proto\");\n\n let googleapis = manifest_dir.join(\"googleapis\");\n\n\n\n println!(\"Google API directory: {}\", googleapis.display());\n\n println!(\"output directory: {}\", out_dir.display());\n\n\n\n tonic_build::configure()\n\n .build_client(true)\n\n .build_server(false)\n\n .out_dir(&out_dir)\n\n .compile(\n\n &[googleapis.join(\"google/bigtable/v2/bigtable.proto\")],\n\n &[googleapis],\n\n )\n\n}\n", "file_path": "storage-bigtable/build-proto/src/main.rs", "rank": 16, "score": 239416.9110498651 }, { "content": "fn num_accounts_arg<'a, 'b>() -> Arg<'a, 'b> {\n\n Arg::with_name(\"num_accounts\")\n\n .long(\"num-accounts\")\n\n .required(true)\n\n .takes_value(true)\n\n .value_name(\"NUMBER\")\n\n .help(\"Number of derived stake accounts\")\n\n}\n\n\n\npub(crate) fn get_matches<'a, I, T>(args: I) -> ArgMatches<'a>\n\nwhere\n\n I: IntoIterator<Item = T>,\n\n T: Into<OsString> + Clone,\n\n{\n\n let default_config_file = CONFIG_FILE.as_ref().unwrap();\n\n App::new(\"solana-stake-accounts\")\n\n .about(\"about\")\n\n .version(\"version\")\n\n .arg(\n\n Arg::with_name(\"config_file\")\n", "file_path": "stake-accounts/src/arg_parser.rs", "rank": 17, "score": 235250.81133460326 }, { "content": "/// Remove the snapshot directory for this slot\n\npub fn remove_bank_snapshot<P>(slot: Slot, bank_snapshots_dir: P) -> Result<()>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let bank_snapshot_dir = get_bank_snapshots_dir(&bank_snapshots_dir, slot);\n\n fs::remove_dir_all(bank_snapshot_dir)?;\n\n Ok(())\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub struct BankFromArchiveTimings {\n\n pub rebuild_bank_from_snapshots_us: u64,\n\n pub full_snapshot_untar_us: u64,\n\n pub incremental_snapshot_untar_us: u64,\n\n pub verify_snapshot_bank_us: u64,\n\n}\n\n\n\n// From testing, 4 seems to be a sweet spot for ranges of 60M-360M accounts and 16-64 cores. This may need to be tuned later.\n\nconst PARALLEL_UNTAR_READERS_DEFAULT: usize = 4;\n\n\n\n/// Rebuild bank from snapshot archives. Handles either just a full snapshot, or both a full\n\n/// snapshot and an incremental snapshot.\n", "file_path": "runtime/src/snapshot_utils.rs", "rank": 18, "score": 233391.69703659846 }, { "content": "pub fn recv_from(batch: &mut PacketBatch, socket: &UdpSocket, max_wait_ms: u64) -> Result<usize> {\n\n let mut i = 0;\n\n //DOCUMENTED SIDE-EFFECT\n\n //Performance out of the IO without poll\n\n // * block on the socket until it's readable\n\n // * set the socket to non blocking\n\n // * read until it fails\n\n // * set it back to blocking before returning\n\n socket.set_nonblocking(false)?;\n\n trace!(\"receiving on {}\", socket.local_addr().unwrap());\n\n let start = Instant::now();\n\n loop {\n\n batch.packets.resize(\n\n std::cmp::min(i + NUM_RCVMMSGS, PACKETS_PER_BATCH),\n\n Packet::default(),\n\n );\n\n match recv_mmsg(socket, &mut batch.packets[i..]) {\n\n Err(_) if i > 0 => {\n\n if start.elapsed().as_millis() as u64 > max_wait_ms {\n\n break;\n", "file_path": "streamer/src/packet.rs", "rank": 19, "score": 228781.84808045937 }, { "content": "fn create_payers<T: 'static + BenchTpsClient + Send + Sync>(\n\n valid_blockhash: bool,\n\n size: usize,\n\n client: Option<&Arc<T>>,\n\n) -> Vec<Option<Keypair>> {\n\n // Assume that if we use valid blockhash, we also have a payer\n\n if valid_blockhash {\n\n // each payer is used to fund transaction\n\n // transactions are built to be invalid so the the amount here is arbitrary\n\n let funding_key = Keypair::new();\n\n let funding_key = Arc::new(funding_key);\n\n let res =\n\n generate_and_fund_keypairs(client.unwrap().clone(), &funding_key, size, 1_000_000)\n\n .unwrap_or_else(|e| {\n\n eprintln!(\"Error could not fund keys: {:?}\", e);\n\n exit(1);\n\n });\n\n res.into_iter().map(Some).collect()\n\n } else {\n\n std::iter::repeat_with(|| None).take(size).collect()\n\n }\n\n}\n\n\n", "file_path": "dos/src/main.rs", "rank": 20, "score": 228433.0429352417 }, { "content": "fn run_dos<T: 'static + BenchTpsClient + Send + Sync>(\n\n nodes: &[ContactInfo],\n\n iterations: usize,\n\n client: Option<Arc<T>>,\n\n params: DosClientParameters,\n\n) {\n\n let target = get_target(nodes, params.mode, params.entrypoint_addr);\n\n\n\n if params.mode == Mode::Rpc {\n\n // creating rpc_client because get_account, get_program_accounts are not implemented for BenchTpsClient\n\n let rpc_client =\n\n get_rpc_client(nodes, params.entrypoint_addr).expect(\"Failed to get rpc client\");\n\n // existence of data_input is checked at cli level\n\n run_dos_rpc_mode(\n\n rpc_client,\n\n iterations,\n\n params.data_type,\n\n &params.data_input.unwrap(),\n\n );\n\n } else if params.data_type == DataType::Transaction\n", "file_path": "dos/src/main.rs", "rank": 21, "score": 228433.0429352417 }, { "content": "fn run_dos_transactions<T: 'static + BenchTpsClient + Send + Sync>(\n\n target: SocketAddr,\n\n iterations: usize,\n\n client: Option<Arc<T>>,\n\n transaction_params: TransactionParams,\n\n) {\n\n let socket = UdpSocket::bind(\"0.0.0.0:0\").unwrap();\n\n\n\n // Number of payers is the number of generating threads, for now it is 1\n\n // Later, we will create a new payer for each thread since Keypair is not clonable\n\n let payers: Vec<Option<Keypair>> =\n\n create_payers(transaction_params.valid_blockhash, 1, client.as_ref());\n\n let payer = payers[0].as_ref();\n\n\n\n // Generate n=1000 unique keypairs\n\n // The number of chunks is described by binomial coefficient\n\n // and hence this choice of n provides large enough number of permutations\n\n let mut keypairs_flat: Vec<Keypair> = Vec::new();\n\n // 1000 is arbitrary number. In case of permutation_size > 1,\n\n // this guaranties large enough set of unique permutations\n", "file_path": "dos/src/main.rs", "rank": 22, "score": 224591.17613383918 }, { "content": "fn unpack_snapshot_local<T: 'static + Read + std::marker::Send, F: Fn() -> T>(\n\n reader: F,\n\n ledger_dir: &Path,\n\n account_paths: &[PathBuf],\n\n parallel_archivers: usize,\n\n) -> Result<UnpackedAppendVecMap> {\n\n assert!(parallel_archivers > 0);\n\n // a shared 'reader' that reads the decompressed stream once, keeps some history, and acts as a reader for multiple parallel archive readers\n\n let shared_buffer = SharedBuffer::new(reader());\n\n\n\n // allocate all readers before any readers start reading\n\n let readers = (0..parallel_archivers)\n\n .into_iter()\n\n .map(|_| SharedBufferReader::new(&shared_buffer))\n\n .collect::<Vec<_>>();\n\n\n\n // create 'parallel_archivers' # of parallel workers, each responsible for 1/parallel_archivers of all the files to extract.\n\n let all_unpacked_append_vec_map = readers\n\n .into_par_iter()\n\n .enumerate()\n", "file_path": "runtime/src/snapshot_utils.rs", "rank": 23, "score": 222756.5030306594 }, { "content": "fn check_num_system_accounts(accounts: &[u8], num: usize) -> Result<(), ParseInstructionError> {\n\n check_num_accounts(accounts, num, ParsableProgram::System)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use {\n\n super::*,\n\n solana_sdk::{message::Message, pubkey::Pubkey, system_instruction, sysvar},\n\n };\n\n\n\n #[test]\n\n fn test_parse_system_create_account_ix() {\n\n let lamports = 55;\n\n let space = 128;\n\n let from_pubkey = Pubkey::new_unique();\n\n let to_pubkey = Pubkey::new_unique();\n\n let owner_pubkey = Pubkey::new_unique();\n\n\n\n let instruction = system_instruction::create_account(\n", "file_path": "transaction-status/src/parse_system.rs", "rank": 24, "score": 221869.05747483668 }, { "content": "fn check_num_vote_accounts(accounts: &[u8], num: usize) -> Result<(), ParseInstructionError> {\n\n check_num_accounts(accounts, num, ParsableProgram::Vote)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use {\n\n super::*,\n\n solana_sdk::{hash::Hash, message::Message, pubkey::Pubkey, sysvar},\n\n solana_vote_program::{\n\n vote_instruction,\n\n vote_state::{Vote, VoteAuthorize, VoteInit},\n\n },\n\n };\n\n\n\n #[test]\n\n fn test_parse_vote_initialize_ix() {\n\n let lamports = 55;\n\n\n\n let commission = 10;\n", "file_path": "transaction-status/src/parse_vote.rs", "rank": 25, "score": 221869.05747483668 }, { "content": "fn check_num_stake_accounts(accounts: &[u8], num: usize) -> Result<(), ParseInstructionError> {\n\n check_num_accounts(accounts, num, ParsableProgram::Stake)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use {\n\n super::*,\n\n solana_sdk::{\n\n message::Message,\n\n pubkey::Pubkey,\n\n stake::{\n\n config,\n\n instruction::{self, LockupArgs},\n\n state::{Authorized, Lockup, StakeAuthorize},\n\n },\n\n sysvar,\n\n },\n\n };\n\n\n", "file_path": "transaction-status/src/parse_stake.rs", "rank": 26, "score": 221869.05747483668 }, { "content": "fn check_num_token_accounts(accounts: &[u8], num: usize) -> Result<(), ParseInstructionError> {\n\n check_num_accounts(accounts, num, ParsableProgram::SplToken)\n\n}\n\n\n", "file_path": "transaction-status/src/parse_token.rs", "rank": 27, "score": 221869.05747483668 }, { "content": "fn kill_process(process: &mut Child) -> Result<(), io::Error> {\n\n if let Ok(()) = process.kill() {\n\n process.wait()?;\n\n } else {\n\n println!(\"Process {} has already exited\", process.id());\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "install/src/stop_process.rs", "rank": 28, "score": 221765.63013255855 }, { "content": "/// Load a value from a file in YAML format.\n\n///\n\n/// Despite the name, this function is generic YAML file deserializer, a thin\n\n/// wrapper around serde.\n\n///\n\n/// Most callers should instead use [`Config::load`].\n\n///\n\n/// # Errors\n\n///\n\n/// This function may return typical file I/O errors.\n\npub fn load_config_file<T, P>(config_file: P) -> Result<T, io::Error>\n\nwhere\n\n T: serde::de::DeserializeOwned,\n\n P: AsRef<Path>,\n\n{\n\n let file = File::open(config_file)?;\n\n let config = serde_yaml::from_reader(file)\n\n .map_err(|err| io::Error::new(io::ErrorKind::Other, format!(\"{:?}\", err)))?;\n\n Ok(config)\n\n}\n\n\n", "file_path": "cli-config/src/lib.rs", "rank": 29, "score": 220794.4566826851 }, { "content": "fn send_messages(\n\n client: &RpcClient,\n\n db: &mut PickleDb,\n\n allocations: &[Allocation],\n\n args: &DistributeTokensArgs,\n\n exit: Arc<AtomicBool>,\n\n messages: Vec<Message>,\n\n stake_extras: StakeExtras,\n\n) -> Result<(), Error> {\n\n for ((allocation, message), (new_stake_account_keypair, lockup_date)) in\n\n allocations.iter().zip(messages).zip(stake_extras)\n\n {\n\n if exit.load(Ordering::SeqCst) {\n\n db.dump()?;\n\n return Err(Error::ExitSignal);\n\n }\n\n let new_stake_account_address = new_stake_account_keypair.pubkey();\n\n\n\n let mut signers = vec![&*args.fee_payer, &*args.sender_keypair];\n\n if let Some(stake_args) = &args.stake_args {\n", "file_path": "tokens/src/commands.rs", "rank": 31, "score": 217518.19193274775 }, { "content": "pub fn parse_decode_transaction(matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n let blob = value_t_or_exit!(matches, \"transaction\", String);\n\n let binary_encoding = match matches.value_of(\"encoding\").unwrap() {\n\n \"base58\" => TransactionBinaryEncoding::Base58,\n\n \"base64\" => TransactionBinaryEncoding::Base64,\n\n _ => unreachable!(),\n\n };\n\n\n\n let encoded_transaction = EncodedTransaction::Binary(blob, binary_encoding);\n\n if let Some(transaction) = encoded_transaction.decode() {\n\n Ok(CliCommandInfo {\n\n command: CliCommand::DecodeTransaction(transaction),\n\n signers: vec![],\n\n })\n\n } else {\n\n Err(CliError::BadParameter(\n\n \"Unable to decode transaction\".to_string(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "cli/src/wallet.rs", "rank": 32, "score": 216706.4722301311 }, { "content": "pub fn parse_supply(matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n let print_accounts = matches.is_present(\"print_accounts\");\n\n Ok(CliCommandInfo {\n\n command: CliCommand::Supply { print_accounts },\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 33, "score": 216706.4722301311 }, { "content": "fn write_return_data<W: io::Write>(\n\n w: &mut W,\n\n return_data: Option<&TransactionReturnData>,\n\n prefix: &str,\n\n) -> io::Result<()> {\n\n if let Some(return_data) = return_data {\n\n if !return_data.data.is_empty() {\n\n use pretty_hex::*;\n\n writeln!(\n\n w,\n\n \"{}Return Data from Program {}:\",\n\n prefix, return_data.program_id\n\n )?;\n\n writeln!(w, \"{} {:?}\", prefix, return_data.data.hex_dump())?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "cli-output/src/display.rs", "rank": 34, "score": 216097.75036958256 }, { "content": "pub fn fund_keypairs<T: 'static + BenchTpsClient + Send + Sync>(\n\n client: Arc<T>,\n\n funding_key: &Keypair,\n\n keypairs: &[Keypair],\n\n extra: u64,\n\n lamports_per_account: u64,\n\n) -> Result<()> {\n\n let rent = client.get_minimum_balance_for_rent_exemption(0)?;\n\n info!(\"Get lamports...\");\n\n\n\n // Sample the first keypair, to prevent lamport loss on repeated solana-bench-tps executions\n\n let first_key = keypairs[0].pubkey();\n\n let first_keypair_balance = client.get_balance(&first_key).unwrap_or(0);\n\n\n\n // Sample the last keypair, to check if funding was already completed\n\n let last_key = keypairs[keypairs.len() - 1].pubkey();\n\n let last_keypair_balance = client.get_balance(&last_key).unwrap_or(0);\n\n\n\n // Repeated runs will eat up keypair balances from transaction fees. In order to quickly\n\n // start another bench-tps run without re-funding all of the keypairs, check if the\n", "file_path": "bench-tps/src/bench.rs", "rank": 35, "score": 214738.4774759162 }, { "content": "/// fund the dests keys by spending all of the source keys into MAX_SPENDS_PER_TX\n\n/// on every iteration. This allows us to replay the transfers because the source is either empty,\n\n/// or full\n\npub fn fund_keys<T: 'static + BenchTpsClient + Send + Sync>(\n\n client: Arc<T>,\n\n source: &Keypair,\n\n dests: &[Keypair],\n\n total: u64,\n\n max_fee: u64,\n\n lamports_per_account: u64,\n\n) {\n\n let mut funded: Vec<&Keypair> = vec![source];\n\n let mut funded_funds = total;\n\n let mut not_funded: Vec<&Keypair> = dests.iter().collect();\n\n while !not_funded.is_empty() {\n\n // Build to fund list and prepare funding sources for next iteration\n\n let mut new_funded: Vec<&Keypair> = vec![];\n\n let mut to_fund: Vec<(&Keypair, Vec<(Pubkey, u64)>)> = vec![];\n\n let to_lamports = (funded_funds - lamports_per_account - max_fee) / MAX_SPENDS_PER_TX;\n\n for f in funded {\n\n let start = not_funded.len() - MAX_SPENDS_PER_TX as usize;\n\n let dests: Vec<_> = not_funded.drain(start..).collect();\n\n let spends: Vec<_> = dests.iter().map(|k| (k.pubkey(), to_lamports)).collect();\n", "file_path": "bench-tps/src/bench.rs", "rank": 36, "score": 214738.4774759162 }, { "content": "/// Save a value to a file in YAML format.\n\n///\n\n/// Despite the name, this function is a generic YAML file serializer, a thin\n\n/// wrapper around serde.\n\n///\n\n/// If the file's directory does not exist, it will be created. If the file\n\n/// already exists, it will be overwritten.\n\n///\n\n/// Most callers should instead use [`Config::save`].\n\n///\n\n/// # Errors\n\n///\n\n/// This function may return typical file I/O errors.\n\npub fn save_config_file<T, P>(config: &T, config_file: P) -> Result<(), io::Error>\n\nwhere\n\n T: serde::ser::Serialize,\n\n P: AsRef<Path>,\n\n{\n\n let serialized = serde_yaml::to_string(config)\n\n .map_err(|err| io::Error::new(io::ErrorKind::Other, format!(\"{:?}\", err)))?;\n\n\n\n if let Some(outdir) = config_file.as_ref().parent() {\n\n create_dir_all(outdir)?;\n\n }\n\n let mut file = File::create(config_file)?;\n\n file.write_all(&serialized.into_bytes())?;\n\n\n\n Ok(())\n\n}\n", "file_path": "cli-config/src/lib.rs", "rank": 37, "score": 213778.10993186283 }, { "content": "pub fn parse_total_supply(_matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n Ok(CliCommandInfo {\n\n command: CliCommand::TotalSupply,\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 38, "score": 212570.01947441208 }, { "content": "pub fn parse_get_slot(_matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n Ok(CliCommandInfo {\n\n command: CliCommand::GetSlot,\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 39, "score": 212570.01947441208 }, { "content": "pub fn parse_get_block(matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n let slot = value_of(matches, \"slot\");\n\n Ok(CliCommandInfo {\n\n command: CliCommand::GetBlock { slot },\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 40, "score": 212570.01947441208 }, { "content": "pub fn parse_get_epoch(_matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n Ok(CliCommandInfo {\n\n command: CliCommand::GetEpoch,\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 41, "score": 212570.01947441208 }, { "content": "pub fn parse_largest_accounts(matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n let filter = if matches.is_present(\"circulating\") {\n\n Some(RpcLargestAccountsFilter::Circulating)\n\n } else if matches.is_present(\"non_circulating\") {\n\n Some(RpcLargestAccountsFilter::NonCirculating)\n\n } else {\n\n None\n\n };\n\n Ok(CliCommandInfo {\n\n command: CliCommand::LargestAccounts { filter },\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 42, "score": 212570.01947441208 }, { "content": "pub fn parse_show_stake_history(matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n let use_lamports_unit = matches.is_present(\"lamports\");\n\n let limit_results = value_of(matches, \"limit\").unwrap();\n\n Ok(CliCommandInfo {\n\n command: CliCommand::ShowStakeHistory {\n\n use_lamports_unit,\n\n limit_results,\n\n },\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/stake.rs", "rank": 43, "score": 212570.01947441208 }, { "content": "pub fn parse_show_validators(matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n let use_lamports_unit = matches.is_present(\"lamports\");\n\n let number_validators = matches.is_present(\"number\");\n\n let reverse_sort = matches.is_present(\"reverse\");\n\n let keep_unstaked_delinquents = matches.is_present(\"keep_unstaked_delinquents\");\n\n let delinquent_slot_distance = value_of(matches, \"delinquent_slot_distance\");\n\n\n\n let sort_order = match value_t_or_exit!(matches, \"sort\", String).as_str() {\n\n \"delinquent\" => CliValidatorsSortOrder::Delinquent,\n\n \"commission\" => CliValidatorsSortOrder::Commission,\n\n \"credits\" => CliValidatorsSortOrder::EpochCredits,\n\n \"identity\" => CliValidatorsSortOrder::Identity,\n\n \"last-vote\" => CliValidatorsSortOrder::LastVote,\n\n \"root\" => CliValidatorsSortOrder::Root,\n\n \"skip-rate\" => CliValidatorsSortOrder::SkipRate,\n\n \"stake\" => CliValidatorsSortOrder::Stake,\n\n \"vote-account\" => CliValidatorsSortOrder::VoteAccount,\n\n \"version\" => CliValidatorsSortOrder::Version,\n\n _ => unreachable!(),\n\n };\n", "file_path": "cli/src/cluster_query.rs", "rank": 44, "score": 212570.01947441208 }, { "content": "pub fn parse_leader_schedule(matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n let epoch = value_of(matches, \"epoch\");\n\n Ok(CliCommandInfo {\n\n command: CliCommand::LeaderSchedule { epoch },\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 45, "score": 212570.01947441208 }, { "content": "#[cfg(not(windows))]\n\npub fn stop_process(process: &mut Child) -> Result<(), io::Error> {\n\n use {\n\n nix::{\n\n errno::Errno::{EINVAL, EPERM, ESRCH},\n\n sys::signal::{kill, Signal},\n\n unistd::Pid,\n\n },\n\n std::{\n\n io::ErrorKind,\n\n thread,\n\n time::{Duration, Instant},\n\n },\n\n };\n\n\n\n let nice_wait = Duration::from_secs(5);\n\n let pid = Pid::from_raw(process.id() as i32);\n\n match kill(pid, Signal::SIGINT) {\n\n Ok(()) => {\n\n let expire = Instant::now() + nice_wait;\n\n while let Ok(None) = process.try_wait() {\n", "file_path": "install/src/stop_process.rs", "rank": 46, "score": 212530.8813769727 }, { "content": "// Lock on each iteration. Slowest.\n\nfn bench_arc_mutex_poh_hash(bencher: &mut Bencher) {\n\n let poh = Arc::new(Mutex::new(Poh::new(Hash::default(), None)));\n\n bencher.iter(|| {\n\n for _ in 0..NUM_HASHES {\n\n poh.lock().unwrap().hash(1);\n\n }\n\n })\n\n}\n\n\n\n#[bench]\n", "file_path": "poh/benches/poh.rs", "rank": 47, "score": 211533.75370930706 }, { "content": "pub fn generate_and_fund_keypairs<T: 'static + BenchTpsClient + Send + Sync>(\n\n client: Arc<T>,\n\n funding_key: &Keypair,\n\n keypair_count: usize,\n\n lamports_per_account: u64,\n\n) -> Result<Vec<Keypair>> {\n\n let rent = client.get_minimum_balance_for_rent_exemption(0)?;\n\n let lamports_per_account = lamports_per_account + rent;\n\n\n\n info!(\"Creating {} keypairs...\", keypair_count);\n\n let (mut keypairs, extra) = generate_keypairs(funding_key, keypair_count as u64);\n\n fund_keypairs(client, funding_key, &keypairs, extra, lamports_per_account)?;\n\n\n\n // 'generate_keypairs' generates extra keys to be able to have size-aligned funding batches for fund_keys.\n\n keypairs.truncate(keypair_count);\n\n\n\n Ok(keypairs)\n\n}\n\n\n", "file_path": "bench-tps/src/bench.rs", "rank": 48, "score": 211296.3244434974 }, { "content": "#[cfg(not(feature = \"hidapi\"))]\n\npub fn initialize_wallet_manager() -> Result<Arc<RemoteWalletManager>, RemoteWalletError> {\n\n Err(RemoteWalletError::Hid(\n\n \"hidapi crate compilation disabled in solana-remote-wallet.\".to_string(),\n\n ))\n\n}\n\n\n", "file_path": "remote-wallet/src/remote_wallet.rs", "rank": 49, "score": 210884.66597455856 }, { "content": "pub fn write_transaction_log<P: AsRef<Path>>(db: &PickleDb, path: &P) -> Result<(), io::Error> {\n\n let mut wtr = csv::WriterBuilder::new().from_path(path).unwrap();\n\n let mut transaction_infos = read_transaction_infos(db);\n\n transaction_infos.sort_by(compare_transaction_infos);\n\n for info in transaction_infos {\n\n let signed_info = SignedTransactionInfo {\n\n recipient: info.recipient.to_string(),\n\n amount: info.amount,\n\n new_stake_account_address: info\n\n .new_stake_account_address\n\n .map(|x| x.to_string())\n\n .unwrap_or_else(|| \"\".to_string()),\n\n finalized_date: info.finalized_date,\n\n signature: info.transaction.signatures[0].to_string(),\n\n };\n\n wtr.serialize(&signed_info)?;\n\n }\n\n wtr.flush()\n\n}\n\n\n", "file_path": "tokens/src/db.rs", "rank": 50, "score": 210104.31526150968 }, { "content": "fn first_err(results: &[Result<()>]) -> Result<()> {\n\n for r in results {\n\n if r.is_err() {\n\n return r.clone();\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "ledger/src/blockstore_processor.rs", "rank": 51, "score": 208966.4647596976 }, { "content": "pub fn parse_get_transaction_count(_matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n Ok(CliCommandInfo {\n\n command: CliCommand::GetTransactionCount,\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 52, "score": 208635.7117738967 }, { "content": "pub fn parse_get_epoch_info(_matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n Ok(CliCommandInfo {\n\n command: CliCommand::GetEpochInfo,\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 53, "score": 208635.7117738967 }, { "content": "pub fn parse_get_block_time(matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n let slot = value_of(matches, \"slot\");\n\n Ok(CliCommandInfo {\n\n command: CliCommand::GetBlockTime { slot },\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 54, "score": 208635.7117738967 }, { "content": "pub fn parse_get_block_height(_matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n Ok(CliCommandInfo {\n\n command: CliCommand::GetBlockHeight,\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 55, "score": 208635.7117738967 }, { "content": "pub fn parse_show_block_production(matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n let epoch = value_t!(matches, \"epoch\", Epoch).ok();\n\n let slot_limit = value_t!(matches, \"slot_limit\", u64).ok();\n\n\n\n Ok(CliCommandInfo {\n\n command: CliCommand::ShowBlockProduction { epoch, slot_limit },\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 56, "score": 208635.7117738967 }, { "content": "/// Defines a Geyser plugin, to stream data from the runtime.\n\n/// Geyser plugins must describe desired behavior for load and unload,\n\n/// as well as how they will handle streamed data.\n\npub trait GeyserPlugin: Any + Send + Sync + std::fmt::Debug {\n\n fn name(&self) -> &'static str;\n\n\n\n /// The callback called when a plugin is loaded by the system,\n\n /// used for doing whatever initialization is required by the plugin.\n\n /// The _config_file contains the name of the\n\n /// of the config file. The config must be in JSON format and\n\n /// include a field \"libpath\" indicating the full path\n\n /// name of the shared library implementing this interface.\n\n fn on_load(&mut self, _config_file: &str) -> Result<()> {\n\n Ok(())\n\n }\n\n\n\n /// The callback called right before a plugin is unloaded by the system\n\n /// Used for doing cleanup before unload.\n\n fn on_unload(&mut self) {}\n\n\n\n /// Called when an account is updated at a slot.\n\n /// When `is_startup` is true, it indicates the account is loaded from\n\n /// snapshots when the validator starts up. When `is_startup` is false,\n", "file_path": "geyser-plugin-interface/src/geyser_plugin_interface.rs", "rank": 57, "score": 208630.3665278984 }, { "content": "// Acquire lock every NUM_HASHES_PER_BATCH iterations.\n\n// Speed should be close to bench_poh_hash() if NUM_HASHES_PER_BATCH is set well.\n\nfn bench_arc_mutex_poh_batched_hash(bencher: &mut Bencher) {\n\n let poh = Arc::new(Mutex::new(Poh::new(Hash::default(), Some(NUM_HASHES))));\n\n //let exit = Arc::new(AtomicBool::new(false));\n\n let exit = Arc::new(AtomicBool::new(true));\n\n\n\n bencher.iter(|| {\n\n // NOTE: This block attempts to look as close as possible to `PohService::tick_producer()`\n\n loop {\n\n if poh.lock().unwrap().hash(DEFAULT_HASHES_PER_BATCH) {\n\n poh.lock().unwrap().tick().unwrap();\n\n if exit.load(Ordering::Relaxed) {\n\n break;\n\n }\n\n }\n\n }\n\n })\n\n}\n\n\n\n#[bench]\n", "file_path": "poh/benches/poh.rs", "rank": 58, "score": 207473.80270120798 }, { "content": "#[bench]\n\n#[ignore]\n\nfn bench_bank_sync_process_builtin_transactions(bencher: &mut Bencher) {\n\n do_bench_transactions(bencher, &sync_bencher, &create_builtin_transactions);\n\n}\n\n\n", "file_path": "runtime/benches/bank.rs", "rank": 59, "score": 207467.7092431887 }, { "content": "#[bench]\n\nfn bench_process_entries_with_order_shuffeling(bencher: &mut Bencher) {\n\n bench_process_entries(true, bencher);\n\n}\n", "file_path": "core/benches/banking_stage.rs", "rank": 60, "score": 207465.65216173275 }, { "content": "#[cfg(windows)]\n\nfn get_windows_path_var() -> Result<Option<String>, String> {\n\n use winreg::{\n\n enums::{HKEY_CURRENT_USER, KEY_READ, KEY_WRITE},\n\n RegKey,\n\n };\n\n\n\n let root = RegKey::predef(HKEY_CURRENT_USER);\n\n let environment = root\n\n .open_subkey_with_flags(\"Environment\", KEY_READ | KEY_WRITE)\n\n .map_err(|err| format!(\"Unable to open HKEY_CURRENT_USER\\\\Environment: {}\", err))?;\n\n\n\n let reg_value = environment.get_raw_value(\"PATH\");\n\n match reg_value {\n\n Ok(val) => {\n\n if let Some(s) = string_from_winreg_value(&val) {\n\n Ok(Some(s))\n\n } else {\n\n println!(\"the registry key HKEY_CURRENT_USER\\\\Environment\\\\PATH does not contain valid Unicode. Not modifying the PATH variable\");\n\n Ok(None)\n\n }\n\n }\n\n Err(ref e) if e.kind() == io::ErrorKind::NotFound => Ok(Some(String::new())),\n\n Err(e) => Err(e.to_string()),\n\n }\n\n}\n\n\n", "file_path": "install/src/command.rs", "rank": 61, "score": 205887.8619361023 }, { "content": "pub fn parse_args<I, T>(args: I) -> Result<Args, Box<dyn Error>>\n\nwhere\n\n I: IntoIterator<Item = T>,\n\n T: Into<OsString> + Clone,\n\n{\n\n let matches = get_matches(args);\n\n let config_file = matches.value_of(\"config_file\").unwrap().to_string();\n\n let url = matches.value_of(\"url\").map(|x| x.to_string());\n\n\n\n let command = match matches.subcommand() {\n\n (\"distribute-tokens\", Some(matches)) => {\n\n Command::DistributeTokens(parse_distribute_tokens_args(matches)?)\n\n }\n\n (\"create-stake\", Some(matches)) => {\n\n Command::DistributeTokens(parse_create_stake_args(matches)?)\n\n }\n\n (\"distribute-stake\", Some(matches)) => {\n\n Command::DistributeTokens(parse_distribute_stake_args(matches)?)\n\n }\n\n (\"distribute-spl-tokens\", Some(matches)) => {\n", "file_path": "tokens/src/arg_parser.rs", "rank": 62, "score": 205675.49651153453 }, { "content": "pub fn gc(config_file: &str) -> Result<(), String> {\n\n let config = Config::load(config_file)?;\n\n\n\n let entries = fs::read_dir(&config.releases_dir)\n\n .map_err(|err| format!(\"Unable to read {}: {}\", config.releases_dir.display(), err))?;\n\n\n\n let mut releases = entries\n\n .filter_map(|entry| entry.ok())\n\n .filter_map(|entry| {\n\n entry\n\n .metadata()\n\n .ok()\n\n .map(|metadata| (entry.path(), metadata))\n\n })\n\n .filter_map(|(release_path, metadata)| {\n\n if metadata.is_dir() {\n\n Some((release_path, metadata))\n\n } else {\n\n None\n\n }\n", "file_path": "install/src/command.rs", "rank": 63, "score": 205481.09864144336 }, { "content": "fn parse_balances_args(matches: &ArgMatches<'_>) -> Result<BalancesArgs, Box<dyn Error>> {\n\n let mut wallet_manager = maybe_wallet_manager()?;\n\n let spl_token_args =\n\n pubkey_of_signer(matches, \"mint_address\", &mut wallet_manager)?.map(|mint| SplTokenArgs {\n\n mint,\n\n ..SplTokenArgs::default()\n\n });\n\n Ok(BalancesArgs {\n\n input_csv: value_t_or_exit!(matches, \"input_csv\", String),\n\n spl_token_args,\n\n })\n\n}\n\n\n", "file_path": "tokens/src/arg_parser.rs", "rank": 64, "score": 204705.63544811937 }, { "content": "#[bench]\n\n#[ignore]\n\nfn bench_bank_sync_process_native_loader_transactions(bencher: &mut Bencher) {\n\n do_bench_transactions(bencher, &sync_bencher, &create_native_loader_transactions);\n\n}\n\n\n", "file_path": "runtime/benches/bank.rs", "rank": 65, "score": 203626.42415472647 }, { "content": "#[bench]\n\nfn bench_process_entries_without_order_shuffeling(bencher: &mut Bencher) {\n\n bench_process_entries(false, bencher);\n\n}\n\n\n", "file_path": "core/benches/banking_stage.rs", "rank": 66, "score": 203624.4243327754 }, { "content": "#[doc(hidden)]\n\n#[cfg(debug_assertions)]\n\npub trait AutoTraitBreakSendSync: Send + Sync {}\n\n\n\n// Unused `solana_sdk::program_stubs!()` macro retained for source backwards compatibility with older programs\n\n#[macro_export]\n\n#[deprecated(\n\n since = \"1.4.3\",\n\n note = \"program_stubs macro is obsolete and can be safely removed\"\n\n)]\n\nmacro_rules! program_stubs {\n\n () => {};\n\n}\n\n\n\n/// Convenience macro for `AddAssign` with saturating arithmetic.\n\n/// Replace by `std::num::Saturating` once stable\n\n#[macro_export]\n\nmacro_rules! saturating_add_assign {\n\n ($i:expr, $v:expr) => {{\n\n $i = $i.saturating_add($v)\n\n }};\n\n}\n", "file_path": "sdk/src/lib.rs", "rank": 67, "score": 201736.6622711983 }, { "content": "pub fn maybe_wallet_manager() -> Result<Option<Arc<RemoteWalletManager>>, RemoteWalletError> {\n\n let wallet_manager = initialize_wallet_manager()?;\n\n let device_count = wallet_manager.update_devices()?;\n\n if device_count > 0 {\n\n Ok(Some(wallet_manager))\n\n } else {\n\n drop(wallet_manager);\n\n Ok(None)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_locator() {\n\n let pubkey = solana_sdk::pubkey::new_rand();\n\n let locator = Locator {\n\n manufacturer: Manufacturer::Ledger,\n", "file_path": "remote-wallet/src/remote_wallet.rs", "rank": 68, "score": 201731.77266635175 }, { "content": "fn semver_of(string: &str) -> Result<semver::Version, String> {\n\n if string.starts_with('v') {\n\n semver::Version::parse(string.split_at(1).1)\n\n } else {\n\n semver::Version::parse(string)\n\n }\n\n .map_err(|err| err.to_string())\n\n}\n\n\n", "file_path": "install/src/command.rs", "rank": 69, "score": 201410.94663952035 }, { "content": "fn load_elf(name: &str) -> Result<Vec<u8>, std::io::Error> {\n\n let path = create_bpf_path(name);\n\n let mut file = File::open(&path).expect(&format!(\"Unable to open {:?}\", path));\n\n let mut elf = Vec::new();\n\n file.read_to_end(&mut elf).unwrap();\n\n Ok(elf)\n\n}\n\n\n", "file_path": "programs/bpf/benches/bpf_loader.rs", "rank": 70, "score": 200883.54179071262 }, { "content": "#[cfg(target_os = \"linux\")]\n\npub fn batch_send<S, T>(sock: &UdpSocket, packets: &[(T, S)]) -> Result<(), SendPktsError>\n\nwhere\n\n S: Borrow<SocketAddr>,\n\n T: AsRef<[u8]>,\n\n{\n\n let size = packets.len();\n\n #[allow(clippy::uninit_assumed_init)]\n\n let mut iovs = vec![unsafe { std::mem::MaybeUninit::uninit().assume_init() }; size];\n\n let mut addrs = vec![unsafe { std::mem::zeroed() }; size];\n\n let mut hdrs = vec![unsafe { std::mem::zeroed() }; size];\n\n for ((pkt, dest), hdr, iov, addr) in izip!(packets, &mut hdrs, &mut iovs, &mut addrs) {\n\n mmsghdr_for_packet(pkt.as_ref(), dest.borrow(), iov, addr, hdr);\n\n }\n\n sendmmsg_retry(sock, &mut hdrs)\n\n}\n\n\n", "file_path": "streamer/src/sendmmsg.rs", "rank": 71, "score": 200754.50187630672 }, { "content": "/// Helper function for programs to get the return data after calling [`GetMinimumDelegation`]\n\n///\n\n/// This fn handles calling [`get_return_data()`], ensures the result is from the correct\n\n/// program, and returns the correct type.\n\n///\n\n/// [`GetMinimumDelegation`]: super::instruction::StakeInstruction::GetMinimumDelegation\n\n/// [`get_return_data()`]: crate::program::get_return_data\n\nfn get_minimum_delegation_return_data() -> Result<u64, ProgramError> {\n\n crate::program::get_return_data()\n\n .ok_or(ProgramError::InvalidInstructionData)\n\n .and_then(|(program_id, return_data)| {\n\n (program_id == super::program::id())\n\n .then(|| return_data)\n\n .ok_or(ProgramError::IncorrectProgramId)\n\n })\n\n .and_then(|return_data| {\n\n return_data\n\n .try_into()\n\n .or(Err(ProgramError::InvalidInstructionData))\n\n })\n\n .map(u64::from_le_bytes)\n\n}\n\n\n", "file_path": "sdk/program/src/stake/tools.rs", "rank": 72, "score": 199945.69976097476 }, { "content": "#[cfg_attr(not(target_os = \"linux\"), allow(dead_code))]\n\nfn parse_udp_stats(reader: &mut impl BufRead) -> Result<UdpStats, String> {\n\n let mut udp_lines = Vec::default();\n\n for line in reader.lines() {\n\n let line = line.map_err(|e| e.to_string())?;\n\n if line.starts_with(\"Udp:\") {\n\n udp_lines.push(line);\n\n if udp_lines.len() == 2 {\n\n break;\n\n }\n\n }\n\n }\n\n if udp_lines.len() != 2 {\n\n return Err(format!(\n\n \"parse error, expected 2 lines, num lines: {}\",\n\n udp_lines.len()\n\n ));\n\n }\n\n\n\n let pairs: Vec<_> = udp_lines[0]\n\n .split_ascii_whitespace()\n", "file_path": "core/src/system_monitor_service.rs", "rank": 73, "score": 199096.71995664306 }, { "content": "/// Reads the supported TARGET triple for the given release\n\nfn load_release_target(release_dir: &Path) -> Result<String, String> {\n\n let mut version_yml = PathBuf::from(release_dir);\n\n version_yml.push(\"solana-release\");\n\n version_yml.push(\"version.yml\");\n\n\n\n let version = load_release_version(&version_yml)?;\n\n Ok(version.target)\n\n}\n\n\n", "file_path": "install/src/command.rs", "rank": 74, "score": 197102.79895223217 }, { "content": "fn no_outfile_arg<'a>() -> Arg<'a> {\n\n Arg::new(NO_OUTFILE_ARG.name)\n\n .long(NO_OUTFILE_ARG.long)\n\n .help(NO_OUTFILE_ARG.help)\n\n}\n\n\n", "file_path": "keygen/src/keygen.rs", "rank": 75, "score": 196409.5374711239 }, { "content": "fn language_arg<'a>() -> Arg<'a> {\n\n Arg::new(LANGUAGE_ARG.name)\n\n .long(LANGUAGE_ARG.long)\n\n .possible_values(&[\n\n \"english\",\n\n \"chinese-simplified\",\n\n \"chinese-traditional\",\n\n \"japanese\",\n\n \"spanish\",\n\n \"korean\",\n\n \"french\",\n\n \"italian\",\n\n ])\n\n .default_value(\"english\")\n\n .value_name(\"LANGUAGE\")\n\n .takes_value(true)\n\n .help(LANGUAGE_ARG.help)\n\n}\n\n\n", "file_path": "keygen/src/keygen.rs", "rank": 76, "score": 196409.5374711239 }, { "content": "fn no_passphrase_arg<'a>() -> Arg<'a> {\n\n Arg::new(NO_PASSPHRASE_ARG.name)\n\n .long(NO_PASSPHRASE_ARG.long)\n\n .alias(\"no-passphrase\")\n\n .help(NO_PASSPHRASE_ARG.help)\n\n}\n\n\n", "file_path": "keygen/src/keygen.rs", "rank": 77, "score": 196409.5374711239 }, { "content": "fn do_bench_shrink_packets(bencher: &mut Bencher, mut batches: Vec<PacketBatch>) {\n\n // verify packets\n\n bencher.iter(|| {\n\n let _ans = sigverify::shrink_batches(&mut batches);\n\n batches.iter_mut().for_each(|b| {\n\n b.packets\n\n .iter_mut()\n\n .for_each(|p| p.meta.set_discard(thread_rng().gen()))\n\n });\n\n });\n\n}\n\n\n", "file_path": "perf/benches/shrink.rs", "rank": 78, "score": 196352.35318494233 }, { "content": "fn do_bench_dedup_packets(bencher: &mut Bencher, mut batches: Vec<PacketBatch>) {\n\n // verify packets\n\n let mut deduper = sigverify::Deduper::new(1_000_000, Duration::from_millis(2_000));\n\n bencher.iter(|| {\n\n let _ans = deduper.dedup_packets(&mut batches);\n\n deduper.reset();\n\n batches\n\n .iter_mut()\n\n .for_each(|b| b.packets.iter_mut().for_each(|p| p.meta.set_discard(false)));\n\n });\n\n}\n\n\n", "file_path": "perf/benches/dedup.rs", "rank": 79, "score": 196352.35318494233 }, { "content": "type Result<T> = std::result::Result<T, TpuSenderError>;\n\n\n\n/// Default number of slots used to build TPU socket fanout set\n\npub const DEFAULT_FANOUT_SLOTS: u64 = 12;\n\n\n\n/// Maximum number of slots used to build TPU socket fanout set\n\npub const MAX_FANOUT_SLOTS: u64 = 100;\n\n\n\n/// Config params for `TpuClient`\n\n#[derive(Clone, Debug)]\n\npub struct TpuClientConfig {\n\n /// The range of upcoming slots to include when determining which\n\n /// leaders to send transactions to (min: 1, max: `MAX_FANOUT_SLOTS`)\n\n pub fanout_slots: u64,\n\n}\n\n\n\nimpl Default for TpuClientConfig {\n\n fn default() -> Self {\n\n Self {\n\n fanout_slots: DEFAULT_FANOUT_SLOTS,\n", "file_path": "client/src/tpu_client.rs", "rank": 80, "score": 195569.20732186636 }, { "content": "type Result<T> = std::result::Result<T, PohRecorderError>;\n\n\n\npub type WorkingBankEntry = (Arc<Bank>, (Entry, u64));\n\n\n\n#[derive(Clone)]\n\npub struct BankStart {\n\n pub working_bank: Arc<Bank>,\n\n pub bank_creation_time: Arc<Instant>,\n\n}\n\n\n\nimpl BankStart {\n\n fn get_working_bank_if_not_expired(&self) -> Option<&Arc<Bank>> {\n\n if Bank::should_bank_still_be_processing_txs(\n\n &self.bank_creation_time,\n\n self.working_bank.ns_per_slot,\n\n ) {\n\n Some(&self.working_bank)\n\n } else {\n\n None\n\n }\n", "file_path": "poh/src/poh_recorder.rs", "rank": 81, "score": 195569.20732186636 }, { "content": "type InterceptedRequestResult = std::result::Result<Request<()>, Status>;\n\n\n\n#[derive(Clone)]\n\npub struct BigTableConnection {\n\n access_token: Option<AccessToken>,\n\n channel: tonic::transport::Channel,\n\n table_prefix: String,\n\n timeout: Option<Duration>,\n\n}\n\n\n\nimpl BigTableConnection {\n\n /// Establish a connection to the BigTable instance named `instance_name`. If read-only access\n\n /// is required, the `read_only` flag should be used to reduce the requested OAuth2 scope.\n\n ///\n\n /// The GOOGLE_APPLICATION_CREDENTIALS environment variable will be used to determine the\n\n /// program name that contains the BigTable instance in addition to access credentials.\n\n ///\n\n /// The BIGTABLE_EMULATOR_HOST environment variable is also respected.\n\n ///\n\n pub async fn new(\n", "file_path": "storage-bigtable/src/bigtable.rs", "rank": 83, "score": 194097.2339684274 }, { "content": "type Result<T> = std::result::Result<T, SigVerifyServiceError>;\n\n\n\npub struct SigVerifyStage {\n\n thread_hdl: JoinHandle<()>,\n\n}\n\n\n", "file_path": "core/src/sigverify_stage.rs", "rank": 84, "score": 193327.76976807223 }, { "content": "fn load_release_version(version_yml: &Path) -> Result<ReleaseVersion, String> {\n\n let file = File::open(&version_yml)\n\n .map_err(|err| format!(\"Unable to open {:?}: {:?}\", version_yml, err))?;\n\n let version: ReleaseVersion = serde_yaml::from_reader(file)\n\n .map_err(|err| format!(\"Unable to parse {:?}: {:?}\", version_yml, err))?;\n\n Ok(version)\n\n}\n\n\n", "file_path": "install/src/command.rs", "rank": 85, "score": 193263.46799085813 }, { "content": "fn word_count_arg<'a>() -> Arg<'a> {\n\n Arg::new(WORD_COUNT_ARG.name)\n\n .long(WORD_COUNT_ARG.long)\n\n .possible_values(&[\"12\", \"15\", \"18\", \"21\", \"24\"])\n\n .default_value(\"12\")\n\n .value_name(\"NUMBER\")\n\n .takes_value(true)\n\n .help(WORD_COUNT_ARG.help)\n\n}\n\n\n", "file_path": "keygen/src/keygen.rs", "rank": 86, "score": 193010.37220377024 }, { "content": "pub fn ed25519_verify_disabled(batches: &mut [PacketBatch]) {\n\n use rayon::prelude::*;\n\n let packet_count = count_packets_in_batches(batches);\n\n debug!(\"disabled ECDSA for {}\", packet_count);\n\n batches.into_par_iter().for_each(|batch| {\n\n batch\n\n .packets\n\n .par_iter_mut()\n\n .for_each(|p| p.meta.set_discard(false))\n\n });\n\n inc_new_counter_debug!(\"ed25519_verify_disabled\", packet_count);\n\n}\n\n\n", "file_path": "perf/src/sigverify.rs", "rank": 87, "score": 191969.13032579113 }, { "content": "fn verify<T: Pod + Verifiable>(invoke_context: &mut InvokeContext) -> Result<(), InstructionError> {\n\n let transaction_context = &invoke_context.transaction_context;\n\n let instruction_context = transaction_context.get_current_instruction_context()?;\n\n let instruction_data = instruction_context.get_instruction_data();\n\n let instruction = ProofInstruction::decode_data::<T>(instruction_data);\n\n\n\n let proof = instruction.ok_or_else(|| {\n\n ic_msg!(invoke_context, \"invalid proof data\");\n\n InstructionError::InvalidInstructionData\n\n })?;\n\n\n\n proof.verify().map_err(|err| {\n\n ic_msg!(invoke_context, \"proof verification failed: {:?}\", err);\n\n InstructionError::InvalidInstructionData\n\n })\n\n}\n\n\n", "file_path": "programs/zk-token-proof/src/lib.rs", "rank": 88, "score": 191405.58542673715 }, { "content": "pub fn read_u16(current: &mut usize, data: &[u8]) -> Result<u16, SanitizeError> {\n\n if data.len() < *current + 2 {\n\n return Err(SanitizeError::IndexOutOfBounds);\n\n }\n\n let mut fixed_data = [0u8; 2];\n\n fixed_data.copy_from_slice(&data[*current..*current + 2]);\n\n let e = u16::from_le_bytes(fixed_data);\n\n *current += 2;\n\n Ok(e)\n\n}\n\n\n", "file_path": "sdk/program/src/serialize_utils.rs", "rank": 89, "score": 190151.75390508433 }, { "content": "pub fn load_genesis_accounts(file: &str, genesis_config: &mut GenesisConfig) -> io::Result<u64> {\n\n let mut lamports = 0;\n\n let accounts_file = File::open(file)?;\n\n\n\n let genesis_accounts: HashMap<String, Base64Account> =\n\n serde_yaml::from_reader(accounts_file)\n\n .map_err(|err| io::Error::new(io::ErrorKind::Other, format!(\"{:?}\", err)))?;\n\n\n\n for (key, account_details) in genesis_accounts {\n\n let pubkey = pubkey_from_str(key.as_str()).map_err(|err| {\n\n io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\"Invalid pubkey/keypair {}: {:?}\", key, err),\n\n )\n\n })?;\n\n\n\n let owner_program_id = Pubkey::from_str(account_details.owner.as_str()).map_err(|err| {\n\n io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\"Invalid owner: {}: {:?}\", account_details.owner, err),\n", "file_path": "genesis/src/main.rs", "rank": 90, "score": 190151.75390508433 }, { "content": "pub fn read_pubkey(current: &mut usize, data: &[u8]) -> Result<Pubkey, SanitizeError> {\n\n let len = std::mem::size_of::<Pubkey>();\n\n if data.len() < *current + len {\n\n return Err(SanitizeError::IndexOutOfBounds);\n\n }\n\n let e = Pubkey::new(&data[*current..*current + len]);\n\n *current += len;\n\n Ok(e)\n\n}\n\n\n", "file_path": "sdk/program/src/serialize_utils.rs", "rank": 91, "score": 190151.75390508433 }, { "content": "pub fn read_u8(current: &mut usize, data: &[u8]) -> Result<u8, SanitizeError> {\n\n if data.len() < *current + 1 {\n\n return Err(SanitizeError::IndexOutOfBounds);\n\n }\n\n let e = data[*current];\n\n *current += 1;\n\n Ok(e)\n\n}\n\n\n", "file_path": "sdk/program/src/serialize_utils.rs", "rank": 92, "score": 190151.75390508433 }, { "content": "fn signer_arg<'a>() -> Arg<'a> {\n\n Arg::new(SIGNER_ARG.name)\n\n .long(SIGNER_ARG.long)\n\n .takes_value(true)\n\n .value_name(\"PUBKEY=SIGNATURE\")\n\n .validator(|s| is_pubkey_sig(s))\n\n .requires(BLOCKHASH_ARG.name)\n\n .multiple_occurrences(true)\n\n .multiple_values(true)\n\n .help(SIGNER_ARG.help)\n\n}\n\n\n", "file_path": "clap-v3-utils/src/offline.rs", "rank": 93, "score": 189796.60454774444 }, { "content": "fn nonce_arg<'a>() -> Arg<'a> {\n\n Arg::new(NONCE_ARG.name)\n\n .long(NONCE_ARG.long)\n\n .takes_value(true)\n\n .value_name(\"PUBKEY\")\n\n .requires(BLOCKHASH_ARG.name)\n\n .validator(|s| is_valid_pubkey(s))\n\n .help(NONCE_ARG.help)\n\n}\n\n\n", "file_path": "clap-v3-utils/src/nonce.rs", "rank": 94, "score": 189796.60454774444 }, { "content": "fn run_bench_packet_discard(num_ips: usize, bencher: &mut Bencher) {\n\n solana_logger::setup();\n\n let len = 30 * 1000;\n\n let chunk_size = 1024;\n\n let tx = test_tx();\n\n let mut batches = to_packet_batches(&vec![tx; len], chunk_size);\n\n\n\n let mut total = 0;\n\n\n\n let ips: Vec<_> = (0..num_ips)\n\n .into_iter()\n\n .map(|_| {\n\n let mut addr = [0u16; 8];\n\n thread_rng().fill(&mut addr);\n\n std::net::IpAddr::from(addr)\n\n })\n\n .collect();\n\n\n\n for batch in batches.iter_mut() {\n\n total += batch.packets.len();\n", "file_path": "core/benches/sigverify_stage.rs", "rank": 95, "score": 189726.4829163511 }, { "content": "use std::fmt;\n\n\n\n#[derive(Default)]\n\npub struct Exit {\n\n exited: bool,\n\n exits: Vec<Box<dyn FnOnce() + Send + Sync>>,\n\n}\n\n\n\nimpl Exit {\n\n pub fn register_exit(&mut self, exit: Box<dyn FnOnce() + Send + Sync>) {\n\n if self.exited {\n\n exit();\n\n } else {\n\n self.exits.push(exit);\n\n }\n\n }\n\n\n\n pub fn exit(&mut self) {\n\n self.exited = true;\n\n for exit in self.exits.drain(..) {\n", "file_path": "sdk/src/exit.rs", "rank": 96, "score": 47.87611383412493 }, { "content": " return;\n\n }\n\n for slot in 0..num_cached_slots {\n\n db.flush_slot_cache(slot);\n\n }\n\n flush_done_sender.send(()).unwrap();\n\n })\n\n .unwrap()\n\n };\n\n\n\n let exit = Arc::new(AtomicBool::new(false));\n\n\n\n let t_spurious_signal = {\n\n let db = db.clone();\n\n let exit = exit.clone();\n\n std::thread::Builder::new()\n\n .name(\"account-cache-flush\".to_string())\n\n .spawn(move || loop {\n\n if exit.load(Ordering::Relaxed) {\n\n return;\n", "file_path": "runtime/src/accounts_db.rs", "rank": 97, "score": 44.709156567816294 }, { "content": "use {\n\n crossbeam_channel::{Receiver, RecvTimeoutError},\n\n std::{\n\n result::Result,\n\n sync::{\n\n atomic::{AtomicBool, Ordering},\n\n Arc,\n\n },\n\n thread::{self, Builder, JoinHandle},\n\n time::Duration,\n\n },\n\n};\n\n\n\npub struct StatsReporterService {\n\n thread_hdl: JoinHandle<()>,\n\n}\n\n\n\nimpl StatsReporterService {\n\n pub fn new(\n\n reporting_receiver: Receiver<Box<dyn FnOnce() + Send>>,\n", "file_path": "core/src/stats_reporter_service.rs", "rank": 98, "score": 44.16814044855598 }, { "content": " verified_receiver: Receiver<Vec<PacketBatch>>,\n\n shred_filter: F,\n\n bank_forks: Arc<RwLock<BankForks>>,\n\n retransmit_sender: Sender<Vec<Shred>>,\n\n ) -> JoinHandle<()>\n\n where\n\n F: 'static\n\n + Fn(&Pubkey, &Shred, Option<Arc<Bank>>, u64) -> bool\n\n + std::marker::Send\n\n + std::marker::Sync,\n\n {\n\n let mut stats = ReceiveWindowStats::default();\n\n Builder::new()\n\n .name(\"solana-window\".to_string())\n\n .spawn(move || {\n\n let _exit = Finalizer::new(exit.clone());\n\n trace!(\"{}: RECV_WINDOW started\", id);\n\n let thread_pool = rayon::ThreadPoolBuilder::new()\n\n .num_threads(get_thread_count())\n\n .build()\n", "file_path": "core/src/window_service.rs", "rank": 99, "score": 44.09120044386976 } ]
Rust
src/rate_limiter/src/persist.rs
parampavar/firecracker
a97b3f6c5c048c84a49ee501c48833deb8189c18
use super::*; use snapshot::Persist; use versionize::{VersionMap, Versionize, VersionizeResult}; use versionize_derive::Versionize; #[derive(Versionize)] pub struct TokenBucketState { size: u64, one_time_burst: Option<u64>, refill_time: u64, budget: u64, elapsed_ns: u64, } impl Persist<'_> for TokenBucket { type State = TokenBucketState; type ConstructorArgs = (); type Error = (); fn save(&self) -> Self::State { TokenBucketState { size: self.size, one_time_burst: self.one_time_burst, refill_time: self.refill_time, budget: self.budget, elapsed_ns: self.last_update.elapsed().as_nanos() as u64, } } fn restore(_: Self::ConstructorArgs, state: &Self::State) -> Result<Self, Self::Error> { let now = Instant::now(); let last_update = now .checked_sub(Duration::from_nanos(state.elapsed_ns)) .unwrap_or(now); let mut token_bucket = TokenBucket::new(state.size, state.one_time_burst, state.refill_time); token_bucket.budget = state.budget; token_bucket.last_update = last_update; Ok(token_bucket) } } #[derive(Versionize)] pub struct RateLimiterState { ops: Option<TokenBucketState>, bandwidth: Option<TokenBucketState>, } impl Persist<'_> for RateLimiter { type State = RateLimiterState; type ConstructorArgs = (); type Error = io::Error; fn save(&self) -> Self::State { RateLimiterState { ops: self.ops.as_ref().map(|ops| ops.save()), bandwidth: self.bandwidth.as_ref().map(|bw| bw.save()), } } fn restore(_: Self::ConstructorArgs, state: &Self::State) -> Result<Self, Self::Error> { let rate_limiter = RateLimiter { ops: state .ops .as_ref() .map(|ops| TokenBucket::restore((), ops).unwrap()), bandwidth: state .bandwidth .as_ref() .map(|bw| TokenBucket::restore((), bw).unwrap()), timer_fd: TimerFd::new_custom(ClockId::Monotonic, true, true)?, timer_active: false, }; Ok(rate_limiter) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_token_bucket_persistence() { let mut tb = TokenBucket::new(1000, Some(2000), 3000); let restored_tb = TokenBucket::restore((), &tb.save()).unwrap(); assert!(tb.partial_eq(&restored_tb)); tb.reduce(100); let restored_tb = TokenBucket::restore((), &tb.save()).unwrap(); assert!(tb.partial_eq(&restored_tb)); tb.replenish(100); let restored_tb = TokenBucket::restore((), &tb.save()).unwrap(); assert!(tb.partial_eq(&restored_tb)); let mut mem = vec![0; 4096]; let version_map = VersionMap::new(); tb.save() .serialize(&mut mem.as_mut_slice(), &version_map, 1) .unwrap(); let restored_tb = TokenBucket::restore( (), &TokenBucketState::deserialize(&mut mem.as_slice(), &version_map, 1).unwrap(), ) .unwrap(); assert!(tb.partial_eq(&restored_tb)); } #[test] fn test_rate_limiter_persistence() { let refill_time = 100_000; let mut rate_limiter = RateLimiter::new(100, None, refill_time, 10, None, refill_time).unwrap(); let restored_rate_limiter = RateLimiter::restore((), &rate_limiter.save()).expect("Unable to restore rate limiter"); assert!(rate_limiter .ops() .unwrap() .partial_eq(&restored_rate_limiter.ops().unwrap())); assert!(rate_limiter .bandwidth() .unwrap() .partial_eq(&restored_rate_limiter.bandwidth().unwrap())); assert_eq!( restored_rate_limiter.timer_fd.get_state(), TimerState::Disarmed ); rate_limiter.consume(10, TokenType::Bytes); rate_limiter.consume(10, TokenType::Ops); let restored_rate_limiter = RateLimiter::restore((), &rate_limiter.save()).expect("Unable to restore rate limiter"); assert!(rate_limiter .ops() .unwrap() .partial_eq(&restored_rate_limiter.ops().unwrap())); assert!(rate_limiter .bandwidth() .unwrap() .partial_eq(&restored_rate_limiter.bandwidth().unwrap())); assert_eq!( restored_rate_limiter.timer_fd.get_state(), TimerState::Disarmed ); rate_limiter.consume(1000, TokenType::Bytes); let restored_rate_limiter = RateLimiter::restore((), &rate_limiter.save()).expect("Unable to restore rate limiter"); assert!(rate_limiter .ops() .unwrap() .partial_eq(&restored_rate_limiter.ops().unwrap())); assert!(rate_limiter .bandwidth() .unwrap() .partial_eq(&restored_rate_limiter.bandwidth().unwrap())); let mut mem = vec![0; 4096]; let version_map = VersionMap::new(); rate_limiter .save() .serialize(&mut mem.as_mut_slice(), &version_map, 1) .unwrap(); let restored_rate_limiter = RateLimiter::restore( (), &RateLimiterState::deserialize(&mut mem.as_slice(), &version_map, 1).unwrap(), ) .unwrap(); assert!(rate_limiter .ops() .unwrap() .partial_eq(&restored_rate_limiter.ops().unwrap())); assert!(rate_limiter .bandwidth() .unwrap() .partial_eq(&restored_rate_limiter.bandwidth().unwrap())); } }
use super::*; use snapshot::Persist; use versionize::{VersionMap, Versionize, VersionizeResult}; use versionize_derive::Versionize; #[derive(Versionize)] pub struct TokenBucketState { size: u64, one_time_burst: Option<u64>,
.partial_eq(&restored_rate_limiter.ops().unwrap())); assert!(rate_limiter .bandwidth() .unwrap() .partial_eq(&restored_rate_limiter.bandwidth().unwrap())); assert_eq!( restored_rate_limiter.timer_fd.get_state(), TimerState::Disarmed ); rate_limiter.consume(10, TokenType::Bytes); rate_limiter.consume(10, TokenType::Ops); let restored_rate_limiter = RateLimiter::restore((), &rate_limiter.save()).expect("Unable to restore rate limiter"); assert!(rate_limiter .ops() .unwrap() .partial_eq(&restored_rate_limiter.ops().unwrap())); assert!(rate_limiter .bandwidth() .unwrap() .partial_eq(&restored_rate_limiter.bandwidth().unwrap())); assert_eq!( restored_rate_limiter.timer_fd.get_state(), TimerState::Disarmed ); rate_limiter.consume(1000, TokenType::Bytes); let restored_rate_limiter = RateLimiter::restore((), &rate_limiter.save()).expect("Unable to restore rate limiter"); assert!(rate_limiter .ops() .unwrap() .partial_eq(&restored_rate_limiter.ops().unwrap())); assert!(rate_limiter .bandwidth() .unwrap() .partial_eq(&restored_rate_limiter.bandwidth().unwrap())); let mut mem = vec![0; 4096]; let version_map = VersionMap::new(); rate_limiter .save() .serialize(&mut mem.as_mut_slice(), &version_map, 1) .unwrap(); let restored_rate_limiter = RateLimiter::restore( (), &RateLimiterState::deserialize(&mut mem.as_slice(), &version_map, 1).unwrap(), ) .unwrap(); assert!(rate_limiter .ops() .unwrap() .partial_eq(&restored_rate_limiter.ops().unwrap())); assert!(rate_limiter .bandwidth() .unwrap() .partial_eq(&restored_rate_limiter.bandwidth().unwrap())); } }
refill_time: u64, budget: u64, elapsed_ns: u64, } impl Persist<'_> for TokenBucket { type State = TokenBucketState; type ConstructorArgs = (); type Error = (); fn save(&self) -> Self::State { TokenBucketState { size: self.size, one_time_burst: self.one_time_burst, refill_time: self.refill_time, budget: self.budget, elapsed_ns: self.last_update.elapsed().as_nanos() as u64, } } fn restore(_: Self::ConstructorArgs, state: &Self::State) -> Result<Self, Self::Error> { let now = Instant::now(); let last_update = now .checked_sub(Duration::from_nanos(state.elapsed_ns)) .unwrap_or(now); let mut token_bucket = TokenBucket::new(state.size, state.one_time_burst, state.refill_time); token_bucket.budget = state.budget; token_bucket.last_update = last_update; Ok(token_bucket) } } #[derive(Versionize)] pub struct RateLimiterState { ops: Option<TokenBucketState>, bandwidth: Option<TokenBucketState>, } impl Persist<'_> for RateLimiter { type State = RateLimiterState; type ConstructorArgs = (); type Error = io::Error; fn save(&self) -> Self::State { RateLimiterState { ops: self.ops.as_ref().map(|ops| ops.save()), bandwidth: self.bandwidth.as_ref().map(|bw| bw.save()), } } fn restore(_: Self::ConstructorArgs, state: &Self::State) -> Result<Self, Self::Error> { let rate_limiter = RateLimiter { ops: state .ops .as_ref() .map(|ops| TokenBucket::restore((), ops).unwrap()), bandwidth: state .bandwidth .as_ref() .map(|bw| TokenBucket::restore((), bw).unwrap()), timer_fd: TimerFd::new_custom(ClockId::Monotonic, true, true)?, timer_active: false, }; Ok(rate_limiter) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_token_bucket_persistence() { let mut tb = TokenBucket::new(1000, Some(2000), 3000); let restored_tb = TokenBucket::restore((), &tb.save()).unwrap(); assert!(tb.partial_eq(&restored_tb)); tb.reduce(100); let restored_tb = TokenBucket::restore((), &tb.save()).unwrap(); assert!(tb.partial_eq(&restored_tb)); tb.replenish(100); let restored_tb = TokenBucket::restore((), &tb.save()).unwrap(); assert!(tb.partial_eq(&restored_tb)); let mut mem = vec![0; 4096]; let version_map = VersionMap::new(); tb.save() .serialize(&mut mem.as_mut_slice(), &version_map, 1) .unwrap(); let restored_tb = TokenBucket::restore( (), &TokenBucketState::deserialize(&mut mem.as_slice(), &version_map, 1).unwrap(), ) .unwrap(); assert!(tb.partial_eq(&restored_tb)); } #[test] fn test_rate_limiter_persistence() { let refill_time = 100_000; let mut rate_limiter = RateLimiter::new(100, None, refill_time, 10, None, refill_time).unwrap(); let restored_rate_limiter = RateLimiter::restore((), &rate_limiter.save()).expect("Unable to restore rate limiter"); assert!(rate_limiter .ops() .unwrap()
random
[ { "content": "/// Returns the memory address where the initrd could be loaded.\n\npub fn initrd_load_addr(guest_mem: &GuestMemoryMmap, initrd_size: usize) -> super::Result<u64> {\n\n let round_to_pagesize = |size| (size + (super::PAGE_SIZE - 1)) & !(super::PAGE_SIZE - 1);\n\n match GuestAddress(get_fdt_addr(&guest_mem)).checked_sub(round_to_pagesize(initrd_size) as u64)\n\n {\n\n Some(offset) => {\n\n if guest_mem.address_in_range(offset) {\n\n return Ok(offset.raw_value());\n\n } else {\n\n return Err(Error::InitrdAddress);\n\n }\n\n }\n\n None => return Err(Error::InitrdAddress),\n\n }\n\n}\n\n\n", "file_path": "src/arch/src/aarch64/mod.rs", "rank": 0, "score": 171057.72119777498 }, { "content": "/// Returns the memory address where the initrd could be loaded.\n\npub fn initrd_load_addr(guest_mem: &GuestMemoryMmap, initrd_size: usize) -> super::Result<u64> {\n\n let first_region = guest_mem\n\n .find_region(GuestAddress::new(0))\n\n .ok_or(Error::InitrdAddress)?;\n\n // It's safe to cast to usize because the size of a region can't be greater than usize.\n\n let lowmem_size = first_region.len() as usize;\n\n\n\n if lowmem_size < initrd_size {\n\n return Err(Error::InitrdAddress);\n\n }\n\n\n\n let align_to_pagesize = |address| address & !(super::PAGE_SIZE - 1);\n\n Ok(align_to_pagesize(lowmem_size - initrd_size) as u64)\n\n}\n\n\n", "file_path": "src/arch/src/x86_64/mod.rs", "rank": 1, "score": 171057.72119777498 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct BusRange(u64, u64);\n\n\n\nimpl Eq for BusRange {}\n\n\n\nimpl PartialEq for BusRange {\n\n fn eq(&self, other: &BusRange) -> bool {\n\n self.0 == other.0\n\n }\n\n}\n\n\n\nimpl Ord for BusRange {\n\n fn cmp(&self, other: &BusRange) -> Ordering {\n\n self.0.cmp(&other.0)\n\n }\n\n}\n\n\n\nimpl PartialOrd for BusRange {\n\n fn partial_cmp(&self, other: &BusRange) -> Option<Ordering> {\n\n self.0.partial_cmp(&other.0)\n\n }\n", "file_path": "src/devices/src/bus.rs", "rank": 2, "score": 167970.22611033375 }, { "content": "pub fn build_config_space(disk_size: u64) -> Vec<u8> {\n\n // We only support disk size, which uses the first two words of the configuration space.\n\n // If the image is not a multiple of the sector size, the tail bits are not exposed.\n\n // The config space is little endian.\n\n if disk_size % SECTOR_SIZE != 0 {\n\n warn!(\n\n \"Disk size {} is not a multiple of sector size {}; \\\n\n the remainder will not be visible to the guest.\",\n\n disk_size, SECTOR_SIZE\n\n );\n\n }\n\n let mut config = Vec::with_capacity(CONFIG_SPACE_SIZE);\n\n let num_sectors = disk_size >> SECTOR_SHIFT;\n\n for i in 0..CONFIG_SPACE_SIZE {\n\n config.push((num_sectors >> (8 * i)) as u8);\n\n }\n\n config\n\n}\n\n\n", "file_path": "src/devices/src/virtio/block/device.rs", "rank": 3, "score": 144544.47316644242 }, { "content": "/// Returns a timestamp in nanoseconds from a monotonic clock.\n\n///\n\n/// Uses `_rdstc` on `x86_64` and [`get_time`](fn.get_time.html) on other architectures.\n\npub fn timestamp_cycles() -> u64 {\n\n #[cfg(target_arch = \"x86_64\")]\n\n // Safe because there's nothing that can go wrong with this call.\n\n unsafe {\n\n std::arch::x86_64::_rdtsc() as u64\n\n }\n\n #[cfg(not(target_arch = \"x86_64\"))]\n\n {\n\n get_time(ClockType::Monotonic)\n\n }\n\n}\n\n\n", "file_path": "src/utils/src/time.rs", "rank": 4, "score": 142060.28591252354 }, { "content": "/// Returns the memory address where the kernel could be loaded.\n\npub fn get_kernel_start() -> u64 {\n\n layout::DRAM_MEM_START\n\n}\n\n\n", "file_path": "src/arch/src/aarch64/mod.rs", "rank": 5, "score": 136633.42111208773 }, { "content": "/// Returns the memory address where the kernel could be loaded.\n\npub fn get_kernel_start() -> u64 {\n\n layout::HIMEM_START\n\n}\n\n\n", "file_path": "src/arch/src/x86_64/mod.rs", "rank": 6, "score": 136633.42111208773 }, { "content": "#[derive(Clone, Debug, Default, Versionize)]\n\nstruct Test {\n\n a: Vec<Dummy>,\n\n #[version(start = 1)]\n\n b: u64,\n\n #[version(start = 2)]\n\n c: u64,\n\n #[version(start = 3)]\n\n d: u32,\n\n #[version(start = 4)]\n\n e: Vec<u64>,\n\n}\n\n\n", "file_path": "src/snapshot/benches/version_map.rs", "rank": 7, "score": 126825.568048319 }, { "content": "#[derive(Clone, Debug, Default, Versionize)]\n\nstruct Dummy {\n\n a: String,\n\n #[version(start = 2)]\n\n b: [u64; 32],\n\n}\n\n\n", "file_path": "src/snapshot/benches/version_map.rs", "rank": 8, "score": 126825.568048319 }, { "content": "/// Returns a timestamp in nanoseconds based on the provided clock type.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `clock_type` - Identifier of the Linux Kernel clock on which to act.\n\npub fn get_time(clock_type: ClockType) -> u64 {\n\n let mut time_struct = libc::timespec {\n\n tv_sec: 0,\n\n tv_nsec: 0,\n\n };\n\n // Safe because the parameters are valid.\n\n unsafe { libc::clock_gettime(clock_type.into(), &mut time_struct) };\n\n seconds_to_nanoseconds(time_struct.tv_sec).expect(\"Time conversion overflow\") as u64\n\n + (time_struct.tv_nsec as u64)\n\n}\n\n\n", "file_path": "src/utils/src/time.rs", "rank": 9, "score": 121523.5093555788 }, { "content": "/// Read the MPIDR - Multiprocessor Affinity Register.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `vcpu` - Structure for the VCPU that holds the VCPU's fd.\n\npub fn read_mpidr(vcpu: &VcpuFd) -> Result<u64> {\n\n vcpu.get_one_reg(MPIDR_EL1).map_err(Error::GetSysRegister)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use aarch64::{arch_memory_regions, layout};\n\n use kvm_ioctls::Kvm;\n\n\n\n #[test]\n\n fn test_setup_regs() {\n\n let kvm = Kvm::new().unwrap();\n\n let vm = kvm.create_vm().unwrap();\n\n let vcpu = vm.create_vcpu(0).unwrap();\n\n let regions = arch_memory_regions(layout::FDT_MAX_SIZE + 0x1000);\n\n let mem = GuestMemoryMmap::from_ranges(&regions).expect(\"Cannot initialize memory\");\n\n\n\n match setup_regs(&vcpu, 0, 0x0, &mem).unwrap_err() {\n\n Error::SetCoreRegister(ref e) => assert_eq!(e.errno(), libc::ENOEXEC),\n", "file_path": "src/arch/src/aarch64/regs.rs", "rank": 10, "score": 115421.71543872195 }, { "content": "/// Replaces the `cpuid` entries corresponding to `function` with the entries from the host's cpuid.\n\npub fn use_host_cpuid_function(\n\n cpuid: &mut CpuId,\n\n function: u32,\n\n use_count: bool,\n\n) -> Result<(), Error> {\n\n // copy all the CpuId entries, except for the ones with the provided function\n\n cpuid.retain(|entry| entry.function != function);\n\n\n\n // add all the host leaves with the provided function\n\n let mut count: u32 = 0;\n\n while let Ok(entry) = get_cpuid(function, count) {\n\n if count > 0 && !use_count {\n\n break;\n\n }\n\n\n\n cpuid\n\n .push(kvm_cpuid_entry2 {\n\n function,\n\n index: count,\n\n flags: 0,\n", "file_path": "src/cpuid/src/transformer/common.rs", "rank": 11, "score": 110043.76441999515 }, { "content": "/// Returns a Vec of the valid memory addresses.\n\n/// These should be used to configure the GuestMemoryMmap structure for the platform.\n\n/// For x86_64 all addresses are valid from the start of the kernel except a\n\n/// carve out at the end of 32bit address space.\n\npub fn arch_memory_regions(size: usize) -> Vec<(GuestAddress, usize)> {\n\n // It's safe to cast MMIO_MEM_START to usize because it fits in a u32 variable\n\n // (It points to an address in the 32 bit space).\n\n match size.checked_sub(MMIO_MEM_START as usize) {\n\n // case1: guest memory fits before the gap\n\n None | Some(0) => vec![(GuestAddress(0), size)],\n\n // case2: guest memory extends beyond the gap\n\n Some(remaining) => vec![\n\n (GuestAddress(0), MMIO_MEM_START as usize),\n\n (GuestAddress(FIRST_ADDR_PAST_32BITS), remaining),\n\n ],\n\n }\n\n}\n\n\n", "file_path": "src/arch/src/x86_64/mod.rs", "rank": 12, "score": 108253.55254839337 }, { "content": "/// Returns a Vec of the valid memory addresses for aarch64.\n\n/// See [`layout`](layout) module for a drawing of the specific memory model for this platform.\n\npub fn arch_memory_regions(size: usize) -> Vec<(GuestAddress, usize)> {\n\n let dram_size = min(size as u64, layout::DRAM_MEM_MAX_SIZE) as usize;\n\n vec![(GuestAddress(layout::DRAM_MEM_START), dram_size)]\n\n}\n\n\n", "file_path": "src/arch/src/aarch64/mod.rs", "rank": 13, "score": 108248.71589113324 }, { "content": "/// Configure base registers for a given CPU.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `vcpu` - Structure for the VCPU that holds the VCPU's fd.\n\n/// * `boot_ip` - Starting instruction pointer.\n\npub fn setup_regs(vcpu: &VcpuFd, boot_ip: u64) -> Result<()> {\n\n let regs: kvm_regs = kvm_regs {\n\n rflags: 0x0000_0000_0000_0002u64,\n\n rip: boot_ip,\n\n // Frame pointer. It gets a snapshot of the stack pointer (rsp) so that when adjustments are\n\n // made to rsp (i.e. reserving space for local variables or pushing values on to the stack),\n\n // local variables and function parameters are still accessible from a constant offset from rbp.\n\n rsp: super::layout::BOOT_STACK_POINTER as u64,\n\n // Starting stack pointer.\n\n rbp: super::layout::BOOT_STACK_POINTER as u64,\n\n // Must point to zero page address per Linux ABI. This is x86_64 specific.\n\n rsi: super::layout::ZERO_PAGE_START as u64,\n\n ..Default::default()\n\n };\n\n\n\n vcpu.set_regs(&regs).map_err(Error::SetBaseRegisters)\n\n}\n\n\n", "file_path": "src/arch/src/x86_64/regs.rs", "rank": 14, "score": 108106.72587798483 }, { "content": "/// Automatically build the kvm struct for SET_SREGS from the kernel bit fields.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `entry` - The gdt entry.\n\n/// * `table_index` - Index of the entry in the gdt table.\n\npub fn kvm_segment_from_gdt(entry: u64, table_index: u8) -> kvm_segment {\n\n kvm_segment {\n\n base: get_base(entry),\n\n limit: get_limit(entry),\n\n selector: u16::from(table_index * 8),\n\n type_: get_type(entry),\n\n present: get_p(entry),\n\n dpl: get_dpl(entry),\n\n db: get_db(entry),\n\n s: get_s(entry),\n\n l: get_l(entry),\n\n g: get_g(entry),\n\n avl: get_avl(entry),\n\n padding: 0,\n\n unusable: match get_p(entry) {\n\n 0 => 1,\n\n _ => 0,\n\n },\n\n }\n\n}\n", "file_path": "src/arch/src/x86_64/gdt.rs", "rank": 15, "score": 106296.3909427888 }, { "content": "/// Constructor for a conventional segment GDT (or LDT) entry. Derived from the kernel's segment.h.\n\npub fn gdt_entry(flags: u16, base: u32, limit: u32) -> u64 {\n\n (((u64::from(base) & 0xff00_0000u64) << (56 - 24))\n\n | ((u64::from(flags) & 0x0000_f0ffu64) << 40)\n\n | ((u64::from(limit) & 0x000f_0000u64) << (48 - 16))\n\n | ((u64::from(base) & 0x00ff_ffffu64) << 16)\n\n | (u64::from(limit) & 0x0000_ffffu64))\n\n}\n\n\n", "file_path": "src/arch/src/x86_64/gdt.rs", "rank": 16, "score": 102688.52616891146 }, { "content": "fn build_magic_id(format_version: u16) -> u64 {\n\n BASE_MAGIC_ID | format_version as u64\n\n}\n\n\n\nimpl Snapshot {\n\n /// Creates a new instance which can only be used to save a new snapshot.\n\n pub fn new(version_map: VersionMap, target_version: u16) -> Snapshot {\n\n Snapshot {\n\n version_map,\n\n hdr: SnapshotHdr::default(),\n\n format_version: SNAPSHOT_FORMAT_VERSION,\n\n target_version,\n\n }\n\n }\n\n\n\n /// Attempts to load an existing snapshot.\n\n pub fn load<T, O>(mut reader: &mut T, version_map: VersionMap) -> Result<O, Error>\n\n where\n\n T: Read,\n\n O: Versionize,\n", "file_path": "src/snapshot/src/lib.rs", "rank": 17, "score": 101806.34142330795 }, { "content": "// Euclid's two-thousand-year-old algorithm for finding the greatest common divisor.\n\nfn gcd(x: u64, y: u64) -> u64 {\n\n let mut x = x;\n\n let mut y = y;\n\n while y != 0 {\n\n let t = y;\n\n y = x % y;\n\n x = t;\n\n }\n\n x\n\n}\n\n\n\n/// TokenBucket provides a lower level interface to rate limiting with a\n\n/// configurable capacity, refill-rate and initial burst.\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct TokenBucket {\n\n // Bucket defining traits.\n\n size: u64,\n\n // Initial burst size (number of free initial tokens, that can be consumed at no cost)\n\n one_time_burst: Option<u64>,\n\n // Complete refill time in milliseconds.\n", "file_path": "src/rate_limiter/src/lib.rs", "rank": 18, "score": 101766.51054518542 }, { "content": "pub fn criterion_benchmark(c: &mut Criterion) {\n\n let mut snapshot_mem = vec![0u8; 1024 * 1024 * 128];\n\n let mut vm = VersionMap::new();\n\n\n\n vm.new_version()\n\n .set_type_version(Test::type_id(), 2)\n\n .new_version()\n\n .set_type_version(Test::type_id(), 3)\n\n .new_version()\n\n .set_type_version(Test::type_id(), 4)\n\n .set_type_version(Dummy::type_id(), 2);\n\n\n\n let mut slice = &mut snapshot_mem.as_mut_slice();\n\n save(&mut slice, vm.clone());\n\n let snapshot_len = slice.as_ptr() as usize - snapshot_mem.as_slice().as_ptr() as usize;\n\n println!(\"Snapshot length: {} bytes\", snapshot_len);\n\n\n\n c.bench_function(\"Serialize in vspace=4\", |b| {\n\n b.iter(|| {\n\n save(\n", "file_path": "src/snapshot/benches/version_map.rs", "rank": 19, "score": 101418.27456224605 }, { "content": "/// Create a GIC device.\n\n///\n\n/// It will try to create by default a GICv3 device. If that fails it will try\n\n/// to fall-back to a GICv2 device.\n\npub fn create_gic(vm: &VmFd, vcpu_count: u64) -> Result<Box<dyn GICDevice>> {\n\n GICv3::new(vm, vcpu_count).or_else(|_| GICv2::new(vm, vcpu_count))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n use kvm_ioctls::Kvm;\n\n\n\n #[test]\n\n fn test_create_gic() {\n\n let kvm = Kvm::new().unwrap();\n\n let vm = kvm.create_vm().unwrap();\n\n assert!(create_gic(&vm, 1).is_ok());\n\n }\n\n}\n", "file_path": "src/arch/src/aarch64/gic.rs", "rank": 20, "score": 93762.32999290185 }, { "content": "// Parse a magic_id and return the format version.\n\nfn get_format_version(magic_id: u64) -> Result<u16, Error> {\n\n let magic_arch = magic_id & BASE_MAGIC_ID_MASK;\n\n if magic_arch == BASE_MAGIC_ID {\n\n return Ok((magic_id & !BASE_MAGIC_ID_MASK) as u16);\n\n }\n\n Err(Error::InvalidMagic(magic_id))\n\n}\n\n\n", "file_path": "src/snapshot/src/lib.rs", "rank": 21, "score": 92995.58451088931 }, { "content": "fn get_base(entry: u64) -> u64 {\n\n ((((entry) & 0xFF00_0000_0000_0000) >> 32)\n\n | (((entry) & 0x0000_00FF_0000_0000) >> 16)\n\n | (((entry) & 0x0000_0000_FFFF_0000) >> 16))\n\n}\n\n\n", "file_path": "src/arch/src/x86_64/gdt.rs", "rank": 22, "score": 91351.54140701066 }, { "content": "/// Configure core registers for a given CPU.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `vcpu` - Structure for the VCPU that holds the VCPU's fd.\n\n/// * `cpu_id` - Index of current vcpu.\n\n/// * `boot_ip` - Starting instruction pointer.\n\n/// * `mem` - Reserved DRAM for current VM.\n\npub fn setup_regs(vcpu: &VcpuFd, cpu_id: u8, boot_ip: u64, mem: &GuestMemoryMmap) -> Result<()> {\n\n // Get the register index of the PSTATE (Processor State) register.\n\n vcpu.set_one_reg(arm64_core_reg!(pstate), PSTATE_FAULT_BITS_64)\n\n .map_err(Error::SetCoreRegister)?;\n\n\n\n // Other vCPUs are powered off initially awaiting PSCI wakeup.\n\n if cpu_id == 0 {\n\n // Setting the PC (Processor Counter) to the current program address (kernel address).\n\n vcpu.set_one_reg(arm64_core_reg!(pc), boot_ip)\n\n .map_err(Error::SetCoreRegister)?;\n\n\n\n // Last mandatory thing to set -> the address pointing to the FDT (also called DTB).\n\n // \"The device tree blob (dtb) must be placed on an 8-byte boundary and must\n\n // not exceed 2 megabytes in size.\" -> https://www.kernel.org/doc/Documentation/arm64/booting.txt.\n\n // We are choosing to place it the end of DRAM. See `get_fdt_addr`.\n\n vcpu.set_one_reg(arm64_core_reg!(regs), get_fdt_addr(mem) as u64)\n\n .map_err(Error::SetCoreRegister)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/arch/src/aarch64/regs.rs", "rank": 23, "score": 87597.57500376861 }, { "content": " def size(self):\n\n \"\"\"Return the size of the filesystem.\"\"\"\n", "file_path": "tests/host_tools/drive.py", "rank": 24, "score": 85408.62739730984 }, { "content": "#[derive(Clone, Debug, Default, Versionize)]\n\nstruct Test {\n\n dummy: Vec<Dummy>,\n\n field_x: u64,\n\n field0: u64,\n\n field1: u32,\n\n #[version(start = 2, default_fn = \"field2_default\")]\n\n field2: u64,\n\n #[version(\n\n start = 3,\n\n default_fn = \"field3_default\",\n\n ser_fn = \"field3_serialize\",\n\n de_fn = \"field3_deserialize\"\n\n )]\n\n field3: String,\n\n #[version(\n\n start = 4,\n\n default_fn = \"field4_default\",\n\n ser_fn = \"field4_serialize\",\n\n de_fn = \"field4_deserialize\"\n\n )]\n\n field4: Vec<u64>,\n\n}\n\n\n", "file_path": "src/snapshot/benches/main.rs", "rank": 25, "score": 84207.58431347972 }, { "content": "#[derive(Clone, Debug, Default, Versionize)]\n\nstruct Dummy {\n\n dummy: u64,\n\n string: String,\n\n}\n\n\n\nimpl Test {\n\n fn field2_default(_: u16) -> u64 {\n\n 20\n\n }\n\n fn field3_default(_: u16) -> String {\n\n \"default\".to_owned()\n\n }\n\n fn field4_default(_: u16) -> Vec<u64> {\n\n vec![1, 2, 3, 4]\n\n }\n\n\n\n fn field4_serialize(&mut self, target_version: u16) -> VersionizeResult<()> {\n\n // Fail if semantic serialization is called for the latest version.\n\n assert_ne!(target_version, Test::version());\n\n self.field0 = self.field4.iter().sum();\n", "file_path": "src/snapshot/benches/main.rs", "rank": 26, "score": 84207.58431347972 }, { "content": "#[derive(Default, Debug, Versionize)]\n\nstruct SnapshotHdr {\n\n /// Snapshot data version (firecracker version).\n\n data_version: u16,\n\n}\n\n\n\n/// The `Snapshot` API manages serialization and deserialization of collections of objects\n\n/// that implement the `Versionize` trait.\n\n#[derive(Debug)]\n\npub struct Snapshot {\n\n hdr: SnapshotHdr,\n\n format_version: u16,\n\n version_map: VersionMap,\n\n // Required for serialization.\n\n target_version: u16,\n\n}\n\n\n", "file_path": "src/snapshot/src/lib.rs", "rank": 27, "score": 82459.84808847084 }, { "content": "#[repr(C)]\n\nstruct sock_fprog {\n\n pub len: ::std::os::raw::c_ushort,\n\n pub filter: *const sock_filter,\n\n}\n\n\n\n/// Program made up of a sequence of BPF instructions.\n\npub type BpfProgram = Vec<sock_filter>;\n\n/// Reference to program made up of a sequence of BPF instructions.\n\npub type BpfProgramRef<'a> = &'a [sock_filter];\n\n/// Slice of BPF instructions.\n\npub type BpfInstructionSlice = [sock_filter];\n\n\n\nimpl SeccompCondition {\n\n /// Creates a new [`SeccompCondition`].\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `arg_number` - The index of the argument in the system call.\n\n /// * `arg_len` - The length of the argument value. See `SeccompCmpArgLen`.\n\n /// * `operator` - The comparison operator. See `SeccompCmpOp`.\n", "file_path": "src/seccomp/src/lib.rs", "rank": 28, "score": 82453.85091707081 }, { "content": "#[repr(C)]\n\n#[derive(Default, Clone, Copy)]\n\nstruct Descriptor {\n\n addr: u64,\n\n len: u32,\n\n flags: u16,\n\n next: u16,\n\n}\n\n\n\nunsafe impl ByteValued for Descriptor {}\n\n\n\n/// A virtio descriptor chain.\n\npub struct DescriptorChain<'a> {\n\n desc_table: GuestAddress,\n\n queue_size: u16,\n\n ttl: u16, // used to prevent infinite chain cycles\n\n\n\n /// Reference to guest memory\n\n pub mem: &'a GuestMemoryMmap,\n\n\n\n /// Index into the descriptor table\n\n pub index: u16,\n", "file_path": "src/devices/src/virtio/queue.rs", "rank": 29, "score": 82453.85091707081 }, { "content": "struct StatusLine {\n\n http_version: Version,\n\n status_code: StatusCode,\n\n}\n\n\n\nimpl StatusLine {\n\n fn new(http_version: Version, status_code: StatusCode) -> Self {\n\n Self {\n\n http_version,\n\n status_code,\n\n }\n\n }\n\n\n\n fn write_all<T: Write>(&self, mut buf: T) -> Result<(), WriteError> {\n\n buf.write_all(self.http_version.raw())?;\n\n buf.write_all(&[SP])?;\n\n buf.write_all(self.status_code.raw())?;\n\n buf.write_all(&[SP, CR, LF])?;\n\n\n\n Ok(())\n", "file_path": "src/micro_http/src/response.rs", "rank": 30, "score": 80833.78842570951 }, { "content": "/// MSR range\n\nstruct MsrRange {\n\n /// Base MSR address\n\n base: u32,\n\n /// Number of MSRs\n\n nmsrs: u32,\n\n}\n\n\n\nimpl MsrRange {\n\n /// Returns whether `msr` is contained in this MSR range.\n\n fn contains(&self, msr: u32) -> bool {\n\n self.base <= msr && msr < self.base + self.nmsrs\n\n }\n\n}\n\n\n\n/// Base MSR for APIC\n\nconst APIC_BASE_MSR: u32 = 0x800;\n\n\n\n/// Number of APIC MSR indexes\n\nconst APIC_MSR_INDEXES: u32 = 0x400;\n\n\n", "file_path": "src/arch/src/x86_64/msr.rs", "rank": 31, "score": 80833.78842570951 }, { "content": "#[derive(Clone, Copy, Eq, Hash, PartialEq)]\n\n#[cfg_attr(test, derive(Debug))]\n\nstruct ConnectionTuple {\n\n remote_addr: Ipv4Addr,\n\n remote_port: u16,\n\n}\n\n\n\nimpl ConnectionTuple {\n\n fn new(remote_addr: Ipv4Addr, remote_port: u16) -> Self {\n\n ConnectionTuple {\n\n remote_addr,\n\n remote_port,\n\n }\n\n }\n\n}\n\n\n\n/// Implements a minimalist TCP over IPv4 listener.\n\n///\n\n/// Forwards incoming TCP segments to the appropriate connection object, based on the associated\n\n/// tuple, or attempts to establish new connections (when receiving `SYN` segments). Aside from\n\n/// constructors, the handler operation is based on three methods:\n\n///\n", "file_path": "src/dumbo/src/tcp/handler.rs", "rank": 32, "score": 80833.78842570951 }, { "content": "#[inline]\n\npub fn bench_restore_v1(mut snapshot_mem: &[u8], vm: VersionMap, crc: bool) {\n\n if crc {\n\n Snapshot::load_with_crc64::<&[u8], Test>(&mut snapshot_mem, vm).unwrap();\n\n } else {\n\n Snapshot::load::<&[u8], Test>(&mut snapshot_mem, vm).unwrap();\n\n }\n\n}\n\n\n", "file_path": "src/snapshot/benches/main.rs", "rank": 33, "score": 80700.03875112106 }, { "content": "/// Sets up the cpuid entries for a given VCPU following a C3 template.\n\nstruct C3CpuidTransformer {}\n\n\n\nimpl CpuidTransformer for C3CpuidTransformer {\n\n fn entry_transformer_fn(&self, entry: &mut kvm_cpuid_entry2) -> Option<EntryTransformerFn> {\n\n match entry.function {\n\n leaf_0x1::LEAF_NUM => Some(update_feature_info_entry),\n\n leaf_0x7::LEAF_NUM => Some(update_structured_extended_entry),\n\n leaf_0xd::LEAF_NUM => Some(update_xsave_features_entry),\n\n leaf_0x80000001::LEAF_NUM => Some(update_extended_feature_info_entry),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/cpuid/src/template/c3.rs", "rank": 34, "score": 79327.95550854914 }, { "content": "#[derive(Debug, Deserialize, Serialize)]\n\n#[serde(deny_unknown_fields)]\n\nstruct ActionBody {\n\n action_type: ActionType,\n\n}\n\n\n", "file_path": "src/api_server/src/request/actions.rs", "rank": 35, "score": 79327.95550854914 }, { "content": "/// Sets up the cpuid entries for a given VCPU following a T2 template.\n\nstruct T2CpuidTransformer {}\n\n\n\nimpl CpuidTransformer for T2CpuidTransformer {\n\n fn entry_transformer_fn(&self, entry: &mut kvm_cpuid_entry2) -> Option<EntryTransformerFn> {\n\n match entry.function {\n\n leaf_0x1::LEAF_NUM => Some(update_feature_info_entry),\n\n leaf_0x7::LEAF_NUM => Some(update_structured_extended_entry),\n\n leaf_0xd::LEAF_NUM => Some(update_xsave_features_entry),\n\n leaf_0x80000001::LEAF_NUM => Some(update_extended_feature_info_entry),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/cpuid/src/template/t2.rs", "rank": 36, "score": 79327.95550854914 }, { "content": "#[derive(Default)]\n\nstruct SerializeToUtcTimestampMs;\n\n\n\nimpl Serialize for SerializeToUtcTimestampMs {\n\n fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {\n\n serializer.serialize_i64(\n\n utils::time::get_time(utils::time::ClockType::Monotonic) as i64 / 1_000_000,\n\n )\n\n }\n\n}\n\n\n\n/// Structure storing all metrics while enforcing serialization support on them.\n\n#[derive(Default, Serialize)]\n\npub struct FirecrackerMetrics {\n\n utc_timestamp_ms: SerializeToUtcTimestampMs,\n\n /// API Server related metrics.\n\n pub api_server: ApiServerMetrics,\n\n /// A block device's related metrics.\n\n pub block: BlockDeviceMetrics,\n\n /// Metrics related to API GET requests.\n\n pub get_api_requests: GetRequestsMetrics,\n", "file_path": "src/logger/src/metrics.rs", "rank": 37, "score": 79327.95550854914 }, { "content": "struct PatchDrivePayload {\n\n // Leaving `fields` pub because ownership on it needs to be yielded to the\n\n // Request enum object. A getter couldn't move `fields` out of the borrowed\n\n // PatchDrivePayload object.\n\n pub fields: Value,\n\n}\n\n\n\nimpl PatchDrivePayload {\n\n /// Checks that `field_key` exists and that the value has the type Value::String.\n\n fn check_field_is_string(map: &Map<String, Value>, field_key: &str) -> Result<(), String> {\n\n match map.get(field_key) {\n\n None => {\n\n return Err(format!(\n\n \"Required key {} not present in the json.\",\n\n field_key\n\n ));\n\n }\n\n Some(id) => {\n\n // Check that field is a string.\n\n if id.as_str().is_none() {\n", "file_path": "src/api_server/src/request/drive.rs", "rank": 38, "score": 77924.68220458768 }, { "content": "struct ApiServerAdapter {\n\n api_event_fd: EventFd,\n\n from_api: Receiver<ApiRequest>,\n\n to_api: Sender<ApiResponse>,\n\n controller: RuntimeApiController,\n\n}\n\n\n\nimpl ApiServerAdapter {\n\n /// Runs the vmm to completion, while any arising control events are deferred\n\n /// to a `RuntimeApiController`.\n\n fn run_microvm(\n\n api_event_fd: EventFd,\n\n from_api: Receiver<ApiRequest>,\n\n to_api: Sender<ApiResponse>,\n\n vm_config: VmConfig,\n\n vmm: Arc<Mutex<Vmm>>,\n\n event_manager: &mut EventManager,\n\n ) {\n\n let api_adapter = Arc::new(Mutex::new(Self {\n\n api_event_fd,\n", "file_path": "src/firecracker/src/api_server_adapter.rs", "rank": 39, "score": 77924.68220458768 }, { "content": "struct VsockAndUnixPath {\n\n vsock: MutexVsockUnix,\n\n uds_path: String,\n\n}\n\n\n\n/// A builder of Vsock with Unix backend from 'VsockDeviceConfig'.\n\n#[derive(Default)]\n\npub struct VsockBuilder {\n\n inner: Option<VsockAndUnixPath>,\n\n}\n\n\n\nimpl VsockBuilder {\n\n /// Creates an empty Vsock with Unix backend Store.\n\n pub fn new() -> Self {\n\n Self { inner: None }\n\n }\n\n\n\n /// Inserts a Unix backend Vsock in the store.\n\n /// If an entry already exists, it will overwrite it.\n\n pub fn insert(&mut self, cfg: VsockDeviceConfig) -> Result<()> {\n", "file_path": "src/vmm/src/vmm_config/vsock.rs", "rank": 40, "score": 77924.68220458768 }, { "content": "/// Used for defining new types of metrics that can be either incremented with an unit\n\n/// or an arbitrary amount of units.\n\n// This trait helps with writing less code. It has to be in scope (via an use directive) in order\n\n// for its methods to be available to call on structs that implement it.\n\npub trait Metric {\n\n /// Adds `value` to the current counter.\n\n fn add(&self, value: usize);\n\n /// Increments by 1 unit the current counter.\n\n fn inc(&self) {\n\n self.add(1);\n\n }\n\n /// Returns current value of the counter.\n\n fn count(&self) -> usize;\n\n}\n\n\n\n/// Representation of a metric that is expected to be incremented from more than one thread, so more\n\n/// synchronization is necessary.\n\n// It's currently used for vCPU metrics. An alternative here would be\n\n// to have one instance of every metric for each thread, and to\n\n// aggregate them when writing. However this probably overkill unless we have a lot of vCPUs\n\n// incrementing metrics very often. Still, it's there if we ever need it :-s\n\n#[derive(Default)]\n\n// We will be keeping two values for each metric for being able to reset\n\n// counters on each metric.\n", "file_path": "src/logger/src/metrics.rs", "rank": 41, "score": 77344.38433817617 }, { "content": "/// Trait that helps in upcasting an object to Any\n\npub trait AsAny {\n\n fn as_any(&self) -> &dyn Any;\n\n\n\n fn as_mut_any(&mut self) -> &mut dyn Any;\n\n}\n\nimpl<T: Any> AsAny for T {\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n\n fn as_mut_any(&mut self) -> &mut dyn Any {\n\n self\n\n }\n\n}\n", "file_path": "src/devices/src/virtio/mod.rs", "rank": 42, "score": 77333.68488593245 }, { "content": "/// A set of RX indications (`PendingRx` items).\n\nstruct PendingRxSet {\n\n data: u16,\n\n}\n\n\n\nimpl PendingRxSet {\n\n /// Insert an item into the set.\n\n fn insert(&mut self, it: PendingRx) {\n\n self.data |= it.into_mask();\n\n }\n\n\n\n /// Remove an item from the set and return:\n\n /// - true, if the item was in the set; or\n\n /// - false, if the item wasn't in the set.\n\n fn remove(&mut self, it: PendingRx) -> bool {\n\n let ret = self.contains(it);\n\n self.data &= !it.into_mask();\n\n ret\n\n }\n\n\n\n /// Check if an item is present in this set.\n", "file_path": "src/devices/src/virtio/vsock/csm/mod.rs", "rank": 43, "score": 76613.83584253641 }, { "content": "/// A trait to express the ability to respond to I/O event readiness\n\n/// using callbacks.\n\npub trait Subscriber {\n\n /// Callback called when an event is available.\n\n ///\n\n /// # Arguments\n\n /// * event - the available `EpollEvent` ready for processing\n\n /// * event_manager - Reference to the `EventManager` that gives the implementor\n\n /// the possibility to directly call the required update operations.\n\n /// The only functions safe to call on this `EventManager` reference\n\n /// are `register`, `unregister` and `modify` which correspond to\n\n /// the `libc::epoll_ctl` operations.\n\n fn process(&mut self, event: &EpollEvent, event_manager: &mut EventManager);\n\n\n\n /// Returns a list of `EpollEvent` that this subscriber is interested in.\n\n fn interest_list(&self) -> Vec<EpollEvent>;\n\n}\n\n\n\n/// Manages I/O notifications using epoll mechanism.\n\npub struct EventManager {\n\n epoll: Epoll,\n\n subscribers: HashMap<RawFd, Arc<Mutex<dyn Subscriber>>>,\n", "file_path": "src/polly/src/event_manager.rs", "rank": 44, "score": 75723.1609135401 }, { "content": "/// Builds and starts a microVM based on the current Firecracker VmResources configuration.\n\n///\n\n/// This is the default build recipe, one could build other microVM flavors by using the\n\n/// independent functions in this module instead of calling this recipe.\n\n///\n\n/// An `Arc` reference of the built `Vmm` is also plugged in the `EventManager`, while another\n\n/// is returned.\n\npub fn build_microvm(\n\n vm_resources: &super::resources::VmResources,\n\n event_manager: &mut EventManager,\n\n seccomp_filter: BpfProgramRef,\n\n) -> std::result::Result<Arc<Mutex<Vmm>>, StartMicrovmError> {\n\n let boot_config = vm_resources\n\n .boot_source()\n\n .ok_or(StartMicrovmError::MissingKernelConfig)?;\n\n\n\n // Timestamp for measuring microVM boot duration.\n\n let request_ts = TimestampUs::default();\n\n\n\n let guest_memory = create_guest_memory(\n\n vm_resources\n\n .vm_config()\n\n .mem_size_mib\n\n .ok_or(StartMicrovmError::MissingMemSizeConfig)?,\n\n )?;\n\n let vcpu_config = vm_resources.vcpu_config();\n\n let track_dirty_pages = vm_resources.track_dirty_pages();\n", "file_path": "src/vmm/src/builder.rs", "rank": 45, "score": 75721.96496685264 }, { "content": "/// Wrapper over `HttpConnection` which keeps track of yielded\n\n/// requests and absorbed responses.\n\nstruct ClientConnection<T> {\n\n /// The `HttpConnection` object which handles data exchange.\n\n connection: HttpConnection<T>,\n\n /// The state of the connection in the `epoll` structure.\n\n state: ClientConnectionState,\n\n /// Represents the difference between yielded requests and\n\n /// absorbed responses.\n\n /// This has to be `0` if we want to drop the connection.\n\n in_flight_response_count: u32,\n\n}\n\n\n\nimpl<T: Read + Write> ClientConnection<T> {\n\n fn new(connection: HttpConnection<T>) -> Self {\n\n Self {\n\n connection,\n\n state: ClientConnectionState::AwaitingIncoming,\n\n in_flight_response_count: 0,\n\n }\n\n }\n\n\n", "file_path": "src/micro_http/src/server.rs", "rank": 46, "score": 74861.46769757598 }, { "content": "#[derive(Clone, Copy)]\n\nstruct MuxerKillQItem {\n\n key: ConnMapKey,\n\n kill_time: Instant,\n\n}\n\n\n\n/// The connection kill queue: a FIFO structure, storing the connections that are scheduled for\n\n/// termination.\n\npub struct MuxerKillQ {\n\n /// The kill queue contents.\n\n q: VecDeque<MuxerKillQItem>,\n\n\n\n /// The kill queue sync status:\n\n /// - when true, all connections that are awaiting termination are guaranteed to be in this\n\n /// queue;\n\n /// - when false, some connections may have been left out.\n\n synced: bool,\n\n}\n\n\n\nimpl MuxerKillQ {\n\n const SIZE: usize = defs::MUXER_KILLQ_SIZE;\n", "file_path": "src/devices/src/virtio/vsock/unix/muxer_killq.rs", "rank": 47, "score": 74235.15475307044 }, { "content": "/// Configures the system and should be called once per vm before starting vcpu threads.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `guest_mem` - The memory to be used by the guest.\n\n/// * `cmdline_addr` - Address in `guest_mem` where the kernel command line was loaded.\n\n/// * `cmdline_size` - Size of the kernel command line in bytes including the null terminator.\n\n/// * `initrd` - Information about where the ramdisk image was loaded in the `guest_mem`.\n\n/// * `num_cpus` - Number of virtual CPUs the guest will have.\n\npub fn configure_system(\n\n guest_mem: &GuestMemoryMmap,\n\n cmdline_addr: GuestAddress,\n\n cmdline_size: usize,\n\n initrd: &Option<InitrdConfig>,\n\n num_cpus: u8,\n\n) -> super::Result<()> {\n\n const KERNEL_BOOT_FLAG_MAGIC: u16 = 0xaa55;\n\n const KERNEL_HDR_MAGIC: u32 = 0x5372_6448;\n\n const KERNEL_LOADER_OTHER: u8 = 0xff;\n\n const KERNEL_MIN_ALIGNMENT_BYTES: u32 = 0x0100_0000; // Must be non-zero.\n\n let first_addr_past_32bits = GuestAddress(FIRST_ADDR_PAST_32BITS);\n\n let end_32bit_gap_start = GuestAddress(MMIO_MEM_START);\n\n\n\n let himem_start = GuestAddress(layout::HIMEM_START);\n\n\n\n // Note that this puts the mptable at the last 1k of Linux's 640k base RAM\n\n mptable::setup_mptable(guest_mem, num_cpus).map_err(Error::MpTableSetup)?;\n\n\n\n let mut params: BootParamsWrapper = BootParamsWrapper(boot_params::default());\n", "file_path": "src/arch/src/x86_64/mod.rs", "rank": 48, "score": 74224.76648422083 }, { "content": "/// Creates GuestMemory of `mem_size_mib` MiB in size.\n\npub fn create_guest_memory(\n\n mem_size_mib: usize,\n\n) -> std::result::Result<GuestMemoryMmap, StartMicrovmError> {\n\n let mem_size = mem_size_mib << 20;\n\n let arch_mem_regions = arch::arch_memory_regions(mem_size);\n\n\n\n Ok(GuestMemoryMmap::from_ranges(&arch_mem_regions)\n\n .map_err(StartMicrovmError::GuestMemoryMmap)?)\n\n}\n\n\n", "file_path": "src/vmm/src/builder.rs", "rank": 49, "score": 74222.13387674215 }, { "content": "/// Trait for objects that need custom initialization and teardown during the Vmm lifetime.\n\npub trait VmmEventsObserver {\n\n /// This function will be called during microVm boot.\n\n fn on_vmm_boot(&mut self) -> std::result::Result<(), utils::errno::Error> {\n\n Ok(())\n\n }\n\n /// This function will be called on microVm teardown.\n\n fn on_vmm_stop(&mut self) -> std::result::Result<(), utils::errno::Error> {\n\n Ok(())\n\n }\n\n}\n\n\n\n/// Shorthand result type for internal VMM commands.\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\n/// Shorthand type for KVM dirty page bitmap.\n\npub type DirtyBitmap = HashMap<usize, Vec<u64>>;\n\n\n\n/// Contains the state and associated methods required for the Firecracker VMM.\n\npub struct Vmm {\n\n events_observer: Option<Box<dyn VmmEventsObserver>>,\n", "file_path": "src/vmm/src/lib.rs", "rank": 50, "score": 74215.48196213634 }, { "content": "/// Configures the system for booting Linux.\n\npub fn configure_system_for_boot(\n\n vmm: &Vmm,\n\n vcpus: &mut [Vcpu],\n\n _vcpu_config: VcpuConfig,\n\n entry_addr: GuestAddress,\n\n initrd: &Option<InitrdConfig>,\n\n kernel_cmdline: KernelCmdline,\n\n) -> std::result::Result<(), StartMicrovmError> {\n\n #[cfg(target_arch = \"x86_64\")]\n\n {\n\n for vcpu in vcpus.iter_mut() {\n\n vcpu.configure_x86_64_for_boot(vmm.guest_memory(), entry_addr, &_vcpu_config)\n\n .map_err(Error::Vcpu)\n\n .map_err(StartMicrovmError::Internal)?;\n\n }\n\n\n\n // Write the kernel command line to guest memory. This is x86_64 specific, since on\n\n // aarch64 the command line will be specified through the FDT.\n\n kernel::loader::load_cmdline(\n\n vmm.guest_memory(),\n", "file_path": "src/vmm/src/builder.rs", "rank": 51, "score": 74215.48196213634 }, { "content": "/// Sets up the serial device.\n\npub fn setup_serial_device(\n\n event_manager: &mut EventManager,\n\n input: Box<dyn devices::legacy::ReadableFd + Send>,\n\n out: Box<dyn io::Write + Send>,\n\n) -> std::result::Result<Arc<Mutex<Serial>>, StartMicrovmError> {\n\n let interrupt_evt = EventFd::new(libc::EFD_NONBLOCK)\n\n .map_err(Error::EventFd)\n\n .map_err(StartMicrovmError::Internal)?;\n\n let serial = Arc::new(Mutex::new(Serial::new_in_out(interrupt_evt, input, out)));\n\n if let Err(e) = event_manager.add_subscriber(serial.clone()) {\n\n // TODO: We just log this message, and immediately return Ok, instead of returning the\n\n // actual error because this operation always fails with EPERM when adding a fd which\n\n // has been redirected to /dev/null via dup2 (this may happen inside the jailer).\n\n // Find a better solution to this (and think about the state of the serial device\n\n // while we're at it).\n\n warn!(\"Could not add serial input event to epoll: {:?}\", e);\n\n }\n\n Ok(serial)\n\n}\n\n\n", "file_path": "src/vmm/src/builder.rs", "rank": 52, "score": 74215.48196213634 }, { "content": "/// Trait containing helper methods for bit operations.\n\npub trait BitHelper {\n\n /// Reads the value of the bit at position `pos`\n\n fn read_bit(&self, pos: u32) -> bool;\n\n\n\n /// Changes the value of the bit at position `pos` to `val`\n\n fn write_bit(&mut self, pos: u32, val: bool) -> &mut Self;\n\n\n\n /// Reads the value stored within the specified range of bits\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// #[macro_use]\n\n /// extern crate cpuid;\n\n /// use cpuid::bit_helper::*;\n\n ///\n\n /// fn main() {\n\n /// let val: u32 = 0b000010001000;\n\n /// let range = BitRange {\n\n /// msb_index: 7,\n", "file_path": "src/cpuid/src/bit_helper.rs", "rank": 53, "score": 74215.48196213634 }, { "content": "#[cfg(target_arch = \"aarch64\")]\n\npub fn setup_interrupt_controller(\n\n vm: &mut Vm,\n\n vcpu_count: u8,\n\n) -> std::result::Result<(), StartMicrovmError> {\n\n vm.setup_irqchip(vcpu_count)\n\n .map_err(Error::Vm)\n\n .map_err(StartMicrovmError::Internal)\n\n}\n\n\n", "file_path": "src/vmm/src/builder.rs", "rank": 54, "score": 74215.48196213634 }, { "content": "/// Generic trait that provides methods for transforming the cpuid\n\npub trait CpuidTransformer {\n\n /// Trait main function. It processes the cpuid and makes the desired transformations.\n\n /// The default logic can be overwritten if needed. For example see `AmdCpuidTransformer`.\n\n fn process_cpuid(&self, cpuid: &mut CpuId, vm_spec: &VmSpec) -> Result<(), Error> {\n\n self.process_entries(cpuid, vm_spec)\n\n }\n\n\n\n /// Iterates through all the cpuid entries and calls the associated transformer for each one.\n\n fn process_entries(&self, cpuid: &mut CpuId, vm_spec: &VmSpec) -> Result<(), Error> {\n\n for entry in cpuid.as_mut_slice().iter_mut() {\n\n let maybe_transformer_fn = self.entry_transformer_fn(entry);\n\n\n\n if let Some(transformer_fn) = maybe_transformer_fn {\n\n transformer_fn(entry, vm_spec)?;\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n", "file_path": "src/cpuid/src/transformer/mod.rs", "rank": 55, "score": 74215.48196213634 }, { "content": "/// Trait for GIC devices.\n\npub trait GICDevice {\n\n /// Returns the file descriptor of the GIC device\n\n fn device_fd(&self) -> &DeviceFd;\n\n\n\n /// Returns an array with GIC device properties\n\n fn device_properties(&self) -> &[u64];\n\n\n\n /// Returns the number of vCPUs this GIC handles\n\n fn vcpu_count(&self) -> u64;\n\n\n\n /// Returns the fdt compatibility property of the device\n\n fn fdt_compatibility(&self) -> &str;\n\n\n\n /// Returns the maint_irq fdt property of the device\n\n fn fdt_maint_irq(&self) -> u32;\n\n\n\n /// Returns the GIC version of the device\n\n fn version() -> u32\n\n where\n\n Self: Sized;\n", "file_path": "src/arch/src/aarch64/gic.rs", "rank": 56, "score": 74215.48196213634 }, { "content": "/// Writes the command line string to the given memory slice.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `guest_mem` - A u8 slice that will be partially overwritten by the command line.\n\n/// * `guest_addr` - The address in `guest_mem` at which to load the command line.\n\n/// * `cmdline` - The kernel command line as CString.\n\npub fn load_cmdline(\n\n guest_mem: &GuestMemoryMmap,\n\n guest_addr: GuestAddress,\n\n cmdline: &CString,\n\n) -> std::result::Result<(), CmdlineError> {\n\n let raw_cmdline = cmdline.as_bytes_with_nul();\n\n if raw_cmdline.len() <= 1 {\n\n return Ok(());\n\n }\n\n\n\n let cmdline_last_addr = guest_addr\n\n .checked_add(raw_cmdline.len() as u64 - 1)\n\n .ok_or(CmdlineError::CommandLineOverflow)?; // Extra for null termination.\n\n\n\n if cmdline_last_addr > guest_mem.last_addr() {\n\n return Err(CmdlineError::CommandLineOverflow);\n\n }\n\n\n\n guest_mem\n\n .write_slice(raw_cmdline, guest_addr)\n", "file_path": "src/kernel/src/loader/mod.rs", "rank": 57, "score": 74215.48196213634 }, { "content": "/// An abstract interface for saving/restoring a component using a specific state.\n\npub trait Persist<'a>\n\nwhere\n\n Self: Sized,\n\n{\n\n /// The type of the object representing the state of the component.\n\n type State;\n\n /// The type of the object holding the constructor arguments.\n\n type ConstructorArgs;\n\n /// The type of the error that can occur while constructing the object.\n\n type Error;\n\n\n\n /// Returns the current state of the component.\n\n fn save(&self) -> Self::State;\n\n /// Constructs a component from a specified state.\n\n fn restore(\n\n constructor_args: Self::ConstructorArgs,\n\n state: &Self::State,\n\n ) -> std::result::Result<Self, Self::Error>;\n\n}\n", "file_path": "src/snapshot/src/persist.rs", "rank": 58, "score": 73866.79920801635 }, { "content": "pub fn restore_stdin() {\n\n let stdin = io::stdin();\n\n stdin.lock().set_canon_mode().unwrap();\n\n}\n\n\n", "file_path": "src/vmm/tests/test_utils/mod.rs", "rank": 59, "score": 72815.66195940404 }, { "content": "/// Trait for devices to be added to the Flattened Device Tree.\n\npub trait DeviceInfoForFDT {\n\n /// Returns the address where this device will be loaded.\n\n fn addr(&self) -> u64;\n\n /// Returns the associated interrupt for this device.\n\n fn irq(&self) -> u32;\n\n /// Returns the amount of memory that needs to be reserved for this device.\n\n fn length(&self) -> u64;\n\n}\n\n\n\n/// Errors thrown while configuring the Flattened Device Tree for aarch64.\n\n#[derive(Debug)]\n\npub enum Error {\n\n /// Failed to append node to the FDT.\n\n AppendFDTNode(io::Error),\n\n /// Failed to append a property to the FDT.\n\n AppendFDTProperty(io::Error),\n\n /// Syscall for creating FDT failed.\n\n CreateFDT(io::Error),\n\n /// Failed to obtain a C style string.\n\n CstringFDTTransform(NulError),\n\n /// Failure in calling syscall for terminating this FDT.\n\n FinishFDTReserveMap(io::Error),\n\n /// Failure in writing FDT in memory.\n\n WriteFDTToMemory(GuestMemoryError),\n\n}\n", "file_path": "src/arch/src/aarch64/fdt.rs", "rank": 60, "score": 72815.66195940404 }, { "content": "/// Any channel that handles vsock packet traffic: sending and receiving packets. Since we're\n\n/// implementing the device model here, our responsibility is to always process the sending of\n\n/// packets (i.e. the TX queue). So, any locally generated data, addressed to the driver (e.g.\n\n/// a connection response or RST), will have to be queued, until we get to processing the RX queue.\n\n///\n\n/// Note: `recv_pkt()` and `send_pkt()` are named analogous to `Read::read()` and `Write::write()`,\n\n/// respectively. I.e.\n\n/// - `recv_pkt(&mut pkt)` will read data from the channel, and place it into `pkt`; and\n\n/// - `send_pkt(&pkt)` will fetch data from `pkt`, and place it into the channel.\n\npub trait VsockChannel {\n\n /// Read/receive an incoming packet from the channel.\n\n fn recv_pkt(&mut self, pkt: &mut VsockPacket) -> Result<()>;\n\n\n\n /// Write/send a packet through the channel.\n\n fn send_pkt(&mut self, pkt: &VsockPacket) -> Result<()>;\n\n\n\n /// Checks whether there is pending incoming data inside the channel, meaning that a subsequent\n\n /// call to `recv_pkt()` won't fail.\n\n fn has_pending_rx(&self) -> bool;\n\n}\n\n\n", "file_path": "src/devices/src/virtio/vsock/mod.rs", "rank": 61, "score": 72815.66195940404 }, { "content": "/// Configures the logger as described in `logger_cfg`.\n\npub fn init_logger(\n\n logger_cfg: LoggerConfig,\n\n firecracker_version: &str,\n\n) -> std::result::Result<(), LoggerConfigError> {\n\n LOGGER\n\n .set_max_level(logger_cfg.level.into())\n\n .set_include_origin(logger_cfg.show_log_origin, logger_cfg.show_log_origin)\n\n .set_include_level(logger_cfg.show_level);\n\n\n\n let writer = FcLineWriter::new(\n\n open_file_nonblock(&logger_cfg.log_path)\n\n .map_err(|e| LoggerConfigError::InitializationFailure(e.to_string()))?,\n\n );\n\n LOGGER\n\n .init(\n\n format!(\"Running {} v{}\", \"Firecracker\", firecracker_version),\n\n Box::new(writer),\n\n )\n\n .map_err(|e| LoggerConfigError::InitializationFailure(e.to_string()))\n\n}\n", "file_path": "src/vmm/src/vmm_config/logger.rs", "rank": 62, "score": 72815.66195940404 }, { "content": "pub fn run_with_api(\n\n seccomp_filter: BpfProgram,\n\n config_json: Option<String>,\n\n bind_path: PathBuf,\n\n instance_info: InstanceInfo,\n\n start_time_us: Option<u64>,\n\n start_time_cpu_us: Option<u64>,\n\n) {\n\n // FD to notify of API events. This is a blocking eventfd by design.\n\n // It is used in the config/pre-boot loop which is a simple blocking loop\n\n // which only consumes API events.\n\n let api_event_fd = EventFd::new(0).expect(\"Cannot create API Eventfd.\");\n\n // Channels for both directions between Vmm and Api threads.\n\n let (to_vmm, from_api) = channel();\n\n let (to_api, from_vmm) = channel();\n\n\n\n // MMDS only supported with API.\n\n let mmds_info = MMDS.clone();\n\n let api_shared_info = Arc::new(RwLock::new(instance_info));\n\n let vmm_shared_info = api_shared_info.clone();\n", "file_path": "src/firecracker/src/api_server_adapter.rs", "rank": 63, "score": 72815.66195940404 }, { "content": "pub fn sigrtmin() -> c_int {\n\n unsafe { __libc_current_sigrtmin() }\n\n}\n\n\n", "file_path": "src/utils/src/signal.rs", "rank": 64, "score": 72245.11253569492 }, { "content": "pub fn sigrtmax() -> c_int {\n\n unsafe { __libc_current_sigrtmax() }\n\n}\n", "file_path": "src/utils/src/signal.rs", "rank": 65, "score": 72245.11253569492 }, { "content": "#[derive(Copy, Clone, Default)]\n\nstruct BootParamsWrapper(boot_params);\n\n\n\n// It is safe to initialize BootParamsWrap which is a wrapper over `boot_params` (a series of ints).\n\nunsafe impl ByteValued for BootParamsWrapper {}\n\n\n\n/// Errors thrown while configuring x86_64 system.\n\n#[derive(Debug, PartialEq)]\n\npub enum Error {\n\n /// Invalid e820 setup params.\n\n E820Configuration,\n\n /// Error writing MP table to memory.\n\n MpTableSetup(mptable::Error),\n\n /// Error writing the zero page of guest memory.\n\n ZeroPageSetup,\n\n /// Failed to compute initrd address.\n\n InitrdAddress,\n\n}\n\n\n\n// Where BIOS/VGA magic would live on a real PC.\n\nconst EBDA_START: u64 = 0x9fc00;\n\nconst FIRST_ADDR_PAST_32BITS: u64 = (1 << 32);\n\nconst MEM_32BIT_GAP_SIZE: u64 = (768 << 20);\n\n/// The start of the memory area reserved for MMIO devices.\n\npub const MMIO_MEM_START: u64 = FIRST_ADDR_PAST_32BITS - MEM_32BIT_GAP_SIZE;\n\n\n", "file_path": "src/arch/src/x86_64/mod.rs", "rank": 66, "score": 72147.34803156325 }, { "content": "pub fn set_panic_hook() {\n\n panic::set_hook(Box::new(move |_| {\n\n restore_stdin();\n\n unsafe {\n\n libc::exit(VMM_ERR_EXIT);\n\n }\n\n }));\n\n}\n", "file_path": "src/vmm/tests/test_utils/mod.rs", "rank": 67, "score": 71508.04144604565 }, { "content": "pub fn update_feature_info_entry(\n\n entry: &mut kvm_cpuid_entry2,\n\n vm_spec: &VmSpec,\n\n) -> Result<(), Error> {\n\n use cpu_leaf::leaf_0x1::*;\n\n\n\n let max_cpus_per_package = u32::from(common::get_max_cpus_per_package(vm_spec.cpu_count)?);\n\n\n\n // X86 hypervisor feature\n\n entry.ecx.write_bit(ecx::HYPERVISOR_BITINDEX, true);\n\n\n\n entry\n\n .ebx\n\n .write_bits_in_range(&ebx::APICID_BITRANGE, u32::from(vm_spec.cpu_id))\n\n .write_bits_in_range(&ebx::CLFLUSH_SIZE_BITRANGE, EBX_CLFLUSH_CACHELINE)\n\n .write_bits_in_range(&ebx::CPU_COUNT_BITRANGE, max_cpus_per_package);\n\n\n\n // A value of 1 for HTT indicates the value in CPUID.1.EBX[23:16]\n\n // (the Maximum number of addressable IDs for logical processors in this package)\n\n // is valid for the package\n\n entry\n\n .edx\n\n .write_bit(edx::HTT_BITINDEX, vm_spec.cpu_count > 1);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cpuid/src/transformer/common.rs", "rank": 68, "score": 71508.04144604565 }, { "content": "pub fn update_structured_extended_entry(\n\n entry: &mut kvm_cpuid_entry2,\n\n _vm_spec: &VmSpec,\n\n) -> Result<(), Error> {\n\n use cpu_leaf::leaf_0x7::index0::*;\n\n\n\n // according to the EPYC PPR, only the leaf 0x7 with index 0 contains the\n\n // structured extended feature identifiers\n\n if entry.index == 0 {\n\n // KVM sets this bit no matter what but this feature is not supported by hardware\n\n entry.edx.write_bit(edx::ARCH_CAPABILITIES_BITINDEX, false);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cpuid/src/transformer/amd.rs", "rank": 69, "score": 71508.04144604565 }, { "content": "pub fn update_cache_parameters_entry(\n\n entry: &mut kvm_cpuid_entry2,\n\n vm_spec: &VmSpec,\n\n) -> Result<(), Error> {\n\n use cpu_leaf::leaf_cache_parameters::*;\n\n\n\n match entry.eax.read_bits_in_range(&eax::CACHE_LEVEL_BITRANGE) {\n\n // L1 & L2 Cache\n\n 1 | 2 => {\n\n // The L1 & L2 cache is shared by at most 2 hyperthreads\n\n entry.eax.write_bits_in_range(\n\n &eax::MAX_CPUS_PER_CORE_BITRANGE,\n\n (vm_spec.cpu_count > 1 && vm_spec.ht_enabled) as u32,\n\n );\n\n }\n\n // L3 Cache\n\n 3 => {\n\n // The L3 cache is shared among all the logical threads\n\n entry.eax.write_bits_in_range(\n\n &eax::MAX_CPUS_PER_CORE_BITRANGE,\n\n u32::from(vm_spec.cpu_count - 1),\n\n );\n\n }\n\n _ => (),\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cpuid/src/transformer/common.rs", "rank": 70, "score": 71508.04144604565 }, { "content": "pub fn update_feature_info_entry(\n\n entry: &mut kvm_cpuid_entry2,\n\n vm_spec: &VmSpec,\n\n) -> Result<(), Error> {\n\n use cpu_leaf::leaf_0x1::*;\n\n\n\n common::update_feature_info_entry(entry, vm_spec)?;\n\n\n\n entry.ecx.write_bit(ecx::TSC_DEADLINE_TIMER_BITINDEX, true);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cpuid/src/transformer/intel.rs", "rank": 71, "score": 71508.04144604565 }, { "content": "pub fn update_brand_string_entry(\n\n entry: &mut kvm_cpuid_entry2,\n\n vm_spec: &VmSpec,\n\n) -> Result<(), Error> {\n\n let brand_string = &vm_spec.brand_string;\n\n entry.eax = brand_string.get_reg_for_leaf(entry.function, BsReg::EAX);\n\n entry.ebx = brand_string.get_reg_for_leaf(entry.function, BsReg::EBX);\n\n entry.ecx = brand_string.get_reg_for_leaf(entry.function, BsReg::ECX);\n\n entry.edx = brand_string.get_reg_for_leaf(entry.function, BsReg::EDX);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cpuid/src/transformer/common.rs", "rank": 72, "score": 71508.04144604565 }, { "content": "pub fn parse_put_snapshot(\n\n body: &Body,\n\n request_type_from_path: Option<&&str>,\n\n) -> Result<ParsedRequest, Error> {\n\n match request_type_from_path {\n\n Some(&request_type) => match request_type {\n\n \"create\" => Ok(ParsedRequest::Sync(VmmAction::CreateSnapshot(\n\n serde_json::from_slice::<CreateSnapshotParams>(body.raw())\n\n .map_err(Error::SerdeJson)?,\n\n ))),\n\n \"load\" => Ok(ParsedRequest::Sync(VmmAction::LoadSnapshot(\n\n serde_json::from_slice::<LoadSnapshotParams>(body.raw())\n\n .map_err(Error::SerdeJson)?,\n\n ))),\n\n _ => Err(Error::InvalidPathMethod(\n\n format!(\"/snapshot/{}\", request_type),\n\n Method::Put,\n\n )),\n\n },\n\n None => Err(Error::Generic(\n\n StatusCode::BadRequest,\n\n \"Missing snapshot operation type.\".to_string(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/api_server/src/request/snapshot.rs", "rank": 73, "score": 71508.04144604565 }, { "content": "pub fn update_amd_features_entry(\n\n entry: &mut kvm_cpuid_entry2,\n\n vm_spec: &VmSpec,\n\n) -> Result<(), Error> {\n\n use cpu_leaf::leaf_0x80000008::*;\n\n\n\n // We don't support more then 64 threads right now.\n\n // It's safe to put them all on the same processor.\n\n entry\n\n .ecx\n\n .write_bits_in_range(&ecx::THREAD_ID_SIZE_BITRANGE, THREAD_ID_MAX_SIZE)\n\n .write_bits_in_range(&ecx::NUM_THREADS_BITRANGE, u32::from(vm_spec.cpu_count - 1));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cpuid/src/transformer/amd.rs", "rank": 74, "score": 71508.04144604565 }, { "content": "pub fn parse_put_mmds(\n\n body: &Body,\n\n path_second_token: Option<&&str>,\n\n) -> Result<ParsedRequest, Error> {\n\n match path_second_token {\n\n Some(config_path) => match *config_path {\n\n \"config\" => Ok(ParsedRequest::Sync(SetMmdsConfiguration(\n\n serde_json::from_slice::<MmdsConfig>(body.raw()).map_err(Error::SerdeJson)?,\n\n ))),\n\n _ => Err(Error::Generic(\n\n StatusCode::BadRequest,\n\n format!(\"Unrecognized PUT request path `{}`.\", *config_path),\n\n )),\n\n },\n\n None => Ok(ParsedRequest::PutMMDS(\n\n serde_json::from_slice(body.raw()).map_err(Error::SerdeJson)?,\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/api_server/src/request/mmds.rs", "rank": 75, "score": 71508.04144604565 }, { "content": "// Wrapper over io::Stdin that implements `Serial::ReadableFd` and `vmm::VmmEventsObserver`.\n\nstruct SerialStdin(io::Stdin);\n\nimpl SerialStdin {\n\n /// Returns a `SerialStdin` wrapper over `io::stdin`.\n\n pub fn get() -> Self {\n\n SerialStdin(io::stdin())\n\n }\n\n}\n\n\n\nimpl io::Read for SerialStdin {\n\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n\n self.0.read(buf)\n\n }\n\n}\n\n\n\nimpl AsRawFd for SerialStdin {\n\n fn as_raw_fd(&self) -> RawFd {\n\n self.0.as_raw_fd()\n\n }\n\n}\n\n\n", "file_path": "src/vmm/src/builder.rs", "rank": 76, "score": 71331.85640185543 }, { "content": "pub fn update_extended_feature_info_entry(\n\n entry: &mut kvm_cpuid_entry2,\n\n _vm_spec: &VmSpec,\n\n) -> Result<(), Error> {\n\n use cpu_leaf::leaf_0x80000001::*;\n\n\n\n // set the Topology Extension bit since we use the Extended Cache Topology leaf\n\n entry.ecx.write_bit(ecx::TOPOEXT_INDEX, true);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cpuid/src/transformer/amd.rs", "rank": 77, "score": 70283.80173154673 }, { "content": "pub fn update_largest_extended_fn_entry(\n\n entry: &mut kvm_cpuid_entry2,\n\n _vm_spec: &VmSpec,\n\n) -> Result<(), Error> {\n\n use cpu_leaf::leaf_0x80000000::*;\n\n\n\n // KVM sets the largest extended function to 0x80000000. Change it to 0x8000001f\n\n // Since we also use the leaf 0x8000001d (Extended Cache Topology).\n\n entry\n\n .eax\n\n .write_bits_in_range(&eax::LARGEST_EXTENDED_FN_BITRANGE, LARGEST_EXTENDED_FN);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cpuid/src/transformer/amd.rs", "rank": 78, "score": 70283.80173154673 }, { "content": "pub fn update_extended_apic_id_entry(\n\n entry: &mut kvm_cpuid_entry2,\n\n vm_spec: &VmSpec,\n\n) -> Result<(), Error> {\n\n use cpu_leaf::leaf_0x8000001e::*;\n\n\n\n let mut core_id = u32::from(vm_spec.cpu_id);\n\n // When hyper-threading is enabled each pair of 2 consecutive logical CPUs\n\n // will have the same core id since they represent 2 threads in the same core.\n\n // For Example:\n\n // logical CPU 0 -> core id: 0\n\n // logical CPU 1 -> core id: 0\n\n // logical CPU 2 -> core id: 1\n\n // logical CPU 3 -> core id: 1\n\n if vm_spec.ht_enabled {\n\n core_id /= 2;\n\n }\n\n\n\n entry\n\n .eax\n", "file_path": "src/cpuid/src/transformer/amd.rs", "rank": 79, "score": 70283.80173154673 }, { "content": "pub fn update_extended_cache_topology_entry(\n\n entry: &mut kvm_cpuid_entry2,\n\n vm_spec: &VmSpec,\n\n) -> Result<(), Error> {\n\n entry.flags |= KVM_CPUID_FLAG_SIGNIFCANT_INDEX;\n\n\n\n common::update_cache_parameters_entry(entry, vm_spec)\n\n}\n\n\n", "file_path": "src/cpuid/src/transformer/amd.rs", "rank": 80, "score": 70283.80173154673 }, { "content": "/// Generates pseudo random u32 numbers based on the current timestamp.\n\npub fn xor_rng_u32() -> u32 {\n\n let mut t: u32 = time::timestamp_cycles() as u32;\n\n // Taken from https://en.wikipedia.org/wiki/Xorshift.\n\n t ^= t << 13;\n\n t ^= t >> 17;\n\n t ^ (t << 5)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_xor_rng_u32() {\n\n for _ in 0..1000 {\n\n assert_ne!(xor_rng_u32(), xor_rng_u32());\n\n }\n\n }\n\n}\n", "file_path": "src/utils/src/rand.rs", "rank": 81, "score": 69343.16530495709 }, { "content": "#[cfg(target_arch = \"aarch64\")]\n\npub fn load_kernel<F>(\n\n guest_mem: &GuestMemoryMmap,\n\n kernel_image: &mut F,\n\n start_address: u64,\n\n) -> Result<GuestAddress>\n\nwhere\n\n F: Read + Seek,\n\n{\n\n /* Kernel boot protocol is specified in the kernel docs\n\n Documentation/arm/Booting and Documentation/arm64/booting.txt.\n\n\n\n ======aarch64 kernel header========\n\n u32 code0;\t\t\t/* Executable code */\n\n u32 code1;\t\t\t/* Executable code */\n\n u64 text_offset;\t\t/* Image load offset, little endian */\n\n u64 image_size;\t\t/* Effective Image size, little endian */\n\n u64 flags;\t\t\t/* kernel flags, little endian */\n\n u64 res2\t= 0;\t\t/* reserved */\n\n u64 res3\t= 0;\t\t/* reserved */\n\n u64 res4\t= 0;\t\t/* reserved */\n", "file_path": "src/kernel/src/loader/mod.rs", "rank": 82, "score": 69343.16530495709 }, { "content": "#[inline]\n\npub fn bench_snapshot_v1<W: std::io::Write>(mut snapshot_mem: &mut W, vm: VersionMap, crc: bool) {\n\n let state = Test {\n\n dummy: vec![\n\n Dummy {\n\n dummy: 123,\n\n string: \"xxx\".to_owned()\n\n };\n\n 100\n\n ],\n\n field0: 0,\n\n field1: 1,\n\n field2: 2,\n\n field3: \"test\".to_owned(),\n\n field4: vec![4; 1024 * 10],\n\n field_x: 0,\n\n };\n\n\n\n let mut snapshot = Snapshot::new(vm.clone(), 4);\n\n if crc {\n\n snapshot.save_with_crc64(&mut snapshot_mem, &state).unwrap();\n\n } else {\n\n snapshot.save(&mut snapshot_mem, &state).unwrap();\n\n }\n\n}\n\n\n", "file_path": "src/snapshot/benches/main.rs", "rank": 83, "score": 68893.36877063617 }, { "content": "/// Trait containing helper methods for [`BitRange`](struct.BitRange.html)\n\n///\n\n/// The methods are needed for:\n\n/// - checking if the `BitRange` is valid for a type `T`\n\n/// - creating masks for a type `T`\n\npub trait BitRangeExt<T> {\n\n /// Returns a value of type `T` that has all the bits in the specified bit range set to 1.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// #[macro_use]\n\n /// extern crate cpuid;\n\n /// use cpuid::bit_helper::*;\n\n ///\n\n /// fn main() {\n\n /// let range = BitRange {\n\n /// msb_index: 7,\n\n /// lsb_index: 3,\n\n /// };\n\n /// println!(\"binary value: {:b}\", range.get_mask());\n\n /// }\n\n /// ```\n\n /// The code above will print:\n\n /// ```bash\n", "file_path": "src/cpuid/src/bit_helper.rs", "rank": 84, "score": 68040.58103127762 }, { "content": "#[allow(unused_variables)]\n\npub trait BusDevice: AsAny + Send {\n\n /// Reads at `offset` from this device\n\n fn read(&mut self, offset: u64, data: &mut [u8]) {}\n\n /// Writes at `offset` into this device\n\n fn write(&mut self, offset: u64, data: &[u8]) {}\n\n /// Triggers the `irq_mask` interrupt on this device\n\n fn interrupt(&self, irq_mask: u32) -> io::Result<()> {\n\n Ok(())\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n /// The insertion failed because the new device overlapped with an old device.\n\n Overlap,\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n use self::Error::*;\n\n\n\n match *self {\n\n Overlap => write!(f, \"New device overlaps with an old device.\"),\n\n }\n\n }\n\n}\n\n\n\npub type Result<T> = result::Result<T, Error>;\n\n\n", "file_path": "src/devices/src/bus.rs", "rank": 85, "score": 67929.72863366384 }, { "content": "#[derive(Copy, Clone, Default)]\n\nstruct MpcCpuWrapper(mpspec::mpc_cpu);\n", "file_path": "src/arch/src/x86_64/mptable.rs", "rank": 86, "score": 67390.47686982242 }, { "content": "#[derive(Copy, Clone, Default)]\n\nstruct MpcIntsrcWrapper(mpspec::mpc_intsrc);\n", "file_path": "src/arch/src/x86_64/mptable.rs", "rank": 87, "score": 67390.47686982242 }, { "content": "#[derive(Copy, Clone, Default)]\n\nstruct MpcIoapicWrapper(mpspec::mpc_ioapic);\n", "file_path": "src/arch/src/x86_64/mptable.rs", "rank": 88, "score": 67390.47686982242 }, { "content": "#[derive(Copy, Clone, Default)]\n\nstruct MpfIntelWrapper(mpspec::mpf_intel);\n\n\n\n// These `mpspec` wrapper types are only data, reading them from data is a safe initialization.\n\nunsafe impl ByteValued for MpcBusWrapper {}\n\nunsafe impl ByteValued for MpcCpuWrapper {}\n\nunsafe impl ByteValued for MpcIntsrcWrapper {}\n\nunsafe impl ByteValued for MpcIoapicWrapper {}\n\nunsafe impl ByteValued for MpcTableWrapper {}\n\nunsafe impl ByteValued for MpcLintsrcWrapper {}\n\nunsafe impl ByteValued for MpfIntelWrapper {}\n\n\n\n// MPTABLE, describing VCPUS.\n\nconst MPTABLE_START: u64 = 0x9fc00;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum Error {\n\n /// There was too little guest memory to store the entire MP table.\n\n NotEnoughMemory,\n\n /// The MP table has too little address space to be stored.\n\n AddressOverflow,\n", "file_path": "src/arch/src/x86_64/mptable.rs", "rank": 89, "score": 67390.47686982242 }, { "content": "#[derive(Copy, Clone, Default)]\n\nstruct MpcTableWrapper(mpspec::mpc_table);\n", "file_path": "src/arch/src/x86_64/mptable.rs", "rank": 90, "score": 67390.47686982242 }, { "content": "#[derive(Copy, Clone, Default)]\n\nstruct MpcBusWrapper(mpspec::mpc_bus);\n", "file_path": "src/arch/src/x86_64/mptable.rs", "rank": 91, "score": 67390.47686982242 }, { "content": "#[derive(Copy, Clone, Default)]\n\nstruct MpcLintsrcWrapper(mpspec::mpc_lintsrc);\n", "file_path": "src/arch/src/x86_64/mptable.rs", "rank": 92, "score": 67390.47686982242 }, { "content": "fn get_s(entry: u64) -> u8 {\n\n ((entry & 0x0000_1000_0000_0000) >> 44) as u8\n\n}\n\n\n", "file_path": "src/arch/src/x86_64/gdt.rs", "rank": 93, "score": 67051.23267524458 }, { "content": "fn get_p(entry: u64) -> u8 {\n\n ((entry & 0x0000_8000_0000_0000) >> 47) as u8\n\n}\n\n\n", "file_path": "src/arch/src/x86_64/gdt.rs", "rank": 94, "score": 67051.23267524458 }, { "content": "fn get_l(entry: u64) -> u8 {\n\n ((entry & 0x0020_0000_0000_0000) >> 53) as u8\n\n}\n\n\n", "file_path": "src/arch/src/x86_64/gdt.rs", "rank": 95, "score": 67051.23267524458 }, { "content": "fn get_g(entry: u64) -> u8 {\n\n ((entry & 0x0080_0000_0000_0000) >> 55) as u8\n\n}\n\n\n", "file_path": "src/arch/src/x86_64/gdt.rs", "rank": 96, "score": 67051.23267524458 }, { "content": "/// Trait for virtio devices to be driven by a virtio transport.\n\n///\n\n/// The lifecycle of a virtio device is to be moved to a virtio transport, which will then query the\n\n/// device. The virtio devices needs to create queues, events and event fds for interrupts and expose\n\n/// them to the transport via get_queues/get_queue_events/get_interrupt/get_interrupt_status fns.\n\npub trait VirtioDevice: AsAny + Send {\n\n /// Get the available features offered by device.\n\n fn avail_features(&self) -> u64;\n\n\n\n /// Get acknowledged features of the driver.\n\n fn acked_features(&self) -> u64;\n\n\n\n /// Set acknowledged features of the driver.\n\n /// This function must maintain the following invariant:\n\n /// - self.avail_features() & self.acked_features() = self.get_acked_features()\n\n fn set_acked_features(&mut self, acked_features: u64);\n\n\n\n /// The virtio device type.\n\n fn device_type(&self) -> u32;\n\n\n\n /// Returns the device queues.\n\n fn queues(&self) -> &[Queue];\n\n\n\n /// Returns a mutable reference to the device queues.\n\n fn queues_mut(&mut self) -> &mut [Queue];\n", "file_path": "src/devices/src/virtio/device.rs", "rank": 97, "score": 66529.90863093153 }, { "content": "fn build_response(http_version: Version, status_code: StatusCode, body: Body) -> Response {\n\n let mut response = Response::new(http_version, status_code);\n\n response.set_body(body);\n\n response\n\n}\n\n\n", "file_path": "src/mmds/src/lib.rs", "rank": 98, "score": 66193.30982877282 } ]
Rust
cli/src/run.rs
russellwmy/tract
e53430a65eac501f3145ff7fbaa80d6aac8c9e40
use crate::CliResult; use crate::{Model, Parameters}; use tract_hir::internal::*; #[cfg(feature = "pulse")] use tract_pulse::internal::*; pub fn handle(params: &Parameters, options: &clap::ArgMatches) -> CliResult<()> { let dump = options.is_present("dump"); #[cfg(feature = "pulse")] let outputs = if let Some(pulse) = params.tract_model.downcast_ref::<PulsedModel>() { run_pulse_t(pulse, &params)? } else { dispatch_model!(&*params.tract_model, |m| run_regular(m, &params, options))? }; #[cfg(not(feature = "pulse"))] let outputs = dispatch_model!(&*params.tract_model, |m| run_regular(m, &params, options))?; if dump { for (ix, output) in outputs.iter().enumerate() { println!("output #{}\n{}\n", ix, output.dump(true)?); } } if let Some(asserts) = &params.assertions.assert_outputs { crate::utils::check_outputs(&*outputs, &asserts)?; } if let Some(facts) = &params.assertions.assert_output_facts { let outputs: Vec<InferenceFact> = outputs.iter().map(|t| InferenceFact::dt_shape(t.datum_type(), t.shape())).collect(); crate::utils::check_inferred(&*outputs, &*facts)?; } Ok(()) } fn run_regular( tract: &dyn Model, params: &Parameters, options: &clap::ArgMatches, ) -> CliResult<TVec<Arc<Tensor>>> { let steps = options.is_present("steps"); let assert_sane_floats = options.is_present("assert-sane-floats"); let mut inputs: TVec<Tensor> = tvec!(); for (ix, input) in tract.input_outlets().iter().enumerate() { if let Some(input) = params.input_values.get(ix).and_then(|x| x.as_ref()) { inputs.push(input.clone().into_tensor()) } else { let fact = tract.outlet_typedfact(*input)?; inputs.push(crate::tensor::tensor_for_fact(&fact, None)?); } } dispatch_model!(tract, |m| { let plan = SimplePlan::new(m)?; let mut state = SimpleState::new(plan)?; Ok(state.run_plan_with_eval(inputs, |session_state, state, node, input| { if steps { eprintln!("{}: <{:?}", node, input); } let r = tract_core::plan::eval(session_state, state, node, input); if steps { eprintln!("{}: >{:?}", node, r); } let r = r?; if assert_sane_floats { for (ix, o) in r.iter().enumerate() { if let Ok(floats) = o.as_slice::<f32>() { if let Some(pos) = floats.iter().position(|f| !f.is_finite()) { eprintln!("{:?}", floats); tract_core::anyhow::bail!( "Found {} in output {} of {}", floats[pos], ix, node ); } } } } Ok(r) })?) }) } #[cfg(feature = "pulse")] fn run_pulse_t(model: &PulsedModel, params: &Parameters) -> CliResult<TVec<Arc<Tensor>>> { let input_fact = model.input_fact(0)?; let output_fact = model.output_fact(0)?; let output_pulse = output_fact.pulse(); let axis = input_fact.axis; let input: &Tensor = &params.input_values[0].as_ref().unwrap(); let input_dim = input.shape()[axis]; let output_dim = output_fact .dim .eval(&SymbolValues::default().with(stream_symbol(), input_dim as i64)) .to_usize()?; let mut output_shape = output_fact.shape.to_vec(); output_shape[output_fact.axis] = (output_dim as usize + output_fact.delay + 4 * output_fact.pulse()).to_dim(); let output_shape: TVec<usize> = output_shape.iter().map(|d| d.to_usize().unwrap()).collect(); let plan = SimplePlan::new(model)?; let mut state = ::tract_core::plan::SimpleState::new(&plan)?; let pulse = input_fact.pulse(); let mut result = tract_ndarray::ArrayD::<f32>::default(&*output_shape); let input = input.to_array_view::<f32>()?; for ix in 0..input_dim.div_ceil(pulse) { let chunk = input.slice_axis(tract_ndarray::Axis(axis), (ix * pulse..(ix + 1) * pulse).into()); let input = if chunk.shape()[input_fact.axis] < pulse { let mut chunk_shape = chunk.shape().to_vec(); chunk_shape[input_fact.axis] = pulse; let mut padded_chunk = tract_ndarray::ArrayD::<f32>::default(chunk_shape); padded_chunk .slice_axis_mut( tract_ndarray::Axis(input_fact.axis), (..chunk.shape()[input_fact.axis]).into(), ) .assign(&chunk); padded_chunk } else { chunk.to_owned() }; let outputs = state.run(tvec!(input.into()))?; let result_chunk = outputs[0].to_array_view::<f32>()?; result .slice_axis_mut( tract_ndarray::Axis(output_fact.axis), ((output_pulse * ix)..(output_pulse * (ix + 1))).into(), ) .assign(&result_chunk); } result.slice_axis_inplace(tract_ndarray::Axis(output_fact.axis), (output_fact.delay..).into()); result .slice_axis_inplace(tract_ndarray::Axis(output_fact.axis), (..output_dim as usize).into()); Ok(tvec!(result.into_arc_tensor())) }
use crate::CliResult; use crate::{Model, Parameters}; use tract_hir::internal::*; #[cfg(feature = "pulse")] use tract_pulse::internal::*; pub fn handle(params: &Parameters, options: &clap::ArgMatches) -> CliResult<()> { let dump = options.is_present("dump"); #[cfg(feature = "pulse")] let outputs = if let Some(pulse) = params.tract_model.downcast_ref::<PulsedModel>() { run_pulse_t(pulse, &params)? } else { dispatch_model!(&*params.tract_model, |m| run_regular(m, &params, options))? }; #[cfg(not(feature = "pulse"))] let outputs = dispatch_model!(&*params.tract_model, |m| run_regular(m, &params, options))?; if dump { for (ix, output) in outputs.iter().enumerate() { println!("output #{}\n{}\n", ix, output.dump(true)?); } } if let Some(asserts) = &params.assertions.assert_outputs { crate::utils::check_outputs(&*outputs, &asserts)?; } if let Some(facts) = &params.assertions.assert_output_facts { let outputs: Vec<InferenceFact> = outputs.iter().map(|t| InferenceFact::dt_shape(t.datum_type(), t.shape())).collect(); crate::utils::check_inferred(&*outputs, &*facts)?; } Ok(()) } fn run_regular( tract: &dyn Model, params: &Parameters, options: &clap::ArgMatches, ) -> CliResult<TVec<Arc<Tensor>>> { let steps = options.is_present("steps"); let assert_sane_floats = options.is_present("assert-sane-floats"); let mut inputs: TVec<Tensor> = tvec!(); for (ix, input) in tract.input_outlets().iter().enumerate() { if let Some(input) = params.input_values.get(ix).and_then(|x| x.as_ref()) { inputs.push(input.clone().into_tensor()) } else { let fact = tract.outlet_typedfact(*input)?; inputs.push(crate::tensor::tensor_for_fact(&fact, None)?); } } dispatch_model!(tract, |m| { let plan = SimplePlan::new(m)?; let mut state = SimpleState::new(plan)?; Ok(state.run_plan_with_eval(inputs, |session_state, state, node, input| { if steps { eprintln!("{}: <{:?}", node, input); } let r = tract_core::plan::eval(session_state, state, node, input); if steps { eprintln!("{}: >{:?}", node, r); } let r = r?; if assert_sane_floats { for (ix, o) in r.iter().enumerate() { if let Ok(floats) = o.as_slice::<f32>() { if let Some(pos) = floats.iter().position(|f| !f.is_finite()) { eprintln!("{:?}", floats); tract_core::anyhow::bail!( "Found {} in output {} of {}", floats[pos], ix, node ); } } } } Ok(r) })?) }) } #[cfg(feature = "pulse")] fn run_pulse_t(model: &PulsedModel, params: &Parameters) -> CliResult<TVec<Arc<Tensor>>> { let input_fact = model.input_fact(0)?; let output_fact = model.output_fact(0)?; let output_pulse = output_fact.pulse(); let axis = input_fact.axis; let input: &Tensor = &params.input_values[0].as_ref().unwrap(); let input_dim = input.shape()[axis]; let output_dim = output_fact .dim .eval(&SymbolValues::default().
shape); let input = input.to_array_view::<f32>()?; for ix in 0..input_dim.div_ceil(pulse) { let chunk = input.slice_axis(tract_ndarray::Axis(axis), (ix * pulse..(ix + 1) * pulse).into()); let input = if chunk.shape()[input_fact.axis] < pulse { let mut chunk_shape = chunk.shape().to_vec(); chunk_shape[input_fact.axis] = pulse; let mut padded_chunk = tract_ndarray::ArrayD::<f32>::default(chunk_shape); padded_chunk .slice_axis_mut( tract_ndarray::Axis(input_fact.axis), (..chunk.shape()[input_fact.axis]).into(), ) .assign(&chunk); padded_chunk } else { chunk.to_owned() }; let outputs = state.run(tvec!(input.into()))?; let result_chunk = outputs[0].to_array_view::<f32>()?; result .slice_axis_mut( tract_ndarray::Axis(output_fact.axis), ((output_pulse * ix)..(output_pulse * (ix + 1))).into(), ) .assign(&result_chunk); } result.slice_axis_inplace(tract_ndarray::Axis(output_fact.axis), (output_fact.delay..).into()); result .slice_axis_inplace(tract_ndarray::Axis(output_fact.axis), (..output_dim as usize).into()); Ok(tvec!(result.into_arc_tensor())) }
with(stream_symbol(), input_dim as i64)) .to_usize()?; let mut output_shape = output_fact.shape.to_vec(); output_shape[output_fact.axis] = (output_dim as usize + output_fact.delay + 4 * output_fact.pulse()).to_dim(); let output_shape: TVec<usize> = output_shape.iter().map(|d| d.to_usize().unwrap()).collect(); let plan = SimplePlan::new(model)?; let mut state = ::tract_core::plan::SimpleState::new(&plan)?; let pulse = input_fact.pulse(); let mut result = tract_ndarray::ArrayD::<f32>::default(&*output_
random
[ { "content": "pub fn dump(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let lrn = node.op_as::<Lrn>().unwrap();\n\n let input = ast.mapping[&node.inputs[0]].clone();\n\n Ok(Some(invocation(\n\n \"tract_onnx_lrn\",\n\n &[input],\n\n &[\n\n (\"alpha\", numeric(lrn.alpha)),\n\n (\"beta\", numeric(lrn.beta)),\n\n (\"bias\", numeric(lrn.bias)),\n\n (\"size\", numeric(lrn.size)),\n\n ],\n\n )))\n\n}\n\n\n", "file_path": "onnx-opl/src/lrn.rs", "rank": 0, "score": 505705.0592507737 }, { "content": "pub fn dump(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op_as::<ElementWiseOp>().unwrap().0.downcast_ref::<IsInf>().unwrap();\n\n let input = ast.mapping[&node.inputs[0]].clone();\n\n Ok(Some(invocation(\n\n \"tract_onnx_isinf\",\n\n &[input],\n\n &[\n\n (\"detect_negative\", logical(op.detect_negative)),\n\n (\"detect_positive\", logical(op.detect_positive)),\n\n ],\n\n )))\n\n}\n\n\n", "file_path": "onnx-opl/src/is_inf.rs", "rank": 1, "score": 505705.05925077375 }, { "content": "pub fn make_inputs_for_model(model: &dyn Model) -> TractResult<TVec<TValue>> {\n\n make_inputs(\n\n &model\n\n .input_outlets()\n\n .iter()\n\n .map(|&t| model.outlet_typedfact(t))\n\n .collect::<TractResult<Vec<TypedFact>>>()?,\n\n )\n\n}\n\n\n", "file_path": "libcli/src/tensor.rs", "rank": 2, "score": 485771.2428039446 }, { "content": "pub fn one_hot_dump(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let one_hot = node.op_as::<OneHot>().unwrap();\n\n let input = ast.mapping[&node.inputs[0]].clone();\n\n Ok(Some(invocation(\n\n \"tract_core_one_hot\",\n\n &[input],\n\n &[\n\n (\"axis\", numeric(one_hot.axis)),\n\n (\"dim\", numeric(one_hot.dim)),\n\n (\"value_off\", numeric(one_hot.off.cast_to_scalar::<f32>()?)),\n\n (\"value_on\", numeric(one_hot.on.cast_to_scalar::<f32>()?)),\n\n ],\n\n )))\n\n}\n\n\n", "file_path": "nnef/src/ops/core/one_hot.rs", "rank": 3, "score": 452367.4259778464 }, { "content": "fn dump(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op_as::<Multinomial>().context(\"wrong op\")?;\n\n let input = ast.mapping[&node.inputs[0]].clone();\n\n\n\n let dtype = match op.dtype {\n\n DatumType::I32 => 6,\n\n DatumType::I64 => 7,\n\n dt => bail!(\"Unsupported datum type {:?} for ONNX Multinomial\", dt),\n\n };\n\n\n\n let inv = if let Some(seed) = op.seed {\n\n invocation(\n\n \"tract_onnx_multinomial\",\n\n &[input],\n\n &[\n\n (\"dtype\", numeric(dtype)),\n\n (\"sample_size\", numeric(op.sample_size)),\n\n (\"seed\", numeric(seed)),\n\n ],\n\n )\n", "file_path": "onnx-opl/src/multinomial.rs", "rank": 4, "score": 449138.2798430942 }, { "content": "fn dump(_ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op_as::<Random>().context(\"wrong op\")?;\n\n let mut named = vec![\n\n (\"datum_type\", string(format!(\"{:?}\", op.fact.datum_type))),\n\n (\"shape\", tdims(&op.fact.shape)),\n\n ];\n\n if let Some(seed) = op.seed {\n\n named.push((\"seed\", numeric(seed)));\n\n }\n\n match &op.dist {\n\n Dist::Uniform { low, high } => {\n\n named.push((\"dist\", string(\"uniform\")));\n\n named.push((\n\n \"parameters\",\n\n array(&[\n\n numeric(low.cast_to_scalar::<f32>()?),\n\n numeric(high.cast_to_scalar::<f32>()?),\n\n ]),\n\n ));\n\n }\n", "file_path": "onnx-opl/src/random.rs", "rank": 5, "score": 449138.2798430943 }, { "content": "pub fn extract_costs(annotations: &mut Annotations, model: &dyn Model) -> TractResult<()> {\n\n fn extract_costs_rec(\n\n annotations: &mut Annotations,\n\n model: &dyn Model,\n\n prefix: &[(usize, String)],\n\n multiplier: TDim,\n\n ) -> TractResult<()> {\n\n if let Some(model) = model.downcast_ref::<TypedModel>() {\n\n for node_id in 0..model.nodes().len() {\n\n let inputs = model.node_input_facts(node_id)?;\n\n let cost = model.node(node_id).op.cost(&inputs)?;\n\n annotations.node_mut(NodeQId(prefix.into(), node_id)).cost = cost\n\n .into_iter()\n\n .map(|(k, v)| (k, if k.is_compute() { v * &multiplier } else { v }))\n\n .collect();\n\n\n\n let nested_subs = model.nested_models(node_id);\n\n let nested_multis = (model as &dyn Model).nested_models_iters(node_id, &inputs);\n\n if let Some((name, sub)) = nested_subs {\n\n let mut prefix: TVec<_> = prefix.into();\n", "file_path": "libcli/src/profile.rs", "rank": 6, "score": 440077.78384786285 }, { "content": "pub fn read_tensor<R: std::io::Read>(mut reader: R) -> TractResult<Tensor> {\n\n unsafe {\n\n let mut header: Header = std::mem::zeroed();\n\n let buffer: &mut [u8; 128] = std::mem::transmute(&mut header);\n\n reader.read_exact(buffer)?;\n\n if header.magic != [0x4e, 0xef] {\n\n bail!(\"Wrong magic number\");\n\n };\n\n if header.version_maj != 1 && header.version_min != 0 {\n\n bail!(\"Wrong version number\");\n\n }\n\n if header.rank > 8 {\n\n bail!(\"Wrong tensor rank {}\", header.rank);\n\n }\n\n let shape: TVec<usize> =\n\n header.dims[0..header.rank as usize].iter().map(|d| *d as _).collect();\n\n let len = shape.iter().product::<usize>();\n\n\n\n if header.item_type == 5 {\n\n let expected_bit_size = len * header.bits_per_item as usize;\n", "file_path": "nnef/src/tensors.rs", "rank": 7, "score": 439462.7349081286 }, { "content": "fn dump(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op_as::<NonMaxSuppression>().context(\"wrong op\")?;\n\n let boxes = ast.mapping[&node.inputs[0]].clone();\n\n let scores = ast.mapping[&node.inputs[1]].clone();\n\n let max_output_boxes_per_class = ast.mapping[&node.inputs[2]].clone();\n\n let iou_threshold = ast.mapping[&node.inputs[3]].clone();\n\n let score_threshold = node.inputs.get(4).map(|v| ast.mapping[v].clone());\n\n\n\n let inv = if let Some(score_threshold) = score_threshold {\n\n invocation(\n\n \"tract_onnx_non_max_suppression\",\n\n &[boxes, scores, max_output_boxes_per_class, iou_threshold, score_threshold],\n\n &[(\"center_point_box\", numeric(op.center_point_box.into_i64()))],\n\n )\n\n } else {\n\n invocation(\n\n \"tract_onnx_non_max_suppression\",\n\n &[boxes, scores, max_output_boxes_per_class, iou_threshold],\n\n &[(\"center_point_box\", numeric(op.center_point_box.into_i64()))],\n\n )\n\n };\n\n\n\n Ok(Some(inv))\n\n}\n\n\n", "file_path": "onnx-opl/src/non_max_suppression.rs", "rank": 8, "score": 438331.1708609619 }, { "content": "fn dump(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op_as::<TreeEnsembleClassifier>().context(\"wrong op\")?;\n\n let input = ast.mapping[&node.inputs[0]].clone();\n\n let trees = ast.konst_variable(format!(\"{}_trees\", node.name), &op.ensemble.data.trees)?;\n\n let nodes = ast.konst_variable(format!(\"{}_nodes\", node.name), &op.ensemble.data.nodes)?;\n\n let leaves = ast.konst_variable(format!(\"{}_leaves\", node.name), &op.ensemble.data.leaves)?;\n\n let agg = match op.ensemble.aggregate_fn {\n\n Aggregate::Min => \"MIN\",\n\n Aggregate::Max => \"MAX\",\n\n Aggregate::Sum => \"SUM\",\n\n Aggregate::Avg => \"AVERAGE\",\n\n };\n\n Ok(Some(invocation(\n\n \"tract_onnx_ml_tree_ensemble_classifier\",\n\n &[input, trees, nodes, leaves],\n\n &[\n\n (\"max_used_feature\", numeric(op.ensemble.max_used_feature)),\n\n (\"n_classes\", numeric(op.ensemble.n_classes)),\n\n (\"aggregate_fn\", string(agg)),\n\n ],\n\n )))\n\n}\n\n\n", "file_path": "onnx-opl/src/ml/tree_ensemble_classifier.rs", "rank": 9, "score": 433221.8708540414 }, { "content": "pub fn ser(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let einsum = node.op_as::<EinSum>().unwrap();\n\n if einsum.q_params.is_some() {\n\n ser_einsum_q(ast, node)\n\n } else {\n\n ser_einsum(ast, node)\n\n }\n\n}\n\n\n", "file_path": "nnef/src/ops/core/einsum.rs", "rank": 10, "score": 430212.5102994032 }, { "content": "pub fn ser_einsum(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let einsum = node.op_as::<EinSum>().unwrap();\n\n let inputs = node.inputs.iter().map(|i| (*ast.mapping[i]).clone()).collect();\n\n Ok(Some(invocation(\n\n \"tract_core_einsum\",\n\n &[Arc::new(RValue::Array(inputs))],\n\n &[\n\n (\"expr\", string(einsum.axes.to_string())),\n\n (\"acc\", datum_type(einsum.operating_dt)),\n\n (\"output\", einsum.q_params.map(datum_type).unwrap_or_else(|| string(\"\"))),\n\n ],\n\n )))\n\n}\n\n\n", "file_path": "nnef/src/ops/core/einsum.rs", "rank": 11, "score": 425112.18755497294 }, { "content": "pub fn leaky_relu(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op_as::<ops::element_wise::ElementWiseOp>().context(\"Wrong op\")?;\n\n let op = op.0.downcast_ref::<ops::nn::LeakyRelu>().context(\"Wrong op\")?;\n\n Ok(Some(invocation(\n\n \"leaky_relu\",\n\n &node.inputs.iter().map(|o| ast.mapping[o].clone()).collect::<TVec<_>>(),\n\n &[(\"alpha\", RValue::Literal(op.alpha.into()))],\n\n )))\n\n}\n\n\n", "file_path": "nnef/src/ops/nnef/ser.rs", "rank": 12, "score": 425112.18755497294 }, { "content": "pub fn ser_einsum_q(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let einsum = node.op_as::<EinSum>().unwrap();\n\n let inputs = node.inputs.iter().map(|i| (*ast.mapping[i]).clone()).collect_vec();\n\n Ok(Some(invocation(\n\n \"tract_core_einsum_q\",\n\n &[Arc::new(RValue::Array(vec![inputs[0].clone(), inputs[1].clone()]))],\n\n &[\n\n (\"expr\", string(einsum.axes.to_string())),\n\n (\"acc\", datum_type(einsum.operating_dt)),\n\n (\"output\", einsum.q_params.map(datum_type).unwrap_or_else(|| string(\"\"))),\n\n (\"bias\", inputs[2].clone()),\n\n (\"a0\", inputs[3].clone()),\n\n (\"a_scale\", inputs[4].clone()),\n\n (\"b0\", inputs[5].clone()),\n\n (\"b_scale\", inputs[6].clone()),\n\n (\"c0\", inputs[7].clone()),\n\n (\"c_scale\", inputs[8].clone()),\n\n ],\n\n )))\n\n}\n\n\n", "file_path": "nnef/src/ops/core/einsum.rs", "rank": 13, "score": 425112.187554973 }, { "content": "pub fn proto_from_reader<R: ::std::io::Read>(mut r: R) -> TractResult<TensorProto> {\n\n let mut v = vec![];\n\n r.read_to_end(&mut v)?;\n\n let b = bytes::Bytes::from(v);\n\n TensorProto::decode(b).context(\"Can not parse protobuf input\")\n\n}\n\n\n", "file_path": "onnx/src/tensor.rs", "rank": 14, "score": 408906.53877976874 }, { "content": "fn cast_dump(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op_as::<Cast>().unwrap();\n\n let input = ast.mapping[&node.inputs[0]].clone();\n\n Ok(Some(invocation(\"tract_core_cast\", &[input], &[(\"to\", datum_type(op.to))])))\n\n}\n\n\n", "file_path": "nnef/src/ops/core/cast.rs", "rank": 15, "score": 406735.57081304386 }, { "content": "fn range_dump(_ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op_as::<Range>().unwrap();\n\n\n\n let start = op.start.to_scalar::<TDim>()?;\n\n let end = op.end.to_scalar::<TDim>()?;\n\n let step = op.step.to_scalar::<TDim>()?;\n\n\n\n Ok(Some(invocation(\n\n \"tract_core_range\",\n\n &[],\n\n &[(\"start\", tdim(start)), (\"end\", tdim(end)), (\"step\", tdim(step))],\n\n )))\n\n}\n\n\n", "file_path": "nnef/src/ops/core/range.rs", "rank": 16, "score": 406735.57081304386 }, { "content": "fn external_dump(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op_as::<TypedSource>().unwrap();\n\n for dim in op.fact.shape.iter() {\n\n for sym in dim.symbols() {\n\n ast.ensure_symbol(&sym)?;\n\n }\n\n }\n\n let shape = tdims(&op.fact.shape);\n\n Ok(Some(invocation(\n\n \"tract_core_external\",\n\n &[],\n\n &[\n\n (\"shape\", shape),\n\n (\"datum_type\", string(format!(\"{:?}\", op.fact.datum_type.unquantized()))),\n\n ],\n\n )))\n\n}\n\n\n", "file_path": "nnef/src/ops/core/source.rs", "rank": 17, "score": 406735.57081304386 }, { "content": "fn qconv_unary_dump(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op_as::<ConvUnary>().unwrap();\n\n if op.q_params.is_none() || node.outputs[0].fact.datum_type.is_quantized() {\n\n return Ok(None);\n\n }\n\n let name = &node.name;\n\n let mut named_args = make_conv_named_args(node, &op.pool_spec, op.group, false, None)?;\n\n\n\n for (ix, name) in [\"a0\", \"a_scale\", \"b0\", \"b_scale\", \"c0\", \"c_scale\"].iter().enumerate() {\n\n named_args.push((name, (*ast.mapping[&node.inputs[1 + ix]]).clone()));\n\n }\n\n\n\n let ci = op\n\n .pool_spec\n\n .data_format\n\n .shape(&ast.model.outlet_fact(node.inputs[0])?.shape.to_tvec())?\n\n .c()\n\n .to_usize()?;\n\n let output_shape = op.pool_spec.data_format.shape(node.outputs[0].fact.shape.to_tvec())?;\n\n let co = output_shape.c().to_usize()?;\n", "file_path": "nnef/src/ops/core/qconv.rs", "rank": 18, "score": 401626.27080612327 }, { "content": "#[deprecated(note = \"Please use onnx().model_for_read(..)\")]\n\npub fn for_reader<R: std::io::Read>(mut r: R) -> TractResult<InferenceModel> {\n\n onnx().model_for_read(&mut r)\n\n}\n\n\n", "file_path": "onnx/src/lib.rs", "rank": 19, "score": 399532.9171999302 }, { "content": "fn dump_reverse_lookup(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let input = ast.mapping[&node.inputs[0]].clone();\n\n let op = node.op_as::<ReverseLookup>().context(\"wrong op\")?;\n\n let values = ast.konst_variable(format!(\"{}.keys\", node.name), &op.keys)?;\n\n Ok(Some(invocation(\n\n \"tract_onnx_ml_reverse_lookup\",\n\n &[input, values],\n\n &[(\"fallback\", numeric(op.fallback_value))],\n\n )))\n\n}\n\n\n", "file_path": "onnx-opl/src/ml/category_mapper.rs", "rank": 20, "score": 396698.98580862384 }, { "content": "fn dump_direct_lookup(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let input = ast.mapping[&node.inputs[0]].clone();\n\n let op = node.op_as::<DirectLookup>().context(\"wrong op\")?;\n\n let keys = ast.konst_variable(format!(\"{}.values\", node.name), &op.values)?;\n\n let fallback = ast.konst_variable(format!(\"{}.fallback\", node.name), &op.fallback_value)?;\n\n Ok(Some(invocation(\"tract_onnx_ml_direct_lookup\", &[input, keys, fallback], &[])))\n\n}\n\n\n", "file_path": "onnx-opl/src/ml/category_mapper.rs", "rank": 21, "score": 396698.98580862384 }, { "content": "pub fn from_reader<R: ::std::io::Read>(r: R) -> TractResult<Tensor> {\n\n proto_from_reader(r)?.try_into()\n\n}\n", "file_path": "onnx/src/tensor.rs", "rank": 22, "score": 385269.3917607808 }, { "content": "#[cfg(feature = \"complex\")]\n\npub fn reinterpret_complex_as_inner_dim(mut t: Tensor) -> anyhow::Result<Tensor> {\n\n unsafe {\n\n t.shape.push(2);\n\n t.set_datum_type(t.datum_type().decomplexify()?);\n\n t.update_strides_and_len();\n\n Ok(t)\n\n }\n\n}\n\n\n", "file_path": "data/src/tensor.rs", "rank": 23, "score": 381256.604468156 }, { "content": "#[cfg(feature = \"complex\")]\n\npub fn reinterpret_inner_dim_as_complex(mut t: Tensor) -> anyhow::Result<Tensor> {\n\n anyhow::ensure!(\n\n t.shape().last() == Some(&2),\n\n \"The last dimension in the tensor shape {:?} must be 2\",\n\n t.shape()\n\n );\n\n unsafe {\n\n t.shape.pop();\n\n t.set_datum_type(t.datum_type().complexify()?);\n\n t.update_strides_and_len();\n\n Ok(t)\n\n }\n\n}\n\n\n", "file_path": "data/src/tensor.rs", "rank": 24, "score": 381256.604468156 }, { "content": "pub fn to_axis_ops(input_orig: &[TDim], output_spec: &[TDim]) -> TractResult<TVec<AxisOp>> {\n\n let final_output = compute_shape(input_orig, output_spec)?;\n\n let mut stack: TVec<AxisOp> = tvec!();\n\n 'top: loop {\n\n let current_input =\n\n stack.iter().try_fold(TVec::from(input_orig), |mut shape, op| -> TractResult<_> {\n\n op.change_shape_array(&mut shape, false)?;\n\n Ok(shape)\n\n })?;\n\n if current_input == final_output {\n\n return Ok(stack);\n\n }\n\n if let Some(common) =\n\n current_input.iter().zip(final_output.iter()).position(|(a, b)| a != b)\n\n {\n\n if current_input[common].is_one() {\n\n stack.push(AxisOp::Rm(common));\n\n } else if final_output[common].is_one() {\n\n stack.push(AxisOp::Add(common));\n\n } else {\n", "file_path": "hir/src/ops/array/reshape.rs", "rank": 25, "score": 377375.24174640543 }, { "content": "fn ser(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op_as::<PulsePad>().unwrap();\n\n let wire = ast.mapping[&node.inputs[0]].clone();\n\n let dt = ast.model.outlet_fact(node.inputs[0])?.datum_type;\n\n let (border, value) = tract_nnef::ops::nnef::ser::pad_mode(&op.mode, dt)?;\n\n let mut params = vec![\n\n (\"axis\", numeric(op.axis)),\n\n (\"before\", numeric(op.before)),\n\n (\"begin_input\", numeric(op.begin_input)),\n\n (\"overlap\", numeric(op.overlap)),\n\n (\"after\", tdim(&op.after)),\n\n (\"end_input\", tdim(&op.end_input)),\n\n ];\n\n params.push((\"border\", string(border)));\n\n if let Some(value) = value {\n\n params.push((\"value\", value));\n\n }\n\n Ok(Some(invocation(\"tract_pulse_pulse_pad\", &[wire], &params)))\n\n}\n\n\n", "file_path": "pulse-opl/src/pad.rs", "rank": 26, "score": 376914.5476410198 }, { "content": "fn ser_delay(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op().downcast_ref::<Delay>().unwrap();\n\n let wire = ast.mapping[&node.inputs[0]].clone();\n\n Ok(Some(invocation(\n\n \"tract_pulse_delay\",\n\n &[wire],\n\n &[\n\n (\"axis\", numeric(op.axis)),\n\n (\"delay\", numeric(op.delay)),\n\n (\"overlap\", numeric(op.overlap)),\n\n ],\n\n )))\n\n}\n\n\n\nimpl PulsedOp for Delay {\n\n fn pulsed_output_facts(&self, inputs: &[&PulsedFact]) -> TractResult<TVec<PulsedFact>> {\n\n ensure!(inputs.len() == 1);\n\n let mut fact = inputs[0].clone();\n\n let mut stream = fact.stream.as_mut().unwrap();\n\n fact.shape.set(self.axis, fact.shape[self.axis].clone() + self.overlap);\n", "file_path": "pulse/src/ops/delay.rs", "rank": 27, "score": 372209.0936984414 }, { "content": "fn ser_trilu(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op().downcast_ref::<ops::array::Trilu>().unwrap();\n\n let input = ast.mapping[&node.inputs[0]].clone();\n\n let k = ast.mapping[&node.inputs[1]].clone();\n\n Ok(Some(invocation(\n\n \"tract_core_trilu\",\n\n &[input, k],\n\n &[\n\n (\"upper\", logical(op.upper)),\n\n ],\n\n )))\n\n}\n\n\n", "file_path": "nnef/src/ops/core/trilu.rs", "rank": 28, "score": 367676.40412540245 }, { "content": "fn ser_reduce(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op().downcast_ref::<ops::nn::Reduce>().unwrap();\n\n let wire = ast.mapping[&node.inputs[0]].clone();\n\n let oper = match op.reducer {\n\n ops::nn::Reducer::ArgMax(last) if last => \"tract_core_argmax_reduce_last\",\n\n ops::nn::Reducer::ArgMin(last) if last => \"tract_core_argmin_reduce_last\",\n\n ops::nn::Reducer::Prod => \"tract_core_product_reduce\",\n\n _ => return Ok(None),\n\n };\n\n Ok(Some(invocation(oper, &[wire], &[(\"axes\", ints(&op.axes))])))\n\n}\n\n\n", "file_path": "nnef/src/ops/core/reduce.rs", "rank": 29, "score": 367676.40412540245 }, { "content": "fn ser_fft(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let input = ast.mapping[&node.inputs[0]].clone();\n\n let op = node.op_as::<Fft>().context(\"wrong op\")?;\n\n Ok(Some(invocation(\n\n \"tract_core_fft\",\n\n &[input],\n\n &[(\"axis\", numeric(op.axis)), (\"inverse\", logical(op.inverse))],\n\n )))\n\n}\n\n\n", "file_path": "nnef/src/ops/core/fft.rs", "rank": 30, "score": 367676.40412540245 }, { "content": "fn ser_ctid(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let wire = ast.mapping[&node.inputs[0]].clone();\n\n Ok(Some(invocation(\"tract_core_complex_to_inner_dim\", &[wire], &[])))\n\n}\n\n\n", "file_path": "nnef/src/ops/core/complex.rs", "rank": 31, "score": 367676.40412540245 }, { "content": "fn ser_submodel(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op_as::<SubmodelOp>().unwrap();\n\n let input = tvec![ast.mapping[&node.inputs[0]].clone()];\n\n let invoke = invocation(\"tract_core_submodel\", &input, &[(\"label\", string(op.label()))]);\n\n ast.resources.insert(op.label().to_string(), Arc::new(TypedModelResource(op.model().clone())));\n\n Ok(Some(invoke))\n\n}\n", "file_path": "nnef/src/ops/core/submodel.rs", "rank": 32, "score": 367676.40412540245 }, { "content": "fn ser_broadcast(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op().downcast_ref::<ops::array::MultiBroadcastTo>().unwrap();\n\n let wire = ast.mapping[&node.inputs[0]].clone();\n\n Ok(Some(invocation(\"tract_core_broadcast\", &[wire], &[(\"shape\", tdims(&op.shape))])))\n\n}\n", "file_path": "nnef/src/ops/core/broadcast.rs", "rank": 33, "score": 367676.40412540245 }, { "content": "fn ser_downsample(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op().downcast_ref::<ops::Downsample>().unwrap();\n\n let wire = ast.mapping[&node.inputs[0]].clone();\n\n Ok(Some(invocation(\n\n \"tract_core_downsample\",\n\n &[wire],\n\n &[\n\n (\"axis\", numeric(op.axis)),\n\n (\"stride\", numeric(op.stride)),\n\n (\"modulo\", numeric(op.modulo)),\n\n ],\n\n )))\n\n}\n\n\n", "file_path": "nnef/src/ops/core/downsample.rs", "rank": 34, "score": 367676.40412540245 }, { "content": "fn ser_stft(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let input = ast.mapping[&node.inputs[0]].clone();\n\n let op = node.op_as::<Stft>().context(\"wrong op\")?;\n\n let mut named: TVec<(_, RValue)> = tvec![\n\n (\"axis\", numeric(op.axis)),\n\n (\"frame\", numeric(op.frame)),\n\n (\"stride\", numeric(op.stride)),\n\n ];\n\n if let Some(w) = &op.window {\n\n let w = ast.konst(format!(\"{}_window\", node.name), w)?;\n\n named.push((\"window\", (*w).clone()));\n\n }\n\n Ok(Some(invocation(\"tract_core_stft\", &[input], &named)))\n\n}\n\n\n", "file_path": "nnef/src/ops/core/fft.rs", "rank": 35, "score": 367676.40412540245 }, { "content": "fn ser_scan(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op().downcast_ref::<Scan>().unwrap();\n\n let (mut body, body_tensors) = crate::ser::to_fragment_def(ast, &op.body)?;\n\n body.decl.id = Identifier(format!(\"scan_body_{}\", ast.fragments.len()));\n\n let mut scan = vec![];\n\n let mut state = vec![];\n\n let mut full = vec![];\n\n let mut outputs = vec![];\n\n for (ix, input) in op.input_mapping.iter().enumerate() {\n\n let name = string(&body.decl.parameters[ix].id.0);\n\n match input {\n\n InputMapping::Scan(info) => {\n\n scan.push(tuple_4(\n\n name,\n\n ast.mapping[&node.inputs[info.slot]].as_ref().clone(),\n\n numeric(info.axis),\n\n numeric(info.chunk),\n\n ));\n\n }\n\n InputMapping::State { init_slot } => {\n", "file_path": "nnef/src/ops/core/scan.rs", "rank": 36, "score": 367676.40412540245 }, { "content": "fn ser_topk(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op().downcast_ref::<ops::array::Topk>().unwrap();\n\n let input = ast.mapping[&node.inputs[0]].clone();\n\n Ok(Some(invocation(\n\n \"tract_core_topk\",\n\n &[input],\n\n &[(\"k\", numeric(op.k)), (\"largest\", logical(op.largest)), (\"axis\", numeric(op.axis))],\n\n )))\n\n}\n\n\n", "file_path": "nnef/src/ops/core/topk.rs", "rank": 37, "score": 367676.40412540245 }, { "content": "fn ser_idtc(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let wire = ast.mapping[&node.inputs[0]].clone();\n\n Ok(Some(invocation(\"tract_core_inner_dim_to_complex\", &[wire], &[])))\n\n}\n\n\n", "file_path": "nnef/src/ops/core/complex.rs", "rank": 38, "score": 367676.40412540245 }, { "content": "pub fn count_op(model: &dyn Model, name: &str) -> TractResult<usize> {\n\n Ok(model\n\n .eval_order()\n\n .context(\"Cannot assert op count without an eval order\")?\n\n .into_iter()\n\n .map(|i| {\n\n if model.node_op_name(i) == name {\n\n 1\n\n } else {\n\n model.nested_models(i).into_iter().flat_map(|(_, m)| count_op(m, name)).sum()\n\n }\n\n })\n\n .sum())\n\n}\n", "file_path": "cli/src/utils.rs", "rank": 39, "score": 364630.20245407324 }, { "content": "fn ser_scatter_elements(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op().downcast_ref::<ScatterElements>().unwrap();\n\n let wire = ast.mapping[&node.inputs[0]].clone();\n\n let indices = ast.mapping[&node.inputs[1]].clone();\n\n let updates = ast.mapping[&node.inputs[2]].clone();\n\n Ok(Some(invocation(\n\n \"tract_core_scatter_elements\",\n\n &[wire, indices, updates],\n\n &[(\"axis\", numeric(op.axis))],\n\n )))\n\n}\n\n\n", "file_path": "nnef/src/ops/core/scatter.rs", "rank": 40, "score": 363307.0052796423 }, { "content": "fn ser_scatter_nd(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let wire = ast.mapping[&node.inputs[0]].clone();\n\n let indices = ast.mapping[&node.inputs[1]].clone();\n\n let updates = ast.mapping[&node.inputs[2]].clone();\n\n Ok(Some(invocation(\"tract_core_scatter_nd\", &[wire, indices, updates], &[])))\n\n}\n\n\n", "file_path": "nnef/src/ops/core/scatter.rs", "rank": 41, "score": 363307.0052796423 }, { "content": "fn strides(node: &NodeProto) -> TractResult<Option<TVec<usize>>> {\n\n node.get_attr_opt_tvec(\"strides\")\n\n}\n\n\n", "file_path": "onnx/src/ops/nn/mod.rs", "rank": 42, "score": 361578.2624951182 }, { "content": "fn dilations(node: &NodeProto) -> TractResult<Option<TVec<usize>>> {\n\n node.get_attr_opt_tvec(\"dilations\")\n\n}\n\n\n", "file_path": "onnx/src/ops/nn/mod.rs", "rank": 43, "score": 361578.2624951182 }, { "content": "fn declutter_recip(model: &TypedModel, node: &TypedNode) -> TractResult<Option<TypedModelPatch>> {\n\n use super::element_wise::*;\n\n if let Some(prec) = model.single_prec(node.id)? {\n\n if let Some(ew) = prec.op_as::<ElementWiseOp>() {\n\n let repl = if ew.0.is::<Sqrt>() {\n\n Some(rsqrt())\n\n } else if ew.0.is::<Rsqrt>() {\n\n Some(sqrt())\n\n } else {\n\n None\n\n };\n\n if let Some(repl) = repl {\n\n let mut patch = TypedModelPatch::default();\n\n let mut wire = patch.tap_model(model, prec.inputs[0])?;\n\n wire = patch.wire_node(&node.name, repl, &[wire])?[0];\n\n patch.shunt_outside(model, node.id.into(), wire)?;\n\n return Ok(Some(patch));\n\n }\n\n }\n\n }\n", "file_path": "core/src/ops/math/mod.rs", "rank": 44, "score": 361556.63396543846 }, { "content": "pub fn multicast(builder: &mut ModelBuilder, inputs: &[OutletId]) -> TractResult<TVec<OutletId>> {\n\n let ranks = inputs\n\n .iter()\n\n .map(|&i| Ok(builder.model.outlet_fact(i)?.rank()))\n\n .collect::<TractResult<Vec<usize>>>()?;\n\n let max_rank = ranks.iter().copied().max().unwrap();\n\n (inputs.iter())\n\n .zip(ranks.iter())\n\n .map(|(&i, &r)| {\n\n (r..max_rank).try_fold(i, |w, n| Ok(builder.wire_as_outlets(AxisOp::Add(n), &[w])?[0]))\n\n })\n\n .collect()\n\n}\n", "file_path": "nnef/src/registry.rs", "rank": 45, "score": 355417.7352464896 }, { "content": "pub fn write_tensor<W: std::io::Write>(w: &mut W, tensor: &Tensor) -> TractResult<()> {\n\n unsafe {\n\n let tensor = if tensor.datum_type() == TDim::datum_type() {\n\n tensor.cast_to::<i64>()?\n\n } else {\n\n Cow::Borrowed(tensor)\n\n };\n\n let mut header: Header = std::mem::zeroed();\n\n header.magic = [0x4e, 0xef];\n\n header.version_maj = 1;\n\n header.version_min = 0;\n\n if tensor.rank() > 8 {\n\n bail!(\"Only rank up to 8 are supported\");\n\n }\n\n header.rank = tensor.rank() as u32;\n\n for d in 0..tensor.rank() {\n\n header.dims[d] = tensor.shape()[d] as u32;\n\n }\n\n header.data_size_bytes = (tensor.len() * tensor.datum_type().size_of()) as u32;\n\n header.bits_per_item = (tensor.datum_type().size_of() * 8) as u32;\n", "file_path": "nnef/src/tensors.rs", "rank": 46, "score": 353351.9069314372 }, { "content": "pub fn optional_outputs(pb: &pb::NodeProto) -> impl Iterator<Item = Option<usize>> + '_ {\n\n let mut real_input = 0;\n\n (0..).map(move |i| {\n\n if pb.output.get(i).filter(|s| !s.is_empty()).is_some() {\n\n real_input += 1;\n\n Some(real_input - 1)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct TensorPlusPath<'a> {\n\n pub tensor: &'a pb::TensorProto,\n\n pub model_path: &'a str,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct ParsingContext<'a> {\n", "file_path": "onnx/src/model.rs", "rank": 48, "score": 350529.2210420476 }, { "content": "pub fn optional_inputs(pb: &pb::NodeProto) -> impl Iterator<Item = Option<usize>> + '_ {\n\n let mut real_input = 0;\n\n (0..).map(move |i| {\n\n if pb.input.get(i).filter(|s| !s.is_empty()).is_some() {\n\n real_input += 1;\n\n Some(real_input - 1)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n", "file_path": "onnx/src/model.rs", "rank": 49, "score": 350523.4400402404 }, { "content": "pub fn reduce(pb: &NodeDef, op: nn::Reducer) -> TractResult<Box<dyn InferenceOp>> {\n\n let t = pb.get_attr_datum_type(\"T\")?;\n\n let t_idx = pb.get_attr_datum_type(\"Tidx\")?;\n\n let keep_dims = pb.get_attr_bool(\"keep_dims\")?;\n\n Ok(Box::new(Reduce::new(t, t_idx, keep_dims, op)))\n\n}\n\n\n\nimpl Op for Reduce {\n\n fn name(&self) -> Cow<str> {\n\n format!(\"{:?}\", self.reducer).into()\n\n }\n\n\n\n not_a_typed_op!();\n\n}\n\n\n\nimpl EvalOp for Reduce {\n\n fn is_stateless(&self) -> bool {\n\n true\n\n }\n\n\n", "file_path": "tensorflow/src/ops/math/reduce.rs", "rank": 50, "score": 346816.63231787324 }, { "content": "pub fn handle(params: &Parameters, options: &DisplayParams) -> TractResult<()> {\n\n let decl = params\n\n .reference_model\n\n .as_deref()\n\n .context(\"Decluttered model not generated. (using --pass ?)\")?\n\n .downcast_ref::<TypedModel>()\n\n .unwrap();\n\n let pulsed =\n\n params.pulsed_model.as_ref().context(\"Pulsed model not generated. (using --pass ?)\")?;\n\n let model = params\n\n .tract_model\n\n .downcast_ref::<TypedModel>()\n\n .context(\"Final model is not Typed. (using --pass ?)\")?;\n\n\n\n let decl_input_fact = decl.input_fact(0)?;\n\n let pulsed_input_fact = pulsed.input_fact(0)?;\n\n let input_pulse = pulsed_input_fact.pulse().unwrap().to_usize().unwrap();\n\n\n\n let mut annotations = Annotations::from_model(&*params.tract_model)?;\n\n annotate_with_graph_def(&mut annotations, &*params.tract_model, &params.graph)?;\n", "file_path": "cli/src/stream_check.rs", "rank": 51, "score": 340894.93445533165 }, { "content": "pub fn full_axis_tracking(model: &TypedModel) -> TractResult<Vec<AxisTracking>> {\n\n let mut axes: Vec<AxisTracking> = vec![];\n\n for node in model.eval_order()? {\n\n for slot in 0..model.node(node).outputs.len() {\n\n let outlet = OutletId::new(node, slot);\n\n let input_fact = model.outlet_fact(outlet)?;\n\n 'axis: for axis in 0..input_fact.rank() {\n\n if axes.iter().any(|tracking| tracking.outlets.get(&outlet) == Some(&axis)) {\n\n continue 'axis;\n\n }\n\n if let Some(tracker) = AxisTracking::for_outlet_and_axis(model, outlet, axis)? {\n\n axes.push(tracker);\n\n }\n\n }\n\n }\n\n }\n\n Ok(axes)\n\n}\n\n\n", "file_path": "core/src/axes/model.rs", "rank": 52, "score": 340691.9608788233 }, { "content": "pub fn inference_wrap<O, R>(op: O, outputs: usize, rules: R) -> Box<dyn InferenceOp>\n\nwhere\n\n O: TypedOp,\n\n R: for<'r, 'p, 's> Fn(\n\n &'s dyn Op,\n\n &mut Solver<'r>,\n\n &'p [TensorProxy],\n\n &'p [TensorProxy],\n\n ) -> InferenceResult\n\n + Send\n\n + Sync\n\n + 'static,\n\n{\n\n expand(InferenceWrapper { typed_op: Box::new(op), rules: Arc::new(rules), outputs })\n\n}\n\n\n", "file_path": "hir/src/ops/expandable.rs", "rank": 53, "score": 327323.7450138716 }, { "content": "pub fn for_npz(npz: &mut ndarray_npy::NpzReader<fs::File>, name: &str) -> TractResult<Tensor> {\n\n if let Ok(t) = npz.by_name::<tract_ndarray::OwnedRepr<f32>, tract_ndarray::IxDyn>(name) {\n\n return Ok(t.into_tensor());\n\n }\n\n if let Ok(t) = npz.by_name::<tract_ndarray::OwnedRepr<f64>, tract_ndarray::IxDyn>(name) {\n\n return Ok(t.into_tensor());\n\n }\n\n if let Ok(t) = npz.by_name::<tract_ndarray::OwnedRepr<i8>, tract_ndarray::IxDyn>(name) {\n\n return Ok(t.into_tensor());\n\n }\n\n if let Ok(t) = npz.by_name::<tract_ndarray::OwnedRepr<i16>, tract_ndarray::IxDyn>(name) {\n\n return Ok(t.into_tensor());\n\n }\n\n if let Ok(t) = npz.by_name::<tract_ndarray::OwnedRepr<i32>, tract_ndarray::IxDyn>(name) {\n\n return Ok(t.into_tensor());\n\n }\n\n if let Ok(t) = npz.by_name::<tract_ndarray::OwnedRepr<i64>, tract_ndarray::IxDyn>(name) {\n\n return Ok(t.into_tensor());\n\n }\n\n if let Ok(t) = npz.by_name::<tract_ndarray::OwnedRepr<u8>, tract_ndarray::IxDyn>(name) {\n", "file_path": "libcli/src/tensor.rs", "rank": 54, "score": 321183.1004844237 }, { "content": "pub fn block_lstm(_ctx: &ParsingContext, node: &NodeDef) -> TractResult<Box<dyn InferenceOp>> {\n\n let forget_bias = node.get_attr_opt_float(\"forget_bias\")?.unwrap_or(1.0);\n\n let cell_clip = node.get_attr_opt_float(\"cell_clip\")?.unwrap_or(3.0);\n\n let t = node.get_attr_datum_type(\"T\")?;\n\n let use_peephole = node.get_attr_opt_bool(\"use_peephole\")?.unwrap_or(false);\n\n if use_peephole {\n\n unimplemented!(\"Block LSTM peeplholes\");\n\n }\n\n Ok(expand(BlockLSTM::new(forget_bias, cell_clip, t, use_peephole)))\n\n}\n\n\n\n#[derive(Clone, Debug, new)]\n\n#[allow(dead_code)]\n\npub struct BlockLSTM {\n\n forget_bias: f32,\n\n cell_clip: f32,\n\n t: DatumType,\n\n use_peephole: bool,\n\n}\n\n\n", "file_path": "tensorflow/src/ops/rec/block_lstm.rs", "rank": 55, "score": 319874.583463372 }, { "content": "pub fn random_uniform(_ctx: &ParsingContext, node: &NodeDef) -> TractResult<Box<dyn InferenceOp>> {\n\n let dtype = node.get_attr_datum_type(\"dtype\")?;\n\n let seed: u64 = node.get_attr_int(\"seed\")?;\n\n let seed2: u64 = node.get_attr_int(\"seed2\")?;\n\n Ok(Box::new(RandomUniform::new(dtype, seed, seed2)))\n\n}\n\n\n", "file_path": "tensorflow/src/ops/random/random_uniform.rs", "rank": 56, "score": 319874.58346337196 }, { "content": "pub fn avgpool(_ctx: &ParsingContext, pb: &NodeDef) -> TractResult<Box<dyn InferenceOp>> {\n\n let ksize: Vec<usize> = pb.get_attr_list_int(\"ksize\")?;\n\n let data_format = super::data_format(pb)?;\n\n let kshape = data_format.shape(ksize)?;\n\n let strides = super::strides(pb)?;\n\n let padding = super::padding(pb)?;\n\n Ok(Box::new(SumPool::new(\n\n PoolSpec::new(\n\n data_format,\n\n kshape.hw_dims().into(),\n\n padding,\n\n None,\n\n Some(strides[kshape.hw_axes()].into()),\n\n None,\n\n ),\n\n false,\n\n true,\n\n )))\n\n}\n\n\n", "file_path": "tensorflow/src/ops/nn/pools.rs", "rank": 57, "score": 319822.4256952179 }, { "content": "pub fn maxpool(_ctx: &ParsingContext, pb: &NodeDef) -> TractResult<Box<dyn InferenceOp>> {\n\n let ksize: Vec<usize> = pb.get_attr_list_int(\"ksize\")?;\n\n let data_format = super::data_format(pb)?;\n\n let kshape = data_format.shape(ksize)?;\n\n let strides = super::strides(pb)?;\n\n let padding = super::padding(pb)?;\n\n Ok(Box::new(MaxPool::new(\n\n PoolSpec::new(\n\n data_format,\n\n kshape.hw_dims().into(),\n\n padding,\n\n None,\n\n Some(strides[kshape.hw_axes()].into()),\n\n None,\n\n ),\n\n None,\n\n )))\n\n}\n", "file_path": "tensorflow/src/ops/nn/pools.rs", "rank": 58, "score": 319822.4256952179 }, { "content": "pub fn conv2d(_ctx: &ParsingContext, pb: &NodeDef) -> TractResult<Box<dyn InferenceOp>> {\n\n let strides = super::strides(pb)?;\n\n let mut op =\n\n cnn::Conv::default().hwio().padding(super::padding(pb)?).strides(strides[1..3].into());\n\n if super::data_format(pb)? == DataFormat::NHWC {\n\n op = op.nhwc()\n\n }\n\n Ok(expand(op))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #![allow(non_snake_case)]\n\n use super::*;\n\n use tract_hir::ops::cnn::{Conv, PaddingSpec};\n\n use tract_ndarray::*;\n\n\n\n fn mk(sizes: &[usize]) -> Tensor {\n\n Array::range(1f32, sizes.iter().product::<usize>() as f32 + 1.0, 1.0)\n\n .into_shape(sizes)\n", "file_path": "tensorflow/src/ops/nn/conv2d.rs", "rank": 59, "score": 319822.4256952179 }, { "content": "/// Compares the outputs of a node in tract and tensorflow.\n\npub fn check_outputs(got: &[Vec<TValue>], params: &Parameters) -> TractResult<()> {\n\n let mut error = None;\n\n // iter over all possible tract model outputs\n\n for (ix, output) in params.tract_model.output_outlets().iter().enumerate() {\n\n // get either name from outlet_label or from node_name\n\n let name = if let Some(label) = params.tract_model.outlet_label(*output) {\n\n label\n\n } else {\n\n params.tract_model.node_name(output.node)\n\n };\n\n // pick expected tensor values for this output\n\n let exp = params\n\n .tensors_values\n\n .by_name(name)\n\n .with_context(|| format!(\"Do not have reference value for output {name:?}\"))?;\n\n debug!(\"Output {}, expects {:?}\", ix, exp);\n\n let mut exp: TValue = exp.values.as_ref().with_context(|| {\n\n format!(\"Output {name:?}: found reference info without value: {exp:?}\")\n\n })?[0]\n\n .clone();\n", "file_path": "cli/src/utils.rs", "rank": 60, "score": 318465.8648557358 }, { "content": "fn lower_const_f32(graph: &mut Graph, name: &str, tensor: &Tensor) -> Node {\n\n let mut t = onnx_pb::TensorProto::default();\n\n t.float_data = tensor.as_slice::<f32>().unwrap().to_vec();\n\n t.data_type = onnx_pb::tensor_proto::DataType::Float as _;\n\n t.dims = tensor.shape().iter().map(|d| *d as i64).collect();\n\n t.name = name.to_string();\n\n graph.constant(name, t)\n\n}\n\n\n", "file_path": "harness/onnx-proptest/src/lib.rs", "rank": 61, "score": 316690.09356054163 }, { "content": "fn lower_const_i32(graph: &mut Graph, name: &str, tensor: &Tensor) -> Node {\n\n let mut t = onnx_pb::TensorProto::default();\n\n t.int32_data = tensor.as_slice::<i32>().unwrap().to_vec();\n\n t.data_type = onnx_pb::tensor_proto::DataType::Int32 as _;\n\n t.dims = tensor.shape().iter().map(|d| *d as i64).collect();\n\n t.name = name.to_string();\n\n graph.constant(name, t)\n\n}\n\n\n\nproptest! {\n\n #[test]\n\n fn gru_prop(pb in any::<GruProblem>()) {\n\n pb.lower().unwrap().check().unwrap()\n\n }\n\n}\n\n\n", "file_path": "harness/onnx-proptest/src/lib.rs", "rank": 62, "score": 316690.09356054163 }, { "content": "pub fn build(_ctx: &ParsingContext, _pb: &NodeDef) -> TractResult<Box<dyn InferenceOp>> {\n\n Ok(expand(ExpandDims))\n\n}\n\n\n\n#[derive(Debug, Clone, Hash)]\n\npub struct ExpandDims;\n\n\n\n\n\n\n\nimpl Expansion for ExpandDims {\n\n fn name(&self) -> Cow<str> {\n\n \"ExpandDims\".into()\n\n }\n\n\n\n\n\n fn rules<'r, 'p: 'r, 's: 'r>(\n\n &'s self,\n\n s: &mut Solver<'r>,\n\n inputs: &'p [TensorProxy],\n\n outputs: &'p [TensorProxy],\n", "file_path": "tensorflow/src/ops/array/expand_dims.rs", "rank": 63, "score": 315619.34817575174 }, { "content": "pub fn perm_to_ops(input: &[usize]) -> TVec<AxisOp> {\n\n perm_to_atoms(input).into_iter().map(|pair| AxisOp::Move(pair.0, pair.1)).collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_perm_to_cycles() {\n\n assert_eq!(perm_to_cycles(&[1, 2, 0]), tvec!(tvec!(0, 1, 2)));\n\n assert_eq!(perm_to_cycles(&[2, 0, 1]), tvec!(tvec!(0, 2, 1)));\n\n assert_eq!(perm_to_cycles(&[1, 2, 3, 0]), tvec!(tvec!(0, 1, 2, 3)));\n\n assert_eq!(perm_to_cycles(&[3, 0, 1, 2]), tvec!(tvec!(0, 3, 2, 1)));\n\n assert_eq!(perm_to_cycles(&[3, 1, 2, 0, 4]), tvec!(tvec!(0, 3)));\n\n }\n\n\n\n #[test]\n\n fn is_rotation() {\n\n assert_eq!(is_rotation_cycle(&[0, 1, 2]), Some((0, 2)));\n", "file_path": "core/src/ops/change_axes.rs", "rank": 64, "score": 313583.177727081 }, { "content": "#[allow(dead_code)]\n\npub fn dyn_hash<H: Hash>(h: H, s: &mut dyn Hasher) {\n\n h.hash(&mut WrappedHasher(s))\n\n}\n", "file_path": "data/src/hash.rs", "rank": 65, "score": 311379.43558127887 }, { "content": "pub fn depthwise_conv2d(_ctx: &ParsingContext, pb: &NodeDef) -> TractResult<Box<dyn InferenceOp>> {\n\n let data_format = super::data_format(pb)?;\n\n let padding = super::padding(pb)?;\n\n let strides = super::strides(pb)?.into();\n\n let dilations: TVec<usize> = pb.get_attr_list_int(\"dilations\")?.into();\n\n if dilations.len() != 4 || dilations[0] != 1 && dilations[3] != 1 {\n\n bail!(\"dilations must be of the form [1, h, v, 1], found {:?}\", dilations)\n\n };\n\n Ok(expand(DepthwiseConv2d::new(data_format, padding, strides, dilations)))\n\n}\n\n\n\n#[derive(Debug, Clone, new, Hash)]\n\npub struct DepthwiseConv2d {\n\n data_format: DataFormat,\n\n padding: PaddingSpec,\n\n strides: TVec<usize>,\n\n dilations: TVec<usize>,\n\n}\n\n\n\n\n", "file_path": "tensorflow/src/ops/nn/dw_conv2d.rs", "rank": 66, "score": 311191.64234932064 }, { "content": "pub fn parse_tdim(symbol_table: &SymbolTable, input: &str) -> TractResult<TDim> {\n\n match all_consuming(|i| expr(symbol_table, i))(input) {\n\n Ok(pair) => Ok(pair.1),\n\n Err(e) => anyhow::bail!(\"Failed to parse {:?}, {:?}\", input, e),\n\n }\n\n}\n\n\n", "file_path": "data/src/dim/parse.rs", "rank": 67, "score": 311025.204938625 }, { "content": "pub fn tdim(dim: &TDim) -> RValue {\n\n match dim {\n\n TDim::Val(x) => numeric(x),\n\n TDim::Sym(s) => ident(s.to_string()),\n\n TDim::Add(terms) => terms\n\n .iter()\n\n .map(tdim)\n\n .reduce(|x, y| RValue::Binary(x.boxed(), \"+\".to_string(), y.boxed()))\n\n .unwrap(),\n\n TDim::Mul(terms) => terms\n\n .iter()\n\n .map(tdim)\n\n .reduce(|x, y| RValue::Binary(x.boxed(), \"*\".to_string(), y.boxed()))\n\n .unwrap(),\n\n TDim::MulInt(x, y) => RValue::Binary(numeric(x).boxed(), \"*\".to_string(), tdim(y).boxed()),\n\n TDim::Div(x, y) => RValue::Binary(tdim(x).boxed(), \"/\".to_string(), numeric(y).boxed()),\n\n }\n\n}\n\n\n", "file_path": "nnef/src/ser.rs", "rank": 68, "score": 310740.03487373714 }, { "content": "fn rename_outputs(typed: &mut TypedModel, sub_matches: &clap::ArgMatches) -> TractResult<()> {\n\n if let Some(renamed) = sub_matches.values_of(\"nnef-override-output-name\") {\n\n for (ix, name) in renamed.into_iter().enumerate() {\n\n let output = typed.wire_node(\n\n name,\n\n tract_core::ops::identity::Identity,\n\n &[typed.output_outlets()?[ix]],\n\n )?;\n\n typed.outputs[ix] = output[0];\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "cli/src/dump.rs", "rank": 69, "score": 309332.58357716864 }, { "content": "type OpBuilder = fn(&ParsingContext, node: &NodeDef) -> TractResult<Box<dyn InferenceOp>>;\n\n\n\n#[derive(Clone, Default)]\n\npub struct TfOpRegister(pub HashMap<String, OpBuilder>);\n\n\n\nimpl TfOpRegister {\n\n pub fn insert(&mut self, s: &'static str, builder: OpBuilder) {\n\n self.0.insert(s.into(), builder);\n\n }\n\n}\n\n\n\npub struct Tensorflow {\n\n pub op_register: TfOpRegister,\n\n}\n\n\n\npub struct TfModelExtensions {\n\n pub control_inputs: Vec<(usize, usize)>,\n\n pub initializing_nodes: Vec<usize>,\n\n}\n\n\n", "file_path": "tensorflow/src/model.rs", "rank": 70, "score": 308831.0774914848 }, { "content": "pub fn space_to_batch_nd(_ctx: &ParsingContext, pb: &NodeDef) -> TractResult<Box<dyn InferenceOp>> {\n\n let datum_type = pb.get_attr_datum_type(\"T\")?;\n\n Ok(Box::new(raw::SpaceToBatch::new(datum_type)))\n\n}\n\n\n", "file_path": "tensorflow/src/ops/nn/s2b/mod.rs", "rank": 71, "score": 307112.53912087274 }, { "content": "pub fn batch_to_space_nd(_ctx: &ParsingContext, pb: &NodeDef) -> TractResult<Box<dyn InferenceOp>> {\n\n let datum_type = pb.get_attr_datum_type(\"T\")?;\n\n Ok(Box::new(raw::BatchToSpace::new(datum_type)))\n\n}\n\n\n", "file_path": "tensorflow/src/ops/nn/s2b/mod.rs", "rank": 72, "score": 307112.53912087274 }, { "content": "/// Generates a random tensor of a given size and type.\n\npub fn random(sizes: &[usize], datum_type: DatumType, tv: Option<&TensorValues>) -> Tensor {\n\n use rand::{Rng, SeedableRng};\n\n let mut rng = rand::rngs::StdRng::seed_from_u64(21242);\n\n let mut tensor = Tensor::zero::<f32>(sizes).unwrap();\n\n let slice = tensor.as_slice_mut::<f32>().unwrap();\n\n if let Some(range) = tv.and_then(|tv| tv.random_range.as_ref()) {\n\n slice.iter_mut().for_each(|x| *x = rng.gen_range(range.clone()))\n\n } else {\n\n slice.iter_mut().for_each(|x| *x = rng.gen())\n\n };\n\n tensor.cast_to_dt(datum_type).unwrap().into_owned()\n\n}\n", "file_path": "libcli/src/tensor.rs", "rank": 73, "score": 304753.24621171755 }, { "content": "pub fn for_model(model: &TypedModel) -> TractResult<AxesMapping> {\n\n let input_ranks = model\n\n .input_outlets()?\n\n .iter()\n\n .map(|io| model.outlet_fact(*io).map(|f| f.rank()))\n\n .collect::<TractResult<TVec<usize>>>()?;\n\n let output_ranks = model\n\n .output_outlets()?\n\n .iter()\n\n .map(|io| model.outlet_fact(*io).map(|f| f.rank()))\n\n .collect::<TractResult<TVec<usize>>>()?;\n\n let mut result = AxesMapping::disconnected_for_ranks(&input_ranks, &output_ranks)?;\n\n for tracking in full_axis_tracking(model)? {\n\n let mut reprs:Vec<char> = vec![];\n\n for (ix, outlet) in model.input_outlets()?.iter().enumerate() {\n\n if let Some(appearance) = tracking.outlets.get(outlet) {\n\n reprs.push(result.input_axis(ix, *appearance).unwrap().repr);\n\n }\n\n }\n\n for (ix, outlet) in model.output_outlets()?.iter().enumerate() {\n", "file_path": "core/src/axes/model.rs", "rank": 74, "score": 303727.3869688952 }, { "content": "pub fn fused_batch_norm(_ctx: &ParsingContext, pb: &NodeDef) -> TractResult<Box<dyn InferenceOp>> {\n\n let epsilon = pb.get_attr_float::<f32>(\"epsilon\")?;\n\n Ok(expand(FusedBatchNorm::new(epsilon)))\n\n}\n\n\n", "file_path": "tensorflow/src/ops/nn/fused_batch_norm.rs", "rank": 75, "score": 303179.6672792196 }, { "content": "#[allow(unused_variables)]\n\npub fn tensor_for_fact(\n\n fact: &TypedFact,\n\n streaming_dim: Option<usize>,\n\n tv: Option<&TensorValues>,\n\n) -> TractResult<Tensor> {\n\n if let Some(value) = &fact.konst {\n\n return Ok(value.clone().into_tensor());\n\n }\n\n #[cfg(pulse)]\n\n {\n\n if fact.shape.stream_info().is_some() {\n\n use tract_pulse::fact::StreamFact;\n\n use tract_pulse::internal::stream_symbol;\n\n let s = stream_symbol();\n\n if let Some(dim) = streaming_dim {\n\n let shape = fact\n\n .shape\n\n .iter()\n\n .map(|d| {\n\n d.eval(&SymbolValues::default().with(s, dim as i64)).to_usize().unwrap()\n", "file_path": "libcli/src/tensor.rs", "rank": 76, "score": 301830.0439901253 }, { "content": "pub fn eval_q(expr: &AxesMapping, qp: DatumType, inputs: TVec<TValue>) -> TractResult<Tensor> {\n\n let [a, b, bias, a0, a_scale, b0, b_scale, c0, c_scale] = &*inputs else {\n\n bail!(\"Expect exactly 9 inputs\")\n\n };\n\n\n\n let mut a = a.cast_to::<i32>()?.into_owned();\n\n let a0 = a0.cast_to_scalar::<i32>()?;\n\n a.as_slice_mut::<i32>()?.iter_mut().for_each(|x| *x -= a0);\n\n let mut b = b.cast_to::<i32>()?.into_owned();\n\n let b0 = b0.cast_to_scalar::<i32>()?;\n\n b.as_slice_mut::<i32>()?.iter_mut().for_each(|x| *x -= b0);\n\n\n\n let mut output =\n\n eval_t::<i32>(expr, tvec!(a.into_tvalue(), b.into_tvalue()))?.into_array::<i32>()?;\n\n let scale = a_scale.cast_to_scalar::<f32>()? * b_scale.cast_to_scalar::<f32>()?\n\n / c_scale.cast_to_scalar::<f32>()?;\n\n let scale = Scaler::new(scale, tract_linalg::mmm::RoundingPolicy::Even);\n\n let c0 = c0.cast_to_scalar::<i32>()?;\n\n\n\n if bias.rank() == 0 {\n", "file_path": "core/src/ops/einsum/eval.rs", "rank": 77, "score": 300650.47084789374 }, { "content": "fn eval_slice(input: &Tensor, axis: usize, start: usize, end: usize) -> TractResult<TVec<TValue>> {\n\n if end > input.shape()[axis] || start > end {\n\n bail!(\"Invalid range {}..{} for slicing {:?} on axis {}\", start, end, input, axis);\n\n }\n\n unsafe {\n\n let mut shape: TVec<_> = input.shape().into();\n\n shape[axis] = end - start;\n\n let mut tensor = Tensor::uninitialized_dt(input.datum_type(), &shape)?;\n\n tensor.assign_slice_unchecked(.., input, start..end, axis);\n\n Ok(tvec!(tensor.into_tvalue()))\n\n }\n\n}\n\n\n\nimpl TypedOp for Slice {\n\n fn output_facts(&self, inputs: &[&TypedFact]) -> TractResult<TVec<TypedFact>> {\n\n anyhow::ensure!(inputs.len() == 1, \"Slice has one single input\");\n\n if let (Ok(start), Ok(end), Ok(len)) =\n\n (self.start.to_usize(), self.end.to_usize(), inputs[0].shape[self.axis].to_usize())\n\n {\n\n ensure!(start <= end);\n", "file_path": "core/src/ops/array/slice.rs", "rank": 78, "score": 298554.1744501 }, { "content": "type OpBuilder = fn(&ParsingContext, node: &str) -> TractResult<Box<dyn InferenceOp>>;\n\n\n\n#[derive(Clone, Default)]\n\npub struct KaldiOpRegister(pub HashMap<String, OpBuilder>);\n\n\n\nimpl KaldiOpRegister {\n\n pub fn insert(&mut self, s: &'static str, builder: OpBuilder) {\n\n self.0.insert(s.into(), builder);\n\n }\n\n}\n\n\n\n#[derive(Clone, Default)]\n\npub struct Kaldi {\n\n pub op_register: KaldiOpRegister,\n\n}\n\n\n\nimpl Framework<KaldiProtoModel, InferenceModel> for Kaldi {\n\n fn proto_model_for_read(&self, r: &mut dyn std::io::Read) -> TractResult<KaldiProtoModel> {\n\n use crate::parser;\n\n let mut v = vec![];\n", "file_path": "kaldi/src/model.rs", "rank": 79, "score": 298537.9208452542 }, { "content": "fn common_tryfrom(t: &TensorProto, path: Option<&str>) -> TractResult<Tensor> {\n\n let dt = DataType::from_i32(t.data_type).unwrap().try_into()?;\n\n let shape: Vec<usize> = t.dims.iter().map(|&i| i as usize).collect();\n\n // detect if the tensor is rather in an external file than inside the onnx file directly\n\n let is_external = t.data_location.is_some() && t.data_location == Some(1);\n\n if t.raw_data.len() > 0 {\n\n create_tensor(shape, dt, &t.raw_data)\n\n } else if is_external {\n\n if let Some(model_path) = path {\n\n // external files will be loaded and fed to the tensor if necessary\n\n let external_data = get_external_resources(t, model_path)?;\n\n create_tensor(shape, dt, &external_data)\n\n } else {\n\n bail!(\"no model path was specified in the parsing context, yet external data was detected. aborting\");\n\n }\n\n } else {\n\n use tract_ndarray::Array;\n\n let it = match dt {\n\n DatumType::Bool => {\n\n Array::from_shape_vec(&*shape, t.int32_data.iter().map(|&x| x != 0).collect())?\n", "file_path": "onnx/src/tensor.rs", "rank": 80, "score": 292963.2163057382 }, { "content": "pub fn q_params() -> BoxedStrategy<[Tensor; 6]> {\n\n (-10i32..10, -10i32..10, -10i32..10, -3..3i32, -3..3i32, -3..3i32)\n\n .prop_map(|(a0, b0, c0, a_scale, b_scale, c_scale)| {\n\n [\n\n tensor0(a0),\n\n tensor0(2f32.powi(a_scale)),\n\n tensor0(b0),\n\n tensor0(2f32.powi(b_scale)),\n\n tensor0(c0),\n\n tensor0(2f32.powi(c_scale)),\n\n ]\n\n })\n\n .boxed()\n\n}\n\n\n", "file_path": "core/src/ops/cnn/conv/proptest_q.rs", "rank": 81, "score": 292553.58485295624 }, { "content": "// fragment reshape<?>( input: tensor<?>, shape: integer[], axis_start: integer = 0, axis_count: integer = -1 )\n\n// -> ( output: tensor<?> );\n\npub fn reshape(builder: &mut ModelBuilder, invocation: &ResolvedInvocation) -> TractResult<Value> {\n\n let input = invocation.named_arg_as(builder, \"input\")?;\n\n let input_shape = builder.model.outlet_fact(input)?.shape.to_tvec();\n\n let start: usize = invocation.named_arg_as(builder, \"axis_start\")?;\n\n let count: i64 = invocation.named_arg_as(builder, \"axis_count\")?;\n\n let count = if count == -1 { input_shape.len() - start } else { count as usize };\n\n let shape: TVec<TDim> =\n\n builder.allowing_new_symbols(|builder| invocation.named_arg_as(builder, \"shape\"))?;\n\n\n\n let mut replacement = shape;\n\n for i in 0..replacement.len() {\n\n if replacement[i] == 0.to_dim() {\n\n replacement[i] = input_shape[i + start].clone();\n\n }\n\n }\n\n if let Some(pos) = replacement.iter().position(|d| *d == (-1).to_dim()) {\n\n let product: TDim = replacement.iter().filter(|d| **d != (-1).to_dim()).product();\n\n let product_input: TDim = input_shape[start..][..count].iter().product();\n\n replacement[pos] = product_input.maybe_div(&product)?.0;\n\n }\n\n\n\n let op = AxisOp::Reshape(start, input_shape[start..][..count].into(), replacement);\n\n builder.wire(op, &[input])\n\n}\n\n\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 82, "score": 291644.29288927926 }, { "content": "// fragment tile<?>( input: tensor<?>, repeats: integer[] ) -> ( output: tensor<?> );\n\npub fn tile(builder: &mut ModelBuilder, invocation: &ResolvedInvocation) -> TractResult<Value> {\n\n let multipliers: TVec<TDim> = invocation.named_arg_as(builder, \"repeats\")?;\n\n let wire = tvec!(invocation.named_arg_as(builder, \"input\")?);\n\n builder.wire(ops::array::Tile { multipliers }, &wire)\n\n}\n\n\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 83, "score": 291638.41149677703 }, { "content": "// fragment squeeze<?>( input: tensor<?>, axes: integer[] ) -> ( output: tensor<?> );\n\npub fn squeeze(builder: &mut ModelBuilder, invocation: &ResolvedInvocation) -> TractResult<Value> {\n\n let axes: TVec<usize> = invocation.named_arg_as(builder, \"axes\")?;\n\n let wire = tvec!(invocation.named_arg_as(builder, \"input\")?);\n\n axes.iter()\n\n .sorted()\n\n .rev()\n\n .try_fold(wire, |wire, &axis| {\n\n builder.wire_as_outlets(ops::change_axes::AxisOp::Rm(axis), &wire)\n\n })\n\n .map(Value::from)\n\n}\n\n\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 84, "score": 291638.41149677703 }, { "content": "// fragment slice<?>( input: tensor<?>, axes: integer[], begin: integer[], end: integer[] ) -> ( output: tensor<?> );\n\npub fn slice(builder: &mut ModelBuilder, invocation: &ResolvedInvocation) -> TractResult<Value> {\n\n let wire = tvec!(invocation.named_arg_as(builder, \"input\")?);\n\n let input_fact = builder.model.outlet_fact(wire[0])?.clone();\n\n let axes: TVec<usize> = invocation.named_arg_as(builder, \"axes\")?;\n\n let (mut begins, mut ends): (TVec<TDim>, TVec<TDim>) =\n\n builder.allowing_new_symbols(|builder| -> TractResult<_> {\n\n Ok((\n\n invocation.named_arg_as(builder, \"begin\")?,\n\n invocation.named_arg_as(builder, \"end\")?,\n\n ))\n\n })?;\n\n for (ix, d) in begins.iter_mut().enumerate() {\n\n if let Ok(i) = d.to_i64() {\n\n if i < 0 {\n\n *d += input_fact.shape[axes[ix]].to_dim();\n\n }\n\n }\n\n }\n\n\n\n // use \"<=\", no \"<\" end[axis] = 0 means \"up to the end\"\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 85, "score": 291637.90283612575 }, { "content": "// fragment pad( input: tensor<scalar>, padding: (integer, integer)[], border: string = 'constant', value: scalar = 0.0 ) -> ( output: tensor<scalar> );\n\npub fn pad(builder: &mut ModelBuilder, invocation: &ResolvedInvocation) -> TractResult<Value> {\n\n use tract_core::ops::array::Pad;\n\n let wire = tvec!(invocation.named_arg_as(builder, \"input\")?);\n\n let padding: TVec<TVec<usize>> = invocation.named_arg_as(builder, \"padding\")?;\n\n let padding: Vec<(usize, usize)> = padding.iter().map(|a| (a[0], a[1])).collect();\n\n let value: Tensor = tensor0(invocation.named_arg_as::<f32>(builder, \"value\")?);\n\n let border: String = invocation.named_arg_as(builder, \"border\")?;\n\n let mode = pad_mode(&border, value)?;\n\n builder.wire(Pad { pads: padding, mode }, &wire)\n\n}\n\n\n\n/*\n\nfragment conv( input: tensor<scalar>, filter: tensor<scalar>,\n\nbias: tensor<scalar> = 0.0, border: string = 'constant',\n\npadding: (integer,integer)[] = [], stride: integer[] = [],\n\ndilation: integer[] = [], groups: integer = 1 )\n\n-> ( output: tensor<scalar> );\n\n*/\n\n\n\n/* fragment deconv(\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 86, "score": 291637.42330227385 }, { "content": "// fragment concat<?>( values: tensor<?>[], axis: integer ) -> ( value: tensor<?> );\n\npub fn concat(builder: &mut ModelBuilder, invocation: &ResolvedInvocation) -> TractResult<Value> {\n\n let axis: usize = invocation.named_arg_as(builder, \"axis\")?;\n\n let mut values: TVec<OutletId> = invocation.named_arg_as(builder, \"values\")?;\n\n if let Some(Some(dt)) = invocation.dt_from_quant_file.get(0) {\n\n for value in &mut values {\n\n if builder.model.node(value.node).outputs[value.slot].fact.datum_type != *dt {\n\n *value = builder.wire_as_outlets(ops::cast::cast(*dt), &[*value])?[0];\n\n }\n\n }\n\n }\n\n\n\n builder.wire(ops::array::TypedConcat::new(axis), &values)\n\n}\n\n\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 87, "score": 291632.8765578359 }, { "content": "// fragment external<? = scalar>( shape: integer[] ) -> ( output: tensor<?> );\n\npub fn external(builder: &mut ModelBuilder, invocation: &ResolvedInvocation) -> TractResult<Value> {\n\n let type_name = invocation.invocation.generic_type_name.unwrap_or(TypeName::Scalar);\n\n let dt = if let Some(Some(dt)) = invocation.dt_from_quant_file.get(0) {\n\n *dt\n\n } else if type_name == TypeName::Scalar {\n\n f32::datum_type()\n\n } else if type_name == TypeName::Logical {\n\n bool::datum_type()\n\n } else if type_name == TypeName::Integer {\n\n i64::datum_type()\n\n } else {\n\n todo!()\n\n };\n\n let shape: TVec<TDim> =\n\n builder.allowing_new_symbols(|builder| invocation.named_arg_as(builder, \"shape\"))?;\n\n Ok(Value::Wire(builder.model.add_source(\"\", dt.fact(&shape))?))\n\n}\n\n\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 88, "score": 291631.87636784633 }, { "content": "// fragment variable<? = scalar>( shape: integer[], label: string ) -> ( output: tensor<?> );\n\npub fn variable(builder: &mut ModelBuilder, invocation: &ResolvedInvocation) -> TractResult<Value> {\n\n let shape: TVec<usize> = invocation.named_arg_as(builder, \"shape\")?;\n\n let label = Identifier(invocation.named_arg_as(builder, \"label\")?);\n\n let mut tensor = Arc::clone(\n\n builder\n\n .proto_model\n\n .tensors\n\n .get(&label)\n\n .ok_or_else(|| format_err!(\"No data for tensor {:?}\", label))?,\n\n );\n\n if let Some(Some(dt)) = invocation.dt_from_quant_file.get(0) {\n\n if dt.size_of() != tensor.datum_type().size_of() {\n\n bail!(\n\n \"Mismatched tensor type for tensor {}: expected {:?}, got {:?}\",\n\n label.0,\n\n *dt,\n\n tensor.datum_type()\n\n );\n\n }\n\n if *dt != tensor.datum_type() {\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 89, "score": 291631.67923815054 }, { "content": "pub fn deconv(builder: &mut ModelBuilder, invocation: &ResolvedInvocation) -> TractResult<Value> {\n\n conv_or_deconv(builder, invocation, true)\n\n}\n\n\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 90, "score": 291620.2333160793 }, { "content": "pub fn matmul(builder: &mut ModelBuilder, invocation: &ResolvedInvocation) -> TractResult<Value> {\n\n let a: OutletId = invocation.named_arg_as(builder, \"A\")?;\n\n let b: OutletId = invocation.named_arg_as(builder, \"B\")?;\n\n let a_trans = invocation.named_arg_as(builder, \"transposeA\")?;\n\n let b_trans = invocation.named_arg_as(builder, \"transposeB\")?;\n\n let a_dt = builder.model.outlet_fact(a)?.datum_type;\n\n let b_dt = builder.model.outlet_fact(b)?.datum_type;\n\n let a_rank = builder.model.outlet_fact(a)?.rank();\n\n let b_rank = builder.model.outlet_fact(b)?.rank();\n\n let c_rank = a_rank.max(b_rank);\n\n let mut axes = AxesMapping::for_numpy_matmul(c_rank, a_trans, b_trans, false)?;\n\n let name = &*invocation.invocation.id.0;\n\n if a_dt.is_quantized() || b_dt.is_quantized() {\n\n for input in 0..7 {\n\n axes = axes.add_input(input)?;\n\n }\n\n let accum_dt = DatumType::QI32(QParams::ZpScale {\n\n scale: a_dt.zp_scale().1 * b_dt.zp_scale().1,\n\n zero_point: 0,\n\n });\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 91, "score": 291620.2333160793 }, { "content": "pub fn unstack(builder: &mut ModelBuilder, invocation: &ResolvedInvocation) -> TractResult<Value> {\n\n let wire = tvec!(invocation.named_arg_as(builder, \"value\")?);\n\n let axis: usize = invocation.named_arg_as(builder, \"axis\")?;\n\n\n\n let input_fact = builder.model.outlet_fact(wire[0])?.clone();\n\n\n\n (0..input_fact.shape[axis].clone().to_i32()?)\n\n .map(|start_int| {\n\n let start = start_int.to_dim();\n\n let end = (start_int + 1).to_dim();\n\n let sliced_wire = builder\n\n .wire_as_outlets(tract_core::ops::array::Slice { axis, start, end }, &wire)?;\n\n let squeezed_wire =\n\n builder.wire_as_outlets(ops::change_axes::AxisOp::Rm(axis), &sliced_wire)?;\n\n Ok(squeezed_wire[0])\n\n })\n\n .collect::<TractResult<TVec<_>>>()\n\n .map(Value::from)\n\n}\n\n\n\n/*\n\n * fragment softmax( x: tensor<scalar>, axes: integer[] = [1] ) -> ( y: tensor<scalar> )\n\n * {\n\n * m = max_reduce(x, axes = axes);\n\n * e = exp(x - m);\n\n * y = e / sum_reduce(e, axes = axes);\n\n * }\n\n */\n\n\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 92, "score": 291620.2333160793 }, { "content": "pub fn select(builder: &mut ModelBuilder, invocation: &ResolvedInvocation) -> TractResult<Value> {\n\n let cond = invocation.named_arg_as(builder, \"condition\")?;\n\n let true_value = invocation.named_arg_as(builder, \"true_value\")?;\n\n let false_value = invocation.named_arg_as(builder, \"false_value\")?;\n\n let inputs = crate::registry::multicast(builder, &[cond, true_value, false_value])?;\n\n\n\n builder.wire(ops::logic::Iff {}, &inputs)\n\n}\n\n\n\n/*\n\n * fragment leaky_relu( x: tensor<scalar>, alpha: scalar )-> ( y: tensor<scalar> )\n\n */\n\n\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 93, "score": 291620.2333160793 }, { "content": "pub fn conv(builder: &mut ModelBuilder, invocation: &ResolvedInvocation) -> TractResult<Value> {\n\n conv_or_deconv(builder, invocation, false)\n\n}\n\n\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 94, "score": 291620.2333160793 }, { "content": "pub fn reduce(builder: &mut ModelBuilder, invocation: &ResolvedInvocation) -> TractResult<Value> {\n\n let input = invocation.named_arg_as(builder, \"input\")?;\n\n let axes: TVec<usize> = invocation.named_arg_as(builder, \"axes\")?;\n\n let reducer_name = invocation.invocation.id.0.split('_').next().unwrap();\n\n let reducer = match reducer_name {\n\n \"sum\" => ops::nn::Reducer::Sum,\n\n \"min\" => ops::nn::Reducer::Min,\n\n \"max\" => ops::nn::Reducer::Max,\n\n \"argmin\" => ops::nn::Reducer::ArgMin(false),\n\n \"argmax\" => ops::nn::Reducer::ArgMax(false),\n\n _ => bail!(\"unsupported reducer: {}\", invocation.invocation.id.0),\n\n };\n\n let wire = builder.wire_as_outlets(ops::nn::Reduce::new(axes.clone(), reducer), &[input])?;\n\n if reducer_name != \"sum\" || !invocation.named_arg_as(builder, \"normalize\")? {\n\n return Ok(wire.into());\n\n }\n\n\n\n let fact = builder.model.outlet_fact(wire[0])?.clone();\n\n let input_shape = &builder.model.outlet_fact(input)?.shape;\n\n let cardinality: TDim = axes.iter().map(|ax| &input_shape[*ax]).product();\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 95, "score": 291620.2333160793 }, { "content": "pub fn softmax(builder: &mut ModelBuilder, invocation: &ResolvedInvocation) -> TractResult<Value> {\n\n let x = invocation.named_arg_as(builder, \"x\")?;\n\n let axes: TVec<usize> = invocation.named_arg_as(builder, \"axes\")?;\n\n\n\n let input_fact = builder.model.outlet_fact(x)?.clone();\n\n let output_dt =\n\n invocation.dt_from_quant_file.get(0).cloned().flatten().unwrap_or(input_fact.datum_type);\n\n\n\n builder.wire(ops::nn::Softmax { axes, output_dt }, &[x])\n\n}\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 96, "score": 291620.2333160793 }, { "content": "pub fn stack(builder: &mut ModelBuilder, invocation: &ResolvedInvocation) -> TractResult<Value> {\n\n let axis: usize = invocation.named_arg_as(builder, \"axis\")?;\n\n let mut values: TVec<OutletId> = invocation.named_arg_as(builder, \"values\")?;\n\n if let Some(Some(dt)) = invocation.dt_from_quant_file.get(0) {\n\n for value in &mut values {\n\n if builder.model.node(value.node).outputs[value.slot].fact.datum_type != *dt {\n\n *value = builder.wire_as_outlets(ops::cast::cast(*dt), &[*value])?[0];\n\n }\n\n }\n\n }\n\n\n\n for value in &mut values {\n\n // add unsqueeze\n\n *value = builder.wire_as_outlets(ops::change_axes::AxisOp::Add(axis), &[*value])?[0];\n\n }\n\n\n\n builder.wire(ops::array::TypedConcat::new(axis), &values)\n\n}\n\n\n\n/*\n\n * fragment unstack<?>( value: tensor<?>, axis: integer ) -> ( values: tensor<?>[] )\n\n *\n\n * Inverse of stack operator\n\n */\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 97, "score": 291620.2333160793 }, { "content": "pub fn output_type(input: DatumType) -> DatumType {\n\n if input.is_float() {\n\n input\n\n } else {\n\n i32::datum_type()\n\n }\n\n}\n", "file_path": "core/src/ops/matmul.rs", "rank": 98, "score": 288877.2493851554 }, { "content": "pub fn to_proto_model(framework: &Nnef, model: &TypedModel) -> TractResult<ProtoModel> {\n\n let mut into_ast = IntoAst::new(framework, model);\n\n into_ast.translate().context(\"Translating model to AST\")?;\n\n into_ast.into_proto_model().context(\"Translating AST to proto model\")\n\n}\n\n\n", "file_path": "nnef/src/ser.rs", "rank": 99, "score": 287944.9155299785 } ]
Rust
dpu-cluster-core/src/pipeline/stages/loader.rs
upmem/dpu_cluster
92f143a9d7757a29e79a863d25afb4e6daeccc56
use std::sync::mpsc::Receiver; use std::sync::mpsc::Sender; use crate::pipeline::transfer::OutputMemoryTransfer; use crate::pipeline::PipelineError; use std::sync::Arc; use std::sync::Mutex; use crate::pipeline::transfer::InputMemoryTransfer; use crate::view::View; use crate::pipeline::stages::DpuGroup; use crate::pipeline::OutputResult; use crate::pipeline::stages::GroupJob; use crate::cluster::Cluster; use crate::memory::MemoryTransfer; use crate::error::ClusterError; use crate::pipeline::monitoring::EventMonitor; use crate::pipeline::monitoring::Process; use crate::pipeline::monitoring::Event; use crate::driver::Driver; use std::sync::mpsc::SyncSender; use crate::pipeline::stages::Stage; pub struct InputLoader<InputHandle> { cluster: Arc<Cluster>, transfer_receiver: Receiver<(DpuGroup, Vec<Vec<InputMemoryTransfer>>, Vec<(InputHandle, OutputMemoryTransfer)>)>, job_sender: Sender<GroupJob<InputHandle>>, output_sender: SyncSender<OutputResult<InputHandle>>, monitoring: EventMonitor, shutdown: Arc<Mutex<bool>> } impl <InputHandle> InputLoader<InputHandle> where InputHandle: Send + 'static { pub fn new(cluster: Arc<Cluster>, transfer_receiver: Receiver<(DpuGroup, Vec<Vec<InputMemoryTransfer>>, Vec<(InputHandle, OutputMemoryTransfer)>)>, job_sender: Sender<GroupJob<InputHandle>>, output_sender: SyncSender<OutputResult<InputHandle>>, mut monitoring: EventMonitor, shutdown: Arc<Mutex<bool>>) -> Self { monitoring.set_process(Process::Loader); InputLoader { cluster, transfer_receiver, job_sender, output_sender, monitoring, shutdown } } } impl <InputHandle> Stage for InputLoader<InputHandle> where InputHandle: Send + 'static { fn run(self) { let monitoring = self.monitoring; monitoring.record(Event::ProcessBegin); let driver = self.cluster.driver(); for (group, inputs, outputs) in self.transfer_receiver { let group_id = group.id; monitoring.record(Event::GroupLoadingBegin(group_id)); let is_ok = load_input_chunk(driver, &group, inputs, &self.output_sender); monitoring.record(Event::GroupLoadingEnd(group_id)); if is_ok { self.job_sender.send((group, outputs)).unwrap(); } } } } fn load_input_chunk<T>(driver: &Driver, group: &DpuGroup, chunk: Vec<Vec<InputMemoryTransfer>>, output_sender: &SyncSender<OutputResult<T>>) -> bool { match chunk.iter().max_by_key(|t| t.len()).map(|t| t.len()) { None => true, Some(max_len) => match do_memory_transfers(driver, group, chunk, max_len) { Err(err) => { output_sender.send(Err(PipelineError::InfrastructureError(err))).unwrap(); false }, Ok(_) => { let mut is_ok = true; for dpu in group.active_dpus() { match driver.boot(&View::one(dpu.clone())) { Ok(_) => (), Err(err) => { output_sender.send(Err(PipelineError::InfrastructureError(err))).unwrap(); is_ok = false; break; } } } is_ok }, }, } } fn do_memory_transfers(driver: &Driver, group: &DpuGroup, mut chunk: Vec<Vec<InputMemoryTransfer>>, max_len: usize) -> Result<(), ClusterError> { let mut memory_transfers = Vec::with_capacity(max_len); for _ in 0..max_len { memory_transfers.push(MemoryTransfer::default()); } let dpus = group.active_dpus().collect::<Vec<_>>(); for (idx, transfers) in chunk.iter_mut().enumerate() { for (i, transfer) in transfers.iter_mut().enumerate() { let memory_transfer = memory_transfers.get_mut(i).unwrap(); memory_transfer.add_in_place(*dpus.get(idx).unwrap().clone(), transfer.offset, transfer.content.as_mut_slice()); } } for memory_transfer in memory_transfers.iter_mut() { driver.copy_to_memory(memory_transfer)?; } Ok(()) }
use std::sync::mpsc::Receiver; use std::sync::mpsc::Sender; use crate::pipeline::transfer::OutputMemoryTransfer; use crate::pipeline::PipelineError; use std::sync::Arc; use std::sync::Mutex; use crate::pipeline::transfer::InputMemoryTransfer; use crate::view::View; use crate::pipeline::stages::DpuGroup; use crate::pipeline::OutputResult; use crate::pipeline::stages::GroupJob; use crate::cluster::Cluster; use crate::memory::MemoryTransfer; use crate::error::ClusterError; use crate::pipeline::monitoring::EventMonitor; use crate::pipeline::monitoring::Process; use crate::pipeline::monitoring::Event; use crate::driver::Driver; use std::sync::mpsc::SyncSender; use crate::pipeline::stages::Stage; pub struct InputLoader<InputHandle> { cluster: Arc<Cluster>, transfer_receiver: Receiver<(DpuGroup, Vec<Vec<InputMemoryTransfer>>, Vec<(InputHandle, OutputMemoryTransfer)>)>, job_sender: Sender<GroupJob<InputHandle>>, output_sender: SyncSender<OutputResult<InputHandle>>, monitoring: EventMonitor, shutdown: Arc<Mutex<bool>> } impl <InputHandle> InputLoader<InputHandle> where InputHandle: Send + 'static { pub fn new(cluster: Arc<Cluster>, transfer_receiver: Receiver<(DpuGroup, Vec<Vec<InputMemoryTransfer>>, Vec<(InputHandle, OutputMemoryTransfer)>)>, job_sender: Sender<GroupJob<InputHandle>>, output_sender: SyncSender<OutputResult<InputHandle>>, mut monitoring: EventMonitor, shutdown: Arc<Mutex<bool>>) -> Self { monitoring.set_process(Process::Loader); InputLoader { cluster, transfer_receiver, job_sender, output_sender, monitoring, shutdown } } } impl <InputHandle> Stage for InputLoader<InputHandle> where InputHandle: Send + 'static { fn run(self) { let monitoring = self.monitoring; monitoring.record(Event::ProcessBegin); let driver = self.cluster.driver(); for (group, inputs, outputs) in self.transfer_receiver { let group_id = group.id; monitoring.record(Event::GroupLoadingBegin(group_id)); let is_ok = load_input_chunk(driver, &group, inputs, &self.output_sender); monitoring.record(Event::GroupLoadingEnd(group_i
} fn load_input_chunk<T>(driver: &Driver, group: &DpuGroup, chunk: Vec<Vec<InputMemoryTransfer>>, output_sender: &SyncSender<OutputResult<T>>) -> bool { match chunk.iter().max_by_key(|t| t.len()).map(|t| t.len()) { None => true, Some(max_len) => match do_memory_transfers(driver, group, chunk, max_len) { Err(err) => { output_sender.send(Err(PipelineError::InfrastructureError(err))).unwrap(); false }, Ok(_) => { let mut is_ok = true; for dpu in group.active_dpus() { match driver.boot(&View::one(dpu.clone())) { Ok(_) => (), Err(err) => { output_sender.send(Err(PipelineError::InfrastructureError(err))).unwrap(); is_ok = false; break; } } } is_ok }, }, } } fn do_memory_transfers(driver: &Driver, group: &DpuGroup, mut chunk: Vec<Vec<InputMemoryTransfer>>, max_len: usize) -> Result<(), ClusterError> { let mut memory_transfers = Vec::with_capacity(max_len); for _ in 0..max_len { memory_transfers.push(MemoryTransfer::default()); } let dpus = group.active_dpus().collect::<Vec<_>>(); for (idx, transfers) in chunk.iter_mut().enumerate() { for (i, transfer) in transfers.iter_mut().enumerate() { let memory_transfer = memory_transfers.get_mut(i).unwrap(); memory_transfer.add_in_place(*dpus.get(idx).unwrap().clone(), transfer.offset, transfer.content.as_mut_slice()); } } for memory_transfer in memory_transfers.iter_mut() { driver.copy_to_memory(memory_transfer)?; } Ok(()) }
d)); if is_ok { self.job_sender.send((group, outputs)).unwrap(); } } }
function_block-function_prefixed
[ { "content": "pub trait Stage: Sized + Send + 'static {\n\n fn launch(mut self) -> Result<ThreadHandle, PipelineError> {\n\n self.init()?;\n\n\n\n Ok(Some(thread::spawn(|| self.run())))\n\n }\n\n\n\n fn init(&mut self) -> Result<(), PipelineError> { Ok(()) }\n\n fn run(self);\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct DpuGroup {\n\n pub id: GroupId,\n\n pub dpus: Vec<(DpuId, bool)>\n\n}\n\n\n\nimpl DpuGroup {\n\n pub fn active_dpus(&self) -> impl Iterator<Item=&DpuId> {\n\n self.dpus.iter().filter_map(|(dpu , active)| if *active { Some(dpu) } else { None })\n\n }\n\n}\n\n\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mod.rs", "rank": 1, "score": 233902.15152110002 }, { "content": "fn fetch_group_status(driver: &Driver, group: &DpuGroup) -> Result<RunStatus, ClusterError> {\n\n // todo add this as a view optimization?\n\n\n\n let mut global_status = RunStatus::default();\n\n\n\n for dpu in group.active_dpus() {\n\n let status = driver.fetch_status(&View::one(dpu.clone()))?;\n\n global_status = global_status.merge_with(&status)\n\n }\n\n\n\n Ok(global_status)\n\n}", "file_path": "dpu-cluster-core/src/pipeline/stages/tracker.rs", "rank": 3, "score": 208445.1175763363 }, { "content": "fn build_and_launch_group<K>(mut group: DpuGroup, mut dpus: HashMap<DpuId, MemoryTransfers<K>>,\n\n transfer_sender: &Sender<(DpuGroup, Vec<Vec<InputMemoryTransfer>>, Vec<(K, OutputMemoryTransfer)>)>) {\n\n let group_size = group.dpus.len();\n\n let mut inputs = Vec::with_capacity(group_size);\n\n let mut outputs = Vec::with_capacity(group_size);\n\n\n\n for (dpu, is_active) in group.dpus.iter_mut() {\n\n match dpus.remove(dpu) {\n\n None => {\n\n *is_active = false;\n\n },\n\n Some(transfers) => {\n\n *is_active = true;\n\n inputs.push(transfers.inputs);\n\n outputs.push((transfers.key, transfers.output));\n\n },\n\n }\n\n }\n\n\n\n transfer_sender.send((group, inputs, outputs)).unwrap();\n\n}\n\n\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 4, "score": 187959.77028648547 }, { "content": "fn add_waiting_input<K>(waiting_inputs: &mut HashMap<GroupId, HashMap<DpuId, Vec<MemoryTransfers<K>>>>,\n\n group_id: GroupId, dpu_id: DpuId, transfers: MemoryTransfers<K>) {\n\n let group_entry = match waiting_inputs.entry(group_id) {\n\n Entry::Occupied(entry) => entry.into_mut(),\n\n Entry::Vacant(entry) => entry.insert(HashMap::default()),\n\n };\n\n\n\n let dpu_entry = match group_entry.entry(dpu_id) {\n\n Entry::Occupied(entry) => entry.into_mut(),\n\n Entry::Vacant(entry) => entry.insert(Vec::default()),\n\n };\n\n\n\n dpu_entry.push(transfers);\n\n}\n\n\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 5, "score": 187686.2312132992 }, { "content": "fn do_sort(driver: &Driver, input_file: &str, output_file: &str) -> Result<(), AppError> {\n\n let mram_size = driver.rank_description.memories.mram_size;\n\n let dpu = DpuId::new(0, 0, 0);\n\n let view = View::one(dpu);\n\n let program = fetch_dpu_program()?;\n\n let (mut strings, mut addresses, string_map) = extract_inputs(input_file, mram_size)?;\n\n let mut nb_of_words = vec![addresses.len() as u32, 0];\n\n\n\n driver.load(&view, &program)?;\n\n\n\n {\n\n let mut input_tranfers = prepare_input_memory_transfers(dpu, &mut strings, &mut addresses, &mut nb_of_words);\n\n for transfer in input_tranfers.iter_mut() {\n\n driver.copy_to_memory(transfer)?;\n\n }\n\n }\n\n\n\n driver.run(&view)?;\n\n\n\n {\n\n let mut output_tranfer = prepare_output_memory_transfer(dpu, &mut addresses);\n\n driver.copy_from_memory(&mut output_tranfer)?;\n\n }\n\n\n\n process_outputs(addresses, output_file, string_map)\n\n}\n\n\n", "file_path": "dpu-cluster-core/examples/string_sort/main.rs", "rank": 6, "score": 172942.62072320847 }, { "content": "fn fetch_next_group(groups: &mut Vec<DpuGroup>, group_receiver: &Receiver<DpuGroup>) -> DpuGroup {\n\n match groups.pop() {\n\n Some(grp) => grp,\n\n None => {\n\n // todo: fix the issue where no group may be sent because all have failed\n\n let mut grp = group_receiver.recv().unwrap();\n\n\n\n loop {\n\n match group_receiver.try_recv() {\n\n Ok(other_group) => groups.push(other_group),\n\n Err(_) => break,\n\n }\n\n }\n\n\n\n for (_, activity) in grp.dpus.iter_mut() {\n\n *activity = true;\n\n }\n\n\n\n grp\n\n },\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 7, "score": 169569.79240020938 }, { "content": "fn prepare_output_memory_transfer(dpu: DpuId, output: &mut [u32]) -> MemoryTransfer<'_> {\n\n MemoryTransfer::default().add(dpu, ADDRESSES_OFFSET, output)\n\n}\n\n\n", "file_path": "dpu-cluster-core/examples/string_sort/main.rs", "rank": 8, "score": 157117.3293221534 }, { "content": "fn still_some_waiting_inputs<K>(waiting_inputs: &HashMap<GroupId, HashMap<DpuId, Vec<MemoryTransfers<K>>>>) -> bool {\n\n waiting_inputs.iter().any(|(_, e)| e.values().any(|v| !v.is_empty()))\n\n}\n\n\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 9, "score": 149150.44259128498 }, { "content": "struct Input {\n\n id: usize,\n\n fragment_id: usize,\n\n prefix: String\n\n}\n\n\n", "file_path": "dpu-cluster-core/examples/basic_search/main.rs", "rank": 10, "score": 146803.24843151943 }, { "content": "struct BaseMapper<InputItem, InputHandle> {\n\n groups: Vec<DpuGroup>,\n\n input_receiver: Receiver<InputItem>,\n\n group_receiver: Receiver<DpuGroup>,\n\n transfer_sender: Sender<(DpuGroup, Vec<Vec<InputMemoryTransfer>>, Vec<(InputHandle, OutputMemoryTransfer)>)>,\n\n monitoring: EventMonitor,\n\n // todo: use or remove\n\n shutdown: Arc<Mutex<bool>>\n\n}\n\n\n\npub struct SimpleMapper<InputItem, InputHandle> {\n\n base: BaseMapper<InputItem, InputHandle>,\n\n get_transfers: Box<dyn Fn(InputItem) -> MemoryTransfers<InputHandle> + Send>\n\n}\n\n\n\npub struct PersistentMapper<InputItem, InputHandle, FragmentId, FragmentIterator> {\n\n base: BaseMapper<InputItem, InputHandle>,\n\n cluster: Arc<Cluster>,\n\n get_transfers: Box<dyn Fn(InputItem) -> (FragmentId, MemoryTransfers<InputHandle>) + Send>,\n\n output_sender: SyncSender<OutputResult<InputHandle>>,\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 11, "score": 142773.96359714185 }, { "content": "fn is_group_complete<T>(group: &DpuGroup, entries: &HashMap<DpuId, T>) -> bool {\n\n group.dpus.len() == entries.len()\n\n}\n\n\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 12, "score": 140585.9488290268 }, { "content": "fn extract_first_waiting_input<K>(waiting_inputs: &mut HashMap<DpuId, Vec<MemoryTransfers<K>>>) -> HashMap<DpuId, MemoryTransfers<K>> {\n\n let mut firsts = HashMap::default();\n\n\n\n for (dpu_id, waiting_transfers) in waiting_inputs.iter_mut() {\n\n match waiting_transfers.pop() {\n\n None => {},\n\n Some(first) => { firsts.insert(*dpu_id, first); },\n\n }\n\n }\n\n\n\n firsts\n\n}", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 13, "score": 140543.5615229386 }, { "content": "fn fetch_available_groups(group_receiver: &Receiver<DpuGroup>) -> Vec<DpuGroup> {\n\n group_receiver.try_iter().collect()\n\n}\n\n\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 14, "score": 133202.18287816035 }, { "content": "fn map_input_query(input: Input) -> (usize, MemoryTransfers<usize>) {\n\n let mut query_content = input.prefix.into_bytes();\n\n query_content.resize(QUERY_BUFFER_SIZE as usize, 0);\n\n\n\n let transfers = MemoryTransfers {\n\n inputs: vec![InputMemoryTransfer {\n\n offset: QUERY_BUFFER_ADDRESS,\n\n content: query_content\n\n }],\n\n output: OutputMemoryTransfer {\n\n offset: OUTPUT_BUFFER_ADDRESS,\n\n length: OUTPUT_BUFFER_SIZE\n\n },\n\n key: input.id\n\n };\n\n\n\n (input.fragment_id, transfers)\n\n}\n\n\n", "file_path": "dpu-cluster-core/examples/basic_search/main.rs", "rank": 15, "score": 133085.98665994816 }, { "content": "fn prepare_input_memory_transfers<'a>(dpu: DpuId, strings: &'a mut [u8], addresses: &'a mut [u32], nb_of_words: &'a mut [u32]) -> Vec<MemoryTransfer<'a>> {\n\n let strings_tranfer = MemoryTransfer::default().add(dpu, STRINGS_OFFSET, strings);\n\n let addresses_tranfer = MemoryTransfer::default().add(dpu, ADDRESSES_OFFSET, addresses);\n\n let nb_of_words_tranfer = MemoryTransfer::default().add(dpu, NB_OF_WORDS_OFFSET, nb_of_words);\n\n\n\n vec![strings_tranfer, addresses_tranfer, nb_of_words_tranfer]\n\n}\n\n\n", "file_path": "dpu-cluster-core/examples/string_sort/main.rs", "rank": 16, "score": 132519.93543769253 }, { "content": "#[derive(Debug)]\n\nstruct RankHandler {\n\n ranks: Vec<DpuRank>\n\n}\n\n\n\n#[derive(Clone)]\n\npub enum FaultCause {\n\n Breakpoint,\n\n Memory,\n\n Dma\n\n}\n\n\n\npub struct FaultInformation {\n\n dpu: DpuId,\n\n context: DpuDebugContext\n\n}\n\n\n\npub enum RunStatus {\n\n Idle,\n\n Running,\n\n Fault(Vec<DpuId>)\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 17, "score": 119736.7784167677 }, { "content": "pub trait Mergeable {\n\n fn merge_with(&self, other: &Self) -> Self;\n\n}\n\n\n\nimpl Mergeable for () {\n\n fn merge_with(&self, _: &Self) -> Self {\n\n ()\n\n }\n\n}\n\n\n\nimpl Mergeable for RunStatus {\n\n fn merge_with(&self, other: &Self) -> Self {\n\n match (self, other) {\n\n (RunStatus::Idle, RunStatus::Idle) => RunStatus::Idle,\n\n (RunStatus::Idle, RunStatus::Running) => RunStatus::Running,\n\n (RunStatus::Idle, RunStatus::Fault(other_faults)) => RunStatus::Fault(other_faults.to_vec()),\n\n (RunStatus::Running, RunStatus::Idle) => RunStatus::Running,\n\n (RunStatus::Running, RunStatus::Running) => RunStatus::Running,\n\n (RunStatus::Running, RunStatus::Fault(other_faults)) => RunStatus::Fault(other_faults.to_vec()),\n\n (RunStatus::Fault(faults), RunStatus::Idle) => RunStatus::Fault(faults.to_vec()),\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 18, "score": 117474.09658384892 }, { "content": "fn process_outputs(name: &str, output: Vec<u8>) -> Result<(), AppError> {\n\n let mut file = OpenOptions::new().append(true).create(true).open(format!(\"{}.out.txt\", name))?;\n\n let output_bytes = (((output[0] as u32) & 0xFF) |\n\n (((output[1] as u32) & 0xFF) << 8) |\n\n (((output[2] as u32) & 0xFF) << 16) |\n\n (((output[3] as u32) & 0xFF) << 24)) * 32;\n\n\n\n let output_results = &output.as_slice()[8..(8 + output_bytes as usize)];\n\n\n\n file.write(output_results)?;\n\n\n\n Ok(())\n\n}\n\n\n\nimpl From<ClusterError> for AppError {\n\n fn from(err: ClusterError) -> Self {\n\n AppError::DpuInitError(err)\n\n }\n\n}\n\n\n", "file_path": "dpu-cluster-core/examples/basic_search/main.rs", "rank": 19, "score": 115633.18358277516 }, { "content": "fn process_outputs(output: Vec<u8>, filename: &str) -> Result<(), AppError> {\n\n let mut file = OpenOptions::new().write(true).create(true).truncate(true).open(filename)?;\n\n\n\n for entry in output.chunks(4) {\n\n let index = {\n\n let mut val = 0u32;\n\n for (idx, byte) in entry.iter().enumerate() {\n\n val = val | (((*byte as u32) & 0xff) << (idx * 8));\n\n }\n\n val\n\n };\n\n\n\n let index_as_string = format!(\"{}\", index);\n\n file.write(index_as_string.as_bytes())?;\n\n file.write(b\"\\n\")?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "dpu-cluster-core/examples/string_sort_pipeline/main.rs", "rank": 20, "score": 114459.85527703514 }, { "content": "type GroupJob<K> = (DpuGroup, Vec<(K, OutputMemoryTransfer)>);", "file_path": "dpu-cluster-core/src/pipeline/stages/mod.rs", "rank": 21, "score": 112098.88471654562 }, { "content": "fn process_outputs(output: Vec<u32>, filename: &str, string_map: HashMap<u32, String>) -> Result<(), AppError> {\n\n let mut file = OpenOptions::new().write(true).create(true).truncate(true).open(filename)?;\n\n\n\n for entry in output {\n\n let string = string_map.get(&entry).ok_or_else(|| AppError::InvalidStringEntry(entry))?;\n\n file.write(string.as_bytes())?;\n\n file.write(b\"\\n\")?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\nimpl From<ClusterError> for AppError {\n\n fn from(err: ClusterError) -> Self {\n\n AppError::DpuError(err)\n\n }\n\n}\n\n\n\nimpl From<io::Error> for AppError {\n\n fn from(err: io::Error) -> Self {\n\n AppError::FileManagementError(err)\n\n }\n\n}", "file_path": "dpu-cluster-core/examples/string_sort/main.rs", "rank": 22, "score": 103031.15929217821 }, { "content": "fn map_transfers(input: (usize, Vec<u8>, Vec<u32>)) -> MemoryTransfers<usize> {\n\n let (idx, strings, addresses) = input;\n\n let nr_of_words = addresses.len() as u32;\n\n\n\n MemoryTransfers {\n\n inputs: vec![\n\n InputMemoryTransfer::from_u8_vec(STRINGS_OFFSET, strings),\n\n InputMemoryTransfer::from_u32_vec(ADDRESSES_OFFSET, addresses),\n\n InputMemoryTransfer::from_u32_vec(NB_OF_WORDS_OFFSET, vec![nr_of_words, 0]),\n\n ],\n\n output: OutputMemoryTransfer {\n\n offset: ADDRESSES_OFFSET,\n\n length: nr_of_words * 4\n\n },\n\n key: idx\n\n }\n\n}\n\n\n", "file_path": "dpu-cluster-core/examples/string_sort_pipeline/main.rs", "rank": 23, "score": 102639.78574626999 }, { "content": "fn create_groups_from(policy: GroupPolicy, nr_ranks: u8, nr_slices: u8, nr_dpus: u8) -> Vec<DpuGroup> {\n\n match policy {\n\n GroupPolicy::Dpu => {\n\n let mut vec = Vec::with_capacity((nr_ranks as usize) * (nr_slices as usize) * (nr_dpus as usize));\n\n\n\n for rank_idx in 0..nr_ranks {\n\n for slice_idx in 0..nr_slices {\n\n for dpu_idx in 0..nr_dpus {\n\n let id = ((rank_idx as u32) * (nr_slices as u32) * (nr_dpus as u32)) + ((slice_idx as u32) * (nr_dpus as u32)) + (dpu_idx as u32);\n\n let dpus = vec![(DpuId::new(rank_idx, slice_idx, dpu_idx), true)];\n\n vec.push(DpuGroup { id, dpus });\n\n }\n\n }\n\n }\n\n\n\n vec\n\n },\n\n GroupPolicy::Slice => {\n\n let mut vec = Vec::with_capacity((nr_ranks as usize) * (nr_dpus as usize));\n\n\n", "file_path": "dpu-cluster-core/src/pipeline/pipeline.rs", "rank": 24, "score": 94693.5596085956 }, { "content": "struct Query {\n\n prefix: String\n\n}\n\n\n\nimpl Query {\n\n pub fn new<S: ToString >(prefix: S) -> Self {\n\n Query { prefix: prefix.to_string() }\n\n }\n\n}\n\n\n", "file_path": "dpu-cluster-core/examples/basic_search/main.rs", "rank": 25, "score": 85431.91682900049 }, { "content": "struct Context {\n\n next_fragment_id: Arc<Mutex<usize>>,\n\n input_file: File\n\n}\n\n\n\nimpl Context {\n\n pub fn new<S: ToString>(next_fragment_id: Arc<Mutex<usize>>, file: S) -> Result<Self, AppError> {\n\n let input_file = File::open(file.to_string())?;\n\n\n\n Ok(Context { next_fragment_id, input_file })\n\n }\n\n}\n\n\n\nimpl Iterator for Context {\n\n type Item = (usize, InputMemoryTransfer);\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let mut buffer = vec![0u8; INPUT_BUFFER_SIZE as usize];\n\n\n\n match self.input_file.read(buffer.as_mut_slice()) {\n", "file_path": "dpu-cluster-core/examples/basic_search/main.rs", "rank": 26, "score": 85431.91682900049 }, { "content": "type GroupId = u32;", "file_path": "dpu-cluster-core/src/pipeline/mod.rs", "rank": 27, "score": 81718.6497616001 }, { "content": "fn extract_inputs(filename: &str, mram_size: u32, idx: usize) -> Result<(usize, Vec<u8>, Vec<u32>), AppError> {\n\n let file = File::open(filename)?;\n\n let file = BufReader::new(file);\n\n\n\n let mut string_addresses = Vec::default();\n\n let mut strings = Vec::default();\n\n\n\n for line in file.lines() {\n\n let offset = strings.len();\n\n let line = line?;\n\n\n\n strings.extend(line.as_bytes());\n\n strings.push(b'\\0');\n\n\n\n let end = strings.len();\n\n strings.resize((end + 7) & !7, b'\\0');\n\n\n\n string_addresses.push(offset as u32);\n\n }\n\n\n\n if strings.len() > ((mram_size - STRINGS_OFFSET) as usize) {\n\n return Err(AppError::InputFileTooBig(strings.len()))\n\n }\n\n\n\n Ok((idx, strings, string_addresses))\n\n}\n\n\n", "file_path": "dpu-cluster-core/examples/string_sort_pipeline/main.rs", "rank": 28, "score": 77353.76872817297 }, { "content": "fn extract_inputs(filename: &str, mram_size: u32) -> Result<(Vec<u8>, Vec<u32>, HashMap<u32, String>), AppError> {\n\n let file = File::open(filename)?;\n\n let file = BufReader::new(file);\n\n\n\n let mut string_map = HashMap::default();\n\n let mut string_addresses = Vec::default();\n\n let mut strings = Vec::default();\n\n\n\n for line in file.lines() {\n\n let offset = strings.len();\n\n let line = line?;\n\n\n\n strings.extend(line.as_bytes());\n\n strings.push(b'\\0');\n\n\n\n let end = strings.len();\n\n strings.resize((end + 7) & !7, b'\\0');\n\n\n\n string_addresses.push(offset as u32);\n\n string_map.insert(offset as u32, line);\n\n }\n\n\n\n if strings.len() > ((mram_size - STRINGS_OFFSET) as usize) {\n\n return Err(AppError::InputFileTooBig(strings.len()))\n\n }\n\n\n\n Ok((strings, string_addresses, string_map))\n\n}\n\n\n", "file_path": "dpu-cluster-core/examples/string_sort/main.rs", "rank": 29, "score": 77353.76872817297 }, { "content": "fn find_nr_of_available_dpus_for(target: &DpuTarget) -> Result<u32, ClusterError> {\n\n let (dpu_type, ref profile) = target.to_cni_args();\n\n let nr_of_dpus = DpuRank::find_nr_of_available_dpus_for(dpu_type, profile)?;\n\n\n\n Ok(nr_of_dpus)\n\n}\n\n\n", "file_path": "dpu-cluster-core/src/cluster.rs", "rank": 30, "score": 74321.45222354025 }, { "content": "fn find_description_for(target: &DpuTarget) -> Result<DpuRankDescription, ClusterError> {\n\n let (dpu_type, ref profile) = target.to_cni_args();\n\n let description = DpuRank::get_description_for(dpu_type, profile)?;\n\n\n\n Ok(description)\n\n}\n\n\n", "file_path": "dpu-cluster-core/src/cluster.rs", "rank": 31, "score": 74321.45222354025 }, { "content": "fn main() -> Result<(), AppError> {\n\n let queries = vec![\n\n Query::new(\"Artemis\"),\n\n Query::new(\"turtle\"),\n\n Query::new(\"Schroedinger\")\n\n ];\n\n\n\n let config = ClusterConfiguration::for_functional_simulator(1);\n\n let cluster = Cluster::create(config)?;\n\n\n\n let program = fetch_dpu_program()?;\n\n\n\n let (input_tx, input_rx) = channel();\n\n\n\n let nr_of_fragments = Arc::new(Mutex::new(0));\n\n let context = Context::new(nr_of_fragments.clone(), DATA_FILE)?;\n\n\n\n let outputs = Plan::from(input_rx)\n\n .for_persistent_model(map_input_query, context)\n\n .driving(cluster)\n", "file_path": "dpu-cluster-core/examples/basic_search/main.rs", "rank": 32, "score": 73716.10603494539 }, { "content": "fn main() -> Result<(), AppError> {\n\n let config = ClusterConfiguration::for_functional_simulator(1);\n\n let cluster = Cluster::create(config)?;\n\n let driver = cluster.driver();\n\n\n\n do_sort(driver, INPUT_FILE, OUTPUT_FILE)\n\n}\n\n\n", "file_path": "dpu-cluster-core/examples/string_sort/main.rs", "rank": 33, "score": 73716.10603494539 }, { "content": "fn main() -> Result<(), AppError> {\n\n let config = ClusterConfiguration::for_functional_simulator(1);\n\n let cluster = Cluster::create(config)?;\n\n let mram_size = cluster.driver().rank_description.memories.mram_size;\n\n\n\n let program = fetch_dpu_program()?;\n\n\n\n let inputs = INPUTS.iter()\n\n .enumerate()\n\n .map(move |(idx, filename)| extract_inputs(filename, mram_size, idx).unwrap());\n\n\n\n let outputs = Plan::from(inputs)\n\n .for_simple_model(map_transfers)\n\n .driving(cluster)\n\n .running(&program)\n\n .build()?;\n\n\n\n for output in outputs {\n\n let (idx, output) = output?;\n\n let filename = format!(\"output{}.txt\", idx);\n\n process_outputs(output, &filename)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "dpu-cluster-core/examples/string_sort_pipeline/main.rs", "rank": 34, "score": 72759.0885099559 }, { "content": "fn fetch_dpu_program() -> Result<Program, AppError> {\n\n Ok(Program::new_raw(std::fs::read(DPU_PROGRAM_IRAM)?, std::fs::read(DPU_PROGRAM_WRAM)?))\n\n}\n\n\n", "file_path": "dpu-cluster-core/examples/basic_search/main.rs", "rank": 35, "score": 69311.76449425098 }, { "content": "fn fetch_dpu_program() -> Result<Program, AppError> {\n\n Ok(Program::new_raw(std::fs::read(DPU_PROGRAM_IRAM)?, std::fs::read(DPU_PROGRAM_WRAM)?))\n\n}\n\n\n", "file_path": "dpu-cluster-core/examples/string_sort/main.rs", "rank": 36, "score": 69311.76449425098 }, { "content": "fn fetch_dpu_program() -> Result<Program, AppError> {\n\n Ok(Program::new_raw(std::fs::read(DPU_PROGRAM_IRAM)?, std::fs::read(DPU_PROGRAM_WRAM)?))\n\n}\n\n\n", "file_path": "dpu-cluster-core/examples/string_sort_pipeline/main.rs", "rank": 37, "score": 68452.00174569817 }, { "content": "fn allocate_at_least(nr_of_dpus: u32, description: &DpuRankDescription, target: &DpuTarget) -> Result<Vec<DpuRank>, ClusterError> {\n\n let nr_of_dpus_per_rank = (description.topology.nr_of_control_interfaces as u32) * (description.topology.nr_of_dpus_per_control_interface as u32);\n\n let nr_of_ranks = (nr_of_dpus / nr_of_dpus_per_rank) + if (nr_of_dpus % nr_of_dpus_per_rank) == 0 { 0 } else { 1 };\n\n let mut ranks = Vec::with_capacity(nr_of_ranks as usize);\n\n\n\n let (dpu_type, ref profile) = target.to_cni_args();\n\n\n\n for _ in 0..nr_of_ranks {\n\n let rank = DpuRank::allocate_for(dpu_type, profile)?;\n\n rank.reset_all()?;\n\n ranks.push(rank);\n\n }\n\n\n\n Ok(ranks)\n\n}", "file_path": "dpu-cluster-core/src/cluster.rs", "rank": 38, "score": 65550.19029800362 }, { "content": "#[repr(C)]\n\nstruct RawDpuDebugContext {\n\n registers: *mut u32,\n\n pcs: *mut u16,\n\n atomic_register: *mut bool,\n\n zero_flags: *mut bool,\n\n carry_flags: *mut bool,\n\n nr_of_running_threads: u8,\n\n scheduling: *mut u8,\n\n bkp_fault: bool,\n\n dma_fault: bool,\n\n mem_fault: bool,\n\n bkp_fault_thread_index: u8,\n\n dma_fault_thread_index: u8,\n\n mem_fault_thread_index: u8\n\n}\n\n\n\nunsafe impl Send for DpuRankDescription {}\n\nunsafe impl Sync for DpuRankDescription {}\n\n\n\nimpl DpuDebugContext {\n", "file_path": "dpu-sys/src/lib.rs", "rank": 39, "score": 61491.276392752596 }, { "content": "use crate::dpu::DpuId;\n\nuse dpu_sys::DpuRank;\n\nuse dpu_sys::DpuRankDescription;\n\nuse dpu_sys::DpuRankTransferMatrix;\n\nuse crate::error::ClusterError;\n\nuse crate::program::Program;\n\nuse crate::view::FastSelection;\n\nuse crate::view::Selection;\n\nuse crate::view::View;\n\nuse dpu_sys::DpuDebugContext;\n\nuse crate::memory::MemoryTransfer;\n\nuse crate::memory::MemoryTransferRankEntry;\n\nuse dpu_sys::DpuTarget;\n\n\n\n#[derive(Debug)]\n\npub struct Driver {\n\n rank_handler: RankHandler,\n\n pub nr_of_ranks: u8,\n\n pub target: DpuTarget,\n\n pub rank_description: DpuRankDescription\n\n}\n\n\n\nunsafe impl Sync for Driver {}\n\n\n\n#[derive(Debug)]\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 40, "score": 59996.06062872441 }, { "content": " (RunStatus::Fault(faults), RunStatus::Running) => RunStatus::Fault(faults.to_vec()),\n\n (RunStatus::Fault(faults), RunStatus::Fault(other_faults)) => {\n\n let mut all_faults = faults.to_vec();\n\n all_faults.append(&mut other_faults.to_vec());\n\n RunStatus::Fault(all_faults)\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl Driver {\n\n pub fn new(ranks: Vec<DpuRank>, rank_description: DpuRankDescription, target: DpuTarget) -> Self {\n\n let nr_of_ranks = ranks.len() as u8;\n\n let rank_handler = RankHandler { ranks };\n\n\n\n Driver { rank_handler, nr_of_ranks, rank_description, target }\n\n }\n\n\n\n pub fn nr_of_dpus(&self) -> usize {\n\n self.rank_handler.ranks.len() *\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 41, "score": 59994.68368062019 }, { "content": " }\n\n }\n\n\n\n fn dispatch_for_some_ranks<'a, T, FnRankArg, FnDpu, FnRank>(&'a self, ranks: &[Selection<Selection<DpuId>>], for_dpu: FnDpu, for_rank: FnRank) -> Result<T, ClusterError>\n\n where T: Default + Mergeable,\n\n FnRankArg: FromRankId<'a>,\n\n FnDpu: Fn(&DpuId) -> Result<T, ClusterError>,\n\n FnRank: Fn(FnRankArg) -> Result<T, ClusterError>\n\n {\n\n let mut result = T::default();\n\n for (rank_id, rank_selection) in ranks.iter().enumerate() {\n\n match rank_selection {\n\n Selection::All => {\n\n let rank_result = for_rank(FnRankArg::from_rank_id(rank_id as u8, &self.rank_handler))?;\n\n result = result.merge_with(&rank_result);\n\n },\n\n Selection::None => (),\n\n Selection::Some(control_interfaces) => {\n\n // todo: we can be more efficient when the C Interface changes\n\n let mut dpus = Vec::default();\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 42, "score": 59992.228483102226 }, { "content": " self.rank_description.info.nr_of_work_registers_per_thread,\n\n self.rank_description.info.nr_of_atomic_bits);\n\n rank.initialize_fault_process_for_dpu(slice_id, member, &mut context)?;\n\n Ok(FaultInformation { dpu: dpu.clone(), context })\n\n }\n\n\n\n fn dispatch<'a, T, FnRankArg, FnDpu, FnRank, FnAll>(&'a self, view: &View, for_dpu: FnDpu, for_rank: FnRank, for_all: FnAll) -> Result<T, ClusterError>\n\n where T: Default + Mergeable,\n\n FnRankArg: FromRankId<'a>,\n\n FnDpu: Fn(&DpuId) -> Result<T, ClusterError>,\n\n FnRank: Fn(FnRankArg) -> Result<T, ClusterError>,\n\n FnAll: FnOnce() -> Result<T, ClusterError>\n\n {\n\n let View(selection) = view;\n\n\n\n match selection {\n\n FastSelection::Fast(dpu) => for_dpu(dpu),\n\n FastSelection::Normal(Selection::All) => for_all(),\n\n FastSelection::Normal(Selection::None) => Ok(T::default()),\n\n FastSelection::Normal(Selection::Some(ranks)) => self.dispatch_for_some_ranks(ranks, for_dpu, for_rank),\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 43, "score": 59991.47442407881 }, { "content": " rank.copy_to_mrams(&matrix)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn copy_from_memory(&self, data: &mut MemoryTransfer<'_>) -> Result<(), ClusterError> {\n\n for (rank_id, rank_transfers) in data.0.iter_mut() {\n\n let rank= self.rank_handler.get_rank(*rank_id);\n\n let matrix = self.create_transfer_matrix_for(rank, rank_transfers)?;\n\n rank.copy_from_mrams(&matrix)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn fetch_dpu_fault_context(&self, dpu: &DpuId) -> Result<FaultInformation, ClusterError> {\n\n let (rank, slice_id, member) = self.destructure(dpu);\n\n let mut context =\n\n DpuDebugContext::new(self.rank_description.info.nr_of_threads,\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 44, "score": 59991.17801452465 }, { "content": " |dpu| self.fetch_dpu_status(dpu),\n\n |rank| self.fetch_rank_status(rank),\n\n || self.fetch_all_status())\n\n }\n\n\n\n pub fn run(&self, view: &View) -> Result<RunStatus, ClusterError> {\n\n self.boot(view)?;\n\n\n\n loop {\n\n match self.fetch_status(view)? {\n\n RunStatus::Running => (),\n\n finished => return Ok(finished),\n\n }\n\n }\n\n }\n\n\n\n pub fn copy_to_memory(&self, data: &mut MemoryTransfer<'_>) -> Result<(), ClusterError> {\n\n for (rank_id, rank_transfers) in data.0.iter_mut() {\n\n let rank= self.rank_handler.get_rank(*rank_id);\n\n let matrix = self.create_transfer_matrix_for(rank, rank_transfers)?;\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 45, "score": 59990.97585256465 }, { "content": " fn boot_dpu(&self, dpu: &DpuId) -> Result<(), ClusterError> {\n\n let mut was_running = false;\n\n let (rank, slice, member) = self.destructure(dpu);\n\n\n\n rank.launch_thread_on_dpu(slice, member, BOOTSTRAP_THREAD, false, &mut was_running)?;\n\n\n\n if was_running {\n\n Err(ClusterError::DpuIsAlreadyRunning)\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n\n\n fn fetch_all_status(&self) -> Result<RunStatus, ClusterError> {\n\n let nr_of_ranks = self.nr_of_ranks;\n\n let mut status = RunStatus::default();\n\n\n\n for rank_id in 0..nr_of_ranks {\n\n let rank_status = self.fetch_rank_status(rank_id)?;\n\n status = status.merge_with(&rank_status);\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 46, "score": 59989.01615453797 }, { "content": " (self.rank_description.topology.nr_of_control_interfaces as usize) *\n\n (self.rank_description.topology.nr_of_dpus_per_control_interface as usize)\n\n }\n\n\n\n pub fn load(&self, view: &View, program: &Program) -> Result<(), ClusterError> {\n\n self.dispatch(view,\n\n |dpu| self.load_dpu(dpu, program),\n\n |rank| self.load_rank(rank, program),\n\n || self.load_all(program))\n\n }\n\n\n\n pub fn boot(&self, view: &View) -> Result<(), ClusterError> {\n\n self.dispatch(view,\n\n |dpu| self.boot_dpu(dpu),\n\n |rank| self.boot_rank(rank),\n\n || self.boot_all())\n\n }\n\n\n\n pub fn fetch_status(&self, view: &View) -> Result<RunStatus, ClusterError> {\n\n self.dispatch(view,\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 47, "score": 59988.92047274078 }, { "content": "}\n\n\n\nimpl Default for RunStatus {\n\n fn default() -> Self {\n\n RunStatus::Idle\n\n }\n\n}\n\n\n\nconst BOOTSTRAP_THREAD: u8 = 0;\n\nconst PRIMARY_MRAM: u32 = 0;\n\n\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 48, "score": 59988.878054317625 }, { "content": " }\n\n\n\n Ok(())\n\n }\n\n\n\n fn boot_rank(&self, rank: &DpuRank) -> Result<(), ClusterError> {\n\n let nr_of_slices = self.rank_description.topology.nr_of_control_interfaces as usize;\n\n let mut was_running = vec!(0; nr_of_slices);\n\n\n\n rank.launch_thread_on_all(BOOTSTRAP_THREAD, false, was_running.as_mut_ptr())?;\n\n\n\n for slice_was_running in was_running {\n\n if slice_was_running != 0 {\n\n return Err(ClusterError::DpuIsAlreadyRunning);\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 49, "score": 59988.82146244012 }, { "content": " } else {\n\n Ok(RunStatus::Fault(faults))\n\n }\n\n }\n\n\n\n fn fetch_dpu_status(&self, dpu: &DpuId) -> Result<RunStatus, ClusterError> {\n\n let (rank, slice, member) = self.destructure(dpu);\n\n\n\n let mut running = false;\n\n let mut fault = false;\n\n\n\n rank.poll_dpu(slice, member, &mut running, &mut fault)?;\n\n\n\n if !running {\n\n Ok(RunStatus::Idle)\n\n } else if !fault {\n\n Ok(RunStatus::Running)\n\n } else {\n\n Ok(RunStatus::Fault(vec![dpu.clone()]))\n\n }\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 50, "score": 59988.40765699363 }, { "content": " }\n\n\n\n Ok(status)\n\n }\n\n\n\n fn fetch_rank_status(&self, rank_id: u8) -> Result<RunStatus, ClusterError> {\n\n let mut running = false;\n\n let mut fault = false;\n\n let mut faults = Vec::default();\n\n let nr_of_control_interfaces_per_rank = self.rank_description.topology.nr_of_control_interfaces as usize;\n\n let nr_of_dpus_per_control_interface = self.rank_description.topology.nr_of_dpus_per_control_interface;\n\n\n\n let rank = self.rank_handler.get_rank(rank_id);\n\n let mut run_bitfields = vec![0; nr_of_control_interfaces_per_rank];\n\n let mut fault_bitfields = vec![0; nr_of_control_interfaces_per_rank];\n\n\n\n rank.poll_all(run_bitfields.as_mut_ptr(), fault_bitfields.as_mut_ptr())?;\n\n\n\n for slice_id in 0..nr_of_control_interfaces_per_rank {\n\n if run_bitfields[slice_id] == 0 {\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 51, "score": 59987.99277213647 }, { "content": " }\n\n\n\n fn create_transfer_matrix_for<'a>(&self, rank: &'a DpuRank, data: &mut MemoryTransferRankEntry<'_>) -> Result<DpuRankTransferMatrix<'a>, ClusterError> {\n\n let matrix = DpuRankTransferMatrix::allocate_for(rank)?;\n\n\n\n for (dpu, image) in data.0.iter_mut() {\n\n let (_, slice, member) = dpu.members();\n\n let offset = image.offset;\n\n let length = image.reference.len() as u32;\n\n\n\n matrix.add_dpu(slice, member, image.ptr(), length, offset, PRIMARY_MRAM);\n\n }\n\n\n\n Ok(matrix)\n\n }\n\n\n\n fn destructure(&self, dpu: &DpuId) -> (&DpuRank, u8, u8) {\n\n let (rank_id, slice_id, member_id) = dpu.members();\n\n let rank = self.rank_handler.get_rank(rank_id);\n\n\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 52, "score": 59987.092725419 }, { "content": " }\n\n\n\n Ok(result)\n\n }\n\n\n\n fn load_all(&self, program: &Program) -> Result<(), ClusterError> {\n\n for rank in &self.rank_handler.ranks {\n\n self.load_rank(rank, program)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n fn load_rank(&self, rank: &DpuRank, program: &Program) -> Result<(), ClusterError> {\n\n for (offset, instructions) in &program.iram_sections {\n\n rank.copy_to_irams(instructions.as_ptr(), instructions.len() as u16, *offset)?;\n\n }\n\n for (offset, data) in &program.wram_sections {\n\n rank.copy_to_wrams(data.as_ptr(), data.len() as u32, *offset)?;\n\n }\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 53, "score": 59985.56015798191 }, { "content": "\n\n Ok(())\n\n }\n\n\n\n fn load_dpu(&self, dpu: &DpuId, program: &Program) -> Result<(), ClusterError> {\n\n let (rank, slice, member) = self.destructure(dpu);\n\n\n\n for (offset, instructions) in &program.iram_sections {\n\n rank.copy_to_iram(slice, member, instructions.as_ptr(), instructions.len() as u16, *offset)?;\n\n }\n\n for (offset, data) in &program.wram_sections {\n\n rank.copy_to_wram(slice, member, data.as_ptr(), data.len() as u32, *offset)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n fn boot_all(&self) -> Result<(), ClusterError> {\n\n for rank in &self.rank_handler.ranks {\n\n self.boot_rank(rank)?;\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 54, "score": 59985.490859655576 }, { "content": " (rank, slice_id, member_id)\n\n }\n\n}\n\n\n\nimpl RankHandler {\n\n fn get_rank(&self, rank_id: u8) -> &DpuRank {\n\n // unwrap: DpuId are checked during their creation\n\n self.ranks.get(rank_id as usize).unwrap()\n\n }\n\n}", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 55, "score": 59985.409265335344 }, { "content": " let nr_of_dpus_per_control_interface = self.rank_description.topology.nr_of_dpus_per_control_interface;\n\n\n\n for (slice_id, slice_selection) in control_interfaces.iter().enumerate() {\n\n match slice_selection {\n\n Selection::All => {\n\n for member_id in 0..nr_of_dpus_per_control_interface {\n\n dpus.push(DpuId::new(rank_id as u8, slice_id as u8, member_id));\n\n }\n\n },\n\n Selection::None => (),\n\n Selection::Some(dpu_ids) => dpus.append(&mut dpu_ids.to_vec()),\n\n }\n\n }\n\n\n\n for dpu in dpus {\n\n let dpu_result = for_dpu(&dpu)?;\n\n result = result.merge_with(&dpu_result);\n\n }\n\n },\n\n }\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 56, "score": 59984.59540757148 }, { "content": " continue;\n\n }\n\n\n\n running = true;\n\n let fault_bitfield = fault_bitfields[slice_id];\n\n if fault_bitfield != 0 {\n\n fault = true;\n\n\n\n for member in 0..nr_of_dpus_per_control_interface {\n\n if (fault_bitfield & (1 << (member as u32))) != 0 {\n\n faults.push(DpuId::new(rank_id, slice_id as u8, member));\n\n }\n\n }\n\n }\n\n }\n\n\n\n if !running {\n\n Ok(RunStatus::Idle)\n\n } else if !fault {\n\n Ok(RunStatus::Running)\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 57, "score": 59982.60816698419 }, { "content": " }\n\n\n\n pub fn set_process(&mut self, process: Process) {\n\n self.process = process;\n\n }\n\n\n\n pub fn set_policy(&mut self, policy: RecordPolicy) {\n\n self.policy = policy;\n\n }\n\n\n\n pub fn record(&self, event: Event) {\n\n match self.policy {\n\n RecordPolicy::Disabled => {},\n\n RecordPolicy::Stdout => {\n\n println!(\"[{}][{:?}] {:?}\", Local::now().format(\"%F %T%.f\").to_string(), self.process, event);\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl Default for EventMonitor {\n\n fn default() -> Self {\n\n EventMonitor::new()\n\n }\n\n}", "file_path": "dpu-cluster-core/src/pipeline/monitoring.rs", "rank": 58, "score": 59443.76820161995 }, { "content": " fn default() -> Self {\n\n Process::Pipeline\n\n }\n\n}\n\n\n\nimpl Default for RecordPolicy {\n\n fn default() -> Self {\n\n RecordPolicy::Disabled\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct EventMonitor {\n\n process: Process,\n\n policy: RecordPolicy\n\n}\n\n\n\nimpl EventMonitor {\n\n pub fn new() -> Self {\n\n EventMonitor { process: Default::default(), policy: Default::default() }\n", "file_path": "dpu-cluster-core/src/pipeline/monitoring.rs", "rank": 59, "score": 59443.339300341766 }, { "content": "use crate::pipeline::GroupId;\n\nuse chrono::Local;\n\nuse crate::dpu::DpuId;\n\n\n\n#[derive(Debug)]\n\npub enum Event {\n\n Initialization { nr_ranks: u8, nr_slices: u8, nr_dpus: u8 },\n\n LoadingProgramBegin { nr_instructions: u32, nr_data_bytes: u32 },\n\n LoadingProgramEnd,\n\n ProcessBegin,\n\n ProcessEnd,\n\n NewInput,\n\n GroupSearchBegin,\n\n GroupSearchEnd(GroupId),\n\n GroupLoadingBegin(GroupId),\n\n GroupLoadingEnd(GroupId),\n\n JobExecutionTrackingBegin(GroupId),\n\n JobExecutionTrackingEnd(GroupId),\n\n OutputFetchingBegin(GroupId),\n\n OutputFetchingInfo { dpu: DpuId, offset: u32, length: u32 },\n", "file_path": "dpu-cluster-core/src/pipeline/monitoring.rs", "rank": 60, "score": 59442.66832115891 }, { "content": " OutputFetchingEnd(GroupId),\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum Process {\n\n Pipeline,\n\n Initializer,\n\n Mapper,\n\n Loader,\n\n Tracker,\n\n Fetcher\n\n}\n\n\n\n#[derive(Clone)]\n\npub enum RecordPolicy {\n\n Disabled,\n\n Stdout\n\n}\n\n\n\nimpl Default for Process {\n", "file_path": "dpu-cluster-core/src/pipeline/monitoring.rs", "rank": 61, "score": 59441.18444590307 }, { "content": "use crate::pipeline::OutputResult;\n\nuse crate::pipeline::pipeline::Pipeline;\n\n\n\npub struct Output<K> {\n\n pipeline: Pipeline<K>\n\n}\n\n\n\nimpl <K> Iterator for Output<K> {\n\n type Item = OutputResult<K>;\n\n\n\n fn next(&mut self) -> Option<<Self as Iterator>::Item> {\n\n self.pipeline.output_receiver.iter().next()\n\n }\n\n}\n\n\n\nimpl <K> Output<K> {\n\n pub fn new(pipeline: Pipeline<K>) -> Self {\n\n Output { pipeline }\n\n }\n\n}", "file_path": "dpu-cluster-core/src/pipeline/output.rs", "rank": 62, "score": 59231.26146169479 }, { "content": "#define OUTPUT_OFFSET (INPUT_OFFSET + INPUT_SIZE)\n", "file_path": "dpu-cluster-core/examples/basic_search/main.c", "rank": 63, "score": 58771.7278075784 }, { "content": "#define OUTPUT_SIZE (8 + (1 << 24))\n\n\n", "file_path": "dpu-cluster-core/examples/basic_search/main.c", "rank": 64, "score": 58771.7278075784 }, { "content": "#define INPUT_OFFSET (QUERY_OFFSET + QUERY_SIZE)\n", "file_path": "dpu-cluster-core/examples/basic_search/main.c", "rank": 65, "score": 58630.47855354475 }, { "content": "#define INPUT_SIZE (1 << 24)\n", "file_path": "dpu-cluster-core/examples/basic_search/main.c", "rank": 66, "score": 58630.47855354475 }, { "content": "uint32_t __attribute__ ((aligned (8))) current_output_index = 0;\n", "file_path": "dpu-cluster-core/examples/basic_search/main.c", "rank": 67, "score": 57903.875421460296 }, { "content": "#define INPUT_ENTRY_SIZE 32 \n", "file_path": "dpu-cluster-core/examples/basic_search/main.c", "rank": 68, "score": 57765.148197532835 }, { "content": "trait FromRankId<'a> {\n\n fn from_rank_id(rank_id: u8, handler: &'a RankHandler) -> Self;\n\n}\n\n\n\nimpl <'a> FromRankId<'a> for u8 {\n\n fn from_rank_id(rank_id: u8, _: &'a RankHandler) -> Self {\n\n rank_id\n\n }\n\n}\n\n\n\nimpl <'a> FromRankId<'a> for &'a DpuRank {\n\n fn from_rank_id(rank_id: u8, handler: &'a RankHandler) -> Self {\n\n handler.get_rank(rank_id)\n\n }\n\n}\n\n\n", "file_path": "dpu-cluster-core/src/driver.rs", "rank": 69, "score": 57583.600014150856 }, { "content": "#[test]\n\nfn can_allocate_functional_simulator() {\n\n let target = DpuTarget::for_functional_simulator();\n\n let (dpu_type, ref profile) = target.to_cni_args();\n\n if let Err(err) = DpuRank::allocate_for(dpu_type, profile) {\n\n panic!(\"{:?}\", err)\n\n }\n\n}\n\n\n", "file_path": "dpu-sys/tests/allocation.rs", "rank": 70, "score": 57130.298724808665 }, { "content": " group_sender: Sender<DpuGroup>,\n\n monitoring: EventMonitor,\n\n // todo: use or remove\n\n shutdown: Arc<Mutex<bool>>\n\n}\n\n\n\nimpl <InputHandle> OutputFetcher<InputHandle>\n\n where InputHandle: Send + 'static\n\n{\n\n pub fn new(cluster: Arc<Cluster>,\n\n finish_receiver: Receiver<GroupJob<InputHandle>>,\n\n output_sender: SyncSender<OutputResult<InputHandle>>,\n\n group_sender: Sender<DpuGroup>,\n\n mut monitoring: EventMonitor,\n\n shutdown: Arc<Mutex<bool>>) -> Self {\n\n monitoring.set_process(Process::Fetcher);\n\n\n\n OutputFetcher { cluster, finish_receiver, output_sender, group_sender, monitoring, shutdown }\n\n }\n\n}\n", "file_path": "dpu-cluster-core/src/pipeline/stages/fetcher.rs", "rank": 71, "score": 56924.51315036368 }, { "content": "use std::time::Duration;\n\nuse crate::pipeline::stages::Stage;\n\n\n\npub struct ExecutionTracker<InputHandle> {\n\n cluster: Arc<Cluster>,\n\n job_receiver: Receiver<GroupJob<InputHandle>>,\n\n finish_sender: Sender<GroupJob<InputHandle>>,\n\n output_sender: SyncSender<OutputResult<InputHandle>>,\n\n sleep_duration: Option<Duration>,\n\n monitoring: EventMonitor,\n\n // todo: use or remove\n\n shutdown: Arc<Mutex<bool>>\n\n}\n\n\n\nimpl <InputHandle> ExecutionTracker<InputHandle>\n\n where InputHandle: Send + 'static\n\n{\n\n pub fn new(cluster: Arc<Cluster>,\n\n job_receiver: Receiver<GroupJob<InputHandle>>,\n\n finish_sender: Sender<GroupJob<InputHandle>>,\n", "file_path": "dpu-cluster-core/src/pipeline/stages/tracker.rs", "rank": 73, "score": 56921.732481937135 }, { "content": " shutdown: Arc<Mutex<bool>>) -> Self {\n\n monitoring.set_process(Process::Mapper);\n\n\n\n PersistentMapper {\n\n base: BaseMapper { groups, input_receiver, group_receiver, transfer_sender, monitoring, shutdown },\n\n cluster,\n\n get_transfers,\n\n output_sender,\n\n mapping,\n\n fragment_map: Default::default(),\n\n available_groups: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl <I, K> Stage for SimpleMapper<I, K>\n\n where I: Send + 'static,\n\n K: Send + 'static\n\n{\n\n fn run(mut self) {\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 74, "score": 56921.720703444465 }, { "content": " output_sender: SyncSender<OutputResult<InputHandle>>,\n\n sleep_duration: Option<Duration>,\n\n mut monitoring: EventMonitor,\n\n shutdown: Arc<Mutex<bool>>) -> Self {\n\n monitoring.set_process(Process::Tracker);\n\n\n\n ExecutionTracker { cluster, job_receiver, finish_sender, output_sender, sleep_duration, monitoring, shutdown }\n\n }\n\n}\n\n\n\nimpl <InputHandle> Stage for ExecutionTracker<InputHandle>\n\n where InputHandle: Send + 'static\n\n{\n\n fn run(self) {\n\n let monitoring = self.monitoring;\n\n\n\n monitoring.record(Event::ProcessBegin);\n\n\n\n let mut jobs = Vec::default();\n\n\n", "file_path": "dpu-cluster-core/src/pipeline/stages/tracker.rs", "rank": 76, "score": 56919.16739400421 }, { "content": " get_transfers\n\n }\n\n }\n\n}\n\n\n\nimpl <I, K, D, IT> PersistentMapper<I, K, D, IT>\n\n where I: Send + 'static,\n\n K: Send + 'static,\n\n D: Eq + Hash + Send + 'static,\n\n IT: Iterator<Item=(D, InputMemoryTransfer)>\n\n{\n\n pub fn new(get_transfers: Box<dyn Fn(I) -> (D, MemoryTransfers<K>) + Send>,\n\n groups: Vec<DpuGroup>,\n\n input_receiver: Receiver<I>,\n\n group_receiver: Receiver<DpuGroup>,\n\n cluster: Arc<Cluster>,\n\n transfer_sender: Sender<(DpuGroup, Vec<Vec<InputMemoryTransfer>>, Vec<(K, OutputMemoryTransfer)>)>,\n\n output_sender: SyncSender<OutputResult<K>>,\n\n mapping: Box<IT>,\n\n mut monitoring: EventMonitor,\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 77, "score": 56919.088483863445 }, { "content": " mapping: Box<FragmentIterator>,\n\n fragment_map: HashMap<FragmentId, (DpuId, GroupId)>,\n\n available_groups: HashMap<GroupId, (DpuGroup, HashMap<DpuId, MemoryTransfers<InputHandle>>)>\n\n}\n\n\n\nimpl <I, K> SimpleMapper<I, K>\n\n where I: Send + 'static,\n\n K: Send + 'static\n\n{\n\n pub fn new(get_transfers: Box<dyn Fn(I) -> MemoryTransfers<K> + Send>,\n\n groups: Vec<DpuGroup>,\n\n input_receiver: Receiver<I>,\n\n group_receiver: Receiver<DpuGroup>,\n\n transfer_sender: Sender<(DpuGroup, Vec<Vec<InputMemoryTransfer>>, Vec<(K, OutputMemoryTransfer)>)>,\n\n mut monitoring: EventMonitor,\n\n shutdown: Arc<Mutex<bool>>) -> Self {\n\n monitoring.set_process(Process::Mapper);\n\n\n\n SimpleMapper {\n\n base: BaseMapper { groups, input_receiver, group_receiver, transfer_sender, monitoring, shutdown },\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 78, "score": 56917.78052302774 }, { "content": "use std::sync::mpsc::SyncSender;\n\nuse std::sync::Arc;\n\nuse std::sync::Mutex;\n\nuse crate::pipeline::monitoring::EventMonitor;\n\nuse crate::pipeline::monitoring::Event;\n\nuse crate::pipeline::monitoring::Process;\n\nuse crate::pipeline::stages::Stage;\n\n\n\npub struct InputInitializer<InputItem, InputIterator> {\n\n iterator: Box<InputIterator>,\n\n sender: SyncSender<InputItem>,\n\n monitoring: EventMonitor,\n\n shutdown: Arc<Mutex<bool>>\n\n}\n\n\n\nimpl <InputItem, InputIterator> InputInitializer<InputItem, InputIterator>\n\n where InputItem: Send + 'static,\n\n InputIterator: Iterator<Item=InputItem> + Send + 'static\n\n{\n\n pub fn new(iterator: Box<InputIterator>,\n", "file_path": "dpu-cluster-core/src/pipeline/stages/initializer.rs", "rank": 79, "score": 56915.7360069001 }, { "content": " }\n\n}\n\n\n\nimpl <I, K, D, IT> Stage for PersistentMapper<I, K, D, IT>\n\n where I: Send + 'static,\n\n K: Send + 'static,\n\n D: Eq + Hash + Send + 'static,\n\n IT: Iterator<Item=(D, InputMemoryTransfer)> + Send + 'static\n\n{\n\n fn init(&mut self) -> Result<(), PipelineError> {\n\n let driver = self.cluster.driver();\n\n\n\n for mut group in self.base.groups.iter().cloned() {\n\n let group_id = group.id;\n\n let mut transfers = Vec::with_capacity(group.dpus.len());\n\n\n\n for (dpu, _) in &group.dpus {\n\n match self.mapping.next() {\n\n None => break,\n\n Some((fragment_id, fragment_transfer)) => {\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 80, "score": 56915.61948133668 }, { "content": " sender: SyncSender<InputItem>,\n\n mut monitoring: EventMonitor,\n\n shutdown: Arc<Mutex<bool>>) -> Self {\n\n monitoring.set_process(Process::Initializer);\n\n\n\n InputInitializer { iterator, sender, monitoring, shutdown }\n\n }\n\n}\n\n\n\nimpl <InputItem, InputIterator> Stage for InputInitializer<InputItem, InputIterator>\n\n where InputItem: Send + 'static,\n\n InputIterator: Iterator<Item=InputItem> + Send + 'static\n\n{\n\n fn run(self) {\n\n let monitoring = self.monitoring;\n\n\n\n monitoring.record(Event::ProcessBegin);\n\n\n\n for item in self.iterator {\n\n monitoring.record(Event::NewInput);\n", "file_path": "dpu-cluster-core/src/pipeline/stages/initializer.rs", "rank": 81, "score": 56915.255826343884 }, { "content": "use crate::pipeline::OutputResult;\n\nuse crate::pipeline::stages::DpuGroup;\n\nuse std::sync::mpsc::Sender;\n\nuse std::sync::mpsc::Receiver;\n\nuse std::sync::Arc;\n\nuse std::sync::Mutex;\n\nuse crate::memory::MemoryTransfer;\n\nuse crate::pipeline::PipelineError;\n\nuse crate::pipeline::stages::GroupJob;\n\nuse crate::cluster::Cluster;\n\nuse crate::pipeline::monitoring::EventMonitor;\n\nuse crate::pipeline::monitoring::Process;\n\nuse crate::pipeline::monitoring::Event;\n\nuse std::sync::mpsc::SyncSender;\n\nuse crate::pipeline::stages::Stage;\n\n\n\npub struct OutputFetcher<InputHandle> {\n\n cluster: Arc<Cluster>,\n\n finish_receiver: Receiver<GroupJob<InputHandle>>,\n\n output_sender: SyncSender<OutputResult<InputHandle>>,\n", "file_path": "dpu-cluster-core/src/pipeline/stages/fetcher.rs", "rank": 82, "score": 56914.486346811966 }, { "content": "\n\nimpl <InputHandle> Stage for OutputFetcher<InputHandle>\n\n where InputHandle: Send + 'static\n\n{\n\n fn run(self) {\n\n let monitoring = self.monitoring;\n\n\n\n monitoring.record(Event::ProcessBegin);\n\n\n\n for (group, transfers) in self.finish_receiver {\n\n let group_id = group.id;\n\n monitoring.record(Event::OutputFetchingBegin(group_id));\n\n\n\n let mut vectors = Vec::with_capacity(transfers.len());\n\n\n\n for (handle, transfer) in transfers {\n\n vectors.push((vec![0u8; transfer.length as usize], transfer.offset, handle));\n\n }\n\n\n\n let copy_result = {\n", "file_path": "dpu-cluster-core/src/pipeline/stages/fetcher.rs", "rank": 83, "score": 56914.38197522767 }, { "content": "use std::sync::mpsc::Receiver;\n\nuse crate::pipeline::stages::DpuGroup;\n\nuse std::sync::mpsc::Sender;\n\nuse std::sync::mpsc::SyncSender;\n\nuse crate::pipeline::OutputResult;\n\nuse std::sync::Arc;\n\nuse std::sync::Mutex;\n\nuse std::sync::mpsc::TryRecvError;\n\nuse crate::driver::RunStatus;\n\nuse crate::error::ClusterError;\n\nuse crate::view::View;\n\nuse crate::driver::Mergeable;\n\nuse crate::pipeline::PipelineError;\n\nuse std::thread;\n\nuse crate::pipeline::stages::GroupJob;\n\nuse crate::cluster::Cluster;\n\nuse crate::pipeline::monitoring::EventMonitor;\n\nuse crate::pipeline::monitoring::Process;\n\nuse crate::pipeline::monitoring::Event;\n\nuse crate::driver::Driver;\n", "file_path": "dpu-cluster-core/src/pipeline/stages/tracker.rs", "rank": 85, "score": 56910.38654429197 }, { "content": " let mut memory_transfer = MemoryTransfer::default();\n\n for ((vector, offset, _), dpu) in vectors.iter_mut().zip(group.active_dpus()) {\n\n monitoring.record(Event::OutputFetchingInfo { dpu: dpu.clone(), offset: *offset, length: vector.len() as u32});\n\n memory_transfer.add_in_place(dpu.clone(), *offset, vector.as_mut_slice());\n\n }\n\n self.cluster.driver().copy_from_memory(&mut memory_transfer)\n\n };\n\n\n\n monitoring.record(Event::OutputFetchingEnd(group_id));\n\n\n\n match copy_result {\n\n Ok(_) => {\n\n self.group_sender.send(group);\n\n for (result, _, handle) in vectors {\n\n self.output_sender.send(Ok((handle, result))).unwrap();\n\n }\n\n },\n\n Err(err) => {\n\n self.output_sender.send(Err(PipelineError::InfrastructureError(err))).unwrap();\n\n },\n\n };\n\n }\n\n\n\n monitoring.record(Event::ProcessEnd);\n\n }\n\n}", "file_path": "dpu-cluster-core/src/pipeline/stages/fetcher.rs", "rank": 87, "score": 56909.18656435483 }, { "content": "use crate::pipeline::stages::Stage;\n\nuse crate::pipeline::monitoring::Event;\n\nuse crate::pipeline::stages::DpuGroup;\n\nuse std::sync::mpsc::Receiver;\n\nuse crate::pipeline::transfer::MemoryTransfers;\n\nuse std::sync::Mutex;\n\nuse std::sync::Arc;\n\nuse crate::pipeline::monitoring::EventMonitor;\n\nuse crate::pipeline::monitoring::Process;\n\nuse std::sync::mpsc::Sender;\n\nuse crate::pipeline::transfer::OutputMemoryTransfer;\n\nuse crate::pipeline::transfer::InputMemoryTransfer;\n\nuse std::collections::HashMap;\n\nuse std::collections::hash_map::Entry;\n\nuse crate::dpu::DpuId;\n\nuse std::hash::Hash;\n\nuse crate::pipeline::OutputResult;\n\nuse std::sync::mpsc::SyncSender;\n\nuse crate::pipeline::PipelineError;\n\nuse crate::pipeline::GroupId;\n\nuse crate::cluster::Cluster;\n\nuse crate::memory::MemoryTransfer;\n\n\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 88, "score": 56908.55276829985 }, { "content": " let monitoring = self.base.monitoring;\n\n\n\n monitoring.record(Event::ProcessBegin);\n\n\n\n let mut iterator = self.base.input_receiver.iter();\n\n\n\n while let Some(item) = iterator.next() {\n\n monitoring.record(Event::GroupSearchBegin);\n\n let mut group = fetch_next_group(&mut self.base.groups, &mut self.base.group_receiver);\n\n let group_id = group.id;\n\n monitoring.record(Event::GroupSearchEnd(group_id));\n\n\n\n let group_size = group.dpus.len();\n\n let mut inputs = Vec::with_capacity(group_size);\n\n let mut outputs = Vec::with_capacity(group_size);\n\n\n\n let transfers = (self.get_transfers)(item);\n\n inputs.push(transfers.inputs);\n\n outputs.push((transfers.key, transfers.output));\n\n\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 89, "score": 56907.681046330574 }, { "content": " while inputs.len() != group_size {\n\n match iterator.next() {\n\n None => {\n\n for (_, is_active) in group.dpus.iter_mut().skip(inputs.len()) {\n\n *is_active = false;\n\n }\n\n },\n\n Some(item) => {\n\n let transfers = (self.get_transfers)(item);\n\n inputs.push(transfers.inputs);\n\n outputs.push((transfers.key, transfers.output));\n\n }\n\n }\n\n }\n\n\n\n self.base.transfer_sender.send((group, inputs, outputs)).unwrap();\n\n }\n\n\n\n monitoring.record(Event::ProcessEnd);\n\n }\n\n}\n\n\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 90, "score": 56907.65001878662 }, { "content": " for job in jobs {\n\n let group_id = job.0.id;\n\n match fetch_group_status(self.cluster.driver(), &job.0) {\n\n Ok(RunStatus::Running) => new_jobs.push(job),\n\n Ok(RunStatus::Idle) => {\n\n monitoring.record(Event::JobExecutionTrackingEnd(group_id));\n\n self.finish_sender.send(job).unwrap();\n\n },\n\n Ok(RunStatus::Fault(faults)) => {\n\n monitoring.record(Event::JobExecutionTrackingEnd(group_id));\n\n for faulting_dpu in faults {\n\n self.output_sender.send(Err(PipelineError::ExecutionError(faulting_dpu))).unwrap();\n\n }\n\n },\n\n Err(err) => {\n\n monitoring.record(Event::JobExecutionTrackingEnd(group_id));\n\n self.output_sender.send(Err(PipelineError::InfrastructureError(err))).unwrap();\n\n }\n\n }\n\n }\n", "file_path": "dpu-cluster-core/src/pipeline/stages/tracker.rs", "rank": 91, "score": 56905.57687334353 }, { "content": "\n\n if let Some(_) = self.mapping.next() {\n\n Err(PipelineError::TooManyFragments)\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n\n\n fn run(mut self) {\n\n let monitoring = self.base.monitoring;\n\n\n\n monitoring.record(Event::ProcessBegin);\n\n\n\n let mut waiting_inputs: HashMap<GroupId, HashMap<DpuId, Vec<MemoryTransfers<K>>>> = Default::default();\n\n\n\n for item in self.base.input_receiver {\n\n let (fragment_id, transfers) = (self.get_transfers)(item);\n\n\n\n match self.fragment_map.get(&fragment_id) {\n\n None => self.output_sender.send(Err(PipelineError::UnknownFragmentId)).unwrap(),\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 92, "score": 56905.0132503898 }, { "content": "use crate::dpu::DpuId;\n\nuse crate::pipeline::transfer::OutputMemoryTransfer;\n\nuse crate::pipeline::GroupId;\n\nuse crate::pipeline::ThreadHandle;\n\nuse std::thread;\n\nuse crate::pipeline::PipelineError;\n\n\n\npub mod initializer;\n\npub mod mapper;\n\npub mod loader;\n\npub mod tracker;\n\npub mod fetcher;\n\n\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mod.rs", "rank": 93, "score": 56902.211750063885 }, { "content": " while still_some_waiting_inputs(&waiting_inputs) {\n\n let new_groups = fetch_available_groups(&self.base.group_receiver);\n\n\n\n for group in new_groups {\n\n let group_id = group.id;\n\n\n\n if let Some(group_entry) = waiting_inputs.get_mut(&group_id) {\n\n let first_entry = extract_first_waiting_input(group_entry);\n\n build_and_launch_group(group, first_entry, &self.base.transfer_sender);\n\n }\n\n }\n\n }\n\n\n\n monitoring.record(Event::ProcessEnd);\n\n }\n\n}\n\n\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 94, "score": 56902.0862905742 }, { "content": " if should_launch {\n\n let (group, dpus) = group_entry.remove();\n\n build_and_launch_group(group, dpus, &self.base.transfer_sender);\n\n }\n\n },\n\n Entry::Vacant(_) => {\n\n add_waiting_input(&mut waiting_inputs, *group_id, *dpu_id, transfers);\n\n },\n\n }\n\n },\n\n }\n\n\n\n if still_some_waiting_inputs(&waiting_inputs) {\n\n let new_groups = fetch_available_groups(&self.base.group_receiver);\n\n\n\n for group in new_groups {\n\n let group_id = group.id;\n\n\n\n match waiting_inputs.get_mut(&group_id) {\n\n None => {\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 95, "score": 56900.592938716945 }, { "content": " Some((dpu_id, group_id)) => {\n\n match self.available_groups.entry(*group_id) {\n\n Entry::Occupied(mut group_entry) => {\n\n let should_launch = {\n\n let (group, dpus) = group_entry.get_mut();\n\n\n\n let force_launch = match dpus.entry(*dpu_id) {\n\n Entry::Occupied(_) => {\n\n add_waiting_input(&mut waiting_inputs, *group_id, *dpu_id, transfers);\n\n true\n\n },\n\n Entry::Vacant(dpu_entry) => {\n\n dpu_entry.insert(transfers);\n\n false\n\n },\n\n };\n\n\n\n force_launch || is_group_complete(group, dpus)\n\n };\n\n\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 96, "score": 56899.922335153744 }, { "content": "\n\n if *self.shutdown.lock().unwrap() {\n\n break;\n\n } else {\n\n self.sender.send(item).unwrap();\n\n }\n\n }\n\n\n\n monitoring.record(Event::ProcessEnd);\n\n }\n\n}", "file_path": "dpu-cluster-core/src/pipeline/stages/initializer.rs", "rank": 97, "score": 56899.22551772499 }, { "content": " self.fragment_map.insert(fragment_id, (*dpu, group_id));\n\n transfers.push((*dpu, fragment_transfer));\n\n },\n\n }\n\n }\n\n\n\n if !transfers.is_empty() {\n\n group.dpus.truncate(transfers.len());\n\n\n\n let mut memory_transfer = MemoryTransfer::default();\n\n\n\n for (dpu, transfer) in transfers.iter_mut() {\n\n memory_transfer.add_in_place(*dpu, transfer.offset, transfer.content.as_mut_slice());\n\n }\n\n\n\n driver.copy_to_memory(&mut memory_transfer)?;\n\n\n\n self.available_groups.insert(group_id, (group, HashMap::default()));\n\n }\n\n }\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 98, "score": 56899.185481557564 }, { "content": " self.available_groups.insert(group_id, (group, HashMap::default()));\n\n },\n\n Some(group_entry) => {\n\n let first_entry = extract_first_waiting_input(group_entry);\n\n\n\n if is_group_complete(&group, &first_entry) {\n\n build_and_launch_group(group, first_entry, &self.base.transfer_sender);\n\n } else {\n\n self.available_groups.insert(group_id, (group, first_entry));\n\n }\n\n },\n\n }\n\n }\n\n }\n\n }\n\n\n\n for (_, (group, dpus)) in self.available_groups {\n\n build_and_launch_group(group, dpus, &self.base.transfer_sender);\n\n }\n\n\n", "file_path": "dpu-cluster-core/src/pipeline/stages/mapper.rs", "rank": 99, "score": 56896.14933334601 } ]
Rust
algebra/src/fields/models/fp6_2over3.rs
ZencashOfficial/zexe
deaabb1c47d2a01fbd7514dc4ed4e255ebaec890
use super::quadratic_extension::*; use std::marker::PhantomData; use std::ops::{MulAssign, Neg}; use crate::{ bits::{FromBits, FromCompressedBits, ToBits, ToCompressedBits}, fields::{Field, Fp3, Fp3Parameters, SquareRootField}, BitSerializationError, Error, }; pub trait Fp6Parameters: 'static + Send + Sync { type Fp3Params: Fp3Parameters; const NONRESIDUE: Fp3<Self::Fp3Params>; const FROBENIUS_COEFF_FP6_C1: &'static [<Self::Fp3Params as Fp3Parameters>::Fp]; #[inline(always)] fn mul_fp3_by_nonresidue(fe: &Fp3<Self::Fp3Params>) -> Fp3<Self::Fp3Params> { let mut res = *fe; res.c0 = fe.c2; res.c1 = fe.c0; res.c2 = fe.c1; res.c0 = <Self::Fp3Params as Fp3Parameters>::mul_fp_by_nonresidue(&res.c0); res } } pub struct Fp6ParamsWrapper<P: Fp6Parameters>(PhantomData<P>); impl<P: Fp6Parameters> QuadExtParameters for Fp6ParamsWrapper<P> { type BasePrimeField = <P::Fp3Params as Fp3Parameters>::Fp; type BaseField = Fp3<P::Fp3Params>; type FrobCoeff = Self::BasePrimeField; const DEGREE_OVER_BASE_PRIME_FIELD: usize = 6; const NONRESIDUE: Self::BaseField = P::NONRESIDUE; const FROBENIUS_COEFF_C1: &'static [Self::FrobCoeff] = P::FROBENIUS_COEFF_FP6_C1; #[inline(always)] fn mul_base_field_by_nonresidue(fe: &Self::BaseField) -> Self::BaseField { P::mul_fp3_by_nonresidue(fe) } fn mul_base_field_by_frob_coeff(fe: &mut Self::BaseField, power: usize) { fe.mul_assign_by_fp(&Self::FROBENIUS_COEFF_C1[power % Self::DEGREE_OVER_BASE_PRIME_FIELD]); } } pub type Fp6<P> = QuadExtField<Fp6ParamsWrapper<P>>; impl<P: Fp6Parameters> Fp6<P> { pub fn mul_by_034( &mut self, c0: &<P::Fp3Params as Fp3Parameters>::Fp, c3: &<P::Fp3Params as Fp3Parameters>::Fp, c4: &<P::Fp3Params as Fp3Parameters>::Fp, ) { let z0 = self.c0.c0; let z1 = self.c0.c1; let z2 = self.c0.c2; let z3 = self.c1.c0; let z4 = self.c1.c1; let z5 = self.c1.c2; let x0 = *c0; let x3 = *c3; let x4 = *c4; let mut tmp1 = x3; tmp1.mul_assign(&<P::Fp3Params as Fp3Parameters>::NONRESIDUE); let mut tmp2 = x4; tmp2.mul_assign(&<P::Fp3Params as Fp3Parameters>::NONRESIDUE); self.c0.c0 = x0 * &z0 + &(tmp1 * &z5) + &(tmp2 * &z4); self.c0.c1 = x0 * &z1 + &(x3 * &z3) + &(tmp2 * &z5); self.c0.c2 = x0 * &z2 + &(x3 * &z4) + &(x4 * &z3); self.c1.c0 = x0 * &z3 + &(x3 * &z0) + &(tmp2 * &z2); self.c1.c1 = x0 * &z4 + &(x3 * &z1) + &(x4 * &z0); self.c1.c2 = x0 * &z5 + &(x3 * &z2) + &(x4 * &z1); } pub fn mul_by_014( &mut self, c0: &<P::Fp3Params as Fp3Parameters>::Fp, c1: &<P::Fp3Params as Fp3Parameters>::Fp, c4: &<P::Fp3Params as Fp3Parameters>::Fp, ) { let z0 = self.c0.c0; let z1 = self.c0.c1; let z2 = self.c0.c2; let z3 = self.c1.c0; let z4 = self.c1.c1; let z5 = self.c1.c2; let x0 = *c0; let x1 = *c1; let x4 = *c4; let mut tmp1 = x1; tmp1.mul_assign(&<P::Fp3Params as Fp3Parameters>::NONRESIDUE); let mut tmp2 = x4; tmp2.mul_assign(&<P::Fp3Params as Fp3Parameters>::NONRESIDUE); self.c0.c0 = x0 * &z0 + &(tmp1 * &z2) + &(tmp2 * &z4); self.c0.c1 = x0 * &z1 + &(x1 * &z0) + &(tmp2 * &z5); self.c0.c2 = x0 * &z2 + &(x1 * &z1) + &(x4 * &z3); self.c1.c0 = x0 * &z3 + &(tmp1 * &z5) + &(tmp2 * &z2); self.c1.c1 = x0 * &z4 + &(x1 * &z3) + &(x4 * &z0); self.c1.c2 = x0 * &z5 + &(x1 * &z4) + &(x4 * &z1); } pub fn mul_by_2345(self, other: &Self) -> Self /* Devegili OhEig Scott Dahab --- Multiplication and Squaring on Pairing-Friendly Fields.pdf; Section 3 (Karatsuba) */ { let v0 = { let t = other.c0.c2 * &<P::Fp3Params as Fp3Parameters>::NONRESIDUE; Fp3::<P::Fp3Params>::new(self.c0.c1 * &t, self.c0.c2 * &t, self.c0.c0 * &other.c0.c2) }; let v1 = self.c1 * &other.c1; let beta_v1 = P::mul_fp3_by_nonresidue(&v1); let c0 = v0 + &beta_v1; let c1 = (self.c0 + &self.c1) * &(other.c0 + &other.c1) - &v0 - &v1; Self::new(c0, c1) } } impl<P: Fp6Parameters> ToCompressedBits for Fp6<P> { #[inline] fn compress(&self) -> Vec<bool> { let mut res = self.c1.write_bits(); let parity = self.c0.is_odd(); res.push(parity); res } } impl<P: Fp6Parameters> FromCompressedBits for Fp6<P> { #[inline] fn decompress(compressed: Vec<bool>) -> Result<Self, Error> { let len = compressed.len() - 1; let parity_flag_set = compressed[len]; let c1 = Fp3::read_bits(compressed[..len].to_vec())?; let c0 = { let t = Fp3::one() + &P::mul_fp3_by_nonresidue(&(c1.square())); t.sqrt() }; match c0 { Some(c0_u) => { let neg_c0u = c0_u.neg(); let c0_s = if c0_u.is_odd() ^ parity_flag_set { neg_c0u } else { c0_u }; Ok(Self::new(c0_s, c1)) } _ => Err(Box::new(BitSerializationError::UndefinedSqrt)), } } }
use super::quadratic_extension::*; use std::marker::PhantomData; use std::ops::{MulAssign, Neg}; use crate::{ bits::{FromBits, FromCompressedBits, ToBits, ToCompressedBits}, fields::{Field, Fp3, Fp3Parameters, SquareRootField}, BitSerializationError, Error, }; pub trait Fp6Parameters: 'static + Send + Sync { type Fp3Params: Fp3Parameters; const NONRESIDUE: Fp3<Self::Fp3Params>; const FROBENIUS_COEFF_FP6_C1: &'static [<Self::Fp3Params as Fp3Parameters>::Fp]; #[inline(always)] fn mul_fp3_by_nonresidue(fe: &Fp3<Self::Fp3Params>) -> Fp3<Self::Fp3Params> { let mut res = *fe; res.c0 = fe.c2; res.c1 = fe.c0; res.c2 = fe.c1; res.c0 = <Self::Fp3Params as Fp3Parameters>::mul_fp_by_nonresidue(&res.c0); res } } pub struct Fp6ParamsWrapper<P: Fp6Parameters>(PhantomData<P>); impl<P: Fp6Parameters> QuadExtParameters for Fp6ParamsWrapper<P> { type BasePrimeField = <P::Fp3Params as Fp3Parameters>::Fp; type BaseField = Fp3<P::Fp3Params>; type FrobCoeff = Self::BasePrimeField; const DEGREE_OVER_BASE_PRIME_FIELD: usize = 6; const NONRESIDUE: Self::BaseField = P::NONRESIDUE; const FROBENIUS_COEFF_C1: &'static [Self::FrobCoeff] = P::FROBENIUS_COEFF_FP6_C1; #[inline(always)] fn mul_base_field_by_nonresidue(fe: &Self::BaseField) -> Self::BaseField { P::mul_fp3_by_nonresidue(fe) } fn mul_base_field_by_frob_coeff(fe: &mut Self::BaseField, power: usize) { fe.mul_assign_by_fp(&Self::FROBENIUS_COEFF_C1[power % Self::DEGREE_OVER_BASE_PRIME_FIELD]); } } pub type Fp6<P> = QuadExtField<Fp6ParamsWrapper<P>>; impl<P: Fp6Parameters> Fp6<P> { pub fn mul_by_034( &mut self, c0: &<P::Fp3Params as Fp3Parameters>::Fp, c3: &<P::Fp3Params as Fp3Parameters>::Fp, c4: &<P::Fp3Params as Fp3Parameters>::Fp, ) { let z0 = self.c0.c0; let z1 = self.c0.c1; let z2 = self.c0.c2; let z3 = self.c1.c0; let z4 = self.c1.c1; let z5 = self.c1.c2; let x0 = *c0; let x3 = *c3; let x4 = *c4; let mut tmp1 = x3; tmp1.mul_assign(&<P::Fp3Params as Fp3Parameters>::NONRESIDUE); let mut tmp2 = x4; tmp2.mul_assign(&<P::Fp3Params as Fp3Parameters>::NONRESIDUE); self.c0.c0 = x0 * &z0 + &(tmp1 * &z5) + &(tmp2 * &z4); self.c0.c1 = x0 * &z1 + &(x3 * &z3) + &(tmp2 * &z5); self.c0.c2 = x0 * &z2 + &(x3 * &z4) + &(x4 * &z3); self.c1.c0 = x0 * &z3 + &(x3 * &z0) + &(tmp2 * &z2); self.c1.c1 = x0 * &z4 + &(x3 * &z1) + &(x4 * &z0); self.c1.c2 = x0 * &z5 + &(x3 * &z2) + &(x4 * &z1); } pub fn mul_by_014( &
pub fn mul_by_2345(self, other: &Self) -> Self /* Devegili OhEig Scott Dahab --- Multiplication and Squaring on Pairing-Friendly Fields.pdf; Section 3 (Karatsuba) */ { let v0 = { let t = other.c0.c2 * &<P::Fp3Params as Fp3Parameters>::NONRESIDUE; Fp3::<P::Fp3Params>::new(self.c0.c1 * &t, self.c0.c2 * &t, self.c0.c0 * &other.c0.c2) }; let v1 = self.c1 * &other.c1; let beta_v1 = P::mul_fp3_by_nonresidue(&v1); let c0 = v0 + &beta_v1; let c1 = (self.c0 + &self.c1) * &(other.c0 + &other.c1) - &v0 - &v1; Self::new(c0, c1) } } impl<P: Fp6Parameters> ToCompressedBits for Fp6<P> { #[inline] fn compress(&self) -> Vec<bool> { let mut res = self.c1.write_bits(); let parity = self.c0.is_odd(); res.push(parity); res } } impl<P: Fp6Parameters> FromCompressedBits for Fp6<P> { #[inline] fn decompress(compressed: Vec<bool>) -> Result<Self, Error> { let len = compressed.len() - 1; let parity_flag_set = compressed[len]; let c1 = Fp3::read_bits(compressed[..len].to_vec())?; let c0 = { let t = Fp3::one() + &P::mul_fp3_by_nonresidue(&(c1.square())); t.sqrt() }; match c0 { Some(c0_u) => { let neg_c0u = c0_u.neg(); let c0_s = if c0_u.is_odd() ^ parity_flag_set { neg_c0u } else { c0_u }; Ok(Self::new(c0_s, c1)) } _ => Err(Box::new(BitSerializationError::UndefinedSqrt)), } } }
mut self, c0: &<P::Fp3Params as Fp3Parameters>::Fp, c1: &<P::Fp3Params as Fp3Parameters>::Fp, c4: &<P::Fp3Params as Fp3Parameters>::Fp, ) { let z0 = self.c0.c0; let z1 = self.c0.c1; let z2 = self.c0.c2; let z3 = self.c1.c0; let z4 = self.c1.c1; let z5 = self.c1.c2; let x0 = *c0; let x1 = *c1; let x4 = *c4; let mut tmp1 = x1; tmp1.mul_assign(&<P::Fp3Params as Fp3Parameters>::NONRESIDUE); let mut tmp2 = x4; tmp2.mul_assign(&<P::Fp3Params as Fp3Parameters>::NONRESIDUE); self.c0.c0 = x0 * &z0 + &(tmp1 * &z2) + &(tmp2 * &z4); self.c0.c1 = x0 * &z1 + &(x1 * &z0) + &(tmp2 * &z5); self.c0.c2 = x0 * &z2 + &(x1 * &z1) + &(x4 * &z3); self.c1.c0 = x0 * &z3 + &(tmp1 * &z5) + &(tmp2 * &z2); self.c1.c1 = x0 * &z4 + &(x1 * &z3) + &(x4 * &z0); self.c1.c2 = x0 * &z5 + &(x1 * &z4) + &(x4 * &z1); }
function_block-function_prefix_line
[ { "content": "pub trait Fp3Parameters: 'static + Send + Sync {\n\n type Fp: PrimeField + SquareRootField;\n\n\n\n //alpha\n\n const NONRESIDUE: Self::Fp;\n\n // coefficients of the powers of the Frobenius automorphism as linear map over F\n\n // (pi^0(X), pi^1(X), pi^2(X)) = (C1_0*X, C1_1*X +C1_2*X),\n\n const FROBENIUS_COEFF_FP3_C1: &'static [Self::Fp];\n\n // (pi^0(X^2), pi^1(X^2), pi^2(X^2)) = (C2_0*X^2, C2_1*X^2 +C2_2*X^2),\n\n const FROBENIUS_COEFF_FP3_C2: &'static [Self::Fp];\n\n /// p^3 - 1 = 2^s * t, where t is odd.\n\n const TWO_ADICITY: u32;\n\n const T_MINUS_ONE_DIV_TWO: &'static [u64];\n\n /// t-th power of a quadratic nonresidue in Fp3.\n\n /// this is needed for the square root algorithm\n\n const QUADRATIC_NONRESIDUE_TO_T: (Self::Fp, Self::Fp, Self::Fp);\n\n\n\n #[inline(always)]\n\n fn mul_fp_by_nonresidue(fe: &Self::Fp) -> Self::Fp {\n\n Self::NONRESIDUE * fe\n", "file_path": "algebra/src/fields/models/fp3.rs", "rank": 0, "score": 491986.8631513133 }, { "content": "/// Model for quadratic extension field F4 as towered extension\n\n///\n\n// F4 = F2[Y]/(Y^2-X),\n\n// F2 = Fp[X]/(X^2-alpha),\n\n///\n\n/// using a \"non-residue\" alpha mod p such that (X^4-alpha) is irreducible over Fp.\n\n/// Its arithmetics includes pairing-relevant operations such as exponentiation and\n\n/// squaring on the r-th unit roots of F4 (cyclotomic exp. and squ.).\n\npub trait Fp4Parameters: 'static + Send + Sync {\n\n type Fp2Params: Fp2Parameters;\n\n\n\n /// This *must* equal (0, 1);\n\n /// see [[DESD06, Section 5.1]](https://eprint.iacr.org/2006/471.pdf).\n\n const NONRESIDUE: Fp2<Self::Fp2Params>;\n\n\n\n /// Coefficients for the Frobenius automorphism.\n\n /// non_residue^((modulus^i-1)/4) for i=0,1,2,3\n\n const FROBENIUS_COEFF_FP4_C1: &'static [<Self::Fp2Params as Fp2Parameters>::Fp];\n\n\n\n #[inline(always)]\n\n fn mul_fp2_by_nonresidue(fe: &Fp2<Self::Fp2Params>) -> Fp2<Self::Fp2Params> {\n\n // see [[DESD06, Section 5.1]](https://eprint.iacr.org/2006/471.pdf).\n\n Fp2::new(\n\n <Self::Fp2Params as Fp2Parameters>::NONRESIDUE * &fe.c1,\n\n fe.c0,\n\n )\n\n }\n\n}\n", "file_path": "algebra/src/fields/models/fp4.rs", "rank": 1, "score": 426811.5907810498 }, { "content": "pub trait ModelParameters: Send + Sync + 'static {\n\n type BaseField: Field + SquareRootField;\n\n type ScalarField: PrimeField + SquareRootField + Into<<Self::ScalarField as PrimeField>::BigInt>;\n\n}\n\n\n", "file_path": "algebra/src/curves/models/mod.rs", "rank": 2, "score": 426806.9589839061 }, { "content": "pub trait Fp2Parameters: 'static + Send + Sync {\n\n type Fp: PrimeField + SquareRootField;\n\n\n\n //alpha\n\n const NONRESIDUE: Self::Fp;\n\n //quadratic nonresidue for square root algorithm\n\n const QUADRATIC_NONRESIDUE: (Self::Fp, Self::Fp);\n\n //coefficients of the powers of the Frobenius automorphism as linear map over F\n\n // (pi^0(X), pi^1(X)) = (C1_0*X, C1_1*X),\n\n const FROBENIUS_COEFF_FP2_C1: &'static [Self::Fp];\n\n\n\n #[inline(always)]\n\n fn mul_fp_by_nonresidue(fe: &Self::Fp) -> Self::Fp {\n\n Self::NONRESIDUE * fe\n\n }\n\n}\n\n\n\npub struct Fp2ParamsWrapper<P: Fp2Parameters>(PhantomData<P>);\n\n\n\nimpl<P: Fp2Parameters> QuadExtParameters for Fp2ParamsWrapper<P> {\n", "file_path": "algebra/src/fields/models/fp2.rs", "rank": 3, "score": 426806.9589839061 }, { "content": "/// A trait that defines parameters for a prime field.\n\npub trait FpParameters: 'static + Send + Sync + Sized {\n\n type BigInt: BigInteger;\n\n\n\n /// The modulus of the field.\n\n const MODULUS: Self::BigInt;\n\n\n\n /// The number of bits needed to represent the `Self::MODULUS`.\n\n const MODULUS_BITS: u32;\n\n\n\n /// The number of bits that must be shaved from the beginning of\n\n /// the representation when randomly sampling.\n\n const REPR_SHAVE_BITS: u32;\n\n\n\n /// R = 2^256 % Self::MODULUS\n\n const R: Self::BigInt;\n\n\n\n /// R2 = R^2 % Self::MODULUS\n\n const R2: Self::BigInt;\n\n\n\n /// INV = -(MODULUS^{-1} mod MODULUS) mod MODULUS\n", "file_path": "algebra/src/fields/mod.rs", "rank": 5, "score": 417829.81343505287 }, { "content": "/// Model for cubic extension field of a prime field F=BasePrimeField\n\n/// F3 = F[X]/(X^3-alpha),\n\n/// with alpha being a (quadratic) \"non-residue\" (for which X^3-alpha is irreducible).\n\n///\n\n/// We implement inversion according to\n\n/// Beuchat, et al., High-Speed Software Implementation of the Optimal Ate Pairing over Barreto–Naehrig Curves\n\n/// https://eprint.iacr.org/2010/354.pdf,\n\n/// and square and Karatsuba multiplication according to\n\n/// Devegili, et al., Multiplication and Squaring on Abstract Pairing-Friendly Fields\n\n/// https://eprint.iacr.org/2006/471.pdf\n\npub trait CubicExtParameters: 'static + Send + Sync {\n\n /// The prime field that this cubic extension is eventually an extension of.\n\n type BasePrimeField: PrimeField;\n\n /// The base field that this field is a cubic extension of.\n\n type BaseField: Field;\n\n /// The type of the coefficients for an efficient implementation of the\n\n /// Frobenius endomorphism.\n\n type FrobCoeff: Field;\n\n\n\n /// The degree of the extension over the base prime field.\n\n const DEGREE_OVER_BASE_PRIME_FIELD: usize;\n\n\n\n /// The cubic non-residue used to construct the extension.\n\n const NONRESIDUE: Self::BaseField;\n\n\n\n /// Coefficients for the Frobenius automorphism.\n\n const FROBENIUS_COEFF_C1: &'static [Self::FrobCoeff];\n\n const FROBENIUS_COEFF_C2: &'static [Self::FrobCoeff];\n\n\n\n /// A specializable method for multiplying an element of the base field by\n", "file_path": "algebra/src/fields/models/cubic_extension.rs", "rank": 6, "score": 417749.1539424879 }, { "content": "pub trait Fp6Parameters: 'static + Send + Sync + Copy {\n\n type Fp2Params: Fp2Parameters;\n\n\n\n const NONRESIDUE: Fp2<Self::Fp2Params>;\n\n\n\n /// Coefficients for the Frobenius automorphism.\n\n const FROBENIUS_COEFF_FP6_C1: &'static [Fp2<Self::Fp2Params>];\n\n const FROBENIUS_COEFF_FP6_C2: &'static [Fp2<Self::Fp2Params>];\n\n\n\n #[inline(always)]\n\n fn mul_fp2_by_nonresidue(fe: &Fp2<Self::Fp2Params>) -> Fp2<Self::Fp2Params> {\n\n Self::NONRESIDUE * fe\n\n }\n\n}\n\n\n\npub struct Fp6ParamsWrapper<P: Fp6Parameters>(PhantomData<P>);\n\n\n\nimpl<P: Fp6Parameters> CubicExtParameters for Fp6ParamsWrapper<P> {\n\n type BasePrimeField = <P::Fp2Params as Fp2Parameters>::Fp;\n\n type BaseField = Fp2<P::Fp2Params>;\n", "file_path": "algebra/src/fields/models/fp6_3over2.rs", "rank": 7, "score": 408765.6200359243 }, { "content": "pub trait Fp12Parameters: 'static + Send + Sync + Copy {\n\n type Fp6Params: Fp6Parameters;\n\n\n\n /// This *must* equal (0, 1, 0);\n\n /// see [[DESD06, Section 6.1]](https://eprint.iacr.org/2006/471.pdf).\n\n const NONRESIDUE: Fp6<Self::Fp6Params>;\n\n\n\n /// Coefficients for the Frobenius automorphism.\n\n const FROBENIUS_COEFF_FP12_C1: &'static [Fp2<Fp2Params<Self>>];\n\n\n\n /// Multiply by quadratic nonresidue v.\n\n #[inline(always)]\n\n fn mul_fp6_by_nonresidue(fe: &Fp6<Self::Fp6Params>) -> Fp6<Self::Fp6Params> {\n\n // see [[DESD06, Section 6.1]](https://eprint.iacr.org/2006/471.pdf).\n\n let new_c0 = Self::Fp6Params::mul_fp2_by_nonresidue(&fe.c2);\n\n let new_c1 = fe.c0;\n\n let new_c2 = fe.c1;\n\n Fp6::new(new_c0, new_c1, new_c2)\n\n }\n\n}\n", "file_path": "algebra/src/fields/models/fp12_2over3over2.rs", "rank": 8, "score": 408765.6200359243 }, { "content": "/// Model for quadratic extension field of prime field F=Fp\n\n/// F2 = F[X]/(X^2-alpha),\n\n/// with alpha being a (quadratic) \"non-residue\".\n\n/// We implement the inversion and Karatsuba multiplication according to\n\n/// Mrabet, Joye, Guide to Pairing-based Cryptography\n\n/// https://dl.acm.org/doi/book/10.5555/3092800\n\n/// and the square root algorithm from\n\n/// Adj, et al., Square root computation over even extension fields,\n\n/// https://eprint.iacr.org/2012/685.pdf\n\npub trait QuadExtParameters: 'static + Send + Sync + Sized {\n\n /// The prime field that this quadratic extension is eventually an extension of.\n\n type BasePrimeField: PrimeField;\n\n /// The base field that this field is a quadratic extension of.\n\n type BaseField: Field;\n\n /// The type of the coefficients for an efficient implemntation of the\n\n /// Frobenius endomorphism.\n\n type FrobCoeff: Field;\n\n\n\n /// The degree of the extension over the base prime field.\n\n const DEGREE_OVER_BASE_PRIME_FIELD: usize;\n\n\n\n /// The quadratic non-residue used to construct the extension.\n\n const NONRESIDUE: Self::BaseField;\n\n\n\n /// Coefficients for the Frobenius automorphism.\n\n const FROBENIUS_COEFF_C1: &'static [Self::FrobCoeff];\n\n\n\n /// A specializable method for multiplying an element of the base field by\n\n /// the quadratic non-residue. This is used in Karatsuba multiplication\n", "file_path": "algebra/src/fields/models/quadratic_extension.rs", "rank": 9, "score": 404470.6974917419 }, { "content": "pub trait PairingEngine: Sized + 'static + Copy + Debug + Sync + Send + Eq + PartialEq {\n\n /// This is the scalar field of the G1/G2 groups.\n\n type Fr: PrimeField + SquareRootField + Into<<Self::Fr as PrimeField>::BigInt>;\n\n\n\n /// The projective representation of an element in G1.\n\n type G1Projective: ProjectiveCurve<BaseField = Self::Fq, ScalarField = Self::Fr, Affine = Self::G1Affine>\n\n + From<Self::G1Affine>\n\n + Into<Self::G1Affine>;\n\n\n\n /// The affine representation of an element in G1.\n\n type G1Affine: AffineCurve<BaseField = Self::Fq, ScalarField = Self::Fr, Projective = Self::G1Projective>\n\n + From<Self::G1Projective>\n\n + Into<Self::G1Projective>\n\n + Into<Self::G1Prepared>;\n\n\n\n /// A G1 element that has been preprocessed for use in a pairing.\n\n type G1Prepared: ToBytes\n\n + FromBytes\n\n + Serialize\n\n + for<'a> Deserialize<'a>\n", "file_path": "algebra/src/curves/mod.rs", "rank": 10, "score": 368276.7732833774 }, { "content": "/// This trait expresses the verifier for proof carrying data from accumulator SNARKs.\n\n/// The PCD is assumed to process a set of proof carrying data consisting of\n\n/// - a statement,\n\n/// - accumulator SNARK proof (i.e. a SNARK proof plus its accumulator)\n\npub trait PCD: Sized + Send + Sync {\n\n type PCDAccumulator: ItemAccumulator;\n\n type PCDVerifierKey: AsRef<<Self::PCDAccumulator as ItemAccumulator>::AccumulatorVerifierKey>;\n\n\n\n /// Perform only the efficient part (i.e. sublinear w.r.t. the circuit size) of proof verification.\n\n /// Typically includes few algebraic operations, e.g. the verification of Marlin's sumcheck\n\n /// equations, batching commitments and their claimed openings, dlog reduction,and so on.\n\n /// Return the accumulator for the proof if verification was successful,\n\n /// Error otherwise.\n\n fn succinct_verify(\n\n &self,\n\n vk: &Self::PCDVerifierKey,\n\n ) -> Result<<Self::PCDAccumulator as ItemAccumulator>::Item, PCDError>;\n\n\n\n /// Perform the non-efficient part of proof verification.\n\n /// Verify / decide the current accumulator, by checking the non-efficient predicate.\n\n /// Typically involves one or several MSMs.\n\n fn hard_verify<R: RngCore>(\n\n &self,\n\n acc: <Self::PCDAccumulator as ItemAccumulator>::Item,\n", "file_path": "proof-systems/src/darlin/pcd/mod.rs", "rank": 11, "score": 341061.0551284404 }, { "content": "/// Defines a domain over which finite field (I)FFTs can be performed.\n\npub trait EvaluationDomain<F: PrimeField>: Debug + Send + Sync {\n\n /// Returns the size of the domain\n\n fn size(&self) -> usize;\n\n\n\n fn size_as_field_element(&self) -> F {\n\n F::from_repr(F::BigInt::from(self.size() as u64))\n\n }\n\n\n\n /// Interprets size as a field element F and returns its inverse in the field\n\n fn size_inv(&self) -> F;\n\n\n\n /// Returns the generator of the multiplicative subgroup over which FFT is performed\n\n fn group_gen(&self) -> F;\n\n\n\n /// Compute a FFT.\n\n fn fft(&self, coeffs: &[F]) -> Vec<F> {\n\n let mut coeffs = coeffs.to_vec();\n\n self.fft_in_place(&mut coeffs);\n\n coeffs\n\n }\n", "file_path": "algebra/src/fft/domain/mod.rs", "rank": 12, "score": 316765.3797671779 }, { "content": "pub trait BnParameters: 'static {\n\n const X: &'static [u64];\n\n const X_IS_NEGATIVE: bool;\n\n const ATE_LOOP_COUNT: &'static [i8];\n\n const ATE_LOOP_COUNT_IS_NEGATIVE: bool;\n\n const TWIST_TYPE: TwistType;\n\n const TWIST_MUL_BY_Q_X: Fp2<Self::Fp2Params>;\n\n const TWIST_MUL_BY_Q_Y: Fp2<Self::Fp2Params>;\n\n type Fp: PrimeField + SquareRootField + Into<<Self::Fp as PrimeField>::BigInt>;\n\n type Fp2Params: Fp2Parameters<Fp = Self::Fp>;\n\n type Fp6Params: Fp6Parameters<Fp2Params = Self::Fp2Params>;\n\n type Fp12Params: Fp12Parameters<Fp6Params = Self::Fp6Params>;\n\n type G1Parameters: SWModelParameters<BaseField = Self::Fp>;\n\n type G2Parameters: SWModelParameters<\n\n BaseField = Fp2<Self::Fp2Params>,\n\n ScalarField = <Self::G1Parameters as ModelParameters>::ScalarField,\n\n >;\n\n}\n\n\n\npub mod g1;\n", "file_path": "algebra/src/curves/models/bn/mod.rs", "rank": 13, "score": 270853.3511962668 }, { "content": "pub trait Bls12Parameters: 'static {\n\n const X: &'static [u64];\n\n const X_IS_NEGATIVE: bool;\n\n const TWIST_TYPE: TwistType;\n\n type Fp: PrimeField + SquareRootField + Into<<Self::Fp as PrimeField>::BigInt>;\n\n type Fp2Params: Fp2Parameters<Fp = Self::Fp>;\n\n type Fp6Params: Fp6Parameters<Fp2Params = Self::Fp2Params>;\n\n type Fp12Params: Fp12Parameters<Fp6Params = Self::Fp6Params>;\n\n type G1Parameters: SWModelParameters<BaseField = Self::Fp>;\n\n type G2Parameters: SWModelParameters<\n\n BaseField = Fp2<Self::Fp2Params>,\n\n ScalarField = <Self::G1Parameters as ModelParameters>::ScalarField,\n\n >;\n\n}\n\n\n\npub mod g1;\n\npub mod g2;\n\n\n\npub use self::{\n\n g1::{G1Affine, G1Prepared, G1Projective},\n", "file_path": "algebra/src/curves/models/bls12/mod.rs", "rank": 14, "score": 270853.3511962668 }, { "content": "pub trait MNT4Parameters: 'static {\n\n // the loop count for the Miller loop, equals the |Frobenius trace of E - 1|\n\n const ATE_LOOP_COUNT: &'static [u64];\n\n // the non-adjacent normal form of ATE_LOOP_COUNT trimmed of leading zeroes and\n\n // without MSB, starting with the least significant bit\n\n const WNAF: &'static [i32];\n\n // true/false depending whether the Frobenius trace is negative/positive\n\n const ATE_IS_LOOP_COUNT_NEG: bool;\n\n // The twist factor twist=Y^2 for\n\n // E': y'^2 = x'^3 + a*twist^2*x + twist^3 * b\n\n // as needed for the point evaluation of the Miller loop lines\n\n const TWIST: Fp2<Self::Fp2Params>;\n\n // Weierstrass coefficient a'=a*omega^4= a*alpha of the quadratic twist E'\n\n // as needed for the point evaluation of the Miller loop lines\n\n const TWIST_COEFF_A: Fp2<Self::Fp2Params>;\n\n // the final pairing exponent is decomposed as\n\n // (p^4-1)/r = (p^2-1) (p^2 + 1)/r,\n\n // wheras\n\n // (p^2 +1)/r = m_1*p + m_0,\n\n // where m_1, 0<= m_1 < p, is\n", "file_path": "algebra/src/curves/models/mnt4/mod.rs", "rank": 15, "score": 270853.3511962668 }, { "content": "pub trait MNT6Parameters: 'static {\n\n // the loop count for the Miller loop, equals the |Frobenius trace of E - 1|\n\n const ATE_LOOP_COUNT: &'static [u64];\n\n // the non-adjacent normal form of ATE_LOOP_COUNT trimmed of leading zeroes and\n\n // without MSB, starting with the least significant bit\n\n const WNAF: &'static [i32];\n\n // true/false depending whether the Frobenius trace is negative/positive\n\n const ATE_IS_LOOP_COUNT_NEG: bool;\n\n // The twist factor twist=Y^2 for\n\n // E': y'^2 = x'^3 + a*twist^2*x + twist^3 * b\n\n // as needed for the point evaluation of the Miller loop lines\n\n const TWIST: Fp3<Self::Fp3Params>;\n\n // Weierstrass coefficient a'=a*omega^4= a*alpha of the quadratic twist E'\n\n // as needed for the point evaluation of the Miller loop lines\n\n // translated via the twist map\n\n const TWIST_COEFF_A: Fp3<Self::Fp3Params>;\n\n // the final pairing exponent is decomposed as\n\n // (p^6-1)/r = (p^3-1)(p+1) (p^2 - p + 1)/r,\n\n // wheras\n\n // (p^2 - p + 1)/r = m_1*p + m_0,\n", "file_path": "algebra/src/curves/models/mnt6/mod.rs", "rank": 16, "score": 270853.3511962668 }, { "content": "pub trait MulShort<Rhs = Self> {\n\n type Output;\n\n\n\n #[must_use]\n\n fn mul_short(self, rhs: Rhs) -> Self::Output;\n\n}\n\n\n", "file_path": "algebra/src/fields/mod.rs", "rank": 17, "score": 267192.4122180966 }, { "content": "pub trait MulShortAssign<Rhs = Self> {\n\n fn mul_short_assign(&mut self, rhs: Rhs);\n\n}\n\n\n", "file_path": "algebra/src/fields/mod.rs", "rank": 18, "score": 264217.76791290933 }, { "content": "/// Definition of parameters needed to implement and optimize a Merkle Tree whose nodes and leaves\n\n/// are Field elements. The trait is generic with respect to the arity of the Merkle Tree.\n\npub trait FieldBasedMerkleTreeParameters: 'static + Clone {\n\n type Data: Field;\n\n /// Actually unnecessary, but simplifies the overall design\n\n type H: FieldBasedHash<Data = Self::Data>;\n\n /// The arity of the Merkle Tree\n\n const MERKLE_ARITY: usize;\n\n /// The pre-computed hashes of the empty nodes for the different levels of the Merkle Tree\n\n const ZERO_NODE_CST: Option<FieldBasedMerkleTreePrecomputedZeroConstants<'static, Self::H>>;\n\n}\n\n\n\n/// Pre-computed hashes of the empty nodes for the different levels of the Merkle Tree\n\n#[derive(Derivative)]\n\n#[derivative(Debug(bound = \"\"), Eq(bound = \"\"), PartialEq(bound = \"\"))]\n\npub struct FieldBasedMerkleTreePrecomputedZeroConstants<'a, H: FieldBasedHash> {\n\n pub nodes: &'a [H::Data],\n\n pub merkle_arity: usize,\n\n}\n\n\n", "file_path": "primitives/src/merkle_tree/field_based_mht/mod.rs", "rank": 19, "score": 248341.5819789142 }, { "content": "pub trait PoseidonParameters: 'static + FieldBasedHashParameters + Clone {\n\n const T: usize; // Number of S-Boxes\n\n const R_F: i32; // Number of full rounds\n\n const R_P: i32; // Number of partial rounds\n\n const ZERO: Self::Fr; // The zero element in the field\n\n const AFTER_ZERO_PERM: &'static [Self::Fr]; // State vector after a zero permutation\n\n const ROUND_CST: &'static [Self::Fr]; // Array of round constants\n\n const MDS_CST: &'static [Self::Fr]; // The MDS matrix\n\n\n\n /// Add round constants to `state` starting from `start_idx_cst`, modifying `state` in place.\n\n #[inline]\n\n fn add_round_constants(\n\n state: &mut [<Self as FieldBasedHashParameters>::Fr],\n\n start_idx_cst: &mut usize,\n\n ) {\n\n for d in state.iter_mut() {\n\n let rc = Self::ROUND_CST[*start_idx_cst];\n\n *d += &rc;\n\n *start_idx_cst += 1;\n\n }\n", "file_path": "primitives/src/crh/poseidon/mod.rs", "rank": 20, "score": 247239.73315683834 }, { "content": "pub fn batch_inversion<F: Field>(v: &mut [F]) {\n\n // Montgomery’s Trick and Fast Implementation of Masked AES\n\n // Genelle, Prouff and Quisquater\n\n // Section 3.2\n\n\n\n // First pass: compute [a, ab, abc, ...]\n\n let mut prod = Vec::with_capacity(v.len());\n\n let mut tmp = F::one();\n\n for f in v.iter().filter(|f| !f.is_zero()) {\n\n tmp.mul_assign(f);\n\n prod.push(tmp);\n\n }\n\n\n\n // Invert `tmp`.\n\n tmp = tmp.inverse().unwrap(); // Guaranteed to be nonzero.\n\n\n\n // Second pass: iterate backwards to compute inverses\n\n for (f, s) in v\n\n .iter_mut()\n\n // Backwards\n", "file_path": "algebra/src/fields/mod.rs", "rank": 21, "score": 237760.17692741362 }, { "content": "pub fn group_test<G: Group>(a: G, mut b: G) {\n\n let mut rng = XorShiftRng::seed_from_u64(1231275789u64);\n\n let zero = G::zero();\n\n let fr_zero = G::ScalarField::zero();\n\n let fr_one = G::ScalarField::one();\n\n let fr_two = fr_one + &fr_one;\n\n assert_eq!(zero.is_zero(), true);\n\n assert_eq!(a.mul(&fr_one), a);\n\n assert_eq!(a.mul(&fr_two), a + &a);\n\n assert_eq!(a.mul(&fr_zero), zero);\n\n assert_eq!(a.mul(&fr_zero) - &a, -a);\n\n assert_eq!(a.mul(&fr_one) - &a, zero);\n\n assert_eq!(a.mul(&fr_two) - &a, a);\n\n\n\n // a + 0 = a\n\n assert_eq!(a + &zero, a);\n\n // a - 0 = a\n\n assert_eq!(a - &zero, a);\n\n // a - a = 0\n\n assert_eq!(a - &a, zero);\n", "file_path": "algebra/src/groups/tests.rs", "rank": 22, "score": 227340.1208935961 }, { "content": "#[inline]\n\npub fn log2(x: usize) -> u32 {\n\n if x == 0 {\n\n 0\n\n } else if x.is_power_of_two() {\n\n 1usize.leading_zeros() - x.leading_zeros()\n\n } else {\n\n 0usize.leading_zeros() - x.leading_zeros()\n\n }\n\n}\n\n\n\n/// Returns the floor of the base-2 logarithm of `x`.\n", "file_path": "algebra/src/lib.rs", "rank": 23, "score": 225586.51186229233 }, { "content": "#[inline]\n\npub fn buffer_bit_byte_size(modulus_bits: usize) -> (usize, usize) {\n\n let byte_size = buffer_byte_size(modulus_bits);\n\n ((byte_size * 8), byte_size)\n\n}\n\n\n\n/// Converts the number of bits required to represent a number\n\n/// into the number of bytes required to represent it.\n\n#[inline]\n\npub const fn buffer_byte_size(modulus_bits: usize) -> usize {\n\n (modulus_bits + 7) / 8\n\n}\n\n\n\n// Implement Serialization for tuples\n\nmacro_rules! impl_tuple {\n\n ($( $ty: ident : $no: tt, )*) => {\n\n impl<$($ty, )*> CanonicalSerialize for ($($ty,)*) where\n\n $($ty: CanonicalSerialize,)*\n\n {\n\n #[inline]\n\n fn serialize<W: Write>(&self, mut _writer: W) -> Result<(), SerializationError> {\n", "file_path": "algebra/src/serialize/mod.rs", "rank": 24, "score": 222898.35911802982 }, { "content": "#[inline]\n\npub fn log2_floor(x: usize) -> u32 {\n\n if x == 0 {\n\n 0\n\n } else {\n\n (x as f64).log2() as u32\n\n }\n\n}\n", "file_path": "algebra/src/lib.rs", "rank": 25, "score": 222387.47186259893 }, { "content": "/// The interface for a field that supports an efficient square-root operation.\n\npub trait SquareRootField: Field {\n\n /// Returns the Legendre symbol.\n\n fn legendre(&self) -> LegendreSymbol;\n\n\n\n /// Returns the square root of self, if it exists.\n\n #[must_use]\n\n fn sqrt(&self) -> Option<Self>;\n\n\n\n /// Sets `self` to be the square root of `self`, if it exists.\n\n fn sqrt_in_place(&mut self) -> Option<&mut Self>;\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum LegendreSymbol {\n\n Zero = 0,\n\n QuadraticResidue = 1,\n\n QuadraticNonResidue = -1,\n\n}\n\n\n\nimpl LegendreSymbol {\n", "file_path": "algebra/src/fields/mod.rs", "rank": 26, "score": 220243.75754386073 }, { "content": "/// Attempts to convert a boolean vec into a valid field element for field `ToF`.\n\n/// If `from` is not a valid element for field ToF, this function returns None.\n\npub fn convert<ToF: PrimeField>(from: Vec<bool>) -> Result<ToF, Error> {\n\n ToF::read_bits(from)\n\n}\n\n\n", "file_path": "algebra/src/fields/mod.rs", "rank": 27, "score": 219132.62774582644 }, { "content": "/// Uses three bits to perform a lookup into a table, where the last bit\n\n/// performs negation\n\npub trait ThreeBitCondNegLookupGadget<ConstraintF: Field>\n\nwhere\n\n Self: Sized,\n\n{\n\n type TableConstant;\n\n fn three_bit_cond_neg_lookup<CS: ConstraintSystemAbstract<ConstraintF>>(\n\n cs: CS,\n\n bits: &[Boolean],\n\n b0b1: &Boolean,\n\n constants: &[Self::TableConstant],\n\n ) -> Result<Self, SynthesisError>;\n\n\n\n fn cost() -> usize;\n\n}\n", "file_path": "r1cs/gadgets/std/src/select.rs", "rank": 28, "score": 214709.59394992655 }, { "content": "#[allow(dead_code)]\n\npub fn compute_bytes_truncation_size<F: PrimeField>() -> usize {\n\n let bigint_bytes = (F::Params::MODULUS_BITS + F::Params::REPR_SHAVE_BITS) / 8;\n\n let safe_bytes = F::Params::CAPACITY / 8;\n\n (bigint_bytes - safe_bytes) as usize\n\n}\n\n\n", "file_path": "primitives/src/lib.rs", "rank": 29, "score": 210732.44412178328 }, { "content": "type Error = Box<dyn std::error::Error>;\n\n\n", "file_path": "algebra/src/to_field_vec.rs", "rank": 30, "score": 205992.56230275615 }, { "content": "/// Return the number of leading bits to skip in a field element belonging to a field\n\n/// 'from' having `modulus_from` bits in order to safely convert it into a field element\n\n/// belonging to a field 'to' having `modulus_to` bits.\n\npub fn compute_truncation_size(modulus_from: i32, modulus_to: i32) -> usize {\n\n (match modulus_from - modulus_to {\n\n moduli_diff if moduli_diff > 0 => moduli_diff + 1,\n\n moduli_diff if moduli_diff == 0 => 1,\n\n moduli_diff if moduli_diff < 0 => 0,\n\n _ => unreachable!(),\n\n }) as usize\n\n}\n\n\n\nuse algebra::{FpParameters, PrimeField};\n\n\n\n/// Return the number of bytes to skip in a little-endian byte order representation\n\n/// of a field element belonging to field `F`.\n", "file_path": "primitives/src/lib.rs", "rank": 31, "score": 204752.79292317733 }, { "content": "pub fn generate_macro_string(num_limbs: usize) -> std::string::String {\n\n if num_limbs > 3 * MAX_REGS {\n\n panic!(\n\n \"Number of limbs must be <= {} and MAX_REGS >= 6\",\n\n 3 * MAX_REGS\n\n );\n\n }\n\n let mut macro_string = String::from(\n\n \"\n\n macro_rules! llvm_asm_mul {\n\n ($limbs:expr, $a:expr, $b:expr, $modulus:expr, $mod_prime:expr) => {\n\n match $limbs {\",\n\n );\n\n macro_string += &generate_matches(num_limbs, true);\n\n\n\n macro_string += &\"\n\n macro_rules! llvm_asm_square {\n\n ($limbs:expr, $a:expr, $modulus:expr, $mod_prime:expr) => {\n\n match $limbs {\";\n\n macro_string += &generate_matches(num_limbs, false);\n\n macro_string\n\n}\n\n\n", "file_path": "algebra/field-assembly/src/lib.rs", "rank": 32, "score": 197047.4312447585 }, { "content": "pub trait ToBits {\n\n /// Serialize `self` into a bit vector using a BigEndian bit order representation.\n\n fn write_bits(&self) -> Vec<bool>;\n\n}\n\n\n", "file_path": "algebra/src/bits.rs", "rank": 33, "score": 195603.86844704885 }, { "content": "pub trait ToBytes {\n\n /// Serializes `self` into `writer`.\n\n fn write<W: Write>(&self, writer: W) -> IoResult<()>;\n\n}\n\n\n", "file_path": "algebra/src/bytes.rs", "rank": 34, "score": 195603.86844704885 }, { "content": "pub trait ToCompressedBits {\n\n fn compress(&self) -> Vec<bool>;\n\n}\n\n\n", "file_path": "algebra/src/bits.rs", "rank": 35, "score": 193289.6546254749 }, { "content": "/// The interface for a generic field.\n\npub trait Field:\n\n ToBytes\n\n + FromBytes\n\n + FromBytesChecked\n\n + ToBits\n\n + FromBits\n\n + Serialize\n\n + for<'a> Deserialize<'a>\n\n + CanonicalSerialize\n\n + CanonicalSerializeWithFlags\n\n + CanonicalDeserialize\n\n + CanonicalDeserializeWithFlags\n\n + SemanticallyValid\n\n + Copy\n\n + Clone\n\n + Debug\n\n + Display\n\n + Default\n\n + Send\n\n + Sync\n", "file_path": "algebra/src/fields/mod.rs", "rank": 36, "score": 193289.6546254749 }, { "content": "pub trait PRF {\n\n type Input: FromBytes + Serialize + for<'a> Deserialize<'a> + Default;\n\n type Output: ToBytes + Serialize + for<'a> Deserialize<'a> + Eq + Clone + Default + Hash;\n\n type Seed: FromBytes + ToBytes + Serialize + for<'a> Deserialize<'a> + Clone + Default + Debug;\n\n\n\n fn evaluate(seed: &Self::Seed, input: &Self::Input) -> Result<Self::Output, CryptoError>;\n\n}\n", "file_path": "primitives/src/prf/mod.rs", "rank": 37, "score": 193289.6546254749 }, { "content": "pub trait Group:\n\n ToBytes\n\n + FromBytes\n\n + FromBytesChecked\n\n + SemanticallyValid\n\n + Serialize\n\n + for<'a> Deserialize<'a>\n\n + CanonicalSerialize\n\n + CanonicalDeserialize\n\n + Copy\n\n + Clone\n\n + Debug\n\n + Display\n\n + Default\n\n + Send\n\n + Sync\n\n + 'static\n\n + Eq\n\n + Hash\n\n + Neg<Output = Self>\n", "file_path": "algebra/src/groups/mod.rs", "rank": 38, "score": 193289.6546254749 }, { "content": "pub trait SemanticallyValid {\n\n /// Does all the necessary checks on `Self` to estabilish its semantic validity.\n\n /// NOTE: The meaning of \"semantic validity\" for `Self` is actually defined by\n\n /// the implementation of this function.\n\n fn is_valid(&self) -> bool;\n\n}\n\n\n\nimpl<T: SemanticallyValid> SemanticallyValid for Vec<T> {\n\n fn is_valid(&self) -> bool {\n\n for item in self.iter() {\n\n if !item.is_valid() {\n\n return false;\n\n }\n\n }\n\n true\n\n }\n\n}\n", "file_path": "algebra/src/validity.rs", "rank": 39, "score": 193289.6546254749 }, { "content": "/// Serializer in little endian format.\n\n/// The serialization format must be 'length-extension' safe.\n\n/// e.g. if T implements Canonical Serialize and Deserialize,\n\n/// then for all strings `x, y`, if `a = T::deserialize(Reader(x))` and `a` is not an error,\n\n/// then it must be the case that `a = T::deserialize(Reader(x || y))`,\n\n/// and that both readers read the same number of bytes.\n\n///\n\n/// This trait can be derived if all fields of a struct implement\n\n/// `CanonicalSerialize` and the `derive` feature is enabled.\n\npub trait CanonicalSerialize {\n\n /// Serializes `self` into `writer`.\n\n /// It is left up to a particular type for how it strikes the\n\n /// serialization efficiency vs compression tradeoff.\n\n /// For standard types (e.g. `bool`, lengths, etc.) typically an uncompressed\n\n /// form is used, whereas for algebraic types compressed forms are used.\n\n ///\n\n /// Particular examples of interest:\n\n /// `bool` - 1 byte encoding\n\n /// uints - Direct encoding\n\n /// Length prefixing (for any container implemented by default) - 8 byte encoding\n\n /// Elliptic curves - compressed point encoding\n\n fn serialize<W: Write>(&self, writer: W) -> Result<(), SerializationError>;\n\n\n\n fn serialized_size(&self) -> usize;\n\n\n\n /// Like `serialize_uncompressed()`, but doesn't write (if present) any additional information\n\n /// required to reconstruct `self` (e.g. the length of a container type).\n\n /// For this reason, there isn't any deserialization counterpart function in\n\n /// CanonicalDeserialize trait.\n", "file_path": "algebra/src/serialize/mod.rs", "rank": 40, "score": 191080.0746263577 }, { "content": "pub trait SignatureScheme {\n\n type Parameters: Clone + Send + Sync + Serialize + for<'a> Deserialize<'a>;\n\n type PublicKey: ToBytes\n\n + Serialize\n\n + for<'a> Deserialize<'a>\n\n + Hash\n\n + Eq\n\n + Clone\n\n + Default\n\n + Send\n\n + Sync;\n\n type SecretKey: ToBytes + Serialize + for<'a> Deserialize<'a> + Clone + Default;\n\n type Signature: Serialize + for<'a> Deserialize<'a> + Clone + Default + Send + Sync;\n\n\n\n fn setup<R: Rng>(rng: &mut R) -> Result<Self::Parameters, Error>;\n\n\n\n fn keygen<R: Rng>(\n\n pp: &Self::Parameters,\n\n rng: &mut R,\n\n ) -> Result<(Self::PublicKey, Self::SecretKey), Error>;\n", "file_path": "primitives/src/signature/mod.rs", "rank": 41, "score": 191066.07375998952 }, { "content": "/// This defines a `BigInteger`, a smart wrapper around a\n\n/// sequence of `u64` limbs, least-significant digit first.\n\npub trait BigInteger:\n\n ToBytes\n\n + FromBytes\n\n + Serialize\n\n + for<'a> Deserialize<'a>\n\n + CanonicalSerialize\n\n + CanonicalDeserialize\n\n + Copy\n\n + Clone\n\n + Debug\n\n + Default\n\n + Display\n\n + Eq\n\n + Ord\n\n + Send\n\n + Sized\n\n + Sync\n\n + 'static\n\n + UniformRand\n\n + AsMut<[u64]>\n", "file_path": "algebra/src/biginteger/mod.rs", "rank": 42, "score": 191066.07375998952 }, { "content": "/// Projective representation of an elliptic curve point guaranteed to be\n\n/// in the correct prime order subgroup.\n\npub trait ProjectiveCurve:\n\n Eq\n\n + Sized\n\n + ToBytes\n\n + FromBytes\n\n + Serialize\n\n + for<'a> Deserialize<'a>\n\n + CanonicalSerialize\n\n + CanonicalDeserialize\n\n + SemanticallyValid\n\n + FromBytesChecked\n\n + Copy\n\n + Clone\n\n + Default\n\n + Send\n\n + Sync\n\n + Hash\n\n + Debug\n\n + Display\n\n + UniformRand\n", "file_path": "algebra/src/curves/mod.rs", "rank": 43, "score": 191066.07375998952 }, { "content": "/// The interface for a prime field.\n\npub trait PrimeField:\n\n Field<BasePrimeField = Self> + FromStr + From<BigUint> + Into<BigUint>\n\n{\n\n type Params: FpParameters<BigInt = Self::BigInt>;\n\n type BigInt: BigInteger;\n\n\n\n /// Returns a prime field element from its underlying representation.\n\n fn from_repr(repr: <Self::Params as FpParameters>::BigInt) -> Self;\n\n\n\n /// Returns the underlying representation of the prime field element.\n\n fn into_repr(&self) -> Self::BigInt;\n\n\n\n /// Returns a prime field element from its underlying raw representation.\n\n fn from_repr_raw(repr: Self::BigInt) -> Self;\n\n\n\n /// Returns the underlying raw representation of the prime field element.\n\n fn into_repr_raw(&self) -> Self::BigInt;\n\n\n\n /// Reads bytes in big-endian, and converts them to a field element.\n\n /// If the bytes are larger than the modulus, it will reduce them.\n", "file_path": "algebra/src/fields/mod.rs", "rank": 44, "score": 191066.07375998952 }, { "content": "/// Affine representation of an elliptic curve point guaranteed to be\n\n/// in the correct prime order subgroup.\n\npub trait AffineCurve:\n\n Eq\n\n + Sized\n\n + ToBytes\n\n + FromBytes\n\n + Serialize\n\n + for<'a> Deserialize<'a>\n\n + CanonicalSerialize\n\n + CanonicalDeserialize\n\n + SemanticallyValid\n\n + FromBytesChecked\n\n + ToCompressedBits\n\n + FromCompressedBits\n\n + Copy\n\n + Clone\n\n + Default\n\n + Send\n\n + Sync\n\n + Hash\n\n + Debug\n", "file_path": "algebra/src/curves/mod.rs", "rank": 45, "score": 191066.07375998952 }, { "content": "pub trait CommitmentScheme {\n\n type Output: ToBytes + Serialize + for<'a> Deserialize<'a> + Clone + Default + Eq + Hash + Debug;\n\n type Parameters: Clone + Serialize + for<'a> Deserialize<'a>;\n\n type Randomness: Clone\n\n + ToBytes\n\n + Serialize\n\n + for<'a> Deserialize<'a>\n\n + Default\n\n + Eq\n\n + UniformRand\n\n + Debug;\n\n\n\n fn setup<R: Rng>(r: &mut R) -> Result<Self::Parameters, Error>;\n\n\n\n fn commit(\n\n parameters: &Self::Parameters,\n\n input: &[u8],\n\n r: &Self::Randomness,\n\n ) -> Result<Self::Output, Error>;\n\n}\n", "file_path": "primitives/src/commitment/mod.rs", "rank": 46, "score": 191066.07375998952 }, { "content": "pub trait SBox {\n\n type Field: Field;\n\n type Parameters: FieldBasedHashParameters<Fr = Self::Field>;\n\n\n\n // Apply this SBox to the state, if performing a full round\n\n fn apply_full(state: &mut Vec<Self::Field>);\n\n\n\n // Apply this SBox to the state, if performing a partial round\n\n fn apply_partial(state: &mut Vec<Self::Field>);\n\n}\n\n\n", "file_path": "primitives/src/crh/sbox.rs", "rank": 47, "score": 191066.07375998952 }, { "content": "pub trait FieldBasedHash {\n\n type Data: Field;\n\n type Parameters: FieldBasedHashParameters<Fr = Self::Data>;\n\n\n\n /// Initialize a Field Hash accepting inputs of constant length `input_size`:\n\n /// any attempt to finalize the hash after having updated the Self instance\n\n /// with a number of inputs not equal to `input_size` should result in an error.\n\n /// Initialize the hash to a null state, or with `personalization` if specified.\n\n fn init_constant_length(input_size: usize, personalization: Option<&[Self::Data]>) -> Self;\n\n\n\n /// Initialize a Field Hash accepting inputs of variable length.\n\n /// It is able to serve two different modes, selected by the boolean `mod_rate`:\n\n /// - `mod_rate` = False is for the ususal variable length hash, whereas\n\n /// - `mod_rate` = True allows the input only to be a multiple of the rate (and hence\n\n /// should throw an error when trying to finalize with a non-multiple of rate input).\n\n /// This mode allows an optimized handling of padding, saving constraints in SNARK applications;\n\n fn init_variable_length(mod_rate: bool, personalization: Option<&[Self::Data]>) -> Self;\n\n\n\n /// Update the hash with `input`.\n\n fn update(&mut self, input: Self::Data) -> &mut Self;\n\n\n\n /// Return the hash. This method is idempotent, and calling it multiple times will\n\n /// give the same result.\n\n fn finalize(&self) -> Result<Self::Data, Error>;\n\n\n\n /// Reset self to its initial state, allowing to change `personalization` too if needed.\n\n fn reset(&mut self, personalization: Option<&[Self::Data]>) -> &mut Self;\n\n}\n\n\n", "file_path": "primitives/src/crh/mod.rs", "rank": 48, "score": 188927.903837086 }, { "content": "pub trait NIZK {\n\n type Circuit;\n\n type AssignedCircuit;\n\n type VerifierInput: ?Sized;\n\n type ProvingParameters: Clone;\n\n type VerificationParameters: Clone + Default;\n\n type PreparedVerificationParameters: Clone + Default + From<Self::VerificationParameters>;\n\n type Proof: ToBytes + Clone + Default;\n\n}\n\n\n", "file_path": "r1cs/gadgets/crypto/src/nizk/mod.rs", "rank": 49, "score": 188927.903837086 }, { "content": "pub trait FixedLengthCRH {\n\n const INPUT_SIZE_BITS: usize;\n\n type Output: ToBytes + Serialize + for<'a> Deserialize<'a> + Clone + Eq + Hash + Default;\n\n type Parameters: Clone + Serialize + for<'a> Deserialize<'a> + Default;\n\n\n\n fn setup<R: Rng>(r: &mut R) -> Result<Self::Parameters, Error>;\n\n fn evaluate(parameters: &Self::Parameters, input: &[u8]) -> Result<Self::Output, Error>;\n\n}\n\n\n", "file_path": "primitives/src/crh/mod.rs", "rank": 50, "score": 188927.903837086 }, { "content": "pub trait FieldBasedVrf {\n\n type Data: Field;\n\n type PublicKey: FromBytes\n\n + FromBytesChecked\n\n + ToBytes\n\n + Hash\n\n + Eq\n\n + Copy\n\n + Clone\n\n + Default\n\n + Debug\n\n + Send\n\n + Sync\n\n + UniformRand\n\n + Serialize\n\n + for<'a> Deserialize<'a>;\n\n type SecretKey: ToBytes + Clone + Default + Serialize + for<'a> Deserialize<'a>;\n\n type Proof: Copy\n\n + Clone\n\n + Default\n", "file_path": "primitives/src/vrf/mod.rs", "rank": 51, "score": 188927.903837086 }, { "content": "pub trait FromBytes: Sized {\n\n /// Reads `Self` from `reader`.\n\n fn read<R: Read>(reader: R) -> IoResult<Self>;\n\n}\n\n\n", "file_path": "algebra/src/bytes.rs", "rank": 52, "score": 188223.85425136343 }, { "content": "pub trait FromBits: Sized {\n\n /// Reads `self` from `bits`, where `bits` are expected to be\n\n /// in a BigEndian bit order representation.\n\n fn read_bits(bits: Vec<bool>) -> Result<Self, Error>;\n\n}\n\n\n", "file_path": "algebra/src/bits.rs", "rank": 53, "score": 188223.85425136343 }, { "content": "/// The `ItemAccumulator` trait comes with the essential functions for proving\n\n/// and verifying aggregation, as well as checking (\"deciding\") if an item\n\n/// satisfies the predicate.\n\n/// It applies to mixed type accumulators as described in our [Darlin Proof Tree doc](TODO: add link):\n\n/// There, a (full) accumulator is a composite structure of dlog and inner\n\n/// sumcheck (\"single\") accumulators, from both groups of the EC cycle (the\n\n/// \"current\", and the \"collected\" ones). Although within recursion we do\n\n/// not separate accumulation strategy from the SNARK on protocol level,\n\n/// we nevertheless serve this functionality for post processing outside the PCD.\n\npub trait ItemAccumulator {\n\n type AccumulatorProverKey;\n\n type AccumulatorVerifierKey;\n\n type AccumulationProof;\n\n type Item;\n\n\n\n /// Decide whether an/the public accumulator/s are correct,\n\n /// i.e. whether they satisfy the non-efficient predicate.\n\n /// Typically involves non-succinct MSMs.\n\n fn check_items<R: RngCore>(\n\n vk: &Self::AccumulatorVerifierKey,\n\n accumulators: &[Self::Item],\n\n rng: &mut R,\n\n ) -> Result<bool, Error>;\n\n\n\n /// Amortization strategy for items as a separate argument. \n\n /// Returns the new/\"updated\" item and a non-interactive\n\n /// proof of its correct aggregation.\n\n fn accumulate_items(\n\n ck: &Self::AccumulatorProverKey,\n", "file_path": "proof-systems/src/darlin/accumulators/mod.rs", "rank": 54, "score": 186878.58357671386 }, { "content": "pub trait MerkleTreeConfig {\n\n const HEIGHT: usize;\n\n type H: FixedLengthCRH;\n\n}\n\n\n\n/// Stores the hashes of a particular path (in order) from leaf to root.\n\n/// Our path `is_left_child()` if the boolean in `path` is false.\n\n#[derive(Derivative)]\n\n#[derivative(\n\n Clone(bound = \"P: MerkleTreeConfig\"),\n\n Debug(bound = \"P: MerkleTreeConfig, <P::H as FixedLengthCRH>::Output: fmt::Debug\")\n\n)]\n\n#[derive(Serialize, Deserialize)]\n\npub struct MerkleTreePath<P: MerkleTreeConfig> {\n\n pub path: Vec<(<P::H as FixedLengthCRH>::Output, bool)>,\n\n}\n\n\n\npub type MerkleTreeParams<P> = <<P as MerkleTreeConfig>::H as FixedLengthCRH>::Parameters;\n\npub type MerkleTreeDigest<P> = <<P as MerkleTreeConfig>::H as FixedLengthCRH>::Output;\n\n\n", "file_path": "primitives/src/merkle_tree/mod.rs", "rank": 55, "score": 186870.31645379885 }, { "content": "pub trait FieldBasedSignatureScheme {\n\n type Data: Field;\n\n type PublicKey: FromBytes\n\n + FromBytesChecked\n\n + ToBytes\n\n + Hash\n\n + Eq\n\n + Copy\n\n + Clone\n\n + Default\n\n + Debug\n\n + Send\n\n + Sync\n\n + UniformRand\n\n + Serialize\n\n + for<'a> Deserialize<'a>;\n\n type SecretKey: ToBytes + Clone + Default + Serialize + for<'a> Deserialize<'a>;\n\n type Signature: Copy\n\n + Clone\n\n + Default\n", "file_path": "primitives/src/signature/mod.rs", "rank": 56, "score": 186870.31645379885 }, { "content": "pub trait BatchFieldBasedHash {\n\n type Data: Field;\n\n\n\n /// Specification of this type allows to provide a default implementation and more flexibility\n\n /// when included in other traits/struct (i.e. a FieldBasedMerkleTree using both simple and\n\n /// batch hashes can only specify this trait, simplifying its design and usage).\n\n /// Still, it's a reasonable addition for a trait like this.\n\n type BaseHash: FieldBasedHash<Data = Self::Data>;\n\n\n\n /// Given an `input_array` of size n * hash_rate, batches the computation of the n hashes\n\n /// and outputs the n hash results.\n\n /// NOTE: The hashes are independent from each other, therefore the output is not some sort\n\n /// of aggregated hash but it's actually the hash result of each of the inputs, grouped in\n\n /// hash_rate chunks.\n\n fn batch_evaluate(input_array: &[Self::Data]) -> Result<Vec<Self::Data>, Error> {\n\n let rate = <<Self::BaseHash as FieldBasedHash>::Parameters as FieldBasedHashParameters>::R;\n\n if input_array.len() % rate != 0 {\n\n return Err(Box::new(CryptoError::Other(\n\n \"The length of the input data array is not a multiple of the rate\".to_owned(),\n\n )));\n", "file_path": "primitives/src/crh/mod.rs", "rank": 57, "score": 186870.31645379885 }, { "content": "pub trait UniformRand: Sized {\n\n fn rand<R: Rng + ?Sized>(rng: &mut R) -> Self;\n\n}\n\n\n\nimpl<T> UniformRand for T\n\nwhere\n\n Standard: Distribution<T>,\n\n{\n\n #[inline]\n\n fn rand<R: Rng + ?Sized>(rng: &mut R) -> Self {\n\n rng.sample(Standard)\n\n }\n\n}\n", "file_path": "algebra/src/rand.rs", "rank": 58, "score": 186000.27338587804 }, { "content": "pub trait FromCompressedBits: Sized {\n\n fn decompress(compressed: Vec<bool>) -> Result<Self, Error>;\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum BitSerializationError {\n\n InvalidFieldElement(String),\n\n UndefinedSqrt,\n\n NotPrimeOrder,\n\n NotOnCurve,\n\n NotInCorrectSubgroup,\n\n InvalidFlags,\n\n}\n\n\n\nimpl std::fmt::Display for BitSerializationError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let msg = match self {\n\n BitSerializationError::InvalidFieldElement(s) => s.to_owned(),\n\n BitSerializationError::UndefinedSqrt => \"square root doesn't exist in field\".to_owned(),\n\n BitSerializationError::NotPrimeOrder => {\n", "file_path": "algebra/src/bits.rs", "rank": 59, "score": 186000.27338587804 }, { "content": "pub fn frobenius_test<F: Field, C: AsRef<[u64]>>(characteristic: C, maxpower: usize) {\n\n let mut rng = XorShiftRng::seed_from_u64(1231275789u64);\n\n\n\n for _ in 0..ITERATIONS {\n\n let a = F::rand(&mut rng);\n\n\n\n let mut a_0 = a;\n\n a_0.frobenius_map(0);\n\n assert_eq!(a, a_0);\n\n\n\n let mut a_q = a.pow(&characteristic);\n\n for power in 1..maxpower {\n\n let mut a_qi = a;\n\n a_qi.frobenius_map(power);\n\n assert_eq!(a_qi, a_q);\n\n\n\n a_q = a_q.pow(&characteristic);\n\n }\n\n }\n\n}\n", "file_path": "algebra/src/fields/tests.rs", "rank": 60, "score": 185713.2687813419 }, { "content": "pub fn sqrt_field_test<F: SquareRootField>(elem: F) {\n\n let square = elem.square();\n\n let sqrt = square.sqrt().unwrap();\n\n assert!(sqrt == elem || sqrt == -elem);\n\n if let Some(sqrt) = elem.sqrt() {\n\n assert!(sqrt.square() == elem || sqrt.square() == -elem);\n\n }\n\n random_sqrt_tests::<F>();\n\n}\n\n\n", "file_path": "algebra/src/fields/tests.rs", "rank": 61, "score": 185589.8075518677 }, { "content": "pub trait FieldHasherGadget<\n\n H: FieldBasedHash<Data = ConstraintF>,\n\n ConstraintF: Field,\n\n HG: FieldBasedHashGadget<H, ConstraintF>,\n\n>\n\n{\n\n fn enforce_hash<CS: ConstraintSystemAbstract<ConstraintF>>(\n\n &self,\n\n cs: CS,\n\n personalization: Option<&[HG::DataGadget]>,\n\n ) -> Result<HG::DataGadget, SynthesisError>;\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::FieldBasedHashGadget;\n\n use algebra::PrimeField;\n\n use primitives::FieldBasedHash;\n\n use r1cs_core::{\n\n ConstraintSystem, ConstraintSystemAbstract, ConstraintSystemDebugger, SynthesisMode,\n", "file_path": "r1cs/gadgets/crypto/src/crh/mod.rs", "rank": 62, "score": 184888.8404179999 }, { "content": "/// Deserializer in little endian format.\n\n/// This trait can be derived if all fields of a struct implement\n\n/// `CanonicalDeserialize` and the `derive` feature is enabled.\n\npub trait CanonicalDeserialize: Sized {\n\n /// Reads `Self` from `reader`.\n\n fn deserialize<R: Read>(reader: R) -> Result<Self, SerializationError>;\n\n\n\n /// Reads `Self` from `reader` without performing validity checks.\n\n /// Should be used *only* when the input is trusted.\n\n fn deserialize_unchecked<R: Read>(reader: R) -> Result<Self, SerializationError> {\n\n CanonicalDeserialize::deserialize(reader)\n\n }\n\n\n\n /// Reads `Self` from `reader` without compression.\n\n #[inline]\n\n fn deserialize_uncompressed<R: Read>(reader: R) -> Result<Self, SerializationError> {\n\n CanonicalDeserialize::deserialize(reader)\n\n }\n\n\n\n /// Reads `self` from `reader` without compression, and without performing\n\n /// validity checks. Should be used *only* when the input is trusted.\n\n #[inline]\n\n fn deserialize_uncompressed_unchecked<R: Read>(reader: R) -> Result<Self, SerializationError> {\n", "file_path": "algebra/src/serialize/mod.rs", "rank": 63, "score": 183873.6926689449 }, { "content": "pub trait Assignment<T> {\n\n fn get(self) -> Result<T, SynthesisError>;\n\n}\n\n\n\nimpl<T> Assignment<T> for Option<T> {\n\n fn get(self) -> Result<T, SynthesisError> {\n\n match self {\n\n Some(v) => Ok(v),\n\n None => Err(SynthesisError::AssignmentMissing),\n\n }\n\n }\n\n}\n\n\n", "file_path": "r1cs/gadgets/std/src/lib.rs", "rank": 64, "score": 183862.1034629745 }, { "content": "pub trait CurvePair {\n\n type PairingEngineTick: PairingEngine;\n\n type PairingEngineTock: PairingEngine;\n\n type PairingGadgetTick: PG<\n\n Self::PairingEngineTick,\n\n <Self::PairingEngineTock as PairingEngine>::Fr,\n\n >;\n\n type PairingGadgetTock: PG<\n\n Self::PairingEngineTock,\n\n <Self::PairingEngineTick as PairingEngine>::Fr,\n\n >;\n\n\n\n const TICK_CURVE: &'static str;\n\n const TOCK_CURVE: &'static str;\n\n}\n\n\n", "file_path": "proof-systems/src/gm17/examples/recursive-snark/constraints.rs", "rank": 65, "score": 182979.3293146803 }, { "content": "pub trait CurvePair {\n\n type PairingEngineTick: PairingEngine;\n\n type PairingEngineTock: PairingEngine;\n\n type PairingGadgetTick: PG<\n\n Self::PairingEngineTick,\n\n <Self::PairingEngineTock as PairingEngine>::Fr,\n\n >;\n\n type PairingGadgetTock: PG<\n\n Self::PairingEngineTock,\n\n <Self::PairingEngineTick as PairingEngine>::Fr,\n\n >;\n\n\n\n const TICK_CURVE: &'static str;\n\n const TOCK_CURVE: &'static str;\n\n}\n\n\n", "file_path": "proof-systems/src/groth16/examples/recursive-snark/constraints.rs", "rank": 66, "score": 182979.3293146803 }, { "content": "/// Deserializer in little endian format allowing flags to be encoded.\n\npub trait CanonicalDeserializeWithFlags: Sized {\n\n /// Reads `Self` and `Flags` from `reader`.\n\n /// Returns empty flags by default.\n\n fn deserialize_with_flags<R: Read, F: Flags>(\n\n reader: R,\n\n ) -> Result<(Self, F), SerializationError>;\n\n}\n\n\n", "file_path": "algebra/src/serialize/mod.rs", "rank": 67, "score": 181804.51607968737 }, { "content": "pub trait PedersenWindow: Clone {\n\n const WINDOW_SIZE: usize;\n\n const NUM_WINDOWS: usize;\n\n}\n\n\n\n#[derive(Clone, Default, Serialize, Deserialize)]\n\n#[serde(bound(deserialize = \"G: Group\"))]\n\npub struct PedersenParameters<G: Group> {\n\n pub generators: Vec<Vec<G>>,\n\n}\n\n\n\npub struct PedersenCRH<G: Group, W: PedersenWindow> {\n\n group: PhantomData<G>,\n\n window: PhantomData<W>,\n\n}\n\n\n\nimpl<G: Group, W: PedersenWindow> PedersenCRH<G, W> {\n\n pub fn create_generators<R: Rng>(rng: &mut R) -> Vec<Vec<G>> {\n\n let mut generators_powers = Vec::new();\n\n for _ in 0..W::NUM_WINDOWS {\n", "file_path": "primitives/src/crh/pedersen/mod.rs", "rank": 68, "score": 181804.51607968737 }, { "content": "pub trait InjectiveMapGadget<\n\n G: Group,\n\n I: InjectiveMap<G>,\n\n ConstraintF: Field,\n\n GG: GroupGadget<G, ConstraintF>,\n\n>\n\n{\n\n type OutputGadget: EqGadget<ConstraintF>\n\n + ToBytesGadget<ConstraintF>\n\n + CondSelectGadget<ConstraintF>\n\n + AllocGadget<I::Output, ConstraintF>\n\n + Debug\n\n + Clone\n\n + Sized;\n\n\n\n fn evaluate_map<CS: ConstraintSystemAbstract<ConstraintF>>(\n\n cs: CS,\n\n ge: &GG,\n\n ) -> Result<Self::OutputGadget, SynthesisError>;\n\n}\n", "file_path": "r1cs/gadgets/crypto/src/crh/injective_map/mod.rs", "rank": 69, "score": 181137.93254307072 }, { "content": "/// The `EndoMulCurve` trait for curves that have a non-trivial endomorphism\n\n/// `Phi` of the form `Phi(x,y) = (zeta*x,y)`.\n\npub trait EndoMulCurve: AffineCurve {\n\n /// Apply `Phi`\n\n fn apply_endomorphism(&self) -> Self;\n\n\n\n /// Conversion of a bit sequence used in `endo_mul()` into its equivalent\n\n /// scalar\n\n fn endo_rep_to_scalar(bits: Vec<bool>) -> Result<Self::ScalarField, Error>;\n\n\n\n /// Endomorphism-based multiplication of `&self` with `bits`, a little-endian\n\n /// endomorphism representation.\n\n fn endo_mul(&self, bits: Vec<bool>) -> Result<Self::Projective, Error>;\n\n}\n\n\n\nimpl<C: ProjectiveCurve> Group for C {\n\n type ScalarField = C::ScalarField;\n\n #[must_use]\n\n fn zero() -> Self {\n\n <C as ProjectiveCurve>::zero()\n\n }\n\n\n", "file_path": "algebra/src/curves/mod.rs", "rank": 70, "score": 179828.999615232 }, { "content": "/// Define parameters required to implement a hash function working with field arithmetics.\n\n/// TODO: Depending on the hash construction some parameters may be present and others not\n\n/// we should think about particularizing or generalizing this trait definition.\n\npub trait FieldBasedHashParameters: Clone {\n\n type Fr: Field;\n\n\n\n /// The rate of the hash function\n\n const R: usize;\n\n}\n\n\n", "file_path": "primitives/src/crh/mod.rs", "rank": 71, "score": 179828.47817627064 }, { "content": "pub trait BatchSBox: SBox {\n\n fn apply_full_batch(vec_state: &mut [Vec<Self::Field>]) {\n\n vec_state.par_iter_mut().for_each(|s| Self::apply_full(s));\n\n }\n\n\n\n fn apply_partial_batch(vec_state: &mut [Vec<Self::Field>]) {\n\n vec_state\n\n .par_iter_mut()\n\n .for_each(|s| Self::apply_partial(s));\n\n }\n\n}\n", "file_path": "primitives/src/crh/sbox.rs", "rank": 72, "score": 179823.0400438884 }, { "content": "/// Serializer in little endian format allowing to encode flags.\n\npub trait CanonicalSerializeWithFlags: CanonicalSerialize {\n\n /// Serializes `self` and `flags` into `writer`.\n\n fn serialize_with_flags<W: Write, F: Flags>(\n\n &self,\n\n writer: W,\n\n flags: F,\n\n ) -> Result<(), SerializationError>;\n\n\n\n /// Get size of serialized self with flags.\n\n fn serialized_size_with_flags<F: Flags>(&self) -> usize;\n\n}\n\n\n", "file_path": "algebra/src/serialize/mod.rs", "rank": 73, "score": 179823.0400438884 }, { "content": "pub trait MontgomeryModelParameters: ModelParameters {\n\n const COEFF_A: Self::BaseField;\n\n const COEFF_B: Self::BaseField;\n\n\n\n type TEModelParameters: TEModelParameters<BaseField = Self::BaseField>;\n\n}\n\n\n", "file_path": "algebra/src/curves/models/mod.rs", "rank": 74, "score": 177913.52894056882 }, { "content": "pub trait TEModelParameters: ModelParameters {\n\n const COEFF_A: Self::BaseField;\n\n const COEFF_D: Self::BaseField;\n\n const COFACTOR: &'static [u64];\n\n const COFACTOR_INV: Self::ScalarField;\n\n const AFFINE_GENERATOR_COEFFS: (Self::BaseField, Self::BaseField);\n\n\n\n type MontgomeryModelParameters: MontgomeryModelParameters<BaseField = Self::BaseField>;\n\n\n\n #[inline(always)]\n\n fn mul_by_a(elem: &Self::BaseField) -> Self::BaseField {\n\n let mut copy = *elem;\n\n copy *= &Self::COEFF_A;\n\n copy\n\n }\n\n\n\n #[inline(always)]\n\n fn empirical_recommended_wnaf_for_scalar(\n\n scalar: <Self::ScalarField as PrimeField>::BigInt,\n\n ) -> usize {\n", "file_path": "algebra/src/curves/models/mod.rs", "rank": 75, "score": 177913.52894056882 }, { "content": "pub trait PoseidonShortParameters: PoseidonParameters {\n\n /// MDS matrix supporting short Montgomery multiplication with respect to the short\n\n /// Montgomery constant R_2=2^64\n\n const MDS_CST_SHORT: &'static [Self::Fr];\n\n}\n\n\n\n#[derive(Derivative)]\n\n#[derivative(Clone(bound = \"\"), Debug(bound = \"\"))]\n\npub struct PoseidonHash<\n\n F: PrimeField,\n\n P: PoseidonParameters<Fr = F>,\n\n SB: SBox<Field = F, Parameters = P>,\n\n> {\n\n state: Vec<F>,\n\n pending: Vec<F>,\n\n input_size: Option<usize>,\n\n updates_ctr: usize,\n\n mod_rate: bool,\n\n _parameters: PhantomData<P>,\n\n _sbox: PhantomData<SB>,\n", "file_path": "primitives/src/crh/poseidon/mod.rs", "rank": 76, "score": 177913.52894056882 }, { "content": "pub trait SWModelParameters: ModelParameters {\n\n const COEFF_A: Self::BaseField;\n\n const COEFF_B: Self::BaseField;\n\n const COFACTOR: &'static [u64];\n\n const COFACTOR_INV: Self::ScalarField;\n\n const AFFINE_GENERATOR_COEFFS: (Self::BaseField, Self::BaseField);\n\n\n\n #[inline(always)]\n\n fn mul_by_a(elem: &Self::BaseField) -> Self::BaseField {\n\n let mut copy = *elem;\n\n copy *= &Self::COEFF_A;\n\n copy\n\n }\n\n\n\n #[inline(always)]\n\n fn add_b(elem: &Self::BaseField) -> Self::BaseField {\n\n let mut copy = *elem;\n\n copy += &Self::COEFF_B;\n\n copy\n\n }\n", "file_path": "algebra/src/curves/models/mod.rs", "rank": 77, "score": 177913.52894056882 }, { "content": "/// Definition of a Merkle Path for a Merkle Tree whose leaves and nodes are field elements. The\n\n/// trait is generic with respect to the arity of the Merkle Tree and to the hash function used.\n\npub trait FieldBasedMerkleTreePath:\n\n ToBytes + FromBytes + Serialize + for<'a> Deserialize<'a> + Eq + PartialEq + Clone + Debug + Default\n\n{\n\n type H: FieldBasedHash;\n\n type Path: Clone + Debug + Serialize + for<'a> Deserialize<'a>;\n\n type Parameters: FieldBasedMerkleTreeParameters<\n\n Data = <Self::H as FieldBasedHash>::Data,\n\n H = Self::H,\n\n >;\n\n\n\n /// Return a new instance of the struct implementing this trait given the raw `path`\n\n fn new(path: Self::Path) -> Self;\n\n\n\n /// Compute the root of a Merkle Tree starting from a Merkle Path for a given `leaf`\n\n fn compute_root(\n\n &self,\n\n leaf: &<Self::H as FieldBasedHash>::Data,\n\n ) -> <Self::H as FieldBasedHash>::Data;\n\n\n\n /// Verify the Merkle Path for `leaf` given the `root` of a Merkle Tree with height `height`.\n", "file_path": "primitives/src/merkle_tree/field_based_mht/mod.rs", "rank": 78, "score": 177656.4963520511 }, { "content": "#[allow(dead_code)]\n\npub fn test_canonical_serialize_deserialize<\n\n T: PartialEq + std::fmt::Debug + CanonicalSerialize + CanonicalDeserialize,\n\n>(\n\n negative_test: bool,\n\n data: &T,\n\n) {\n\n // serialize/deserialize\n\n {\n\n let buf_size = data.serialized_size();\n\n\n\n let mut serialized = Vec::with_capacity(buf_size);\n\n CanonicalSerialize::serialize(data, &mut serialized).unwrap();\n\n assert_eq!(serialized.len(), buf_size);\n\n let de = T::deserialize(&serialized[..]).unwrap();\n\n assert_eq!(data, &de);\n\n\n\n if negative_test {\n\n let wrong_buf_size = buf_size - 1;\n\n T::deserialize(&serialized[..wrong_buf_size]).unwrap_err();\n\n CanonicalSerialize::serialize(data, &mut serialized[..wrong_buf_size]).unwrap_err();\n", "file_path": "algebra/src/serialize/mod.rs", "rank": 79, "score": 177645.61712508535 }, { "content": "pub trait FieldBasedMerkleTreePathGadget<\n\n P: FieldBasedMerkleTreePath<H = H>,\n\n H: FieldBasedHash<Data = ConstraintF>,\n\n HGadget: FieldBasedHashGadget<H, ConstraintF>,\n\n ConstraintF: PrimeField,\n\n>:\n\n AllocGadget<P, ConstraintF> + ConstantGadget<P, ConstraintF> + EqGadget<ConstraintF> + Clone\n\n{\n\n /// Return the length of the `self` path.\n\n fn length(&self) -> usize;\n\n\n\n /// Enforce that the root reconstructed from `self` and `leaf` is equal to\n\n /// `expected_root`.\n\n fn check_membership<CS: ConstraintSystemAbstract<ConstraintF>>(\n\n &self,\n\n cs: CS,\n\n expected_root: &HGadget::DataGadget,\n\n leaf: &HGadget::DataGadget,\n\n ) -> Result<(), SynthesisError> {\n\n self.conditionally_check_membership(cs, expected_root, leaf, &Boolean::Constant(true))\n", "file_path": "r1cs/gadgets/crypto/src/merkle_tree/mod.rs", "rank": 80, "score": 177645.40583444445 }, { "content": "/// Types that can be converted to a vector of `F` elements. Useful for specifying\n\n/// how public inputs to a constraint system should be represented inside\n\n/// that constraint system.\n\npub trait ToConstraintField<F: Field> {\n\n fn to_field_elements(&self) -> Result<Vec<F>, Error>;\n\n}\n\n\n\nimpl<F: PrimeField> ToConstraintField<F> for F {\n\n fn to_field_elements(&self) -> Result<Vec<F>, Error> {\n\n Ok(vec![*self])\n\n }\n\n}\n\n\n\n// Impl for base field\n\nimpl<F: Field> ToConstraintField<F> for [F] {\n\n #[inline]\n\n fn to_field_elements(&self) -> Result<Vec<F>, Error> {\n\n Ok(self.to_vec())\n\n }\n\n}\n\n\n\nimpl<ConstraintF: Field> ToConstraintField<ConstraintF> for () {\n\n #[inline]\n", "file_path": "algebra/src/to_field_vec.rs", "rank": 81, "score": 177355.33733546728 }, { "content": "pub trait EndoMulParameters: SWModelParameters {\n\n /// Parameters for endomorphism-based scalar multiplication [Halo](https://eprint.iacr.org/2019/1021).\n\n /// A non-trivial cubic root of unity `ENDO_COEFF` for a curve endomorphism of the form\n\n /// (x, y) -> (ENDO_COEFF * x, y).\n\n const ENDO_COEFF: Self::BaseField;\n\n\n\n /// The scalar representation `zeta_r` of `ENDO_COEFF`.\n\n /// NOTE : If one wants to use the endo mul circuit with `lambda` many bits,\n\n /// then `zeta_r` MUST satisfy the minimal distance property\n\n /// D = min { d(n*zeta_r, m*zeta_r) : n,m in [0, T] } >= R + 1,\n\n /// where `T = 2^{lambda/2 + 1} + 2^{lambda/2} - 1` is the output\n\n /// bound for the coefficients a, b of the equivalent scalar\n\n /// representation `a*zeta_r + b`.\n\n const ENDO_SCALAR: Self::ScalarField;\n\n\n\n /// Maximum number of bits for which security of endo mul is proven. MUST be an even number.\n\n const LAMBDA: usize;\n\n}\n", "file_path": "algebra/src/curves/models/mod.rs", "rank": 82, "score": 176072.13216895924 }, { "content": "/// Computations are expressed in terms of rank-1 constraint systems (R1CS).\n\n/// The `generate_constraints` method is called to generate constraints for\n\n/// both CRS generation and for proving.\n\npub trait ConstraintSynthesizer<F: Field> {\n\n /// Drives generation of new constraints inside `CS`.\n\n fn generate_constraints<CS: ConstraintSystemAbstract<F>>(\n\n self,\n\n cs: &mut CS,\n\n ) -> Result<(), SynthesisError>;\n\n}\n\n\n\nimpl<F: Field, CS: ConstraintSystemAbstract<F>> ConstraintSystemAbstract<F>\n\n for Namespace<'_, F, CS>\n\n{\n\n type Root = CS::Root;\n\n\n\n #[inline]\n\n fn one() -> Variable {\n\n CS::one()\n\n }\n\n\n\n #[inline]\n\n fn alloc<FN, A, AR>(&mut self, annotation: A, f: FN) -> Result<Variable, SynthesisError>\n", "file_path": "r1cs/core/src/constraint_system.rs", "rank": 83, "score": 175362.46893869428 }, { "content": "pub trait InjectiveMap<G: Group> {\n\n type Output: ToBytes + Serialize + for<'a> Deserialize<'a> + Clone + Eq + Hash + Default + Debug;\n\n fn injective_map(ge: &G) -> Result<Self::Output, CryptoError>;\n\n}\n\n\n\npub struct TECompressor;\n\n\n\nimpl<P: TEModelParameters> InjectiveMap<TEAffine<P>> for TECompressor {\n\n type Output = <P as ModelParameters>::BaseField;\n\n\n\n fn injective_map(ge: &TEAffine<P>) -> Result<Self::Output, CryptoError> {\n\n if !ge.is_in_correct_subgroup_assuming_on_curve() {\n\n return Err(CryptoError::InvalidElement(format!(\"{}\", ge)));\n\n }\n\n Ok(ge.x)\n\n }\n\n}\n\n\n\nimpl<P: TEModelParameters> InjectiveMap<TEProjective<P>> for TECompressor {\n\n type Output = <P as ModelParameters>::BaseField;\n", "file_path": "primitives/src/crh/injective_map/mod.rs", "rank": 84, "score": 173452.9578353747 }, { "content": "pub fn define_intrinsics() -> TokenStream {\n\n (quote! {\n\n {\n\n let mut begin = || {\n\n llvm_asm_string.borrow_mut().push_str(\"\\\"\");\n\n };\n\n\n\n let mut end = || {\n\n llvm_asm_string.borrow_mut().push_str(\"\n\n \\\"\");\n\n };\n\n\n\n let mut comment = | comment: &str | {\n\n llvm_asm_string.borrow_mut().push_str(&format!(\" // {}\", comment));\n\n };\n\n\n\n let mut mulxq = | a: &str, b: &str, c: &str | {\n\n llvm_asm_string.borrow_mut().push_str(&format!(\"\n\n mulxq {}, {}, {}\", a, b, c));\n\n };\n", "file_path": "algebra/mince/src/intrinsics.rs", "rank": 85, "score": 173132.84596317325 }, { "content": "pub fn define_arithmetic() -> TokenStream {\n\n (quote! {\n\n {\n\n macro_rules! mul_1 {\n\n ($a:expr, $b:ident, $zero:ident, $limbs:expr) => {\n\n movq($a, RDX);\n\n mulxq($b[0], R[0], R[1]);\n\n for j in 1..$limbs-1 {\n\n mulxq($b[j], RAX, R[((j + 1) % $limbs)]);\n\n adcxq(RAX, R[j]);\n\n }\n\n mulxq($b[$limbs-1], RAX, RCX);\n\n movq($zero, RBX);\n\n adcxq(RAX, R[$limbs-1]);\n\n adcxq(RBX, RCX);\n\n }\n\n }\n\n\n\n macro_rules! mul_add_1 {\n\n ($a:ident, $b:ident, $zero:ident, $i:ident, $limbs:expr) => {\n", "file_path": "algebra/mince/src/arithmetic.rs", "rank": 86, "score": 173132.84596317325 }, { "content": "/// Definition of a Merkle Tree whose leaves and nodes are Field Elements. The trait is\n\n/// designed to be efficient in memory by providing a digest-like interface that allows\n\n/// to update the tree one leaf at a time, without the necessity to keep all the leaves\n\n/// in memory in order to create the tree. The trait is generic with respect to the arity\n\n/// of the Merkle Tree and to the hash function used.\n\npub trait FieldBasedMerkleTree: Clone {\n\n type Position: Hash + Eq;\n\n type Parameters: FieldBasedMerkleTreeParameters;\n\n type MerklePath: FieldBasedMerkleTreePath<\n\n H = <Self::Parameters as FieldBasedMerkleTreeParameters>::H,\n\n Parameters = Self::Parameters,\n\n >;\n\n\n\n /// Append a new leaf to the Merkle Tree. The moment in which the root will be computed\n\n /// is transparent to the user and obeys to pre-defined internal policies.\n\n fn append(\n\n &mut self,\n\n leaf: <Self::Parameters as FieldBasedMerkleTreeParameters>::Data,\n\n ) -> Result<&mut Self, Error>;\n\n\n\n /// Force the computation of the root whatever its internal state and return an updated copy\n\n /// of the Merkle Tree. This function is idempotent, i.e. calling it multiple times will give\n\n /// the same result. It's also possible to `update` with more inputs in between.\n\n fn finalize(&self) -> Result<Self, Error>;\n\n\n", "file_path": "primitives/src/merkle_tree/field_based_mht/mod.rs", "rank": 87, "score": 172590.4176526605 }, { "content": "pub trait ToBitsGadget<ConstraintF: Field> {\n\n fn to_bits<CS: ConstraintSystemAbstract<ConstraintF>>(\n\n &self,\n\n cs: CS,\n\n ) -> Result<Vec<Boolean>, SynthesisError>;\n\n\n\n /// Additionally checks if the produced list of booleans is 'valid'.\n\n fn to_bits_strict<CS: ConstraintSystemAbstract<ConstraintF>>(\n\n &self,\n\n cs: CS,\n\n ) -> Result<Vec<Boolean>, SynthesisError>;\n\n}\n\n\n", "file_path": "r1cs/gadgets/std/src/bits/mod.rs", "rank": 88, "score": 171611.56106376508 }, { "content": "/// If condition is `true`, return `first`; else, select `second`.\n\npub trait CondSelectGadget<ConstraintF: Field>\n\nwhere\n\n Self: Sized,\n\n{\n\n fn conditionally_select<CS: ConstraintSystemAbstract<ConstraintF>>(\n\n cs: CS,\n\n cond: &Boolean,\n\n first: &Self,\n\n second: &Self,\n\n ) -> Result<Self, SynthesisError>;\n\n\n\n fn cost() -> usize;\n\n}\n\n\n", "file_path": "r1cs/gadgets/std/src/select.rs", "rank": 89, "score": 171611.56106376508 }, { "content": "pub trait ToBytesGadget<ConstraintF: Field> {\n\n fn to_bytes<CS: ConstraintSystemAbstract<ConstraintF>>(\n\n &self,\n\n cs: CS,\n\n ) -> Result<Vec<UInt8>, SynthesisError>;\n\n\n\n /// Additionally checks if the produced list of booleans is 'valid'.\n\n fn to_bytes_strict<CS: ConstraintSystemAbstract<ConstraintF>>(\n\n &self,\n\n cs: CS,\n\n ) -> Result<Vec<UInt8>, SynthesisError>;\n\n}\n\n\n", "file_path": "r1cs/gadgets/std/src/bits/mod.rs", "rank": 90, "score": 171611.56106376508 }, { "content": "pub trait FromBitsGadget<ConstraintF: Field>\n\nwhere\n\n Self: Sized,\n\n{\n\n /// Given a bit representation `bits` of bit len not bigger than CAPACITY\n\n /// (i.e. MODULUS - 1) of `Self` in *big endian* form, reconstructs a `Self`.\n\n fn from_bits<CS: ConstraintSystemAbstract<ConstraintF>>(\n\n cs: CS,\n\n bits: &[Boolean],\n\n ) -> Result<Self, SynthesisError>;\n\n}\n\n\n\nimpl<ConstraintF: Field> ToBitsGadget<ConstraintF> for Boolean {\n\n fn to_bits<CS: ConstraintSystemAbstract<ConstraintF>>(\n\n &self,\n\n _: CS,\n\n ) -> Result<Vec<Boolean>, SynthesisError> {\n\n Ok(vec![*self])\n\n }\n\n\n", "file_path": "r1cs/gadgets/std/src/bits/mod.rs", "rank": 91, "score": 171611.56106376508 }, { "content": "pub trait FromBytesChecked: Sized + FromBytes + SemanticallyValid {\n\n /// If `Self` implements `SemanticallyValid` trait, may be more efficient to\n\n /// perform semantic checks while deserializing, in order to return immediately\n\n /// in case of errors. The function passes if and only if `reader` represents\n\n /// a valid serialization of `Self`, and a semantically valid instance of `Self`.\n\n fn read_checked<R: Read>(reader: R) -> IoResult<Self> {\n\n let read = Self::read(reader)?;\n\n if read.is_valid() {\n\n Ok(read)\n\n } else {\n\n Err(IoError::new(\n\n ErrorKind::InvalidData,\n\n \"Semantic checks failed\",\n\n ))\n\n }\n\n }\n\n}\n\n\n\nmacro_rules! array_bytes {\n\n ($N:expr) => {\n", "file_path": "algebra/src/bytes.rs", "rank": 92, "score": 171404.77776819977 }, { "content": "fn ecvrf_keygen(c: &mut Criterion) {\n\n c.bench_function(\"FieldSchnorrMNT4: KeyGen\", move |b| {\n\n b.iter(|| {\n\n let mut rng = &mut rand::thread_rng();\n\n EcVrfMNT4::keygen(&mut rng)\n\n })\n\n });\n\n}\n\n\n", "file_path": "primitives/benches/crypto_primitives/ecvrf.rs", "rank": 93, "score": 170173.6436359297 }, { "content": "fn ecvrf_prove(c: &mut Criterion) {\n\n let mut rng = &mut rand::thread_rng();\n\n let pp = <BHMNT6 as FixedLengthCRH>::setup(rng).unwrap();\n\n let (pk, sk) = EcVrfMNT4::keygen(&mut rng);\n\n let message = MNT4Fr::rand(rng);\n\n\n\n c.bench_function(\"FieldSchnorrMNT4: Sign\", move |b| {\n\n b.iter(|| {\n\n let mut rng = &mut rand::thread_rng();\n\n EcVrfMNT4::prove(&mut rng, &pp, &pk, &sk, message).unwrap()\n\n })\n\n });\n\n}\n\n\n", "file_path": "primitives/benches/crypto_primitives/ecvrf.rs", "rank": 94, "score": 170173.6436359297 }, { "content": "fn ecvrf_verify(c: &mut Criterion) {\n\n let mut rng = &mut rand::thread_rng();\n\n let pp = <BHMNT6 as FixedLengthCRH>::setup(rng).unwrap();\n\n let (pk, sk) = EcVrfMNT4::keygen(&mut rng);\n\n let message = MNT4Fr::rand(rng);\n\n let proof = EcVrfMNT4::prove(&mut rng, &pp, &pk, &sk, message).unwrap();\n\n\n\n c.bench_function(\"FieldSchnorrMNT4: Proof To Hash\", move |b| {\n\n b.iter(|| EcVrfMNT4::proof_to_hash(&pp, &pk, message, &proof).unwrap())\n\n });\n\n}\n\n\n\ncriterion_group! {\n\n name = ecvrf;\n\n config = Criterion::default().sample_size(20);\n\n targets = ecvrf_keygen, ecvrf_prove, ecvrf_verify\n\n}\n\n\n\ncriterion_main!(ecvrf);\n", "file_path": "primitives/benches/crypto_primitives/ecvrf.rs", "rank": 95, "score": 170173.6436359297 }, { "content": "fn read_affine_vec<G: AffineCurve, R: Read>(len: usize, mut reader: R) -> IoResult<Vec<G>> {\n\n let mut v = vec![];\n\n for i in 0..len {\n\n let g = G::read(&mut reader).map_err(|e| {\n\n io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"invalid point {}: {}\", i, e),\n\n )\n\n })?;\n\n v.push(g);\n\n }\n\n Ok(v)\n\n}\n\n\n\n/// A verification key in the Groth16 SNARK.\n\n#[derive(Clone, Debug, Serialize, Deserialize, CanonicalSerialize, CanonicalDeserialize)]\n\npub struct VerifyingKey<E: PairingEngine> {\n\n pub alpha_g1_beta_g2: E::Fqk,\n\n pub gamma_g2: E::G2Affine,\n\n pub delta_g2: E::G2Affine,\n", "file_path": "proof-systems/src/groth16/mod.rs", "rank": 96, "score": 170102.25652087445 }, { "content": "/// Uses two bits to perform a lookup into a table\n\npub trait TwoBitLookupGadget<ConstraintF: Field>\n\nwhere\n\n Self: Sized,\n\n{\n\n type TableConstant;\n\n fn two_bit_lookup<CS: ConstraintSystemAbstract<ConstraintF>>(\n\n cs: CS,\n\n bits: &[Boolean],\n\n constants: &[Self::TableConstant],\n\n ) -> Result<Self, SynthesisError>;\n\n\n\n fn two_bit_lookup_lc<CS: ConstraintSystemAbstract<ConstraintF>>(\n\n cs: CS,\n\n precomp: &Boolean,\n\n b: &[Boolean],\n\n c: &[Self::TableConstant],\n\n ) -> Result<Self, SynthesisError>;\n\n\n\n fn cost() -> usize;\n\n\n\n fn cost_of_lc() -> usize {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "r1cs/gadgets/std/src/select.rs", "rank": 97, "score": 169840.93191128995 }, { "content": "pub trait ToCompressedBitsGadget<ConstraintF: Field> {\n\n /// Enforce compression of an element through serialization of the x coordinate and storing\n\n /// a sign bit for the y coordinate. For GT elements we assume x <-> c1 and y <-> c0 to avoid\n\n /// confusion. When enforcing byte serialization of a field element, \"x_in_field\" and \"y_in_field\"\n\n /// flags could be set in order to enforce too that their bit representation is under the\n\n /// field modulus (default behaviour is both set to false).\n\n fn to_compressed<CS: ConstraintSystemAbstract<ConstraintF>>(\n\n &self,\n\n cs: CS,\n\n ) -> Result<Vec<Boolean>, SynthesisError>;\n\n}\n\n\n\nimpl<ConstraintF: Field> ToBytesGadget<ConstraintF> for [UInt8] {\n\n fn to_bytes<CS: ConstraintSystemAbstract<ConstraintF>>(\n\n &self,\n\n _cs: CS,\n\n ) -> Result<Vec<UInt8>, SynthesisError> {\n\n Ok(self.to_vec())\n\n }\n\n\n", "file_path": "r1cs/gadgets/std/src/bits/mod.rs", "rank": 98, "score": 169834.69791955646 }, { "content": "fn to_internal_repr(mut x: Vec<u8>, mut y: Vec<u8>) -> Projective {\n\n // Hex is in big-endian but FromBytes accepts only in little-endian, so we need to reverse.\n\n // Plus, we represent the Field using a BigInteger320, e.g. with 40 bytes instead of 32, so we need to pad.\n\n x.reverse();\n\n x.append(&mut vec![0u8; 8]);\n\n y.reverse();\n\n y.append(&mut vec![0u8; 8]);\n\n\n\n // Collect both coordinates\n\n x.append(&mut y);\n\n\n\n // Push infinity flag being 0\n\n x.push(0u8);\n\n\n\n // Read point (let's use the FromBytes for simplicity)\n\n Affine::read(&x[..]).unwrap().into_projective()\n\n}\n\n\n\n#[test]\n", "file_path": "algebra/src/curves/secp256k1/tests.rs", "rank": 99, "score": 169523.40760534751 } ]
Rust
cleu-orm/src/crud/utils.rs
c410-f3r/cleu-orm
34d22f1f4bb01ff792262ebb63bdeec36dc0d603
use crate::{ buffer_try_push_str, buffer_write_fmt, crud::{TdEntity, TdError}, write_column_alias, write_select_field, FromRowsSuffix, SelectLimit, SelectOrderBy, SqlWriter, Suffix, Table, TableDefs, }; use sqlx_core::{ postgres::{PgPool, PgRow}, query::query, row::Row, }; #[inline] pub fn seek_related_entities<'entity, B, F, R, TD>( buffer: &mut B, rows: &[PgRow], suffix: Suffix, suffix_related: Suffix, mut cb: F, ) -> Result<usize, TD::Error> where B: cl_traits::String, F: FnMut(R) -> Result<(), TD::Error>, R: FromRowsSuffix<B, Error = TD::Error>, TD: TableDefs<'entity>, { if rows.is_empty() { return Ok(0); } let first_row = if let Some(elem) = rows.first() { elem } else { return Ok(0); }; let first_rslt = R::from_rows_suffix(rows, buffer, suffix_related, first_row); let (mut counter, mut previous) = if let Ok((skip, entity)) = first_rslt { write_column_alias(buffer, TD::TABLE_NAME, suffix, TD::PRIMARY_KEY_NAME)?; let previous = first_row.try_get(buffer.as_ref()).map_err(Into::into)?; buffer.clear(); cb(entity)?; (skip, previous) } else { buffer.clear(); return Ok(1); }; loop { if counter >= rows.len() { break; } let row = if let Some(elem) = rows.get(counter) { elem } else { break; }; let curr_rows = rows.get(counter..).unwrap_or_default(); let (skip, entity) = R::from_rows_suffix(curr_rows, buffer, suffix_related, row)?; write_column_alias(buffer, TD::TABLE_NAME, suffix, TD::PRIMARY_KEY_NAME)?; let curr: i64 = row.try_get(buffer.as_ref()).map_err(Into::into)?; buffer.clear(); if previous == curr { cb(entity)?; counter = counter.wrapping_add(skip); } else { break; } previous = curr; } Ok(counter) } #[inline] pub(crate) async fn read_all<'entity, R, B, TD>( buffer: &mut B, pool: &PgPool, table: &Table<'entity, TD>, ) -> Result<Vec<R>, TdError<'entity, TD>> where B: cl_traits::String, R: FromRowsSuffix<B, Error = TD::Error>, TD: TableDefs<'entity>, TD::Associations: SqlWriter<B, Error = TD::Error>, TD::Error: From<crate::Error>, { table.write_select(buffer, SelectOrderBy::Ascending, SelectLimit::All, &mut |_| Ok(()))?; let rows = query(buffer.as_ref()).fetch_all(pool).await.map_err(Into::into)?; buffer.clear(); collect_entities_tables(buffer, &rows, table) } #[inline] pub(crate) async fn read_by_id<'entity, B, TD>( buffer: &mut B, id: &TD::PrimaryKeyValue, pool: &PgPool, table: &Table<'entity, TD>, ) -> Result<TdEntity<'entity, TD>, TdError<'entity, TD>> where B: cl_traits::String, TD: TableDefs<'entity>, TD::Associations: SqlWriter<B, Error = TD::Error>, TD::Entity: FromRowsSuffix<B, Error = TD::Error>, TD::Error: From<crate::Error>, { table.write_select(buffer, SelectOrderBy::Ascending, SelectLimit::All, &mut |b| { write_select_field( b, TD::TABLE_NAME, TD::TABLE_NAME_ALIAS, table.suffix(), table.id_field().name(), )?; buffer_write_fmt(b, format_args!(" = {id}")) })?; let rows = query(buffer.as_ref()).fetch_all(pool).await.map_err(Into::into)?; buffer.clear(); let first_row = rows.first().ok_or(crate::Error::NoDatabaseRowResult)?; Ok(TD::Entity::from_rows_suffix(&rows, buffer, table.suffix(), first_row)?.1) } #[inline] pub(crate) async fn read_all_with_params<'entity, R, B, TD>( buffer: &mut B, pool: &PgPool, table: &Table<'entity, TD>, order_by: SelectOrderBy, select_limit: SelectLimit, where_str: &str, ) -> Result<Vec<R>, TdError<'entity, TD>> where B: cl_traits::String, R: FromRowsSuffix<B, Error = TD::Error>, TD: TableDefs<'entity>, TD::Associations: SqlWriter<B, Error = TD::Error>, TD::Error: From<crate::Error>, { table.write_select(buffer, order_by, select_limit, &mut |b| buffer_try_push_str(b, where_str))?; let rows = query(buffer.as_ref()).fetch_all(pool).await.map_err(Into::into)?; buffer.clear(); collect_entities_tables(buffer, &rows, table) } #[inline] fn collect_entities_tables<'entity, R, B, TD>( buffer: &mut B, rows: &[PgRow], table: &Table<'entity, TD>, ) -> Result<Vec<R>, TD::Error> where B: cl_traits::String, R: FromRowsSuffix<B, Error = TD::Error>, TD: TableDefs<'entity>, { let mut rslt = Vec::new(); let mut counter: usize = 0; loop { if counter >= rows.len() { break; } let actual_rows = rows.get(counter..).unwrap_or_default(); let skip = seek_related_entities::<_, _, _, TD>( buffer, actual_rows, table.suffix(), table.suffix(), |entity| { rslt.push(entity); Ok(()) }, )?; counter = counter.wrapping_add(skip); } Ok(rslt) }
use crate::{ buffer_try_push_str, buffer_write_fmt, crud::{TdEntity, TdError}, write_column_alias, write_select_field, FromRowsSuffix, SelectLimit, SelectOrderBy, SqlWriter, Suffix, Table, TableDefs, }; use sqlx_core::{ postgres::{PgPool, PgRow}, query::query, row::Row, }; #[inline] pub fn seek_related_entities<'entity, B, F, R, TD>( buffer: &mut B, rows: &[PgRow], suffix: Suffix, suffix_related: Suffix, mut cb: F, ) -> Result<usize, TD::Error> where B: cl_traits::String, F: FnMut(R) -> Result<(), TD::Error>, R: FromRowsSuffix<B, Error = TD::Error>, TD: TableDefs<'entity>, { if rows.is_empty() { return Ok(0); } let first_row = if let Some(elem) = rows.first() { elem } else { return Ok(0); }; let first_rslt = R::from_rows_suffix(rows, buffer, suffix_related, first_row); let (mut counter, mut previous) = if let Ok((skip, entity)) = first_rslt { write_column_alias(buffer, TD::TABLE_NAME, suffix, TD::PRIMARY_KEY_NAME)?; let previous = first_row.try_get(buffer.as_ref()).map_err(Into::into)?; buffer.clear(); cb(entity)?; (skip, previous) } else { buffer.clear(); return Ok(1); }; loop { if counter >= rows.len() { break; } let row = if let Some(elem) = rows.get(counter) { elem } else { break; }; let curr_rows = rows.get(counte
ated_entities::<_, _, _, TD>( buffer, actual_rows, table.suffix(), table.suffix(), |entity| { rslt.push(entity); Ok(()) }, )?; counter = counter.wrapping_add(skip); } Ok(rslt) }
r..).unwrap_or_default(); let (skip, entity) = R::from_rows_suffix(curr_rows, buffer, suffix_related, row)?; write_column_alias(buffer, TD::TABLE_NAME, suffix, TD::PRIMARY_KEY_NAME)?; let curr: i64 = row.try_get(buffer.as_ref()).map_err(Into::into)?; buffer.clear(); if previous == curr { cb(entity)?; counter = counter.wrapping_add(skip); } else { break; } previous = curr; } Ok(counter) } #[inline] pub(crate) async fn read_all<'entity, R, B, TD>( buffer: &mut B, pool: &PgPool, table: &Table<'entity, TD>, ) -> Result<Vec<R>, TdError<'entity, TD>> where B: cl_traits::String, R: FromRowsSuffix<B, Error = TD::Error>, TD: TableDefs<'entity>, TD::Associations: SqlWriter<B, Error = TD::Error>, TD::Error: From<crate::Error>, { table.write_select(buffer, SelectOrderBy::Ascending, SelectLimit::All, &mut |_| Ok(()))?; let rows = query(buffer.as_ref()).fetch_all(pool).await.map_err(Into::into)?; buffer.clear(); collect_entities_tables(buffer, &rows, table) } #[inline] pub(crate) async fn read_by_id<'entity, B, TD>( buffer: &mut B, id: &TD::PrimaryKeyValue, pool: &PgPool, table: &Table<'entity, TD>, ) -> Result<TdEntity<'entity, TD>, TdError<'entity, TD>> where B: cl_traits::String, TD: TableDefs<'entity>, TD::Associations: SqlWriter<B, Error = TD::Error>, TD::Entity: FromRowsSuffix<B, Error = TD::Error>, TD::Error: From<crate::Error>, { table.write_select(buffer, SelectOrderBy::Ascending, SelectLimit::All, &mut |b| { write_select_field( b, TD::TABLE_NAME, TD::TABLE_NAME_ALIAS, table.suffix(), table.id_field().name(), )?; buffer_write_fmt(b, format_args!(" = {id}")) })?; let rows = query(buffer.as_ref()).fetch_all(pool).await.map_err(Into::into)?; buffer.clear(); let first_row = rows.first().ok_or(crate::Error::NoDatabaseRowResult)?; Ok(TD::Entity::from_rows_suffix(&rows, buffer, table.suffix(), first_row)?.1) } #[inline] pub(crate) async fn read_all_with_params<'entity, R, B, TD>( buffer: &mut B, pool: &PgPool, table: &Table<'entity, TD>, order_by: SelectOrderBy, select_limit: SelectLimit, where_str: &str, ) -> Result<Vec<R>, TdError<'entity, TD>> where B: cl_traits::String, R: FromRowsSuffix<B, Error = TD::Error>, TD: TableDefs<'entity>, TD::Associations: SqlWriter<B, Error = TD::Error>, TD::Error: From<crate::Error>, { table.write_select(buffer, order_by, select_limit, &mut |b| buffer_try_push_str(b, where_str))?; let rows = query(buffer.as_ref()).fetch_all(pool).await.map_err(Into::into)?; buffer.clear(); collect_entities_tables(buffer, &rows, table) } #[inline] fn collect_entities_tables<'entity, R, B, TD>( buffer: &mut B, rows: &[PgRow], table: &Table<'entity, TD>, ) -> Result<Vec<R>, TD::Error> where B: cl_traits::String, R: FromRowsSuffix<B, Error = TD::Error>, TD: TableDefs<'entity>, { let mut rslt = Vec::new(); let mut counter: usize = 0; loop { if counter >= rows.len() { break; } let actual_rows = rows.get(counter..).unwrap_or_default(); let skip = seek_rel
random
[]
Rust
awc/src/responses/response.rs
LGU-Web3-0/actix-web
5fd5875d2c72194232cc4356c7093c54e0fc700b
use std::{ cell::{Ref, RefCell, RefMut}, fmt, mem, pin::Pin, task::{Context, Poll}, time::{Duration, Instant}, }; use actix_http::{ error::PayloadError, header::HeaderMap, BoxedPayloadStream, Extensions, HttpMessage, Payload, ResponseHead, StatusCode, Version, }; use actix_rt::time::{sleep, Sleep}; use bytes::Bytes; use futures_core::Stream; use pin_project_lite::pin_project; use serde::de::DeserializeOwned; #[cfg(feature = "cookies")] use crate::cookie::{Cookie, ParseError as CookieParseError}; use super::{JsonBody, ResponseBody, ResponseTimeout}; pin_project! { pub struct ClientResponse<S = BoxedPayloadStream> { pub(crate) head: ResponseHead, #[pin] pub(crate) payload: Payload<S>, pub(crate) timeout: ResponseTimeout, pub(crate) extensions: RefCell<Extensions>, } } impl<S> ClientResponse<S> { pub(crate) fn new(head: ResponseHead, payload: Payload<S>) -> Self { ClientResponse { head, payload, timeout: ResponseTimeout::default(), extensions: RefCell::new(Extensions::new()), } } #[inline] pub(crate) fn head(&self) -> &ResponseHead { &self.head } #[inline] pub fn version(&self) -> Version { self.head().version } #[inline] pub fn status(&self) -> StatusCode { self.head().status } #[inline] pub fn headers(&self) -> &HeaderMap { &self.head().headers } pub fn map_body<F, U>(mut self, f: F) -> ClientResponse<U> where F: FnOnce(&mut ResponseHead, Payload<S>) -> Payload<U>, { let payload = f(&mut self.head, self.payload); ClientResponse { payload, head: self.head, timeout: self.timeout, extensions: self.extensions, } } pub fn timeout(self, dur: Duration) -> Self { let timeout = match self.timeout { ResponseTimeout::Disabled(Some(mut timeout)) | ResponseTimeout::Enabled(mut timeout) => match Instant::now().checked_add(dur) { Some(deadline) => { timeout.as_mut().reset(deadline.into()); ResponseTimeout::Enabled(timeout) } None => ResponseTimeout::Enabled(Box::pin(sleep(dur))), }, _ => ResponseTimeout::Enabled(Box::pin(sleep(dur))), }; Self { payload: self.payload, head: self.head, timeout, extensions: self.extensions, } } pub(crate) fn _timeout(mut self, timeout: Option<Pin<Box<Sleep>>>) -> Self { self.timeout = ResponseTimeout::Disabled(timeout); self } #[cfg(feature = "cookies")] pub fn cookies(&self) -> Result<Ref<'_, Vec<Cookie<'static>>>, CookieParseError> { struct Cookies(Vec<Cookie<'static>>); if self.extensions().get::<Cookies>().is_none() { let mut cookies = Vec::new(); for hdr in self.headers().get_all(&actix_http::header::SET_COOKIE) { let s = std::str::from_utf8(hdr.as_bytes()).map_err(CookieParseError::from)?; cookies.push(Cookie::parse_encoded(s)?.into_owned()); } self.extensions_mut().insert(Cookies(cookies)); } Ok(Ref::map(self.extensions(), |ext| { &ext.get::<Cookies>().unwrap().0 })) } #[cfg(feature = "cookies")] pub fn cookie(&self, name: &str) -> Option<Cookie<'static>> { if let Ok(cookies) = self.cookies() { for cookie in cookies.iter() { if cookie.name() == name { return Some(cookie.to_owned()); } } } None } } impl<S> ClientResponse<S> where S: Stream<Item = Result<Bytes, PayloadError>>, { pub fn body(&mut self) -> ResponseBody<S> { ResponseBody::new(self) } pub fn json<T: DeserializeOwned>(&mut self) -> JsonBody<S, T> { JsonBody::new(self) } } impl<S> fmt::Debug for ClientResponse<S> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { writeln!(f, "\nClientResponse {:?} {}", self.version(), self.status(),)?; writeln!(f, " headers:")?; for (key, val) in self.headers().iter() { writeln!(f, " {:?}: {:?}", key, val)?; } Ok(()) } } impl<S> HttpMessage for ClientResponse<S> { type Stream = S; fn headers(&self) -> &HeaderMap { &self.head.headers } fn take_payload(&mut self) -> Payload<S> { mem::replace(&mut self.payload, Payload::None) } fn extensions(&self) -> Ref<'_, Extensions> { self.extensions.borrow() } fn extensions_mut(&self) -> RefMut<'_, Extensions> { self.extensions.borrow_mut() } } impl<S> Stream for ClientResponse<S> where S: Stream<Item = Result<Bytes, PayloadError>> + Unpin, { type Item = Result<Bytes, PayloadError>; fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { let this = self.project(); this.timeout.poll_timeout(cx)?; this.payload.poll_next(cx) } } #[cfg(test)] mod tests { use static_assertions::assert_impl_all; use super::*; use crate::any_body::AnyBody; assert_impl_all!(ClientResponse: Unpin); assert_impl_all!(ClientResponse<()>: Unpin); assert_impl_all!(ClientResponse<AnyBody>: Unpin); }
use std::{ cell::{Ref, RefCell, RefMut}, fmt, mem, pin::Pin, task::{Context, Poll}, time::{Duration, Instant}, }; use actix_http::{ error::PayloadError, header::HeaderMap, BoxedPayloadStream, Extensions, HttpMessage, Payload, ResponseHead, StatusCode, Version, }; use actix_rt::time::{sleep, Sleep}; use bytes::Bytes; use futures_core::Stream; use pin_project_lite::pin_project; use serde::de::DeserializeOwned; #[cfg(feature = "cookies")] use crate::cookie::{Cookie, ParseError as CookieParseError}; use super::{JsonBody, ResponseBody, ResponseTimeout}; pin_project! { pub struct ClientResponse<S = BoxedPayloadStream> { pub(crate) head: ResponseHead, #[pin] pub(crate) payload: Payload<S>, pub(crate) timeout: ResponseTimeout, pub(crate) extensions: RefCell<Extensions>, } } impl<S> ClientResponse<S> { pub(crate) fn new(head: ResponseHead, payload: Payload<S>) -> Self { ClientResponse { head, payload, timeout: ResponseTimeout::default(), extensions: RefCell::new(Extensions::new()), } } #[inline] pub(crate) fn head(&self) -> &ResponseHead { &self.head } #[inline] pub fn version(&self) -> Version { self.head().version } #[inline] pub fn status(&self) -> StatusCode { self.head().status } #[inline] pub fn headers(&self) -> &HeaderMap { &self.head().headers } pub fn map_body<F, U>(mut self, f: F) -> ClientResponse<U> where F: FnOnce(&mut ResponseHead, Payload<S>) -> Payload<U>, { let payload = f(&mut self.head, self.payload); ClientResponse { payload, head: self.head, timeout: self.timeout, extensions: self.extensions, } } pub fn timeout(self, dur: Duration) -> Self { let timeout = match self.timeout { ResponseTimeout::Disabled(Some(mut timeout)) | ResponseTimeout::Enabled(mut timeout) => match Instant::now().checked_add(dur) { Some(deadline) => { timeout.as_mut().reset(deadline.into()); ResponseTimeout::Enabled(timeout) } None => ResponseTimeout::Enabled(Box::pin(sleep(dur))), }, _ => ResponseTimeout::Enabled(Box::pin(sleep(dur))), }; Self { payload: self.payload, head: self.head, timeout, extensions: self.extensions, } } pub(crate) fn _timeout(mut self, timeout: Option<Pin<Box<Sleep>>>) -> Self { self.timeout = ResponseTimeout::Disabled(timeout); self } #[cfg(feature = "cookies")] pub fn cookies(&self) -> Result<Ref<'_, Vec<Cookie<'static>>>, CookieParseError> { struct Cookies(Vec<Cookie<'static>>); if self.extensions().get::<Cookies>().is_none() { let mut cookies = Vec::new(); for hdr in self.headers().get_all(&actix_http::header::SET_COOKIE) { let s = std::str::from_utf8(hdr.as_bytes()).map_err(CookieParseError::from)?; cookies.push(Cookie::parse_encoded(s)?.into_owned()); } self.extensions_mut().insert(Cookies(cookies)); } Ok(Ref::map(self.extensions(), |ext| { &ext.get::<Cookies>().unwrap().0 })) } #[cfg(feature = "cookies")] pub fn cookie(&self, name: &str) -> Option<Cookie<'static>> { if let Ok(cookies) = self.cookies() { for cookie in cookies.iter() {
} impl<S> ClientResponse<S> where S: Stream<Item = Result<Bytes, PayloadError>>, { pub fn body(&mut self) -> ResponseBody<S> { ResponseBody::new(self) } pub fn json<T: DeserializeOwned>(&mut self) -> JsonBody<S, T> { JsonBody::new(self) } } impl<S> fmt::Debug for ClientResponse<S> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { writeln!(f, "\nClientResponse {:?} {}", self.version(), self.status(),)?; writeln!(f, " headers:")?; for (key, val) in self.headers().iter() { writeln!(f, " {:?}: {:?}", key, val)?; } Ok(()) } } impl<S> HttpMessage for ClientResponse<S> { type Stream = S; fn headers(&self) -> &HeaderMap { &self.head.headers } fn take_payload(&mut self) -> Payload<S> { mem::replace(&mut self.payload, Payload::None) } fn extensions(&self) -> Ref<'_, Extensions> { self.extensions.borrow() } fn extensions_mut(&self) -> RefMut<'_, Extensions> { self.extensions.borrow_mut() } } impl<S> Stream for ClientResponse<S> where S: Stream<Item = Result<Bytes, PayloadError>> + Unpin, { type Item = Result<Bytes, PayloadError>; fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { let this = self.project(); this.timeout.poll_timeout(cx)?; this.payload.poll_next(cx) } } #[cfg(test)] mod tests { use static_assertions::assert_impl_all; use super::*; use crate::any_body::AnyBody; assert_impl_all!(ClientResponse: Unpin); assert_impl_all!(ClientResponse<()>: Unpin); assert_impl_all!(ClientResponse<AnyBody>: Unpin); }
if cookie.name() == name { return Some(cookie.to_owned()); } } } None }
function_block-function_prefix_line
[ { "content": "#[allow(non_snake_case)]\n\npub fn Header(name: &'static str, value: &'static str) -> impl Guard {\n\n HeaderGuard(\n\n header::HeaderName::try_from(name).unwrap(),\n\n header::HeaderValue::from_static(value),\n\n )\n\n}\n\n\n", "file_path": "actix-web/src/guard.rs", "rank": 0, "score": 327944.5562526397 }, { "content": "/// Creates a guard using the given function.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use actix_web::{guard, web, HttpResponse};\n\n///\n\n/// web::route()\n\n/// .guard(guard::fn_guard(|ctx| {\n\n/// ctx.head().headers().contains_key(\"content-type\")\n\n/// }))\n\n/// .to(|| HttpResponse::Ok());\n\n/// ```\n\npub fn fn_guard<F>(f: F) -> impl Guard\n\nwhere\n\n F: Fn(&GuardContext<'_>) -> bool,\n\n{\n\n FnGuard(f)\n\n}\n\n\n", "file_path": "actix-web/src/guard.rs", "rank": 1, "score": 309171.7963958047 }, { "content": "/// Criterion Benchmark for async Service\n\n/// Should be used from within criterion group:\n\n/// ```ignore\n\n/// let mut criterion: ::criterion::Criterion<_> =\n\n/// ::criterion::Criterion::default().configure_from_args();\n\n/// bench_async_service(&mut criterion, ok_service(), \"async_service_direct\");\n\n/// ```\n\n///\n\n/// Usable for benching Service wrappers:\n\n/// Using minimum service code implementation we first measure\n\n/// time to run minimum service, then measure time with wrapper.\n\n///\n\n/// Sample output\n\n/// async_service_direct time: [1.0908 us 1.1656 us 1.2613 us]\n\npub fn bench_async_service<S>(c: &mut Criterion, srv: S, name: &str)\n\nwhere\n\n S: Service<ServiceRequest, Response = ServiceResponse, Error = Error> + 'static,\n\n{\n\n let rt = actix_rt::System::new();\n\n let srv = Rc::new(RefCell::new(srv));\n\n\n\n let req = TestRequest::default().to_srv_request();\n\n assert!(rt\n\n .block_on(srv.borrow_mut().call(req))\n\n .unwrap()\n\n .status()\n\n .is_success());\n\n\n\n // start benchmark loops\n\n c.bench_function(name, move |b| {\n\n b.iter_custom(|iters| {\n\n let srv = srv.clone();\n\n // exclude request generation, it appears it takes significant time vs call (3us vs 1us)\n\n let futs = (0..iters)\n", "file_path": "actix-web/benches/service.rs", "rank": 2, "score": 300821.6239161589 }, { "content": "#[inline]\n\npub fn http_percent_encode(f: &mut fmt::Formatter<'_>, bytes: &[u8]) -> fmt::Result {\n\n let encoded = percent_encoding::percent_encode(bytes, HTTP_VALUE);\n\n fmt::Display::fmt(&encoded, f)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn comma_delimited_parsing() {\n\n let headers = vec![];\n\n let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap();\n\n assert_eq!(res, vec![0; 0]);\n\n\n\n let headers = vec![\n\n HeaderValue::from_static(\"1, 2\"),\n\n HeaderValue::from_static(\"3,4\"),\n\n ];\n\n let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap();\n", "file_path": "actix-http/src/header/utils.rs", "rank": 3, "score": 291171.5345435634 }, { "content": "#[inline]\n\npub fn fmt_comma_delimited<T>(f: &mut fmt::Formatter<'_>, parts: &[T]) -> fmt::Result\n\nwhere\n\n T: fmt::Display,\n\n{\n\n let mut iter = parts.iter();\n\n\n\n if let Some(part) = iter.next() {\n\n fmt::Display::fmt(part, f)?;\n\n }\n\n\n\n for part in iter {\n\n f.write_str(\", \")?;\n\n fmt::Display::fmt(part, f)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n/// Percent encode a sequence of bytes with a character set defined in [RFC 5987 §3.2].\n\n///\n\n/// [RFC 5987 §3.2]: https://datatracker.ietf.org/doc/html/rfc5987#section-3.2\n", "file_path": "actix-http/src/header/utils.rs", "rank": 4, "score": 289956.949250788 }, { "content": "#[inline]\n\npub fn file_extension_to_mime(ext: &str) -> mime::Mime {\n\n from_ext(ext).first_or_octet_stream()\n\n}\n\n\n", "file_path": "actix-files/src/lib.rs", "rank": 5, "score": 270048.3621340519 }, { "content": "#[allow(non_snake_case)]\n\npub fn Host(host: impl AsRef<str>) -> HostGuard {\n\n HostGuard {\n\n host: host.as_ref().to_string(),\n\n scheme: None,\n\n }\n\n}\n\n\n", "file_path": "actix-web/src/guard.rs", "rank": 6, "score": 264181.0751750656 }, { "content": "/// Executes blocking function on a thread pool, returns future that resolves to result of the\n\n/// function execution.\n\npub fn block<F, R>(f: F) -> impl Future<Output = Result<R, BlockingError>>\n\nwhere\n\n F: FnOnce() -> R + Send + 'static,\n\n R: Send + 'static,\n\n{\n\n let fut = actix_rt::task::spawn_blocking(f);\n\n async { fut.await.map_err(|_| BlockingError) }\n\n}\n", "file_path": "actix-web/src/web.rs", "rank": 7, "score": 258846.55529251677 }, { "content": "struct FnGuard<F: Fn(&GuardContext<'_>) -> bool>(F);\n\n\n\nimpl<F> Guard for FnGuard<F>\n\nwhere\n\n F: Fn(&GuardContext<'_>) -> bool,\n\n{\n\n fn check(&self, ctx: &GuardContext<'_>) -> bool {\n\n (self.0)(ctx)\n\n }\n\n}\n\n\n\nimpl<F> Guard for F\n\nwhere\n\n F: Fn(&GuardContext<'_>) -> bool,\n\n{\n\n fn check(&self, ctx: &GuardContext<'_>) -> bool {\n\n (self)(ctx)\n\n }\n\n}\n\n\n", "file_path": "actix-web/src/guard.rs", "rank": 8, "score": 255540.2956855674 }, { "content": "/// Extracts and trims first value for given header name.\n\nfn first_header_value<'a>(req: &'a RequestHead, name: &'_ HeaderName) -> Option<&'a str> {\n\n let hdr = req.headers.get(name)?.to_str().ok()?;\n\n let val = hdr.split(',').next()?.trim();\n\n Some(val)\n\n}\n\n\n\n/// HTTP connection information.\n\n///\n\n/// `ConnectionInfo` implements `FromRequest` and can be extracted in handlers.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// # use actix_web::{HttpResponse, Responder};\n\n/// use actix_web::dev::ConnectionInfo;\n\n///\n\n/// async fn handler(conn: ConnectionInfo) -> impl Responder {\n\n/// match conn.host() {\n\n/// \"actix.rs\" => HttpResponse::Ok().body(\"Welcome!\"),\n\n/// \"admin.actix.rs\" => HttpResponse::Ok().body(\"Admin portal.\"),\n\n/// _ => HttpResponse::NotFound().finish()\n", "file_path": "actix-web/src/info.rs", "rank": 9, "score": 250917.70609195012 }, { "content": "fn http_msg(msg: impl AsRef<str>) -> BytesMut {\n\n let mut msg = msg\n\n .as_ref()\n\n .trim()\n\n .split('\\n')\n\n .into_iter()\n\n .map(|line| [line.trim_start(), \"\\r\"].concat())\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\");\n\n\n\n // remove trailing \\r\n\n msg.pop();\n\n\n\n if !msg.is_empty() && !msg.contains(\"\\r\\n\\r\\n\") {\n\n msg.push_str(\"\\r\\n\\r\\n\");\n\n }\n\n\n\n BytesMut::from(msg.as_bytes())\n\n}\n\n\n", "file_path": "actix-http/src/h1/dispatcher_tests.rs", "rank": 10, "score": 240463.3678907962 }, { "content": "fn update_head(encoding: ContentEncoding, head: &mut ResponseHead) {\n\n head.headers_mut()\n\n .insert(header::CONTENT_ENCODING, encoding.to_header_value());\n\n head.headers_mut()\n\n .insert(header::VARY, HeaderValue::from_static(\"accept-encoding\"));\n\n\n\n head.no_chunking(false);\n\n}\n\n\n", "file_path": "actix-http/src/encoding/encoder.rs", "rank": 11, "score": 240421.09089799447 }, { "content": "/// Write integer to a `fmt::Write`.\n\npub fn itoa_fmt<W: fmt::Write, V: itoa::Integer>(mut wr: W, value: V) -> fmt::Result {\n\n let mut buf = itoa::Buffer::new();\n\n wr.write_str(buf.format(value))\n\n}\n\n\n\n#[derive(Debug, Clone, Display, Error)]\n\n#[display(fmt = \"quality out of bounds\")]\n\n#[non_exhaustive]\n\npub struct QualityOutOfBounds;\n\n\n\nimpl TryFrom<f32> for Quality {\n\n type Error = QualityOutOfBounds;\n\n\n\n #[inline]\n\n fn try_from(value: f32) -> Result<Self, Self::Error> {\n\n if (0.0..=MAX_QUALITY_FLOAT).contains(&value) {\n\n Ok(Quality::from_f32(value))\n\n } else {\n\n Err(QualityOutOfBounds)\n\n }\n", "file_path": "actix-http/src/header/shared/quality.rs", "rank": 12, "score": 235151.1472212228 }, { "content": "/// Creates a new any-method route with handler.\n\n///\n\n/// ```\n\n/// use actix_web::{web, App, HttpResponse, Responder};\n\n///\n\n/// async fn index() -> impl Responder {\n\n/// HttpResponse::Ok()\n\n/// }\n\n///\n\n/// App::new().service(\n\n/// web::resource(\"/\").route(\n\n/// web::to(index))\n\n/// );\n\n/// ```\n\npub fn to<F, Args>(handler: F) -> Route\n\nwhere\n\n F: Handler<Args>,\n\n Args: FromRequest + 'static,\n\n F::Output: Responder + 'static,\n\n{\n\n Route::new().to(handler)\n\n}\n\n\n", "file_path": "actix-web/src/web.rs", "rank": 13, "score": 225809.53529161142 }, { "content": "#[allow(non_snake_case)]\n\npub fn All<F: Guard + 'static>(guard: F) -> AllGuard {\n\n AllGuard {\n\n guards: vec![Box::new(guard)],\n\n }\n\n}\n\n\n\n/// A collection of guards that match if the conjunction of their `check` outcomes is true.\n\n///\n\n/// That is, **all** contained guard needs to match in order for the aggregate guard to match.\n\n///\n\n/// Construct an `AllGuard` using [`All`].\n\npub struct AllGuard {\n\n guards: Vec<Box<dyn Guard>>,\n\n}\n\n\n\nimpl AllGuard {\n\n /// Adds new guard to the collection of guards to check.\n\n pub fn and<F: Guard + 'static>(mut self, guard: F) -> Self {\n\n self.guards.push(Box::new(guard));\n\n self\n", "file_path": "actix-web/src/guard.rs", "rank": 14, "score": 219218.70252162477 }, { "content": "#[allow(non_snake_case)]\n\npub fn Any<F: Guard + 'static>(guard: F) -> AnyGuard {\n\n AnyGuard {\n\n guards: vec![Box::new(guard)],\n\n }\n\n}\n\n\n\n/// A collection of guards that match if the disjunction of their `check` outcomes is true.\n\n///\n\n/// That is, only one contained guard needs to match in order for the aggregate guard to match.\n\n///\n\n/// Construct an `AnyGuard` using [`Any`].\n\npub struct AnyGuard {\n\n guards: Vec<Box<dyn Guard>>,\n\n}\n\n\n\nimpl AnyGuard {\n\n /// Adds new guard to the collection of guards to check.\n\n pub fn or<F: Guard + 'static>(mut self, guard: F) -> Self {\n\n self.guards.push(Box::new(guard));\n\n self\n", "file_path": "actix-web/src/guard.rs", "rank": 15, "score": 219218.70252162477 }, { "content": "/// Creates scope for common path prefix.\n\n///\n\n/// Scopes collect multiple paths under a common path prefix. The scope's path can contain dynamic\n\n/// path segments.\n\n///\n\n/// # Avoid Trailing Slashes\n\n/// Avoid using trailing slashes in the scope prefix (e.g., `web::scope(\"/scope/\")`). It will almost\n\n/// certainly not have the expected behavior. See the [documentation on resource definitions][pat]\n\n/// to understand why this is the case and how to correctly construct scope/prefix definitions.\n\n///\n\n/// # Examples\n\n/// In this example, three routes are set up (and will handle any method):\n\n/// - `/{project_id}/path1`\n\n/// - `/{project_id}/path2`\n\n/// - `/{project_id}/path3`\n\n///\n\n/// ```\n\n/// use actix_web::{web, App, HttpResponse};\n\n///\n\n/// let app = App::new().service(\n\n/// web::scope(\"/{project_id}\")\n\n/// .service(web::resource(\"/path1\").to(|| HttpResponse::Ok()))\n\n/// .service(web::resource(\"/path2\").to(|| HttpResponse::Ok()))\n\n/// .service(web::resource(\"/path3\").to(|| HttpResponse::MethodNotAllowed()))\n\n/// );\n\n/// ```\n\n///\n\n/// [pat]: crate::dev::ResourceDef#prefix-resources\n\npub fn scope(path: &str) -> Scope {\n\n Scope::new(path)\n\n}\n\n\n", "file_path": "actix-web/src/web.rs", "rank": 16, "score": 215344.59103886667 }, { "content": "fn stabilize_date_header(payload: &mut [u8]) {\n\n let mut from = 0;\n\n while let Some(pos) = find_slice(payload, b\"date\", from) {\n\n payload[(from + pos)..(from + pos + 35)]\n\n .copy_from_slice(b\"date: Thu, 01 Jan 1970 12:34:56 UTC\");\n\n from += 35;\n\n }\n\n}\n\n\n", "file_path": "actix-http/src/h1/dispatcher_tests.rs", "rank": 17, "score": 213026.83618798404 }, { "content": "/// Start default [`TestServer`].\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use actix_web::{get, web, test, App, HttpResponse, Error, Responder};\n\n///\n\n/// #[get(\"/\")]\n\n/// async fn my_handler() -> Result<impl Responder, Error> {\n\n/// Ok(HttpResponse::Ok())\n\n/// }\n\n///\n\n/// #[actix_web::test]\n\n/// async fn test_example() {\n\n/// let srv = actix_test::start(||\n\n/// App::new().service(my_handler)\n\n/// );\n\n///\n\n/// let req = srv.get(\"/\");\n\n/// let res = req.send().await.unwrap();\n\n///\n\n/// assert!(res.status().is_success());\n\n/// }\n\n/// ```\n\npub fn start<F, I, S, B>(factory: F) -> TestServer\n\nwhere\n\n F: Fn() -> I + Send + Clone + 'static,\n\n I: IntoServiceFactory<S, Request>,\n\n S: ServiceFactory<Request, Config = AppConfig> + 'static,\n\n S::Error: Into<Error> + 'static,\n\n S::InitError: fmt::Debug,\n\n S::Response: Into<Response<B>> + 'static,\n\n <S::Service as Service<Request>>::Future: 'static,\n\n B: MessageBody + 'static,\n\n{\n\n start_with(TestServerConfig::default(), factory)\n\n}\n\n\n", "file_path": "actix-test/src/lib.rs", "rank": 18, "score": 210601.74991390939 }, { "content": "fn call() -> impl Iterator<Item = &'static str> {\n\n let arr = [\n\n \"/authorizations\",\n\n \"/user/repos\",\n\n \"/repos/rust-lang/rust/stargazers\",\n\n \"/orgs/rust-lang/public_members/nikomatsakis\",\n\n \"/repos/rust-lang/rust/releases/1.51.0\",\n\n ];\n\n\n\n IntoIterator::into_iter(arr)\n\n}\n\n\n", "file_path": "actix-router/benches/router.rs", "rank": 19, "score": 208330.0264958973 }, { "content": "#[allow(non_snake_case)]\n\npub fn Method(method: HttpMethod) -> impl Guard {\n\n MethodGuard(method)\n\n}\n\n\n", "file_path": "actix-web/src/guard.rs", "rank": 20, "score": 204553.50661666534 }, { "content": "/// Start test server with custom configuration\n\n///\n\n/// Check [`TestServerConfig`] docs for configuration options.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use actix_web::{get, web, test, App, HttpResponse, Error, Responder};\n\n///\n\n/// #[get(\"/\")]\n\n/// async fn my_handler() -> Result<impl Responder, Error> {\n\n/// Ok(HttpResponse::Ok())\n\n/// }\n\n///\n\n/// #[actix_web::test]\n\n/// async fn test_example() {\n\n/// let srv = actix_test::start_with(actix_test::config().h1(), ||\n\n/// App::new().service(my_handler)\n\n/// );\n\n///\n\n/// let req = srv.get(\"/\");\n\n/// let res = req.send().await.unwrap();\n\n///\n\n/// assert!(res.status().is_success());\n\n/// }\n\n/// ```\n\npub fn start_with<F, I, S, B>(cfg: TestServerConfig, factory: F) -> TestServer\n\nwhere\n\n F: Fn() -> I + Send + Clone + 'static,\n\n I: IntoServiceFactory<S, Request>,\n\n S: ServiceFactory<Request, Config = AppConfig> + 'static,\n\n S::Error: Into<Error> + 'static,\n\n S::InitError: fmt::Debug,\n\n S::Response: Into<Response<B>> + 'static,\n\n <S::Service as Service<Request>>::Future: 'static,\n\n B: MessageBody + 'static,\n\n{\n\n // for sending handles and server info back from the spawned thread\n\n let (started_tx, started_rx) = std::sync::mpsc::channel();\n\n\n\n // for signaling the shutdown of spawned server and system\n\n let (thread_stop_tx, thread_stop_rx) = mpsc::channel(1);\n\n\n\n let tls = match cfg.stream {\n\n StreamType::Tcp => false,\n\n #[cfg(feature = \"openssl\")]\n", "file_path": "actix-test/src/lib.rs", "rank": 21, "score": 195320.86190041766 }, { "content": "/// Parses extended header parameter values (`ext-value`), as defined\n\n/// in [RFC 5987 §3.2](https://datatracker.ietf.org/doc/html/rfc5987#section-3.2).\n\n///\n\n/// Extended values are denoted by parameter names that end with `*`.\n\n///\n\n/// ## ABNF\n\n///\n\n/// ```plain\n\n/// ext-value = charset \"'\" [ language ] \"'\" value-chars\n\n/// ; like RFC 2231's <extended-initial-value>\n\n/// ; (see [RFC 2231 §7])\n\n///\n\n/// charset = \"UTF-8\" / \"ISO-8859-1\" / mime-charset\n\n///\n\n/// mime-charset = 1*mime-charsetc\n\n/// mime-charsetc = ALPHA / DIGIT\n\n/// / \"!\" / \"#\" / \"$\" / \"%\" / \"&\"\n\n/// / \"+\" / \"-\" / \"^\" / \"_\" / \"`\"\n\n/// / \"{\" / \"}\" / \"~\"\n\n/// ; as <mime-charset> in [RFC 2978 §2.3]\n\n/// ; except that the single quote is not included\n\n/// ; SHOULD be registered in the IANA charset registry\n\n///\n\n/// language = <Language-Tag, defined in [RFC 5646 §2.1]>\n\n///\n\n/// value-chars = *( pct-encoded / attr-char )\n\n///\n\n/// pct-encoded = \"%\" HEXDIG HEXDIG\n\n/// ; see [RFC 3986 §2.1]\n\n///\n\n/// attr-char = ALPHA / DIGIT\n\n/// / \"!\" / \"#\" / \"$\" / \"&\" / \"+\" / \"-\" / \".\"\n\n/// / \"^\" / \"_\" / \"`\" / \"|\" / \"~\"\n\n/// ; token except ( \"*\" / \"'\" / \"%\" )\n\n/// ```\n\n///\n\n/// [RFC 2231 §7]: https://datatracker.ietf.org/doc/html/rfc2231#section-7\n\n/// [RFC 2978 §2.3]: https://datatracker.ietf.org/doc/html/rfc2978#section-2.3\n\n/// [RFC 3986 §2.1]: https://datatracker.ietf.org/doc/html/rfc5646#section-2.1\n\npub fn parse_extended_value(val: &str) -> Result<ExtendedValue, crate::error::ParseError> {\n\n // Break into three pieces separated by the single-quote character\n\n let mut parts = val.splitn(3, '\\'');\n\n\n\n // Interpret the first piece as a Charset\n\n let charset: Charset = match parts.next() {\n\n None => return Err(crate::error::ParseError::Header),\n\n Some(n) => FromStr::from_str(n).map_err(|_| crate::error::ParseError::Header)?,\n\n };\n\n\n\n // Interpret the second piece as a language tag\n\n let language_tag: Option<LanguageTag> = match parts.next() {\n\n None => return Err(crate::error::ParseError::Header),\n\n Some(\"\") => None,\n\n Some(s) => match s.parse() {\n\n Ok(lt) => Some(lt),\n\n Err(_) => return Err(crate::error::ParseError::Header),\n\n },\n\n };\n\n\n", "file_path": "actix-http/src/header/shared/extended.rs", "rank": 22, "score": 192964.88764305698 }, { "content": "#[inline]\n\npub fn apply_mask(buf: &mut [u8], mask: [u8; 4]) {\n\n apply_mask_fast32(buf, mask)\n\n}\n\n\n\n/// A safe unoptimized mask application.\n", "file_path": "actix-http/src/ws/mask.rs", "rank": 23, "score": 190842.71311430764 }, { "content": "fn bench_quality_display_impls(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"quality value display impls\");\n\n\n\n for i in CODES.iter() {\n\n group.bench_with_input(BenchmarkId::new(\"New (fast?)\", i), i, |b, &i| {\n\n b.iter(|| _new::Quality(i).to_string())\n\n });\n\n\n\n group.bench_with_input(BenchmarkId::new(\"Naive\", i), i, |b, &i| {\n\n b.iter(|| _naive::Quality(i).to_string())\n\n });\n\n }\n\n\n\n group.finish();\n\n}\n\n\n\ncriterion_group!(benches, bench_quality_display_impls);\n\ncriterion_main!(benches);\n\n\n\nmod _new {\n", "file_path": "actix-http/benches/quality-value.rs", "rank": 24, "score": 188891.03064179886 }, { "content": "#[inline]\n\npub fn apply_mask_fast32(buf: &mut [u8], mask: [u8; 4]) {\n\n let mask_u32 = u32::from_ne_bytes(mask);\n\n\n\n // SAFETY:\n\n //\n\n // buf is a valid slice borrowed mutably from bytes::BytesMut.\n\n //\n\n // un aligned prefix and suffix would be mask/unmask per byte.\n\n // proper aligned middle slice goes into fast path and operates on 4-byte blocks.\n\n let (prefix, words, suffix) = unsafe { buf.align_to_mut::<u32>() };\n\n apply_mask_fallback(prefix, mask);\n\n let head = prefix.len() & 3;\n\n let mask_u32 = if head > 0 {\n\n if cfg!(target_endian = \"big\") {\n\n mask_u32.rotate_left(8 * head as u32)\n\n } else {\n\n mask_u32.rotate_right(8 * head as u32)\n\n }\n\n } else {\n\n mask_u32\n", "file_path": "actix-http/src/ws/mask.rs", "rank": 25, "score": 188251.36734182632 }, { "content": "/// Returns true if `req` doesn't have an `If-None-Match` header matching `req`.\n\nfn none_match(etag: Option<&header::EntityTag>, req: &HttpRequest) -> bool {\n\n match req.get_header::<header::IfNoneMatch>() {\n\n Some(header::IfNoneMatch::Any) => false,\n\n\n\n Some(header::IfNoneMatch::Items(ref items)) => {\n\n if let Some(some_etag) = etag {\n\n for item in items {\n\n if item.weak_eq(some_etag) {\n\n return false;\n\n }\n\n }\n\n }\n\n\n\n true\n\n }\n\n\n\n None => true,\n\n }\n\n}\n\n\n", "file_path": "actix-files/src/named.rs", "rank": 26, "score": 188160.92449188535 }, { "content": "#[inline]\n\npub fn from_one_raw_str<T: FromStr>(val: Option<&HeaderValue>) -> Result<T, ParseError> {\n\n if let Some(line) = val {\n\n let line = line.to_str().map_err(|_| ParseError::Header)?;\n\n\n\n if !line.is_empty() {\n\n return T::from_str(line).or(Err(ParseError::Header));\n\n }\n\n }\n\n\n\n Err(ParseError::Header)\n\n}\n\n\n\n/// Format an array into a comma-delimited string.\n", "file_path": "actix-http/src/header/utils.rs", "rank": 27, "score": 184513.23029333405 }, { "content": "/// Write out content length header.\n\n///\n\n/// Buffer must to contain enough space or be implicitly extendable.\n\npub fn write_content_length<B: BufMut>(n: u64, buf: &mut B, camel_case: bool) {\n\n if n == 0 {\n\n if camel_case {\n\n buf.put_slice(b\"\\r\\nContent-Length: 0\\r\\n\");\n\n } else {\n\n buf.put_slice(b\"\\r\\ncontent-length: 0\\r\\n\");\n\n }\n\n\n\n return;\n\n }\n\n\n\n let mut buffer = itoa::Buffer::new();\n\n\n\n if camel_case {\n\n buf.put_slice(b\"\\r\\nContent-Length: \");\n\n } else {\n\n buf.put_slice(b\"\\r\\ncontent-length: \");\n\n }\n\n\n\n buf.put_slice(buffer.format(n).as_bytes());\n", "file_path": "actix-http/src/helpers.rs", "rank": 28, "score": 183639.0109262526 }, { "content": "/// Create WebSocket handshake response.\n\n///\n\n/// This function returns handshake `Response`, ready to send to peer.\n\npub fn handshake_response(req: &RequestHead) -> ResponseBuilder {\n\n let key = {\n\n let key = req.headers().get(header::SEC_WEBSOCKET_KEY).unwrap();\n\n proto::hash_key(key.as_ref())\n\n };\n\n\n\n Response::build(StatusCode::SWITCHING_PROTOCOLS)\n\n .upgrade(\"websocket\")\n\n .insert_header((\n\n header::SEC_WEBSOCKET_ACCEPT,\n\n // key is known to be header value safe ascii\n\n HeaderValue::from_bytes(&key).unwrap(),\n\n ))\n\n .take()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{header, Method};\n\n\n", "file_path": "actix-http/src/ws/mod.rs", "rank": 29, "score": 178948.39491617025 }, { "content": "/// Verify WebSocket handshake request.\n\npub fn verify_handshake(req: &RequestHead) -> Result<(), HandshakeError> {\n\n // WebSocket accepts only GET\n\n if req.method != Method::GET {\n\n return Err(HandshakeError::GetMethodRequired);\n\n }\n\n\n\n // Check for \"UPGRADE\" to WebSocket header\n\n let has_hdr = if let Some(hdr) = req.headers().get(header::UPGRADE) {\n\n if let Ok(s) = hdr.to_str() {\n\n s.to_ascii_lowercase().contains(\"websocket\")\n\n } else {\n\n false\n\n }\n\n } else {\n\n false\n\n };\n\n if !has_hdr {\n\n return Err(HandshakeError::NoWebsocketUpgrade);\n\n }\n\n\n", "file_path": "actix-http/src/ws/mod.rs", "rank": 30, "score": 173182.53470318177 }, { "content": "/// Trim whitespace then any quote marks.\n\nfn unquote(val: &str) -> &str {\n\n val.trim().trim_start_matches('\"').trim_end_matches('\"')\n\n}\n\n\n", "file_path": "actix-web/src/info.rs", "rank": 31, "score": 171345.3323391053 }, { "content": "/// Verify WebSocket handshake request and create handshake response.\n\npub fn handshake(req: &RequestHead) -> Result<ResponseBuilder, HandshakeError> {\n\n verify_handshake(req)?;\n\n Ok(handshake_response(req))\n\n}\n\n\n", "file_path": "actix-http/src/ws/mod.rs", "rank": 32, "score": 167917.88445547948 }, { "content": "/// Split at the index of the first `needle` if it exists or at the end.\n\nfn split_once(haystack: &str, needle: char) -> (&str, &str) {\n\n haystack.find(needle).map_or_else(\n\n || (haystack, \"\"),\n\n |sc| {\n\n let (first, last) = haystack.split_at(sc);\n\n (first, last.split_at(1).1)\n\n },\n\n )\n\n}\n\n\n", "file_path": "actix-web/src/http/header/content_disposition.rs", "rank": 33, "score": 163194.95440743215 }, { "content": "/// Split at the index of the first `needle` if it exists or at the end, trim the right of the\n\n/// first part and the left of the last part.\n\nfn split_once_and_trim(haystack: &str, needle: char) -> (&str, &str) {\n\n let (first, last) = split_once(haystack, needle);\n\n (first.trim_end(), last.trim_start())\n\n}\n\n\n\n/// The implied disposition of the content of the HTTP body.\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum DispositionType {\n\n /// Inline implies default processing.\n\n Inline,\n\n\n\n /// Attachment implies that the recipient should prompt the user to save the response locally,\n\n /// rather than process it normally (as per its media type).\n\n Attachment,\n\n\n\n /// Used in *multipart/form-data* as defined in\n\n /// [RFC 7578](https://datatracker.ietf.org/doc/html/rfc7578) to carry the field name and\n\n /// optional filename.\n\n FormData,\n\n\n", "file_path": "actix-web/src/http/header/content_disposition.rs", "rank": 34, "score": 161768.77680342997 }, { "content": "pub fn service_benches() {\n\n let mut criterion: ::criterion::Criterion<_> =\n\n ::criterion::Criterion::default().configure_from_args();\n\n bench_async_service(&mut criterion, ok_service(), \"async_service_direct\");\n\n async_web_service(&mut criterion);\n\n}\n\ncriterion_main!(service_benches);\n", "file_path": "actix-web/benches/service.rs", "rank": 35, "score": 159969.97377589173 }, { "content": "fn echo_payload_service() -> impl Service<Request, Response = Response<Bytes>, Error = Error> {\n\n fn_service(|mut req: Request| {\n\n Box::pin(async move {\n\n use futures_util::stream::StreamExt as _;\n\n\n\n let mut pl = req.take_payload();\n\n let mut body = BytesMut::new();\n\n while let Some(chunk) = pl.next().await {\n\n body.extend_from_slice(chunk.unwrap().chunk())\n\n }\n\n\n\n Ok::<_, Error>(Response::ok().set_body(body.freeze()))\n\n })\n\n })\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn late_request() {\n\n let mut buf = TestBuffer::empty();\n\n\n", "file_path": "actix-http/src/h1/dispatcher_tests.rs", "rank": 36, "score": 159324.4685760322 }, { "content": "fn split_in_two(s: &str, separator: char) -> Option<(&str, &str)> {\n\n let mut iter = s.splitn(2, separator);\n\n match (iter.next(), iter.next()) {\n\n (Some(a), Some(b)) => Some((a, b)),\n\n _ => None,\n\n }\n\n}\n\n\n\nimpl FromStr for ContentRangeSpec {\n\n type Err = ParseError;\n\n\n\n fn from_str(s: &str) -> Result<Self, ParseError> {\n\n let res = match split_in_two(s, ' ') {\n\n Some((\"bytes\", resp)) => {\n\n let (range, instance_length) =\n\n split_in_two(resp, '/').ok_or(ParseError::Header)?;\n\n\n\n let instance_length = if instance_length == \"*\" {\n\n None\n\n } else {\n", "file_path": "actix-web/src/http/header/content_range.rs", "rank": 37, "score": 159291.10976452765 }, { "content": "/// Prepare WebSocket handshake response.\n\n///\n\n/// This function returns handshake `HttpResponse`, ready to send to peer. It does not perform\n\n/// any IO.\n\n///\n\n/// `protocols` is a sequence of known protocols. On successful handshake, the returned response\n\n/// headers contain the first protocol in this list which the server also knows.\n\npub fn handshake_with_protocols(\n\n req: &HttpRequest,\n\n protocols: &[&str],\n\n) -> Result<HttpResponseBuilder, HandshakeError> {\n\n // WebSocket accepts only GET\n\n if *req.method() != Method::GET {\n\n return Err(HandshakeError::GetMethodRequired);\n\n }\n\n\n\n // check for \"UPGRADE\" to WebSocket header\n\n let has_hdr = if let Some(hdr) = req.headers().get(&header::UPGRADE) {\n\n if let Ok(s) = hdr.to_str() {\n\n s.to_ascii_lowercase().contains(\"websocket\")\n\n } else {\n\n false\n\n }\n\n } else {\n\n false\n\n };\n\n if !has_hdr {\n", "file_path": "actix-web-actors/src/ws.rs", "rank": 38, "score": 157745.60079375817 }, { "content": "/// Errors that can generate responses.\n\n// TODO: flesh out documentation\n\npub trait ResponseError: fmt::Debug + fmt::Display {\n\n /// Returns appropriate status code for error.\n\n ///\n\n /// A 500 Internal Server Error is used by default. If [error_response](Self::error_response) is\n\n /// also implemented and does not call `self.status_code()`, then this will not be used.\n\n fn status_code(&self) -> StatusCode {\n\n StatusCode::INTERNAL_SERVER_ERROR\n\n }\n\n\n\n /// Creates full response for error.\n\n ///\n\n /// By default, the generated response uses a 500 Internal Server Error status code, a\n\n /// `Content-Type` of `text/plain`, and the body is set to `Self`'s `Display` impl.\n\n fn error_response(&self) -> HttpResponse<BoxBody> {\n\n let mut res = HttpResponse::new(self.status_code());\n\n\n\n let mut buf = BytesMut::new();\n\n let _ = write!(helpers::MutWriter(&mut buf), \"{}\", self);\n\n\n\n let mime = mime::TEXT_PLAIN_UTF_8.try_into_value().unwrap();\n", "file_path": "actix-web/src/error/response_error.rs", "rank": 39, "score": 156914.7207641406 }, { "content": "#[inline]\n\nfn parts(parts: &mut Option<Inner>) -> &mut Inner {\n\n parts.as_mut().expect(\"cannot reuse test request builder\")\n\n}\n\n\n\n/// Async I/O test buffer.\n\n#[derive(Debug)]\n\npub struct TestBuffer {\n\n pub read_buf: Rc<RefCell<BytesMut>>,\n\n pub write_buf: Rc<RefCell<BytesMut>>,\n\n pub err: Option<Rc<io::Error>>,\n\n}\n\n\n\nimpl TestBuffer {\n\n /// Create new `TestBuffer` instance with initial read buffer.\n\n pub fn new<T>(data: T) -> Self\n\n where\n\n T: Into<BytesMut>,\n\n {\n\n Self {\n\n read_buf: Rc::new(RefCell::new(data.into())),\n", "file_path": "actix-http/src/test.rs", "rank": 40, "score": 155853.73553860968 }, { "content": "#[doc(hidden)]\n\n#[deprecated(since = \"4.0.0\", note = \"Renamed to `status_service`.\")]\n\npub fn simple_service(\n\n status_code: StatusCode,\n\n) -> impl Service<ServiceRequest, Response = ServiceResponse<BoxBody>, Error = Error> {\n\n status_service(status_code)\n\n}\n\n\n", "file_path": "actix-web/src/test/test_services.rs", "rank": 41, "score": 155618.14430547826 }, { "content": "#[doc(hidden)]\n\n#[deprecated(since = \"4.0.0\", note = \"Renamed to `status_service`.\")]\n\npub fn default_service(\n\n status_code: StatusCode,\n\n) -> impl Service<ServiceRequest, Response = ServiceResponse<BoxBody>, Error = Error> {\n\n status_service(status_code)\n\n}\n", "file_path": "actix-web/src/test/test_services.rs", "rank": 42, "score": 155618.14430547826 }, { "content": "/// Creates service that always responds with given status code and no body.\n\npub fn status_service(\n\n status_code: StatusCode,\n\n) -> impl Service<ServiceRequest, Response = ServiceResponse<BoxBody>, Error = Error> {\n\n fn_service(move |req: ServiceRequest| {\n\n ok(req.into_response(HttpResponseBuilder::new(status_code).finish()))\n\n })\n\n}\n\n\n", "file_path": "actix-web/src/test/test_services.rs", "rank": 43, "score": 155618.14430547826 }, { "content": "/// Creates service that always responds with `200 OK` and no body.\n\npub fn ok_service(\n\n) -> impl Service<ServiceRequest, Response = ServiceResponse<BoxBody>, Error = Error> {\n\n status_service(StatusCode::OK)\n\n}\n\n\n", "file_path": "actix-web/src/test/test_services.rs", "rank": 44, "score": 155618.14430547826 }, { "content": "/// Creates a new un-configured route.\n\npub fn route() -> Route {\n\n Route::new()\n\n}\n\n\n\nmacro_rules! method_route {\n\n ($method_fn:ident, $method_const:ident) => {\n\n #[doc = concat!(\" Creates a new route with `\", stringify!($method_const), \"` method guard.\")]\n\n ///\n\n /// # Examples\n\n #[doc = concat!(\" In this example, one `\", stringify!($method_const), \" /{project_id}` route is set up:\")]\n\n /// ```\n\n /// use actix_web::{web, App, HttpResponse};\n\n ///\n\n /// let app = App::new().service(\n\n /// web::resource(\"/{project_id}\")\n\n #[doc = concat!(\" .route(web::\", stringify!($method_fn), \"().to(|| HttpResponse::Ok()))\")]\n\n ///\n\n /// );\n\n /// ```\n\n pub fn $method_fn() -> Route {\n", "file_path": "actix-web/src/web.rs", "rank": 45, "score": 154885.91205522165 }, { "content": "pub fn get_negotiated_alpn_protocol(\n\n addr: SocketAddr,\n\n client_alpn_protocol: &[u8],\n\n) -> Option<Vec<u8>> {\n\n let mut config = rustls::ClientConfig::builder()\n\n .with_safe_defaults()\n\n .with_root_certificates(webpki_roots_cert_store())\n\n .with_no_client_auth();\n\n\n\n config.alpn_protocols.push(client_alpn_protocol.to_vec());\n\n\n\n let mut sess = rustls::ClientConnection::new(\n\n Arc::new(config),\n\n ServerName::try_from(\"localhost\").unwrap(),\n\n )\n\n .unwrap();\n\n\n\n let mut sock = StdTcpStream::connect(addr).unwrap();\n\n let mut stream = rustls::Stream::new(&mut sess, &mut sock);\n\n\n", "file_path": "actix-http/tests/test_rustls.rs", "rank": 46, "score": 153581.4053421223 }, { "content": "type MimeOverride = dyn Fn(&mime::Name<'_>) -> DispositionType;\n\n\n", "file_path": "actix-files/src/lib.rs", "rank": 47, "score": 153468.6988488313 }, { "content": "fn payload_from_bytes(bytes: Bytes) -> dev::Payload {\n\n let (_, mut h1_payload) = actix_http::h1::Payload::create(true);\n\n h1_payload.unread_data(bytes);\n\n dev::Payload::from(h1_payload)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use serde::{Deserialize, Serialize};\n\n\n\n use super::*;\n\n use crate::{\n\n test::TestRequest,\n\n web::{Form, Json},\n\n };\n\n\n\n #[derive(Debug, Clone, Serialize, Deserialize)]\n\n struct TestForm {\n\n hello: String,\n\n }\n", "file_path": "actix-web/src/types/either.rs", "rank": 48, "score": 152522.18378091283 }, { "content": "fn responder(c: &mut Criterion) {\n\n let rt = actix_rt::System::new();\n\n let req = TestRequest::default().to_http_request();\n\n c.bench_function(\"responder\", move |b| {\n\n b.iter_custom(|_| {\n\n let responders =\n\n (0..100_000).map(|_| StringResponder(String::from(\"Hello World!!\")));\n\n\n\n let start = Instant::now();\n\n let _res = rt.block_on(async {\n\n // don't need runtime block on but to be fair.\n\n responders.map(|r| r.respond_to(&req)).collect::<Vec<_>>()\n\n });\n\n\n\n start.elapsed()\n\n })\n\n });\n\n}\n\n\n\ncriterion_group!(responder_bench, future_responder, responder);\n\ncriterion_main!(responder_bench);\n", "file_path": "actix-web/benches/responder.rs", "rank": 49, "score": 151338.01386979478 }, { "content": "#[doc(hidden)]\n\npub trait Head: Default + 'static {\n\n fn clear(&mut self);\n\n\n\n fn with_pool<F, R>(f: F) -> R\n\n where\n\n F: FnOnce(&MessagePool<Self>) -> R;\n\n}\n\n\n\npub struct Message<T: Head> {\n\n /// Rc here should not be cloned by anyone.\n\n /// It's used to reuse allocation of T and no shared ownership is allowed.\n\n head: Rc<T>,\n\n}\n\n\n\nimpl<T: Head> Message<T> {\n\n /// Get new message from the pool of objects\n\n #[allow(clippy::new_without_default)]\n\n pub fn new() -> Self {\n\n T::with_pool(MessagePool::get_message)\n\n }\n", "file_path": "actix-http/src/message.rs", "rank": 50, "score": 150744.4865665179 }, { "content": "/// Create default test server config.\n\npub fn config() -> TestServerConfig {\n\n TestServerConfig::default()\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct TestServerConfig {\n\n tp: HttpVer,\n\n stream: StreamType,\n\n client_request_timeout: Duration,\n\n}\n\n\n\nimpl Default for TestServerConfig {\n\n fn default() -> Self {\n\n TestServerConfig::new()\n\n }\n\n}\n\n\n\nimpl TestServerConfig {\n\n /// Create default server configuration\n\n pub(crate) fn new() -> TestServerConfig {\n", "file_path": "actix-test/src/lib.rs", "rank": 51, "score": 150534.08258480817 }, { "content": "fn main() -> std::io::Result<()> {\n\n env_logger::init_from_env(env_logger::Env::new().default_filter_or(\"info\"));\n\n\n\n rt::System::new().block_on(\n\n HttpServer::new(|| {\n\n App::new()\n\n .wrap(middleware::Logger::default())\n\n .service(web::resource(\"/\").route(web::get().to(index)))\n\n })\n\n .bind((\"127.0.0.1\", 8080))?\n\n .workers(1)\n\n .run(),\n\n )\n\n}\n", "file_path": "actix-web/examples/macroless.rs", "rank": 52, "score": 149581.5499111777 }, { "content": "fn compare_quoters(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"Compare Quoters\");\n\n\n\n let quoter = actix_router::Quoter::new(b\"\", b\"\");\n\n let path_quoted = (0..=0x7f)\n\n .map(|c| format!(\"%{:02X}\", c))\n\n .collect::<String>();\n\n let path_unquoted = ('\\u{00}'..='\\u{7f}').collect::<String>();\n\n\n\n group.bench_function(\"quoter_unquoted\", |b| {\n\n b.iter(|| {\n\n for _ in 0..10 {\n\n black_box(quoter.requote(path_unquoted.as_bytes()));\n\n }\n\n });\n\n });\n\n\n\n group.bench_function(\"percent_encode_unquoted\", |b| {\n\n b.iter(|| {\n\n for _ in 0..10 {\n", "file_path": "actix-router/benches/quoter.rs", "rank": 53, "score": 149112.18963552592 }, { "content": "fn future_responder(c: &mut Criterion) {\n\n let rt = actix_rt::System::new();\n\n let req = TestRequest::default().to_http_request();\n\n\n\n c.bench_function(\"future_responder\", move |b| {\n\n b.iter_custom(|_| {\n\n let futs = (0..100_000).map(|_| async {\n\n StringResponder(String::from(\"Hello World!!\"))\n\n .future_respond_to(&req)\n\n .await\n\n });\n\n\n\n let futs = join_all(futs);\n\n\n\n let start = Instant::now();\n\n\n\n let _res = rt.block_on(async { futs.await });\n\n\n\n start.elapsed()\n\n })\n\n });\n\n}\n\n\n", "file_path": "actix-web/benches/responder.rs", "rank": 54, "score": 149112.18963552592 }, { "content": "fn compare_routers(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"Compare Routers\");\n\n\n\n let mut actix = actix_router::Router::<bool>::build();\n\n for route in register!(brackets) {\n\n actix.path(route, true);\n\n }\n\n let actix = actix.finish();\n\n group.bench_function(\"actix\", |b| {\n\n b.iter(|| {\n\n for route in call() {\n\n let mut path = actix_router::Path::new(route);\n\n black_box(actix.recognize(&mut path).unwrap());\n\n }\n\n });\n\n });\n\n\n\n let regex_set = regex::RegexSet::new(register!(regex)).unwrap();\n\n group.bench_function(\"regex\", |b| {\n\n b.iter(|| {\n", "file_path": "actix-router/benches/router.rs", "rank": 55, "score": 149112.18963552592 }, { "content": "#[proc_macro_attribute]\n\npub fn main(_: TokenStream, item: TokenStream) -> TokenStream {\n\n let mut output: TokenStream = (quote! {\n\n #[::actix_web::rt::main(system = \"::actix_web::rt::System\")]\n\n })\n\n .into();\n\n\n\n output.extend(item);\n\n output\n\n}\n\n\n\n/// Marks async test functions to use the actix system entry-point.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// #[actix_web::test]\n\n/// async fn test() {\n\n/// assert_eq!(async { \"Hello world\" }.await, \"Hello world\");\n\n/// }\n\n/// ```\n", "file_path": "actix-web-codegen/src/lib.rs", "rank": 56, "score": 149054.3249938351 }, { "content": "#[proc_macro_attribute]\n\npub fn test(_: TokenStream, item: TokenStream) -> TokenStream {\n\n let mut output: TokenStream = (quote! {\n\n #[::actix_web::rt::test(system = \"::actix_web::rt::System\")]\n\n })\n\n .into();\n\n\n\n output.extend(item);\n\n output\n\n}\n", "file_path": "actix-web-codegen/src/lib.rs", "rank": 57, "score": 149054.3249938351 }, { "content": "/// Returns true if `req` has no `If-Match` header or one which matches `etag`.\n\nfn any_match(etag: Option<&header::EntityTag>, req: &HttpRequest) -> bool {\n\n match req.get_header::<header::IfMatch>() {\n\n None | Some(header::IfMatch::Any) => true,\n\n\n\n Some(header::IfMatch::Items(ref items)) => {\n\n if let Some(some_etag) = etag {\n\n for item in items {\n\n if item.strong_eq(some_etag) {\n\n return true;\n\n }\n\n }\n\n }\n\n\n\n false\n\n }\n\n }\n\n}\n\n\n", "file_path": "actix-files/src/named.rs", "rank": 58, "score": 148280.7053210744 }, { "content": "// Benchmark basic WebService directly\n\n// this approach is usable for benching WebService, though it adds some time to direct service call:\n\n// Sample results on MacBook Pro '14\n\n// time: [2.0724 us 2.1345 us 2.2074 us]\n\nfn async_web_service(c: &mut Criterion) {\n\n let rt = actix_rt::System::new();\n\n let srv = Rc::new(RefCell::new(rt.block_on(init_service(\n\n App::new().service(web::service(\"/\").finish(index)),\n\n ))));\n\n\n\n let req = TestRequest::get().uri(\"/\").to_request();\n\n assert!(rt\n\n .block_on(srv.borrow_mut().call(req))\n\n .unwrap()\n\n .status()\n\n .is_success());\n\n\n\n // start benchmark loops\n\n c.bench_function(\"async_web_service_direct\", move |b| {\n\n b.iter_custom(|iters| {\n\n let srv = srv.clone();\n\n let futs = (0..iters)\n\n .map(|_| TestRequest::get().uri(\"/\").to_request())\n\n .map(|req| srv.borrow_mut().call(req));\n", "file_path": "actix-web/benches/service.rs", "rank": 59, "score": 146983.3451265336 }, { "content": "// benchmark sending all requests at the same time\n\nfn bench_async_burst(c: &mut Criterion) {\n\n // We are using System here, since Runtime requires preinitialized tokio\n\n // Maybe add to actix_rt docs\n\n let rt = actix_rt::System::new();\n\n\n\n let srv = rt.block_on(async {\n\n actix_test::start(|| {\n\n App::new().service(\n\n web::resource(\"/\").route(web::to(|| async { HttpResponse::Ok().body(STR) })),\n\n )\n\n })\n\n });\n\n\n\n let url = srv.url(\"/\");\n\n\n\n c.bench_function(\"get_body_async_burst\", move |b| {\n\n b.iter_custom(|iters| {\n\n rt.block_on(async {\n\n let client = Client::new().get(url.clone()).freeze().unwrap();\n\n\n", "file_path": "actix-web/benches/server.rs", "rank": 60, "score": 146983.3451265336 }, { "content": "#[deprecated(since = \"4.0.0\", note = \"Prefer `WsResponseBuilder::start_with_addr`.\")]\n\npub fn start_with_addr<A, T>(\n\n actor: A,\n\n req: &HttpRequest,\n\n stream: T,\n\n) -> Result<(Addr<A>, HttpResponse), Error>\n\nwhere\n\n A: Actor<Context = WebsocketContext<A>> + StreamHandler<Result<Message, ProtocolError>>,\n\n T: Stream<Item = Result<Bytes, PayloadError>> + 'static,\n\n{\n\n let mut res = handshake(req)?;\n\n let (addr, out_stream) = WebsocketContext::create_with_addr(actor, stream);\n\n Ok((addr, res.streaming(out_stream)))\n\n}\n\n\n\n/// Do WebSocket handshake and start ws actor.\n\n///\n\n/// `protocols` is a sequence of known protocols.\n\n#[deprecated(\n\n since = \"4.0.0\",\n\n note = \"Prefer `WsResponseBuilder` for setting protocols.\"\n\n)]\n", "file_path": "actix-web-actors/src/ws.rs", "rank": 61, "score": 146243.21956750244 }, { "content": "pub fn start_with_protocols<A, T>(\n\n actor: A,\n\n protocols: &[&str],\n\n req: &HttpRequest,\n\n stream: T,\n\n) -> Result<HttpResponse, Error>\n\nwhere\n\n A: Actor<Context = WebsocketContext<A>> + StreamHandler<Result<Message, ProtocolError>>,\n\n T: Stream<Item = Result<Bytes, PayloadError>> + 'static,\n\n{\n\n let mut res = handshake_with_protocols(req, protocols)?;\n\n Ok(res.streaming(WebsocketContext::create(actor, stream)))\n\n}\n\n\n", "file_path": "actix-web-actors/src/ws.rs", "rank": 62, "score": 146243.21956750244 }, { "content": "fn bench_header_parsing(c: &mut Criterion) {\n\n c.bench_function(\"Original (Unsound) [short]\", |b| {\n\n b.iter(|| {\n\n let mut buf = BytesMut::from(REQ_SHORT);\n\n _original::parse_headers(&mut buf);\n\n })\n\n });\n\n\n\n c.bench_function(\"New (safe) [short]\", |b| {\n\n b.iter(|| {\n\n let mut buf = BytesMut::from(REQ_SHORT);\n\n _new::parse_headers(&mut buf);\n\n })\n\n });\n\n\n\n c.bench_function(\"Original (Unsound) [realistic]\", |b| {\n\n b.iter(|| {\n\n let mut buf = BytesMut::from(REQ);\n\n _original::parse_headers(&mut buf);\n\n })\n", "file_path": "actix-http/benches/uninit-headers.rs", "rank": 63, "score": 144945.2773294823 }, { "content": "/// Parses 0 or more items out of a comma delimited string, ignoring invalid items.\n\nfn from_comma_delimited<T: FromStr>(s: &str) -> Vec<T> {\n\n s.split(',')\n\n .filter_map(|x| match x.trim() {\n\n \"\" => None,\n\n y => Some(y),\n\n })\n\n .filter_map(|x| x.parse().ok())\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use actix_http::{test::TestRequest, Request};\n\n\n\n use super::*;\n\n\n\n fn req(s: &str) -> Request {\n\n TestRequest::default()\n\n .insert_header((header::RANGE, s))\n\n .finish()\n", "file_path": "actix-web/src/http/header/range.rs", "rank": 64, "score": 142994.5082301587 }, { "content": "fn bench_write_status_line_11(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"write_status_line v1.1\");\n\n\n\n let version = Version::HTTP_11;\n\n\n\n for i in CODES.iter() {\n\n group.bench_with_input(BenchmarkId::new(\"Original (unsafe)\", i), i, |b, &i| {\n\n b.iter(|| {\n\n let mut b = BytesMut::with_capacity(35);\n\n _original::write_status_line(version, i, &mut b);\n\n })\n\n });\n\n\n\n group.bench_with_input(BenchmarkId::new(\"New (safe)\", i), i, |b, &i| {\n\n b.iter(|| {\n\n let mut b = BytesMut::with_capacity(35);\n\n _new::write_status_line(version, i, &mut b);\n\n })\n\n });\n\n\n", "file_path": "actix-http/benches/status-line.rs", "rank": 65, "score": 142992.301196857 }, { "content": "fn bench_write_status_line_09(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"write_status_line v0.9\");\n\n\n\n let version = Version::HTTP_09;\n\n\n\n for i in CODES.iter() {\n\n group.bench_with_input(BenchmarkId::new(\"Original (unsafe)\", i), i, |b, &i| {\n\n b.iter(|| {\n\n let mut b = BytesMut::with_capacity(35);\n\n _original::write_status_line(version, i, &mut b);\n\n })\n\n });\n\n\n\n group.bench_with_input(BenchmarkId::new(\"New (safe)\", i), i, |b, &i| {\n\n b.iter(|| {\n\n let mut b = BytesMut::with_capacity(35);\n\n _new::write_status_line(version, i, &mut b);\n\n })\n\n });\n\n\n", "file_path": "actix-http/benches/status-line.rs", "rank": 66, "score": 142992.301196857 }, { "content": "fn bench_write_status_line_10(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"write_status_line v1.0\");\n\n\n\n let version = Version::HTTP_10;\n\n\n\n for i in CODES.iter() {\n\n group.bench_with_input(BenchmarkId::new(\"Original (unsafe)\", i), i, |b, &i| {\n\n b.iter(|| {\n\n let mut b = BytesMut::with_capacity(35);\n\n _original::write_status_line(version, i, &mut b);\n\n })\n\n });\n\n\n\n group.bench_with_input(BenchmarkId::new(\"New (safe)\", i), i, |b, &i| {\n\n b.iter(|| {\n\n let mut b = BytesMut::with_capacity(35);\n\n _new::write_status_line(version, i, &mut b);\n\n })\n\n });\n\n\n", "file_path": "actix-http/benches/status-line.rs", "rank": 67, "score": 142992.301196857 }, { "content": "/// Creates a new route with specified method guard.\n\n///\n\n/// # Examples\n\n/// In this example, one `GET /{project_id}` route is set up:\n\n///\n\n/// ```\n\n/// use actix_web::{web, http, App, HttpResponse};\n\n///\n\n/// let app = App::new().service(\n\n/// web::resource(\"/{project_id}\")\n\n/// .route(web::method(http::Method::GET).to(|| HttpResponse::Ok()))\n\n/// );\n\n/// ```\n\npub fn method(method: Method) -> Route {\n\n Route::new().method(method)\n\n}\n\n\n", "file_path": "actix-web/src/web.rs", "rank": 68, "score": 142582.99163250727 }, { "content": "/// Get a localhost socket address with random, unused port.\n\npub fn unused_addr() -> net::SocketAddr {\n\n let addr: net::SocketAddr = \"127.0.0.1:0\".parse().unwrap();\n\n let socket = Socket::new(Domain::IPV4, Type::STREAM, Some(Protocol::TCP)).unwrap();\n\n socket.bind(&addr.into()).unwrap();\n\n socket.set_reuse_address(true).unwrap();\n\n let tcp = net::TcpListener::from(socket);\n\n tcp.local_addr().unwrap()\n\n}\n", "file_path": "actix-http-test/src/lib.rs", "rank": 69, "score": 142254.777824888 }, { "content": "fn check_slice_validity(slice: &str) -> bool {\n\n slice.bytes().all(entity_validate_char)\n\n}\n\n\n\n/// An entity tag, defined in [RFC 7232 §2.3].\n\n///\n\n/// An entity tag consists of a string enclosed by two literal double quotes.\n\n/// Preceding the first double quote is an optional weakness indicator,\n\n/// which always looks like `W/`. Examples for valid tags are `\"xyzzy\"` and\n\n/// `W/\"xyzzy\"`.\n\n///\n\n/// # ABNF\n\n/// ```plain\n\n/// entity-tag = [ weak ] opaque-tag\n\n/// weak = %x57.2F ; \"W/\", case-sensitive\n\n/// opaque-tag = DQUOTE *etagc DQUOTE\n\n/// etagc = %x21 / %x23-7E / obs-text\n\n/// ; VCHAR except double quotes, plus obs-text\n\n/// ```\n\n///\n", "file_path": "actix-web/src/http/header/entity.rs", "rank": 70, "score": 141345.0519529587 }, { "content": "fn bench_write_camel_case(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"write_camel_case\");\n\n\n\n let names = [\"connection\", \"Transfer-Encoding\", \"transfer-encoding\"];\n\n\n\n for &i in &names {\n\n let bts = i.as_bytes();\n\n\n\n group.bench_with_input(BenchmarkId::new(\"Original\", i), bts, |b, bts| {\n\n b.iter(|| {\n\n let mut buf = black_box([0; 24]);\n\n _original::write_camel_case(black_box(bts), &mut buf)\n\n });\n\n });\n\n\n\n group.bench_with_input(BenchmarkId::new(\"New\", i), bts, |b, bts| {\n\n b.iter(|| {\n\n let mut buf = black_box([0; 24]);\n\n let len = black_box(bts.len());\n\n _new::write_camel_case(black_box(bts), buf.as_mut_ptr(), len)\n", "file_path": "actix-http/benches/write-camel-case.rs", "rank": 71, "score": 141119.19668833906 }, { "content": "fn ok_service() -> impl Service<Request, Response = Response<impl MessageBody>, Error = Error> {\n\n status_service(StatusCode::OK)\n\n}\n\n\n", "file_path": "actix-http/src/h1/dispatcher_tests.rs", "rank": 72, "score": 138312.8671835276 }, { "content": "/// Payload buffer.\n\nstruct PayloadBuffer {\n\n eof: bool,\n\n buf: BytesMut,\n\n stream: LocalBoxStream<'static, Result<Bytes, PayloadError>>,\n\n}\n\n\n\nimpl PayloadBuffer {\n\n /// Constructs new `PayloadBuffer` instance.\n\n fn new<S>(stream: S) -> Self\n\n where\n\n S: Stream<Item = Result<Bytes, PayloadError>> + 'static,\n\n {\n\n PayloadBuffer {\n\n eof: false,\n\n buf: BytesMut::new(),\n\n stream: Box::pin(stream),\n\n }\n\n }\n\n\n\n fn poll_stream(&mut self, cx: &mut Context<'_>) -> Result<(), PayloadError> {\n", "file_path": "actix-multipart/src/server.rs", "rank": 73, "score": 136639.67513928594 }, { "content": "struct PayloadRef {\n\n payload: Rc<RefCell<PayloadBuffer>>,\n\n}\n\n\n\nimpl PayloadRef {\n\n fn new(payload: PayloadBuffer) -> PayloadRef {\n\n PayloadRef {\n\n payload: Rc::new(payload.into()),\n\n }\n\n }\n\n\n\n fn get_mut(&self, s: &Safety) -> Option<RefMut<'_, PayloadBuffer>> {\n\n if s.current() {\n\n Some(self.payload.borrow_mut())\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "actix-multipart/src/server.rs", "rank": 74, "score": 136633.59221481282 }, { "content": "#[derive(Debug)]\n\nstruct Inner {\n\n len: usize,\n\n eof: bool,\n\n err: Option<PayloadError>,\n\n need_read: bool,\n\n items: VecDeque<Bytes>,\n\n task: Option<Waker>,\n\n io_task: Option<Waker>,\n\n}\n\n\n\nimpl Inner {\n\n fn new(eof: bool) -> Self {\n\n Inner {\n\n eof,\n\n len: 0,\n\n err: None,\n\n items: VecDeque::new(),\n\n need_read: true,\n\n task: None,\n\n io_task: None,\n", "file_path": "actix-http/src/h1/payload.rs", "rank": 75, "score": 136633.59221481282 }, { "content": "#[cfg(feature = \"cookies\")]\n\nstruct Cookies(Vec<Cookie<'static>>);\n\n\n\n/// An incoming request.\n\n#[derive(Clone)]\n\npub struct HttpRequest {\n\n /// # Invariant\n\n /// `Rc<HttpRequestInner>` is used exclusively and NO `Weak<HttpRequestInner>`\n\n /// is allowed anywhere in the code. Weak pointer is purposely ignored when\n\n /// doing `Rc`'s ref counter check. Expect panics if this invariant is violated.\n\n pub(crate) inner: Rc<HttpRequestInner>,\n\n}\n\n\n\npub(crate) struct HttpRequestInner {\n\n pub(crate) head: Message<RequestHead>,\n\n pub(crate) path: Path<Url>,\n\n pub(crate) app_data: SmallVec<[Rc<Extensions>; 4]>,\n\n pub(crate) conn_data: Option<Rc<Extensions>>,\n\n pub(crate) extensions: Rc<RefCell<Extensions>>,\n\n app_state: Rc<AppInitServiceState>,\n\n}\n", "file_path": "actix-web/src/request.rs", "rank": 76, "score": 136473.49000362615 }, { "content": "#[inline]\n\npub fn from_comma_delimited<'a, I, T>(all: I) -> Result<Vec<T>, ParseError>\n\nwhere\n\n I: Iterator<Item = &'a HeaderValue> + 'a,\n\n T: FromStr,\n\n{\n\n let size_guess = all.size_hint().1.unwrap_or(2);\n\n let mut result = Vec::with_capacity(size_guess);\n\n\n\n for h in all {\n\n let s = h.to_str().map_err(|_| ParseError::Header)?;\n\n\n\n result.extend(\n\n s.split(',')\n\n .filter_map(|x| match x.trim() {\n\n \"\" => None,\n\n y => Some(y),\n\n })\n\n .filter_map(|x| x.trim().parse().ok()),\n\n )\n\n }\n\n\n\n Ok(result)\n\n}\n\n\n\n/// Reads a single string when parsing a header.\n", "file_path": "actix-http/src/header/utils.rs", "rank": 77, "score": 136413.2675553435 }, { "content": "fn remove_sensitive_headers(headers: &mut header::HeaderMap, prev_uri: &Uri, next_uri: &Uri) {\n\n if next_uri.host() != prev_uri.host()\n\n || next_uri.port() != prev_uri.port()\n\n || next_uri.scheme() != prev_uri.scheme()\n\n {\n\n headers.remove(header::COOKIE);\n\n headers.remove(header::AUTHORIZATION);\n\n headers.remove(header::PROXY_AUTHORIZATION);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::str::FromStr;\n\n\n\n use actix_web::{web, App, Error, HttpRequest, HttpResponse};\n\n\n\n use super::*;\n\n use crate::{\n\n http::{header::HeaderValue, StatusCode},\n", "file_path": "awc/src/middleware/redirect.rs", "rank": 78, "score": 136335.29640748064 }, { "content": "/// Sealed trait implemented for types that can be effectively borrowed as a [`HeaderValue`].\n\n///\n\n/// [`HeaderValue`]: super::HeaderValue\n\npub trait AsHeaderName: Sealed {}\n\n\n\npub struct Seal;\n\n\n", "file_path": "actix-http/src/header/as_name.rs", "rank": 79, "score": 136313.24853867906 }, { "content": "#[inline]\n\npub fn q<T>(quality: T) -> Quality\n\nwhere\n\n T: TryInto<Quality>,\n\n T::Error: fmt::Debug,\n\n{\n\n quality.try_into().expect(\"quality value was out of bounds\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn q_helper() {\n\n assert_eq!(q(0.5), Quality(500));\n\n }\n\n\n\n #[test]\n\n fn display_output() {\n\n assert_eq!(Quality::ZERO.to_string(), \"0\");\n", "file_path": "actix-http/src/header/shared/quality.rs", "rank": 80, "score": 135416.51672789096 }, { "content": "/// Hashes the `Sec-WebSocket-Key` header according to the WebSocket spec.\n\n///\n\n/// Result is a Base64 encoded byte array. `base64(sha1(input))` is always 28 bytes.\n\npub fn hash_key(key: &[u8]) -> [u8; 28] {\n\n let hash = {\n\n use sha1::Digest as _;\n\n\n\n let mut hasher = sha1::Sha1::new();\n\n\n\n hasher.update(key);\n\n hasher.update(WS_GUID);\n\n\n\n hasher.finalize()\n\n };\n\n\n\n let mut hash_b64 = [0; 28];\n\n let n = base64::encode_config_slice(&hash, base64::STANDARD, &mut hash_b64);\n\n assert_eq!(n, 28);\n\n\n\n hash_b64\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "actix-http/src/ws/proto.rs", "rank": 81, "score": 135410.34437359776 }, { "content": "/// Creates a new resource for a specific path.\n\n///\n\n/// Resources may have dynamic path segments. For example, a resource with the path `/a/{name}/c`\n\n/// would match all incoming requests with paths such as `/a/b/c`, `/a/1/c`, or `/a/etc/c`.\n\n///\n\n/// A dynamic segment is specified in the form `{identifier}`, where the identifier can be used\n\n/// later in a request handler to access the matched value for that segment. This is done by looking\n\n/// up the identifier in the `Path` object returned by [`HttpRequest.match_info()`] method.\n\n///\n\n/// By default, each segment matches the regular expression `[^{}/]+`.\n\n///\n\n/// You can also specify a custom regex in the form `{identifier:regex}`:\n\n///\n\n/// For instance, to route `GET`-requests on any route matching `/users/{userid}/{friend}` and store\n\n/// `userid` and `friend` in the exposed `Path` object:\n\n///\n\n/// ```\n\n/// use actix_web::{web, App, HttpResponse};\n\n///\n\n/// let app = App::new().service(\n\n/// web::resource(\"/users/{userid}/{friend}\")\n\n/// .route(web::get().to(|| HttpResponse::Ok()))\n\n/// .route(web::head().to(|| HttpResponse::MethodNotAllowed()))\n\n/// );\n\n/// ```\n\npub fn resource<T: IntoPatterns>(path: T) -> Resource {\n\n Resource::new(path)\n\n}\n\n\n", "file_path": "actix-web/src/web.rs", "rank": 82, "score": 132659.25248238017 }, { "content": "#[derive(Clone)]\n\nstruct CustomRequestFn {\n\n inner_fn: Rc<dyn Fn(&ServiceRequest) -> String>,\n\n}\n\n\n\nimpl CustomRequestFn {\n\n fn call(&self, req: &ServiceRequest) -> String {\n\n (self.inner_fn)(req)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for CustomRequestFn {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.write_str(\"custom_request_fn\")\n\n }\n\n}\n\n\n\nimpl FormatText {\n\n fn render(\n\n &self,\n\n fmt: &mut fmt::Formatter<'_>,\n", "file_path": "actix-web/src/middleware/logger.rs", "rank": 83, "score": 131957.3200541515 }, { "content": "fn get_conn_info(connection: &dyn Any, data: &mut Extensions) {\n\n if let Some(sock) = connection.downcast_ref::<TcpStream>() {\n\n data.insert(ConnectionInfo {\n\n bind: sock.local_addr().unwrap(),\n\n peer: sock.peer_addr().unwrap(),\n\n ttl: sock.ttl().ok(),\n\n });\n\n } else {\n\n unreachable!(\"connection should only be plaintext since no TLS is set up\");\n\n }\n\n}\n\n\n\n#[actix_web::main]\n\nasync fn main() -> io::Result<()> {\n\n env_logger::init_from_env(env_logger::Env::new().default_filter_or(\"info\"));\n\n\n\n let bind = (\"127.0.0.1\", 8080);\n\n log::info!(\"staring server at http://{}:{}\", &bind.0, &bind.1);\n\n\n\n HttpServer::new(|| App::new().default_service(web::to(route_whoami)))\n\n .on_connect(get_conn_info)\n\n .bind(bind)?\n\n .workers(1)\n\n .run()\n\n .await\n\n}\n", "file_path": "actix-web/examples/on-connect.rs", "rank": 84, "score": 131373.00799312102 }, { "content": "type ConnectCallback<IO> = dyn Fn(&IO, &mut Extensions);\n\n\n\n/// Container for data that extract with ConnectCallback.\n\n///\n\n/// # Implementation Details\n\n/// Uses Option to reduce necessary allocations when merging with request extensions.\n\n#[derive(Default)]\n\npub(crate) struct OnConnectData(Option<Extensions>);\n\n\n\nimpl OnConnectData {\n\n /// Construct by calling the on-connect callback with the underlying transport I/O.\n\n pub(crate) fn from_io<T>(io: &T, on_connect_ext: Option<&ConnectCallback<T>>) -> Self {\n\n let ext = on_connect_ext.map(|handler| {\n\n let mut extensions = Extensions::default();\n\n handler(io, &mut extensions);\n\n extensions\n\n });\n\n\n\n Self(ext)\n\n }\n\n}\n", "file_path": "actix-http/src/lib.rs", "rank": 85, "score": 131373.00799312102 }, { "content": "/// Creates a raw service for a specific path.\n\n///\n\n/// ```\n\n/// use actix_web::{dev, web, guard, App, Error, HttpResponse};\n\n///\n\n/// async fn my_service(req: dev::ServiceRequest) -> Result<dev::ServiceResponse, Error> {\n\n/// Ok(req.into_response(HttpResponse::Ok().finish()))\n\n/// }\n\n///\n\n/// let app = App::new().service(\n\n/// web::service(\"/users/*\")\n\n/// .guard(guard::Header(\"content-type\", \"text/plain\"))\n\n/// .finish(my_service)\n\n/// );\n\n/// ```\n\npub fn service<T: IntoPatterns>(path: T) -> WebService {\n\n WebService::new(path)\n\n}\n\n\n", "file_path": "actix-web/src/web.rs", "rank": 86, "score": 130778.9499525922 }, { "content": "pub trait Sealed {\n\n fn try_as_name(&self, seal: Seal) -> Result<Cow<'_, HeaderName>, InvalidHeaderName>;\n\n}\n\n\n\nimpl Sealed for HeaderName {\n\n #[inline]\n\n fn try_as_name(&self, _: Seal) -> Result<Cow<'_, HeaderName>, InvalidHeaderName> {\n\n Ok(Cow::Borrowed(self))\n\n }\n\n}\n\nimpl AsHeaderName for HeaderName {}\n\n\n\nimpl Sealed for &HeaderName {\n\n #[inline]\n\n fn try_as_name(&self, _: Seal) -> Result<Cow<'_, HeaderName>, InvalidHeaderName> {\n\n Ok(Cow::Borrowed(*self))\n\n }\n\n}\n\nimpl AsHeaderName for &HeaderName {}\n\n\n", "file_path": "actix-http/src/header/as_name.rs", "rank": 87, "score": 130737.25098834878 }, { "content": "#[inline]\n\nfn apply_mask_fallback(buf: &mut [u8], mask: [u8; 4]) {\n\n for (i, byte) in buf.iter_mut().enumerate() {\n\n *byte ^= mask[i & 3];\n\n }\n\n}\n\n\n\n/// Faster version of `apply_mask()` which operates on 4-byte blocks.\n", "file_path": "actix-http/src/ws/mask.rs", "rank": 88, "score": 129581.14583866103 }, { "content": "struct WebServiceImpl<T> {\n\n srv: T,\n\n rdef: Patterns,\n\n name: Option<String>,\n\n guards: Vec<Box<dyn Guard>>,\n\n}\n\n\n\nimpl<T> HttpServiceFactory for WebServiceImpl<T>\n\nwhere\n\n T: ServiceFactory<\n\n ServiceRequest,\n\n Config = (),\n\n Response = ServiceResponse,\n\n Error = Error,\n\n InitError = (),\n\n > + 'static,\n\n{\n\n fn register(mut self, config: &mut AppService) {\n\n let guards = if self.guards.is_empty() {\n\n None\n", "file_path": "actix-web/src/service.rs", "rank": 89, "score": 128973.50698089541 }, { "content": "fn drop_payload_service(\n\n) -> impl Service<Request, Response = Response<&'static str>, Error = Error> {\n\n fn_service(|mut req: Request| async move {\n\n let _ = req.take_payload();\n\n Ok::<_, Error>(Response::with_body(StatusCode::OK, \"payload dropped\"))\n\n })\n\n}\n\n\n", "file_path": "actix-http/src/h1/dispatcher_tests.rs", "rank": 90, "score": 127914.3937337693 }, { "content": "#[cfg(feature = \"openssl\")]\n\nfn openssl_acceptor(mut builder: SslAcceptorBuilder) -> io::Result<SslAcceptor> {\n\n builder.set_alpn_select_callback(|_, protocols| {\n\n const H2: &[u8] = b\"\\x02h2\";\n\n const H11: &[u8] = b\"\\x08http/1.1\";\n\n\n\n if protocols.windows(3).any(|window| window == H2) {\n\n Ok(b\"h2\")\n\n } else if protocols.windows(9).any(|window| window == H11) {\n\n Ok(b\"http/1.1\")\n\n } else {\n\n Err(AlpnError::NOACK)\n\n }\n\n });\n\n\n\n builder.set_alpn_protos(b\"\\x08http/1.1\\x02h2\")?;\n\n\n\n Ok(builder.build())\n\n}\n", "file_path": "actix-web/src/server.rs", "rank": 91, "score": 126186.75820312864 }, { "content": "#[proc_macro_attribute]\n\npub fn route(args: TokenStream, input: TokenStream) -> TokenStream {\n\n route::with_method(None, args, input)\n\n}\n\n\n\nmacro_rules! method_macro {\n\n ($variant:ident, $method:ident) => {\n\n#[doc = concat!(\"Creates route handler with `actix_web::guard::\", stringify!($variant), \"`.\")]\n\n///\n\n/// # Syntax\n\n/// ```plain\n\n#[doc = concat!(\"#[\", stringify!($method), r#\"(\"path\"[, attributes])]\"#)]\n\n/// ```\n\n///\n\n/// # Attributes\n\n/// - `\"path\"`: Raw literal string with path for which to register handler.\n\n/// - `name = \"resource_name\"`: Specifies resource name for the handler. If not set, the function\n\n/// name of handler is used.\n\n/// - `guard = \"function_name\"`: Registers function as guard using `actix_web::guard::fn_guard`.\n\n/// - `wrap = \"Middleware\"`: Registers a resource middleware.\n\n///\n", "file_path": "actix-web-codegen/src/lib.rs", "rank": 92, "score": 125582.37797753935 }, { "content": "/// Prepare WebSocket handshake response.\n\n///\n\n/// This function returns handshake `HttpResponse`, ready to send to peer. It does not perform\n\n/// any IO.\n\npub fn handshake(req: &HttpRequest) -> Result<HttpResponseBuilder, HandshakeError> {\n\n handshake_with_protocols(req, &[])\n\n}\n\n\n", "file_path": "actix-web-actors/src/ws.rs", "rank": 93, "score": 123985.38120001301 }, { "content": "#[get(\"/test\")]\n\nfn auto_sync() -> impl Future<Output = Result<HttpResponse, actix_web::Error>> {\n\n ok(HttpResponse::Ok().finish())\n\n}\n\n\n\n#[put(\"/test/{param}\")]\n\nasync fn put_param_test(_: web::Path<String>) -> impl Responder {\n\n HttpResponse::Created()\n\n}\n\n\n\n#[delete(\"/test/{param}\")]\n\nasync fn delete_param_test(_: web::Path<String>) -> impl Responder {\n\n HttpResponse::NoContent()\n\n}\n\n\n\n#[get(\"/test/{param}\")]\n\nasync fn get_param_test(_: web::Path<String>) -> impl Responder {\n\n HttpResponse::Ok()\n\n}\n\n\n\n#[route(\"/multi\", method = \"GET\", method = \"POST\", method = \"HEAD\")]\n", "file_path": "actix-web-codegen/tests/test_macro.rs", "rank": 94, "score": 120800.08343490056 }, { "content": "#[get(\"/test\")]\n\nfn auto_async() -> impl Future<Output = Result<HttpResponse, actix_web::Error>> {\n\n ok(HttpResponse::Ok().finish())\n\n}\n\n\n", "file_path": "actix-web-codegen/tests/test_macro.rs", "rank": 95, "score": 120800.08343490056 }, { "content": "/// Converts the error to a token stream and appends it to the original input.\n\n///\n\n/// Returning the original input in addition to the error is good for IDEs which can gracefully\n\n/// recover and show more precise errors within the macro body.\n\n///\n\n/// See <https://github.com/rust-analyzer/rust-analyzer/issues/10468> for more info.\n\nfn input_and_compile_error(mut item: TokenStream, err: syn::Error) -> TokenStream {\n\n let compile_err = TokenStream::from(err.to_compile_error());\n\n item.extend(compile_err);\n\n item\n\n}\n", "file_path": "actix-web-codegen/src/route.rs", "rank": 96, "score": 120573.87138079226 }, { "content": "struct HeaderGuard(header::HeaderName, header::HeaderValue);\n\n\n\nimpl Guard for HeaderGuard {\n\n fn check(&self, ctx: &GuardContext<'_>) -> bool {\n\n if let Some(val) = ctx.head().headers.get(&self.0) {\n\n return val == self.1;\n\n }\n\n\n\n false\n\n }\n\n}\n\n\n\n/// Creates a guard that matches requests targetting a specific host.\n\n///\n\n/// # Matching Host\n\n/// This guard will:\n\n/// - match against the `Host` header, if present;\n\n/// - fall-back to matching against the request target's host, if present;\n\n/// - return false if host cannot be determined;\n\n///\n", "file_path": "actix-web/src/guard.rs", "rank": 97, "score": 114171.62243917253 }, { "content": "fn get_host_uri(req: &RequestHead) -> Option<Uri> {\n\n req.headers\n\n .get(header::HOST)\n\n .and_then(|host_value| host_value.to_str().ok())\n\n .or_else(|| req.uri.host())\n\n .and_then(|host| host.parse().ok())\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct HostGuard {\n\n host: String,\n\n scheme: Option<String>,\n\n}\n\n\n\nimpl HostGuard {\n\n /// Set request scheme to match\n\n pub fn scheme<H: AsRef<str>>(mut self, scheme: H) -> HostGuard {\n\n self.scheme = Some(scheme.as_ref().to_string());\n\n self\n\n }\n", "file_path": "actix-web/src/guard.rs", "rank": 98, "score": 112719.04374667355 }, { "content": "type PathFilter = dyn Fn(&Path, &RequestHead) -> bool;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::{\n\n fs::{self},\n\n ops::Add,\n\n time::{Duration, SystemTime},\n\n };\n\n\n\n use actix_web::{\n\n dev::ServiceFactory,\n\n guard,\n\n http::{\n\n header::{self, ContentDisposition, DispositionParam, DispositionType},\n\n Method, StatusCode,\n\n },\n\n middleware::Compress,\n\n test::{self, TestRequest},\n\n web::{self, Bytes},\n", "file_path": "actix-files/src/lib.rs", "rank": 99, "score": 111167.33322058126 } ]
Rust
day16/main2.rs
allonsy/advent2017
644dd55ca9cc4319136123126c40330e9ba52de0
mod util; use std::collections::HashSet; const ARR_SIZE: usize = 16; struct Dance { index_arr: [u8; ARR_SIZE], char_arr: [char; ARR_SIZE], } impl Dance { fn new() -> Dance { Dance { index_arr: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15], char_arr: [ 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', ], } } fn rotate(&mut self, num: u8) { for i in 0..ARR_SIZE { self.index_arr[i] = (self.index_arr[i] + num) % ARR_SIZE as u8; self.char_arr[self.index_arr[i] as usize] = (i as u8 + 97) as char; } } fn swap_index(&mut self, from_index: usize, to_index: usize) { let temp = self.char_arr[to_index]; self.char_arr[to_index] = self.char_arr[from_index]; self.index_arr[self.char_arr[to_index] as usize - 97] = to_index as u8; self.char_arr[from_index] = temp; self.index_arr[temp as usize - 97] = from_index as u8; } fn swap_char(&mut self, from_char: char, to_char: char) { let from_char_idx: usize = from_char as usize - 97; let to_char_idx: usize = to_char as usize - 97; let temp = self.index_arr[to_char_idx]; self.index_arr[to_char_idx] = self.index_arr[from_char_idx]; self.char_arr[self.index_arr[to_char_idx] as usize] = to_char; self.index_arr[from_char_idx] = temp; self.char_arr[self.index_arr[from_char_idx] as usize] = from_char; } fn print_array(&self) { print!("array is: "); for i in 0..ARR_SIZE { print!("{}", self.char_arr[i]); } println!(""); } fn get_arr_str(&self) -> String { let mut arr_str: String = String::new(); for i in 0..ARR_SIZE { arr_str += &format!("{}", self.char_arr[i]); } return arr_str; } } enum Instruction { Spin(usize), Exchange(usize, usize), Partner(char, char), } fn main() { let total_num_iters = 1000000000; let instructions = get_instructions(); let mut dance = Dance::new(); let mut seen_before: HashSet<String> = HashSet::new(); let mut num_iters: Option<i32> = Option::None; for i in 0..total_num_iters { for instruction in &instructions { match instruction { Instruction::Spin(s) => dance.rotate(*s as u8), Instruction::Exchange(x, y) => dance.swap_index(*x, *y), Instruction::Partner(a, b) => dance.swap_char(*a, *b), } } let this_dance = dance.get_arr_str(); if seen_before.contains(&this_dance) { num_iters = Some(total_num_iters % i); break; } else { seen_before.insert(this_dance); } } println!("num_cycles is: {}", num_iters.unwrap()); dance = Dance::new(); for _ in 0..num_iters.unwrap() { for instruction in &instructions { match instruction { Instruction::Spin(s) => dance.rotate(*s as u8), Instruction::Exchange(x, y) => dance.swap_index(*x, *y), Instruction::Partner(a, b) => dance.swap_char(*a, *b), } } } dance.print_array(); } fn get_instructions() -> Vec<Instruction> { let line = util::read_file_string("input.txt"); let mut instructions = Vec::new(); for inst_str in line.split(",") { let bytes = inst_str.as_bytes(); match bytes[0] as char { 's' => { let num_str = String::from_utf8_lossy(&bytes[1..bytes.len()]).to_string(); instructions.push(Instruction::Spin(num_str.trim().parse().unwrap())); } 'x' => { let (slice1, slice2) = split_input(&bytes[1..bytes.len()]); let str1 = String::from_utf8_lossy(slice1).to_string(); let str2 = String::from_utf8_lossy(slice2).to_string(); instructions.push(Instruction::Exchange( str1.trim().parse().unwrap(), str2.trim().parse().unwrap(), )); } 'p' => instructions.push(Instruction::Partner(bytes[1] as char, bytes[3] as char)), _ => panic!("unknown character: {}", bytes[0]), } } return instructions; } fn split_input(input: &[u8]) -> (&[u8], &[u8]) { let mut i = 0; while i < input.len() { if input[i] == '/' as u8 { break; } i += 1; } return (&input[0..i], &input[i + 1..input.len()]); }
mod util; use std::collections::HashSet; const ARR_SIZE: usize = 16; struct Dance { index_arr: [u8; ARR_SIZE], char_arr: [char; ARR_SIZE], } impl Dance { fn new() -> Dance { Dance { index_arr: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15], char_arr: [ 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', ], } } fn rotate(&mut self, num: u8) { for i in 0..ARR_SIZE { self.index_arr[i] = (self.index_arr[i] + num) % ARR_SIZE as u8; self.char_arr[self.index_arr[i] as usize] = (i as u8 + 97) as char; } }
fn swap_char(&mut self, from_char: char, to_char: char) { let from_char_idx: usize = from_char as usize - 97; let to_char_idx: usize = to_char as usize - 97; let temp = self.index_arr[to_char_idx]; self.index_arr[to_char_idx] = self.index_arr[from_char_idx]; self.char_arr[self.index_arr[to_char_idx] as usize] = to_char; self.index_arr[from_char_idx] = temp; self.char_arr[self.index_arr[from_char_idx] as usize] = from_char; } fn print_array(&self) { print!("array is: "); for i in 0..ARR_SIZE { print!("{}", self.char_arr[i]); } println!(""); } fn get_arr_str(&self) -> String { let mut arr_str: String = String::new(); for i in 0..ARR_SIZE { arr_str += &format!("{}", self.char_arr[i]); } return arr_str; } } enum Instruction { Spin(usize), Exchange(usize, usize), Partner(char, char), } fn main() { let total_num_iters = 1000000000; let instructions = get_instructions(); let mut dance = Dance::new(); let mut seen_before: HashSet<String> = HashSet::new(); let mut num_iters: Option<i32> = Option::None; for i in 0..total_num_iters { for instruction in &instructions { match instruction { Instruction::Spin(s) => dance.rotate(*s as u8), Instruction::Exchange(x, y) => dance.swap_index(*x, *y), Instruction::Partner(a, b) => dance.swap_char(*a, *b), } } let this_dance = dance.get_arr_str(); if seen_before.contains(&this_dance) { num_iters = Some(total_num_iters % i); break; } else { seen_before.insert(this_dance); } } println!("num_cycles is: {}", num_iters.unwrap()); dance = Dance::new(); for _ in 0..num_iters.unwrap() { for instruction in &instructions { match instruction { Instruction::Spin(s) => dance.rotate(*s as u8), Instruction::Exchange(x, y) => dance.swap_index(*x, *y), Instruction::Partner(a, b) => dance.swap_char(*a, *b), } } } dance.print_array(); } fn get_instructions() -> Vec<Instruction> { let line = util::read_file_string("input.txt"); let mut instructions = Vec::new(); for inst_str in line.split(",") { let bytes = inst_str.as_bytes(); match bytes[0] as char { 's' => { let num_str = String::from_utf8_lossy(&bytes[1..bytes.len()]).to_string(); instructions.push(Instruction::Spin(num_str.trim().parse().unwrap())); } 'x' => { let (slice1, slice2) = split_input(&bytes[1..bytes.len()]); let str1 = String::from_utf8_lossy(slice1).to_string(); let str2 = String::from_utf8_lossy(slice2).to_string(); instructions.push(Instruction::Exchange( str1.trim().parse().unwrap(), str2.trim().parse().unwrap(), )); } 'p' => instructions.push(Instruction::Partner(bytes[1] as char, bytes[3] as char)), _ => panic!("unknown character: {}", bytes[0]), } } return instructions; } fn split_input(input: &[u8]) -> (&[u8], &[u8]) { let mut i = 0; while i < input.len() { if input[i] == '/' as u8 { break; } i += 1; } return (&input[0..i], &input[i + 1..input.len()]); }
fn swap_index(&mut self, from_index: usize, to_index: usize) { let temp = self.char_arr[to_index]; self.char_arr[to_index] = self.char_arr[from_index]; self.index_arr[self.char_arr[to_index] as usize - 97] = to_index as u8; self.char_arr[from_index] = temp; self.index_arr[temp as usize - 97] = from_index as u8; }
function_block-full_function
[ { "content": "fn rotate(num: usize, arr: [char; ARR_SIZE]) -> [char; ARR_SIZE] {\n\n let mut new_arr = ['\\0'; ARR_SIZE];\n\n for i in 0..ARR_SIZE {\n\n let new_index = (i + num) % ARR_SIZE;\n\n new_arr[new_index] = arr[i];\n\n }\n\n return new_arr;\n\n}\n\n\n", "file_path": "day16/main1.rs", "rank": 0, "score": 207282.93982995115 }, { "content": "fn get_char_bytes(c: char) -> [bool; 4] {\n\n match c {\n\n '0' => [false, false, false, false],\n\n '1' => [false, false, false, true],\n\n '2' => [false, false, true, false],\n\n '3' => [false, false, true, true],\n\n '4' => [false, true, false, false],\n\n '5' => [false, true, false, true],\n\n '6' => [false, true, true, false],\n\n '7' => [false, true, true, true],\n\n '8' => [true, false, false, false],\n\n '9' => [true, false, false, true],\n\n 'a' => [true, false, true, false],\n\n 'b' => [true, false, true, true],\n\n 'c' => [true, true, false, false],\n\n 'd' => [true, true, false, true],\n\n 'e' => [true, true, true, false],\n\n 'f' => [true, true, true, true],\n\n _ => panic!(\"unknown char: {}\", c),\n\n }\n\n}\n", "file_path": "day14/main1.rs", "rank": 1, "score": 188114.795653902 }, { "content": "fn get_char_bytes(c: char) -> [bool; 4] {\n\n match c {\n\n '0' => [false, false, false, false],\n\n '1' => [false, false, false, true],\n\n '2' => [false, false, true, false],\n\n '3' => [false, false, true, true],\n\n '4' => [false, true, false, false],\n\n '5' => [false, true, false, true],\n\n '6' => [false, true, true, false],\n\n '7' => [false, true, true, true],\n\n '8' => [true, false, false, false],\n\n '9' => [true, false, false, true],\n\n 'a' => [true, false, true, false],\n\n 'b' => [true, false, true, true],\n\n 'c' => [true, true, false, false],\n\n 'd' => [true, true, false, true],\n\n 'e' => [true, true, true, false],\n\n 'f' => [true, true, true, true],\n\n _ => panic!(\"unknown char: {}\", c),\n\n }\n\n}\n\n\n", "file_path": "day14/main2.rs", "rank": 2, "score": 188114.795653902 }, { "content": "fn consume_char(queue: &mut LinkedList<u8>) -> u8 {\n\n return queue.pop_front().unwrap();\n\n}\n\n\n", "file_path": "day9/main1.rs", "rank": 3, "score": 159443.7267437899 }, { "content": "fn consume_char(queue: &mut LinkedList<u8>) -> u8 {\n\n return queue.pop_front().unwrap();\n\n}\n\n\n", "file_path": "day9/main2.rs", "rank": 4, "score": 159443.7267437899 }, { "content": "fn split_input(input: &[u8]) -> (&[u8], &[u8]) {\n\n let mut i = 0;\n\n while i < input.len() {\n\n if input[i] == '/' as u8 {\n\n break;\n\n }\n\n i += 1;\n\n }\n\n return (&input[0..i], &input[i + 1..input.len()]);\n\n}\n", "file_path": "day16/main1.rs", "rank": 6, "score": 142902.6572530656 }, { "content": "fn swap_index(arr: &mut [char; ARR_SIZE], from_index: usize, to_index: usize) {\n\n let temp = arr[to_index];\n\n arr[to_index] = arr[from_index];\n\n arr[from_index] = temp;\n\n}\n\n\n", "file_path": "day16/main1.rs", "rank": 7, "score": 137472.31299754948 }, { "content": "fn swap_chars(arr: &mut [char; ARR_SIZE], from_char: char, to_char: char) {\n\n let mut from_index = ARR_SIZE;\n\n let mut to_index = ARR_SIZE;\n\n\n\n for i in 0..ARR_SIZE {\n\n if arr[i] == from_char {\n\n from_index = i;\n\n } else if arr[i] == to_char {\n\n to_index = i;\n\n }\n\n }\n\n swap_index(arr, from_index, to_index);\n\n}\n\n\n", "file_path": "day16/main1.rs", "rank": 8, "score": 135617.24618470203 }, { "content": "fn parse_garbage(queue: &mut LinkedList<u8>) -> Vec<u8> {\n\n let mut chars = Vec::new();\n\n loop {\n\n let this_char = consume_char(queue);\n\n match this_char as char {\n\n '>' => break,\n\n '!' => {\n\n consume_char(queue);\n\n }\n\n _ => chars.push(this_char),\n\n }\n\n }\n\n return chars;\n\n}\n\n\n", "file_path": "day9/main1.rs", "rank": 9, "score": 124827.59867587319 }, { "content": "fn parse_garbage(queue: &mut LinkedList<u8>) -> Vec<u8> {\n\n let mut chars = Vec::new();\n\n loop {\n\n let this_char = consume_char(queue);\n\n match this_char as char {\n\n '>' => break,\n\n '!' => {\n\n consume_char(queue);\n\n }\n\n _ => chars.push(this_char),\n\n }\n\n }\n\n return chars;\n\n}\n\n\n", "file_path": "day9/main2.rs", "rank": 10, "score": 124827.59867587319 }, { "content": "fn get_stream() -> LinkedList<u8> {\n\n let stream = util::read_file_string(\"input.txt\");\n\n let mut queue: LinkedList<u8> = LinkedList::new();\n\n\n\n for byte in stream.into_bytes() {\n\n queue.push_back(byte);\n\n }\n\n\n\n return queue;\n\n}\n", "file_path": "day9/main2.rs", "rank": 11, "score": 122762.20856530903 }, { "content": "fn get_stream() -> LinkedList<u8> {\n\n let stream = util::read_file_string(\"input.txt\");\n\n let mut queue: LinkedList<u8> = LinkedList::new();\n\n\n\n for byte in stream.into_bytes() {\n\n queue.push_back(byte);\n\n }\n\n\n\n return queue;\n\n}\n", "file_path": "day9/main1.rs", "rank": 12, "score": 122762.20856530903 }, { "content": "fn get_start_arr() -> [char; ARR_SIZE] {\n\n [\n\n 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p',\n\n ]\n\n}\n\n\n", "file_path": "day16/main1.rs", "rank": 13, "score": 121003.83248602106 }, { "content": "fn byte_to_int(input: u8) -> i32 {\n\n let zero_byte = '0' as u8;\n\n let u8_result = input - zero_byte;\n\n return u8_result as i32;\n\n}\n", "file_path": "day1/main2.rs", "rank": 14, "score": 119717.06901592329 }, { "content": "fn parse_group(queue: &mut LinkedList<u8>) -> Group {\n\n let mut items = Vec::new();\n\n\n\n loop {\n\n let this_char = consume_char(queue);\n\n match this_char as char {\n\n '}' => break,\n\n '<' => items.push(GroupItem::Garbage(parse_garbage(queue))),\n\n '{' => items.push(GroupItem::Group(parse_group(queue))),\n\n _ => panic!(\"invalid character!\"),\n\n }\n\n let next_char = consume_char(queue);\n\n match next_char as char {\n\n '}' => break,\n\n ',' => {}\n\n _ => panic!(\"invalid character!\"),\n\n }\n\n }\n\n return Group { items: items };\n\n}\n\n\n", "file_path": "day9/main2.rs", "rank": 16, "score": 109158.8101829633 }, { "content": "fn parse_group(queue: &mut LinkedList<u8>) -> Group {\n\n let mut items = Vec::new();\n\n\n\n loop {\n\n let this_char = consume_char(queue);\n\n match this_char as char {\n\n '}' => break,\n\n '<' => items.push(GroupItem::Garbage(parse_garbage(queue))),\n\n '{' => items.push(GroupItem::Group(parse_group(queue))),\n\n _ => panic!(\"invalid character!\"),\n\n }\n\n let next_char = consume_char(queue);\n\n match next_char as char {\n\n '}' => break,\n\n ',' => {}\n\n _ => panic!(\"invalid character!\"),\n\n }\n\n }\n\n return Group { items: items };\n\n}\n\n\n", "file_path": "day9/main1.rs", "rank": 17, "score": 109158.8101829633 }, { "content": "pub fn pause() {\n\n print!(\"continue? \");\n\n std::io::stdout().flush();\n\n let mut garbage = String::new();\n\n std::io::stdin().read_line(&mut garbage).unwrap();\n\n}\n", "file_path": "util/util.rs", "rank": 18, "score": 104746.26270211278 }, { "content": "fn manhattan_distance(a: i64, b: i64, c: i64) -> i64 {\n\n return a.abs() + b.abs() + c.abs();\n\n}\n\n\n", "file_path": "day20/main1.rs", "rank": 19, "score": 102377.28305308022 }, { "content": "pub fn pause() {\n\n print!(\"continue? \");\n\n std::io::stdout().flush();\n\n let mut garbage = String::new();\n\n std::io::stdin().read_line(&mut garbage).unwrap();\n\n}\n", "file_path": "day19/util.rs", "rank": 20, "score": 96033.93122434702 }, { "content": "pub fn pause() {\n\n print!(\"continue? \");\n\n std::io::stdout().flush();\n\n let mut garbage = String::new();\n\n std::io::stdin().read_line(&mut garbage).unwrap();\n\n}\n", "file_path": "day21/util.rs", "rank": 21, "score": 96033.93122434702 }, { "content": "pub fn pause() {\n\n print!(\"continue? \");\n\n std::io::stdout().flush();\n\n let mut garbage = String::new();\n\n std::io::stdin().read_line(&mut garbage).unwrap();\n\n}\n", "file_path": "day22/util.rs", "rank": 22, "score": 96033.93122434702 }, { "content": "pub fn pause() {\n\n print!(\"continue? \");\n\n std::io::stdout().flush();\n\n let mut garbage = String::new();\n\n std::io::stdin().read_line(&mut garbage).unwrap();\n\n}\n", "file_path": "day24/util.rs", "rank": 23, "score": 96033.93122434702 }, { "content": "pub fn pause() {\n\n print!(\"continue? \");\n\n std::io::stdout().flush();\n\n let mut garbage = String::new();\n\n std::io::stdin().read_line(&mut garbage).unwrap();\n\n}\n", "file_path": "day20/util.rs", "rank": 24, "score": 96033.93122434702 }, { "content": "pub fn pause() {\n\n print!(\"continue? \");\n\n std::io::stdout().flush();\n\n let mut garbage = String::new();\n\n std::io::stdin().read_line(&mut garbage).unwrap();\n\n}\n", "file_path": "day23/util.rs", "rank": 25, "score": 96033.93122434702 }, { "content": "pub fn read_file_string(fname: &str) -> String {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n return contents;\n\n}\n\n\n", "file_path": "util/util.rs", "rank": 26, "score": 91880.93857614533 }, { "content": "pub fn read_file_lines(fname: &str) -> Vec<String> {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n\n\n let mut rows = Vec::new();\n\n\n\n for fline in contents.lines() {\n\n if !fline.is_empty() {\n\n rows.push(fline.to_owned());\n\n }\n\n }\n\n return rows;\n\n}\n\n\n", "file_path": "util/util.rs", "rank": 27, "score": 89281.36251045034 }, { "content": "fn is_prime(n: i64) -> bool {\n\n if n == 2 {\n\n return true;\n\n }\n\n if n % 2 == 0 {\n\n return false;\n\n }\n\n\n\n let sqrt = (n as f64).sqrt() as i64;\n\n for val in 2..(sqrt + 1) {\n\n if n % val == 0 {\n\n return false;\n\n }\n\n }\n\n\n\n return true;\n\n}\n", "file_path": "day23/main2.rs", "rank": 28, "score": 88026.25433916211 }, { "content": "fn mod_index(index: i32) -> i32 {\n\n if index >= 0 {\n\n index % HASH_SIZE\n\n } else {\n\n HASH_SIZE + (index % HASH_SIZE)\n\n }\n\n}\n\n\n", "file_path": "day14/hash.rs", "rank": 29, "score": 87680.69013112887 }, { "content": "fn mod_index(index: i32) -> i32 {\n\n if index >= 0 {\n\n index % HASH_SIZE\n\n } else {\n\n HASH_SIZE + (index % HASH_SIZE)\n\n }\n\n}\n", "file_path": "day10/main1.rs", "rank": 30, "score": 87680.69013112887 }, { "content": "fn mod_index(index: i32) -> i32 {\n\n if index >= 0 {\n\n index % HASH_SIZE\n\n } else {\n\n HASH_SIZE + (index % HASH_SIZE)\n\n }\n\n}\n\n\n", "file_path": "day10/main2.rs", "rank": 31, "score": 87680.69013112887 }, { "content": "fn last_char_comma(name: &String) -> bool {\n\n let bytes = name.clone().into_bytes();\n\n let bytes_len = bytes.len();\n\n bytes[bytes_len - 1] == ',' as u8\n\n}\n", "file_path": "day7/main2.rs", "rank": 32, "score": 85709.2058763919 }, { "content": "fn last_char_comma(name: &String) -> bool {\n\n let bytes = name.clone().into_bytes();\n\n let bytes_len = bytes.len();\n\n bytes[bytes_len - 1] == ',' as u8\n\n}\n", "file_path": "day7/main1.rs", "rank": 33, "score": 85709.2058763919 }, { "content": "pub fn read_file_string(fname: &str) -> String {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n return contents;\n\n}\n", "file_path": "day8/util.rs", "rank": 34, "score": 82179.71181415916 }, { "content": "pub fn read_file_string(fname: &str) -> String {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n return contents;\n\n}\n", "file_path": "day16/util.rs", "rank": 35, "score": 82179.71181415916 }, { "content": "pub fn read_file_string(fname: &str) -> String {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n return contents;\n\n}\n\n\n", "file_path": "day19/util.rs", "rank": 36, "score": 82179.71181415916 }, { "content": "pub fn read_file_string(fname: &str) -> String {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n return contents;\n\n}\n", "file_path": "day13/util.rs", "rank": 37, "score": 82179.71181415916 }, { "content": "pub fn read_file_string(fname: &str) -> String {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n return contents;\n\n}\n\n\n", "file_path": "day20/util.rs", "rank": 38, "score": 82179.71181415916 }, { "content": "pub fn read_file_string(fname: &str) -> String {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n return contents;\n\n}\n\n\n", "file_path": "day24/util.rs", "rank": 39, "score": 82179.71181415916 }, { "content": "pub fn read_file_string(fname: &str) -> String {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n return contents;\n\n}\n", "file_path": "day18/util.rs", "rank": 40, "score": 82179.71181415916 }, { "content": "pub fn read_file_string(fname: &str) -> String {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n return contents;\n\n}\n", "file_path": "day11/util.rs", "rank": 41, "score": 82179.71181415916 }, { "content": "pub fn read_file_string(fname: &str) -> String {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n return contents;\n\n}\n\n\n", "file_path": "day21/util.rs", "rank": 42, "score": 82179.71181415916 }, { "content": "pub fn read_file_string(fname: &str) -> String {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n return contents;\n\n}\n", "file_path": "day12/util.rs", "rank": 43, "score": 82179.71181415916 }, { "content": "pub fn read_file_string(fname: &str) -> String {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n return contents;\n\n}\n\n\n", "file_path": "day22/util.rs", "rank": 44, "score": 82179.71181415916 }, { "content": "pub fn read_file_string(fname: &str) -> String {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n return contents;\n\n}\n", "file_path": "day9/util.rs", "rank": 45, "score": 82179.71181415916 }, { "content": "pub fn read_file_string(fname: &str) -> String {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n return contents;\n\n}\n\n\n", "file_path": "day23/util.rs", "rank": 46, "score": 82179.71181415916 }, { "content": "fn get_max_index(blocks: &[i32; ARRAY_SIZE]) -> usize {\n\n let mut max = blocks[0];\n\n let mut max_idx = 0;\n\n for i in 1..ARRAY_SIZE {\n\n if max < blocks[i] {\n\n max = blocks[i];\n\n max_idx = i;\n\n }\n\n }\n\n\n\n return max_idx;\n\n}\n", "file_path": "day6/main1.rs", "rank": 47, "score": 81167.95957655348 }, { "content": "fn get_max_index(blocks: &[i32; ARRAY_SIZE]) -> usize {\n\n let mut max = blocks[0];\n\n let mut max_idx = 0;\n\n for i in 1..ARRAY_SIZE {\n\n if max < blocks[i] {\n\n max = blocks[i];\n\n max_idx = i;\n\n }\n\n }\n\n\n\n return max_idx;\n\n}\n", "file_path": "day6/main2.rs", "rank": 48, "score": 81167.95957655348 }, { "content": "fn get_resultant_direction((old_r, old_c): (i32, i32), (new_r, new_c): (i32, i32)) -> Direction {\n\n if old_r > new_r {\n\n Direction::Up\n\n } else if old_r < new_r {\n\n Direction::Down\n\n } else {\n\n if old_c < new_c {\n\n Direction::Right\n\n } else if old_c > new_c {\n\n Direction::Left\n\n } else {\n\n panic!(\"old and new are the same!\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "day19/main1.rs", "rank": 49, "score": 81164.39952354113 }, { "content": "fn get_resultant_direction((old_r, old_c): (i32, i32), (new_r, new_c): (i32, i32)) -> Direction {\n\n if old_r > new_r {\n\n Direction::Up\n\n } else if old_r < new_r {\n\n Direction::Down\n\n } else {\n\n if old_c < new_c {\n\n Direction::Right\n\n } else if old_c > new_c {\n\n Direction::Left\n\n } else {\n\n panic!(\"old and new are the same!\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "day19/main2.rs", "rank": 50, "score": 81164.39952354113 }, { "content": "fn get_other_port(bridge: &Bridge, starting_num: u64) -> u64 {\n\n if bridge.port_a == starting_num {\n\n return bridge.port_b;\n\n } else {\n\n return bridge.port_a;\n\n }\n\n}\n\n\n", "file_path": "day24/main2.rs", "rank": 51, "score": 80096.09302386123 }, { "content": "fn get_other_port(bridge: &Bridge, starting_num: u64) -> u64 {\n\n if bridge.port_a == starting_num {\n\n return bridge.port_b;\n\n } else {\n\n return bridge.port_a;\n\n }\n\n}\n\n\n", "file_path": "day24/main1.rs", "rank": 52, "score": 80096.09302386123 }, { "content": "pub fn read_file_lines(fname: &str) -> Vec<String> {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n\n\n let mut rows = Vec::new();\n\n\n\n for fline in contents.lines() {\n\n if !fline.is_empty() {\n\n rows.push(fline.to_owned());\n\n }\n\n }\n\n return rows;\n\n}\n", "file_path": "day5/util.rs", "rank": 53, "score": 79463.38587066074 }, { "content": "pub fn read_file_lines(fname: &str) -> Vec<String> {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n\n\n let mut rows = Vec::new();\n\n\n\n for fline in contents.lines() {\n\n if !fline.is_empty() {\n\n rows.push(fline.to_owned());\n\n }\n\n }\n\n return rows;\n\n}\n\n\n", "file_path": "day11/util.rs", "rank": 54, "score": 79463.38587066074 }, { "content": "pub fn read_file_lines(fname: &str) -> Vec<String> {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n\n\n let mut rows = Vec::new();\n\n\n\n for fline in contents.lines() {\n\n if !fline.is_empty() {\n\n rows.push(fline.to_owned());\n\n }\n\n }\n\n return rows;\n\n}\n", "file_path": "day4/util.rs", "rank": 55, "score": 79463.38587066074 }, { "content": "pub fn read_file_lines(fname: &str) -> Vec<String> {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n\n\n let mut rows = Vec::new();\n\n\n\n for fline in contents.lines() {\n\n if !fline.is_empty() {\n\n rows.push(fline.to_owned());\n\n }\n\n }\n\n return rows;\n\n}\n\n\n", "file_path": "day22/util.rs", "rank": 56, "score": 79463.38587066074 }, { "content": "pub fn read_file_lines(fname: &str) -> Vec<String> {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n\n\n let mut rows = Vec::new();\n\n\n\n for fline in contents.lines() {\n\n if !fline.is_empty() {\n\n rows.push(fline.to_owned());\n\n }\n\n }\n\n return rows;\n\n}\n\n\n", "file_path": "day19/util.rs", "rank": 57, "score": 79463.38587066074 }, { "content": "pub fn read_file_lines(fname: &str) -> Vec<String> {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n\n\n let mut rows = Vec::new();\n\n\n\n for fline in contents.lines() {\n\n if !fline.is_empty() {\n\n rows.push(fline.to_owned());\n\n }\n\n }\n\n return rows;\n\n}\n\n\n", "file_path": "day23/util.rs", "rank": 58, "score": 79463.38587066074 }, { "content": "pub fn read_file_lines(fname: &str) -> Vec<String> {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n\n\n let mut rows = Vec::new();\n\n\n\n for fline in contents.lines() {\n\n if !fline.is_empty() {\n\n rows.push(fline.to_owned());\n\n }\n\n }\n\n return rows;\n\n}\n\n\n", "file_path": "day24/util.rs", "rank": 59, "score": 79463.38587066074 }, { "content": "pub fn read_file_lines(fname: &str) -> Vec<String> {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n\n\n let mut rows = Vec::new();\n\n\n\n for fline in contents.lines() {\n\n if !fline.is_empty() {\n\n rows.push(fline.to_owned());\n\n }\n\n }\n\n return rows;\n\n}\n\n\n", "file_path": "day20/util.rs", "rank": 60, "score": 79463.38587066074 }, { "content": "pub fn read_file_lines(fname: &str) -> Vec<String> {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n\n\n let mut rows = Vec::new();\n\n\n\n for fline in contents.lines() {\n\n if !fline.is_empty() {\n\n rows.push(fline.to_owned());\n\n }\n\n }\n\n return rows;\n\n}\n\n\n", "file_path": "day12/util.rs", "rank": 61, "score": 79463.38587066074 }, { "content": "pub fn read_file_lines(fname: &str) -> Vec<String> {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n\n\n let mut rows = Vec::new();\n\n\n\n for fline in contents.lines() {\n\n if !fline.is_empty() {\n\n rows.push(fline.to_owned());\n\n }\n\n }\n\n return rows;\n\n}\n\n\n", "file_path": "day13/util.rs", "rank": 62, "score": 79463.38587066074 }, { "content": "pub fn read_file_lines(fname: &str) -> Vec<String> {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n\n\n let mut rows = Vec::new();\n\n\n\n for fline in contents.lines() {\n\n if !fline.is_empty() {\n\n rows.push(fline.to_owned());\n\n }\n\n }\n\n return rows;\n\n}\n\n\n", "file_path": "day16/util.rs", "rank": 63, "score": 79463.38587066074 }, { "content": "pub fn read_file_lines(fname: &str) -> Vec<String> {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n\n\n let mut rows = Vec::new();\n\n\n\n for fline in contents.lines() {\n\n if !fline.is_empty() {\n\n rows.push(fline.to_owned());\n\n }\n\n }\n\n return rows;\n\n}\n\n\n", "file_path": "day21/util.rs", "rank": 64, "score": 79463.38587066074 }, { "content": "pub fn read_file_lines(fname: &str) -> Vec<String> {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n\n\n let mut rows = Vec::new();\n\n\n\n for fline in contents.lines() {\n\n if !fline.is_empty() {\n\n rows.push(fline.to_owned());\n\n }\n\n }\n\n return rows;\n\n}\n", "file_path": "day7/util.rs", "rank": 65, "score": 79463.38587066074 }, { "content": "pub fn read_file_lines(fname: &str) -> Vec<String> {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n\n\n let mut rows = Vec::new();\n\n\n\n for fline in contents.lines() {\n\n if !fline.is_empty() {\n\n rows.push(fline.to_owned());\n\n }\n\n }\n\n return rows;\n\n}\n\n\n", "file_path": "day18/util.rs", "rank": 66, "score": 79463.38587066074 }, { "content": "pub fn read_file_lines(fname: &str) -> Vec<String> {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n\n\n let mut rows = Vec::new();\n\n\n\n for fline in contents.lines() {\n\n if !fline.is_empty() {\n\n rows.push(fline.to_owned());\n\n }\n\n }\n\n return rows;\n\n}\n\n\n", "file_path": "day9/util.rs", "rank": 67, "score": 79463.38587066074 }, { "content": "pub fn read_file_lines(fname: &str) -> Vec<String> {\n\n let mut f = File::open(fname).unwrap();\n\n\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents).unwrap();\n\n\n\n let mut rows = Vec::new();\n\n\n\n for fline in contents.lines() {\n\n if !fline.is_empty() {\n\n rows.push(fline.to_owned());\n\n }\n\n }\n\n return rows;\n\n}\n\n\n", "file_path": "day8/util.rs", "rank": 68, "score": 79463.38587066074 }, { "content": "fn update_highest_value(state: &mut State, new_val: i32) {\n\n match state.highest_value {\n\n None => state.highest_value = Some(new_val),\n\n Some(old_val) => {\n\n state.highest_value = if old_val < new_val {\n\n Some(new_val)\n\n } else {\n\n Some(old_val)\n\n }\n\n }\n\n };\n\n}\n\n\n", "file_path": "day8/main2.rs", "rank": 69, "score": 78523.88461335594 }, { "content": "fn get_new_balanced_weight(tower: &Pointer, expected_weight: i64) -> i64 {\n\n let (bad_child, good_weight) = analyze_children(&tower.borrow().children);\n\n\n\n if bad_child.is_some() {\n\n if good_weight.is_some() {\n\n return get_new_balanced_weight(&bad_child.unwrap(), good_weight.unwrap());\n\n }\n\n let this_weight = tower.borrow().weight as i64;\n\n return get_new_balanced_weight(&bad_child.unwrap(), expected_weight - this_weight);\n\n }\n\n\n\n let bad_weight = get_bad_weight(&tower.borrow().children);\n\n if bad_weight.is_none() {\n\n let cur_weight = get_tower_weight(tower);\n\n let diff = expected_weight - cur_weight;\n\n return tower.borrow().weight as i64 + diff;\n\n }\n\n\n\n let (normal_weight, bad_node) = bad_weight.unwrap();\n\n let cur_weight = get_tower_weight(&bad_node);\n\n let diff = normal_weight - cur_weight;\n\n return bad_node.borrow().weight as i64 + diff;\n\n}\n\n\n", "file_path": "day7/main2.rs", "rank": 70, "score": 77023.10217214057 }, { "content": "fn get_strongest_bridge(bridges: &Vec<Bridge>, starting_num: u64) -> i64 {\n\n let mut strongest = -1;\n\n for bridge in bridges {\n\n if bridge.port_a == starting_num || bridge.port_b == starting_num {\n\n let mut new_bridges = bridges.clone();\n\n remove_bridge(&mut new_bridges, &bridge);\n\n let other_port = get_other_port(&bridge, starting_num);\n\n let bridge_strength = starting_num + other_port;\n\n let chain_strength = get_strongest_bridge(&new_bridges, other_port);\n\n let possible_strongest = if chain_strength > -1 {\n\n bridge_strength as i64 + chain_strength\n\n } else {\n\n bridge_strength as i64\n\n };\n\n if possible_strongest > strongest {\n\n strongest = possible_strongest;\n\n }\n\n }\n\n }\n\n return strongest;\n\n}\n\n\n", "file_path": "day24/main1.rs", "rank": 71, "score": 76046.58205714659 }, { "content": "struct State {\n\n cpu: HashMap<String, i32>,\n\n highest_value: Option<i32>,\n\n}\n\n\n", "file_path": "day8/main2.rs", "rank": 72, "score": 75686.4245102533 }, { "content": "struct Image {\n\n pixels: Vec<Vec<bool>>,\n\n}\n\n\n\nimpl Image {\n\n fn new(str_rep: &str) -> Image {\n\n Image {\n\n pixels: Image::get_pixels_from_string(str_rep),\n\n }\n\n }\n\n\n\n fn get_pixels_from_string(str_rep: &str) -> Vec<Vec<bool>> {\n\n let rows: Vec<&str> = str_rep.split(\"/\").collect();\n\n\n\n let mut pixels = Vec::new();\n\n for row in rows {\n\n let mut pixel_row = Vec::new();\n\n for ch in row.chars() {\n\n match ch {\n\n '#' => pixel_row.push(true),\n", "file_path": "day21/main1.rs", "rank": 73, "score": 75686.4245102533 }, { "content": "#[derive(Debug)]\n\nstruct Particle {\n\n x: i64,\n\n y: i64,\n\n z: i64,\n\n vx: i64,\n\n vy: i64,\n\n vz: i64,\n\n ax: i64,\n\n ay: i64,\n\n az: i64,\n\n}\n\n\n\nimpl Particle {\n\n fn tick(&mut self) {\n\n self.vx += self.ax;\n\n self.vy += self.ay;\n\n self.vz += self.az;\n\n\n\n self.x += self.vx;\n\n self.y += self.vy;\n", "file_path": "day20/main2.rs", "rank": 74, "score": 75686.4245102533 }, { "content": "struct State {\n\n instructions: Vec<Instruction>,\n\n instruction_ptr: usize,\n\n registers: HashMap<char, i64>,\n\n last_played: Option<i64>,\n\n last_recover: Option<i64>,\n\n}\n\n\n\nimpl State {\n\n fn new(instructions: Vec<Instruction>) -> State {\n\n State {\n\n instructions: instructions,\n\n instruction_ptr: 0,\n\n registers: HashMap::new(),\n\n last_played: None,\n\n last_recover: None,\n\n }\n\n }\n\n\n\n fn get_register_value(&mut self, reg_name: char) -> i64 {\n", "file_path": "day18/main1.rs", "rank": 75, "score": 75686.4245102533 }, { "content": "#[derive(Debug)]\n\nstruct Layer {\n\n cur_pos: i32,\n\n range: i32,\n\n is_down: bool,\n\n}\n\n\n", "file_path": "day13/main2.rs", "rank": 76, "score": 75686.4245102533 }, { "content": "struct Board {\n\n cells: Vec<Vec<CellState>>,\n\n current_direction: Direction,\n\n current_pos: (usize, usize),\n\n num_infections: usize,\n\n}\n\n\n\nimpl Board {\n\n fn create() -> Board {\n\n let lines = util::read_file_lines(\"input.txt\");\n\n let mut cells = Vec::new();\n\n\n\n for line in lines {\n\n let mut row = Vec::new();\n\n for cell in line.chars() {\n\n if cell == '.' {\n\n row.push(CellState::Clean);\n\n } else if cell == '#' {\n\n row.push(CellState::Infected);\n\n }\n", "file_path": "day22/main2.rs", "rank": 77, "score": 75686.4245102533 }, { "content": "struct Condition {\n\n register: String,\n\n relation: Relation,\n\n operand: i32,\n\n}\n\n\n", "file_path": "day8/main1.rs", "rank": 78, "score": 75686.4245102533 }, { "content": "struct Group {\n\n items: Vec<GroupItem>,\n\n}\n\n\n", "file_path": "day9/main2.rs", "rank": 79, "score": 75686.4245102533 }, { "content": "struct Layer {\n\n cur_pos: i32,\n\n range: i32,\n\n is_down: bool,\n\n}\n\n\n", "file_path": "day13/main1.rs", "rank": 80, "score": 75686.4245102533 }, { "content": "struct Board {\n\n cells: Vec<Vec<bool>>,\n\n current_direction: Direction,\n\n current_pos: (usize, usize),\n\n num_infections: usize,\n\n}\n\n\n\nimpl Board {\n\n fn create() -> Board {\n\n let lines = util::read_file_lines(\"input.txt\");\n\n let mut cells = Vec::new();\n\n\n\n for line in lines {\n\n let mut row = Vec::new();\n\n for cell in line.chars() {\n\n if cell == '.' {\n\n row.push(false);\n\n } else if cell == '#' {\n\n row.push(true);\n\n }\n", "file_path": "day22/main1.rs", "rank": 81, "score": 75686.4245102533 }, { "content": "struct Image {\n\n pixels: Vec<Vec<bool>>,\n\n}\n\n\n\nimpl Image {\n\n fn new(str_rep: &str) -> Image {\n\n Image {\n\n pixels: Image::get_pixels_from_string(str_rep),\n\n }\n\n }\n\n\n\n fn get_pixels_from_string(str_rep: &str) -> Vec<Vec<bool>> {\n\n let rows: Vec<&str> = str_rep.split(\"/\").collect();\n\n\n\n let mut pixels = Vec::new();\n\n for row in rows {\n\n let mut pixel_row = Vec::new();\n\n for ch in row.chars() {\n\n match ch {\n\n '#' => pixel_row.push(true),\n", "file_path": "day21/main2.rs", "rank": 82, "score": 75686.4245102533 }, { "content": "struct Instruction {\n\n register: String,\n\n command: Command,\n\n operand: i32,\n\n condition: Condition,\n\n}\n\n\n", "file_path": "day8/main1.rs", "rank": 83, "score": 75686.4245102533 }, { "content": "#[derive(Clone, PartialEq)]\n\nstruct Bridge {\n\n port_a: u64,\n\n port_b: u64,\n\n}\n\n\n", "file_path": "day24/main1.rs", "rank": 84, "score": 75686.4245102533 }, { "content": "#[derive(Clone, PartialEq)]\n\nstruct Bridge {\n\n port_a: u64,\n\n port_b: u64,\n\n}\n\n\n", "file_path": "day24/main2.rs", "rank": 85, "score": 75686.4245102533 }, { "content": "struct State {\n\n instructions: Vec<Instruction>,\n\n instruction_ptr: usize,\n\n registers: HashMap<char, i64>,\n\n num_muls: usize,\n\n}\n\n\n\nimpl State {\n\n fn new(instructions: Vec<Instruction>) -> State {\n\n let st = State {\n\n instructions: instructions,\n\n instruction_ptr: 0,\n\n registers: HashMap::new(),\n\n num_muls: 0,\n\n };\n\n st\n\n }\n\n\n\n fn get_register_value(&mut self, reg_name: char) -> i64 {\n\n return *self.registers.entry(reg_name).or_insert(0);\n", "file_path": "day23/main1.rs", "rank": 86, "score": 75686.4245102533 }, { "content": "struct Generator {\n\n prev_value: u64,\n\n factor: u64,\n\n divisor: u64,\n\n}\n\n\n\nimpl Generator {\n\n fn new(prev_value: u64, factor: u64, divisor: u64) -> Generator {\n\n Generator {\n\n prev_value: prev_value,\n\n factor: factor,\n\n divisor: divisor,\n\n }\n\n }\n\n\n\n fn next(&mut self) -> u64 {\n\n self.prev_value = (self.prev_value * self.factor) % 2147483647;\n\n while self.prev_value % self.divisor != 0 {\n\n self.prev_value = (self.prev_value * self.factor) % 2147483647;\n\n }\n\n self.prev_value\n\n }\n\n}\n\n\n", "file_path": "day15/main2.rs", "rank": 87, "score": 75686.4245102533 }, { "content": "struct Node {\n\n name: i32,\n\n connections: HashSet<i32>,\n\n}\n\n\n", "file_path": "day12/main2.rs", "rank": 88, "score": 75686.4245102533 }, { "content": "struct Generator {\n\n prev_value: u64,\n\n factor: u64,\n\n}\n\n\n\nimpl Generator {\n\n fn new(prev_value: u64, factor: u64) -> Generator {\n\n Generator {\n\n prev_value: prev_value,\n\n factor: factor,\n\n }\n\n }\n\n\n\n fn next(&mut self) -> u64 {\n\n self.prev_value = (self.prev_value * self.factor) % 2147483647;\n\n self.prev_value\n\n }\n\n}\n\n\n", "file_path": "day15/main1.rs", "rank": 89, "score": 75686.4245102533 }, { "content": "#[derive(Debug)]\n\nstruct Particle {\n\n x: i64,\n\n y: i64,\n\n z: i64,\n\n vx: i64,\n\n vy: i64,\n\n vz: i64,\n\n ax: i64,\n\n ay: i64,\n\n az: i64,\n\n}\n\n\n", "file_path": "day20/main1.rs", "rank": 90, "score": 75686.4245102533 }, { "content": "struct Condition {\n\n register: String,\n\n relation: Relation,\n\n operand: i32,\n\n}\n\n\n", "file_path": "day8/main2.rs", "rank": 91, "score": 75686.4245102533 }, { "content": "struct State {\n\n instructions: Vec<Instruction>,\n\n instruction_ptr: usize,\n\n registers: HashMap<char, i64>,\n\n send_channel: Queue,\n\n recv_channel: Queue,\n\n id: usize,\n\n num_sends: usize,\n\n}\n\n\n\nimpl State {\n\n fn new(instructions: Vec<Instruction>, sc: Queue, rc: Queue, id: usize) -> State {\n\n let mut st = State {\n\n instructions: instructions,\n\n instruction_ptr: 0,\n\n registers: HashMap::new(),\n\n send_channel: sc,\n\n recv_channel: rc,\n\n id: id,\n\n num_sends: 0,\n", "file_path": "day18/main2.rs", "rank": 92, "score": 75686.4245102533 }, { "content": "struct Instruction {\n\n register: String,\n\n command: Command,\n\n operand: i32,\n\n condition: Condition,\n\n}\n\n\n", "file_path": "day8/main2.rs", "rank": 93, "score": 75686.4245102533 }, { "content": "struct Node {\n\n name: i32,\n\n connections: HashSet<i32>,\n\n}\n\n\n", "file_path": "day12/main1.rs", "rank": 94, "score": 75686.4245102533 }, { "content": "struct Group {\n\n items: Vec<GroupItem>,\n\n}\n\n\n", "file_path": "day9/main1.rs", "rank": 95, "score": 75686.4245102533 }, { "content": "struct CircularBuffer {\n\n current_position: i32,\n\n iter_num: i32,\n\n round_size: i32,\n\n zero_pos: i32,\n\n buf_size: i32,\n\n after_zero: i32,\n\n}\n\n\n\nimpl CircularBuffer {\n\n fn new() -> CircularBuffer {\n\n CircularBuffer {\n\n current_position: 0,\n\n iter_num: 1,\n\n round_size: ROUND_SIZE,\n\n zero_pos: 0,\n\n buf_size: 1,\n\n after_zero: 0,\n\n }\n\n }\n", "file_path": "day17/main2.rs", "rank": 96, "score": 74410.61471790998 }, { "content": "struct Machine {\n\n tape: Vec<bool>,\n\n cursor: usize,\n\n state: State,\n\n num_ones: usize,\n\n}\n\n\n\nimpl Machine {\n\n fn new() -> Machine {\n\n Machine {\n\n tape: vec![false],\n\n cursor: 0,\n\n state: State::A,\n\n num_ones: 0,\n\n }\n\n }\n\n\n\n fn iterate(&mut self) {\n\n match &self.state {\n\n State::A => {\n", "file_path": "day24/day25/main1.rs", "rank": 97, "score": 74410.61471790998 }, { "content": "#[derive(Debug)]\n\nstruct ProgramTower {\n\n name: String,\n\n weight: i32,\n\n children: HashMap<String, Pointer>,\n\n}\n\n\n\nimpl ProgramTower {\n\n fn new(name: String, weight: i32, children: HashMap<String, Pointer>) -> ProgramTower {\n\n ProgramTower {\n\n name: name,\n\n weight: weight,\n\n children: children,\n\n }\n\n }\n\n}\n\n\n", "file_path": "day7/main1.rs", "rank": 98, "score": 74410.61471790998 }, { "content": "struct CircularBuffer {\n\n buffer: Vec<i32>,\n\n current_position: i32,\n\n iter_num: i32,\n\n round_size: i32,\n\n}\n\n\n\nimpl CircularBuffer {\n\n fn new() -> CircularBuffer {\n\n CircularBuffer {\n\n buffer: vec![0],\n\n current_position: 0,\n\n iter_num: 1,\n\n round_size: ROUND_SIZE,\n\n }\n\n }\n\n\n\n fn perform_round(&mut self) {\n\n self.current_position += self.round_size;\n\n self.current_position %= self.buffer.len() as i32;\n", "file_path": "day17/main1.rs", "rank": 99, "score": 74410.61471790998 } ]
Rust
src/screen/agent_info.rs
timcryt/zemeroth
7b6b51add0f90e9c85e3a9c3a3cd890c9239b4a6
use std::{collections::HashMap, time::Duration}; use gwg::{ graphics::{self, Color, Image, Point2, Text}, Context, }; use heck::TitleCase; use ui::{self, Gui, Widget}; use crate::{ core::battle::{ ability::{Ability, PassiveAbility}, component::{self, Component, ObjType, Prototypes}, }, screen::{self, Screen, StackCommand}, sprite_info::SpriteInfo, utils, ZResult, }; #[derive(Clone, Debug, Default)] struct StaticObjectInfo { meta: Option<component::Meta>, strength: Option<component::Strength>, armor: Option<component::Armor>, agent: Option<component::Agent>, blocker: Option<component::Blocker>, abilities: Option<component::Abilities>, passive_abilities: Option<component::PassiveAbilities>, summoner: Option<component::Summoner>, } impl StaticObjectInfo { fn new(typename: &ObjType, components: &[Component]) -> Self { let mut this = StaticObjectInfo::default(); let name = typename.clone(); this.meta = Some(component::Meta { name }); for component in components { match component.clone() { Component::Strength(c) => this.strength = Some(c), Component::Armor(c) => this.armor = Some(c), Component::Meta(c) => this.meta = Some(c), Component::Agent(c) => this.agent = Some(c), Component::Abilities(c) => this.abilities = Some(c), Component::PassiveAbilities(c) => this.passive_abilities = Some(c), Component::Summoner(c) => this.summoner = Some(c), Component::Blocker(c) => this.blocker = Some(c), Component::BelongsTo(_) | Component::Pos(_) | Component::Effects(_) | Component::Schedule(_) => (), } } this } } type SpritesInfo = HashMap<String, SpriteInfo>; fn load_sprites_info(context: &mut Context) -> ZResult<SpritesInfo> { let info = utils::deserialize_from_file(context, "/sprites.ron")?; Ok(info) } fn agent_image(context: &mut Context, typename: &ObjType) -> ZResult<Box<dyn ui::Widget>> { let h = 0.3; let sprites_info = load_sprites_info(context)?; let sprite_info = sprites_info[&typename.0].clone(); let default_frame = ""; let default_frame_path = &sprite_info.paths[default_frame]; let image = Image::new(context, default_frame_path).expect("Can't load agent's image"); let label = ui::Label::new(context, Box::new(image), h)? .with_color(Color::new(1.0, 1.0, 1.0, 1.0)) .stretchable(true); Ok(Box::new(label)) } #[derive(Clone, Debug)] enum Message { Back, AbilityInfo(Ability), PassiveAbilityInfo(PassiveAbility), } fn info_panel( context: &mut Context, font: graphics::Font, gui: &mut ui::Gui<Message>, prototypes: &Prototypes, typename: &ObjType, ) -> ZResult<Box<dyn ui::Widget>> { let proto = &prototypes.0[&typename]; let info = StaticObjectInfo::new(&typename, proto); let h = utils::line_heights().normal; let space_between_buttons = h / 8.0; let mut layout = Box::new(ui::VLayout::new().stretchable(true)); layout.add(agent_image(context, typename)?); let mut add = |w| layout.add(w); let text_ = |s: &str| Box::new(Text::new((s, font, utils::font_size()))); let label_ = |context: &mut Context, text: &str| -> ZResult<_> { Ok(ui::Label::new(context, text_(text), h)?) }; let label = |context: &mut Context, text: &str| -> ZResult<Box<_>> { Ok(Box::new(label_(context, text)?)) }; let label_s = |context: &mut Context, text: &str| -> ZResult<_> { Ok(Box::new(label_(context, text)?.stretchable(true))) }; let spacer_v = || Box::new(ui::Spacer::new_vertical(h * 0.5)); let spacer_s = || Box::new(ui::Spacer::new_horizontal(h * 0.5).stretchable(true)); let line = |context: &mut Context, arg: &str, val: &str| -> ZResult<_> { let mut line = ui::HLayout::new().stretchable(true); line.add(label(context, arg)?); line.add(spacer_s()); line.add(label(context, val)?); Ok(Box::new(line)) }; let line_i = |context: &mut Context, arg: &str, val: i32| -> ZResult<_> { line(context, arg, &val.to_string()) }; { if let Some(meta) = info.meta { let title = meta.name.0.to_title_case(); add(label_s(context, &format!("~~~ {} ~~~", title))?); add(spacer_v()); } if let Some(strength) = info.strength { add(line_i(context, "strength:", strength.base_strength.0)?); } if let Some(a) = info.agent { add(line_i(context, "attacks:", a.base_attacks.0)?); add(line_i(context, "moves:", a.base_moves.0)?); if a.base_jokers.0 != 0 { add(line_i(context, "jokers:", a.base_jokers.0)?); } if a.reactive_attacks.0 != 0 { add(line_i(context, "reactive attacks:", a.reactive_attacks.0)?); } if a.attack_distance.0 != 1 { add(line_i(context, "attack distance:", a.attack_distance.0)?); } add(line_i(context, "attack strength:", a.attack_strength.0)?); add(line_i(context, "attack accuracy:", a.attack_accuracy.0)?); if a.attack_break.0 > 0 { add(line_i(context, "armor break:", a.attack_break.0)?); } if a.dodge.0 > 0 { add(line_i(context, "dodge:", a.dodge.0)?); } add(line_i(context, "move points:", a.move_points.0)?); } if let Some(armor) = info.armor { let armor = armor.armor.0; if armor != 0 { add(line_i(context, "armor:", armor)?); } } if let Some(blocker) = info.blocker { add(line(context, "weight:", &format!("{}", blocker.weight))?); } if let Some(abilities) = info.abilities { if !abilities.0.is_empty() { add(label_s(context, "~ abilities ~")?); for r_ability in &abilities.0 { let s = r_ability.ability.title(); let cooldown = r_ability.ability.base_cooldown(); let text = format!("{} (cooldown: {}t)", s, cooldown); let mut line_layout = ui::HLayout::new().stretchable(true); line_layout.add(label(context, &text)?); line_layout.add(spacer_s()); let icon = Box::new(graphics::Image::new(context, "/img/icon_info.png")?); let message = Message::AbilityInfo(r_ability.ability.clone()); let button = ui::Button::new(context, icon, h, gui.sender(), message)?; line_layout.add(Box::new(button)); add(Box::new(line_layout)); add(Box::new(ui::Spacer::new_vertical(space_between_buttons))); } } } if let Some(abilities) = info.passive_abilities { if !abilities.0.is_empty() { add(label_s(context, "~ passive abilities ~")?); for &ability in &abilities.0 { let mut line_layout = ui::HLayout::new().stretchable(true); line_layout.add(label(context, &ability.title())?); line_layout.add(spacer_s()); let icon = Box::new(graphics::Image::new(context, "/img/icon_info.png")?); let message = Message::PassiveAbilityInfo(ability); let button = ui::Button::new(context, icon, h, gui.sender(), message)?; line_layout.add(Box::new(button)); add(Box::new(line_layout)); add(Box::new(ui::Spacer::new_vertical(space_between_buttons))); } } } } layout.stretch_to_self(context)?; Ok(layout) } fn button_back( context: &mut Context, font: graphics::Font, gui: &mut ui::Gui<Message>, layout_width: f32, ) -> ZResult<Box<dyn ui::Widget>> { let h = utils::line_heights().normal; let text = Box::new(Text::new(("back", font, utils::font_size()))); let msg = Message::Back; let mut button = ui::Button::new(context, text, h, gui.sender(), msg)?.stretchable(true); button.stretch(context, layout_width / 3.0)?; button.set_stretchable(false); Ok(Box::new(button)) } #[derive(Debug)] pub struct AgentInfo { font: graphics::Font, gui: Gui<Message>, } impl AgentInfo { pub fn new_agent_info( context: &mut Context, prototypes: &Prototypes, typename: &ObjType, ) -> ZResult<Self> { let font = utils::default_font(context); let mut gui = ui::Gui::new(context); let mut layout = ui::VLayout::new(); let h = utils::line_heights().big; layout.add(info_panel(context, font, &mut gui, prototypes, typename)?); layout.add(Box::new(ui::Spacer::new_vertical(h))); layout.add(button_back(context, font, &mut gui, layout.rect().w)?); let layout = utils::add_offsets_and_bg_big(context, Box::new(layout))?; let anchor = ui::Anchor(ui::HAnchor::Middle, ui::VAnchor::Middle); gui.add(&ui::pack(layout), anchor); Ok(Self { font, gui }) } pub fn new_upgrade_info( context: &mut Context, prototypes: &Prototypes, from: &ObjType, to: &ObjType, ) -> ZResult<Self> { let font = utils::default_font(context); let mut gui = ui::Gui::new(context); let mut layout = ui::VLayout::new(); let h = utils::line_heights().big; let line = { let mut line = Box::new(ui::HLayout::new()); let panel_from = info_panel(context, font, &mut gui, prototypes, from)?; let panel_from_height = panel_from.rect().h; line.add(panel_from); line.add(Box::new(ui::Spacer::new_horizontal(h))); let col = { let mut col = Box::new(ui::VLayout::new()); col.add(Box::new(ui::Spacer::new_vertical(panel_from_height * 0.5))); let text = Box::new(Text::new(("=>", font, utils::font_size()))); col.add(Box::new(ui::Label::new(context, text, h)?)); col }; line.add(col); line.add(Box::new(ui::Spacer::new_horizontal(h))); line.add(info_panel(context, font, &mut gui, prototypes, to)?); line }; layout.add(line); layout.add(Box::new(ui::Spacer::new_vertical(h))); layout.add(button_back(context, font, &mut gui, layout.rect().w)?); let layout = utils::add_offsets_and_bg_big(context, Box::new(layout))?; let anchor = ui::Anchor(ui::HAnchor::Middle, ui::VAnchor::Middle); gui.add(&ui::pack(layout), anchor); Ok(Self { font, gui }) } } impl Screen for AgentInfo { fn update(&mut self, _context: &mut Context, _dtime: Duration) -> ZResult<StackCommand> { Ok(StackCommand::None) } fn draw(&self, context: &mut Context) -> ZResult { self.gui.draw(context)?; Ok(()) } fn click(&mut self, context: &mut Context, pos: Point2) -> ZResult<StackCommand> { let message = self.gui.click(pos); match message { Some(Message::Back) => Ok(StackCommand::Pop), Some(Message::AbilityInfo(info)) => { let mut description = info.description(); description.push(format!("Cooldown: {}t", info.base_cooldown())); let screen = screen::GeneralInfo::new(context, &info.title(), &description)?; Ok(StackCommand::PushPopup(Box::new(screen))) } Some(Message::PassiveAbilityInfo(info)) => { let screen = screen::GeneralInfo::new(context, &info.title(), &info.description())?; Ok(StackCommand::PushPopup(Box::new(screen))) } None => Ok(StackCommand::None), } } fn resize(&mut self, aspect_ratio: f32) { self.gui.resize(aspect_ratio); } fn move_mouse(&mut self, _context: &mut Context, pos: Point2) -> ZResult { self.gui.move_mouse(pos); Ok(()) } }
use std::{collections::HashMap, time::Duration}; use gwg::{ graphics::{self, Color, Image, Point2, Text}, Context, }; use heck::TitleCase; use ui::{self, Gui, Widget}; use crate::{ core::battle::{ ability::{Ability, PassiveAbility}, component::{self, Component, ObjType, Prototypes}, }, screen::{self, Screen, StackCommand}, sprite_info::SpriteInfo, utils, ZResult, }; #[derive(Clone, Debug, Default)] struct StaticObjectInfo { meta: Option<component::Meta>, strength: Option<component::Strength>, armor: Option<component::Armor>, agent: Option<component::Agent>, blocker: Option<component::Blocker>, abilities: Option<component::Abilities>, passive_abilities: Option<component::PassiveAbilities>, summoner: Option<component::Summoner>, } impl StaticObjectInfo { fn new(typename: &ObjType, components: &[Component]) -> Self { let mut this = StaticObjectInfo::default(); let name = typename.clone(); this.meta = Some(component::Meta { name }); for component in components { match component.clone() { Component::Strength(c) => this.strength = Some(c), Component::Armor(c) => this.armor = Some(c), Component::Meta(c) => this.meta = Some(c), Component::Agent(c) => this.agent = Some(c), Component::Abilities(c) => this.abilities = Some(c), Component::PassiveAbilities(c) => this.passive_abilities = Some(c), Component::Summoner(c) => this.summoner = Some(c), Component::Blocker(c) => this.blocker = Some(c), Component::BelongsTo(_) | Component::Pos(_) | Component::Effects(_) | Component::Schedule(_) => (), } } this } } type SpritesInfo = HashMap<String, SpriteInfo>; fn load_sprites_info(context: &mut Context) -> ZResult<SpritesInfo> { let info = utils::deserialize_from_file(context, "/sprites.ron")?; Ok(info) } fn agent_image(context: &mut Context, typename: &ObjType) -> ZResult<Box<dyn ui::Widget>> { let h = 0.3; let sprites_info = load_sprites_info(context)?; let sprite_info = sprites_info[&typename.0].clone(); let default_frame = ""; let default_frame_path = &sprite_info.paths[default_frame]; let image = Image::new(context, default_frame_path).expect("Can't load agent's image"); let label = ui::Label::new(context, Box::new(image), h)? .with_color(Color::new(1.0, 1.0, 1.0, 1.0)) .stretchable(true); Ok(Box::new(label)) } #[derive(Clone, Debug)] enum Message { Back, AbilityInfo(Ability), PassiveAbilityInfo(PassiveAbility), } fn info_panel( context: &mut Context, font: graphics::Font, gui: &mut ui::Gui<Message>, prototypes: &Prototypes, typename: &ObjType, ) -> ZResult<Box<dyn ui::Widget>> { let proto = &prototypes.0[&typename]; let info = StaticObjectInfo::new(&typename, proto); let h = utils::line_heights().normal; let space_between_buttons = h / 8.0; let mut layout = Box::new(ui::VLayout::new().stretchable(true)); layout.add(agent_image(context, typename)?); let mut add = |w| layout.add(w); let text_ = |s: &str| Box::new(Text::new((s, font, utils::font_size()))); let label_ = |context: &mut Context, text: &str| -> ZResult<_> { Ok(ui::Label::new(context, text_(text), h)?) }; let label = |context: &mut Context, text: &str| -> ZResult<Box<_>> { Ok(Box::new(label_(context, text)?)) }; let label_s = |context: &mut Context, text: &str| -> ZResult<_> { Ok(Box::new(label_(context, text)?.stretchable(true))) }; let spacer_v = || Box::new(ui::Spacer::new_vertical(h * 0.5)); let spacer_s = || Box::new(ui::Spacer::new_horizontal(h * 0.5).stretchable(true)); let line = |context: &mut Context, arg: &str, val: &str| -> ZResult<_> { let mut line = ui::HLayout::new().stretchable(true); line.add(label(context, arg)?); line.add(spacer_s()); line.add(label(context, val)?); Ok(Box::new(line)) }; let line_i = |context: &mut Context, arg: &str, val: i32| -> ZResult<_> { line(context, arg, &val.to_string()) }; { if let Some(meta) = info.meta { let title = meta.name.0.to_title_case(); add(label_s(context, &format!("~~~ {} ~~~", title))?); add(spacer_v()); } if let Some(strength) = info.strength { add(line_i(context, "strength:", strength.base_strength.0)?); } if let Some(a) = info.agent { add(line_i(context, "attacks:", a.base_attacks.0)?); add(line_i(context, "moves:", a.base_moves.0)?); if a.base_jokers.0 != 0 { add(line_i(context, "jokers:", a.base_jokers.0)?); } if a.reactive_attacks.0 != 0 { add(line_i(context, "reactive attacks:", a.reactive_attacks.0)?); } if a.attack_distance.0 != 1 { add(line_i(context, "attack distance:", a.attack_distance.0)?); } add(line_i(context, "attack strength:", a.attack_strength.0)?); add(line_i(context, "attack accuracy:", a.attack_accuracy.0)?); if a.attack_break.0 > 0 { add(line_i(context, "armor break:", a.attack_break.0)?); } if a.dodge.0 > 0 { add(line_i(context, "dodge:", a.dodge.0)?); } add(line_i(context, "move points:", a.move_points.0)?); } if let Some(armor) = info.armor { let armor = armor.armor.0; if armor != 0 { add(line_i(context, "armor:", armor)?); } } if let Some(blocker) = info.blocker { add(line(context, "weight:", &format!("{}", blocker.weight))?); } if let Some(abilities) = info.abilities { if !abilities.0.is_empty() { add(label_s(context, "~ abilities ~")?); for r_ability in &abilities.0 { let s = r_ability.ability.title(); let cooldown = r_ability.ability.base_cooldown(); let text = format!("{} (cooldown: {}t)", s, cooldown); let mut line_layout = ui::HLayout::new().stretchable(true); line_layout.add(label(context, &text)?); line_layout.add(spacer_s()); let icon = Box::new(graphics::Image::new(context, "/img/icon_info.png")?); let message = Message::AbilityInfo(r_ability.ability.clone()); let button = ui::Button::new(context, icon, h, gui.sender(), message)?; line_layout.add(Box::new(button)); add(Box::new(line_layout)); add(Box::new(ui::Spacer::new_vertical(space_between_buttons))); } } } if let Some(abilities) = info.passive_abilities { if !abilities.0.is_empty() { add(label_s(context, "~ passive abilities ~")?); for &ability in &abilities.0 { let mut line_layout = ui::HLayout::new().stretchable(true); line_layout.add(label(context, &ability.title())?); line_layout.add(spacer_s()); let icon = Box::new(graphics::Image::new(context, "/img/icon_info.png")?); let message = Message::PassiveAbilityInfo(ability); let button = ui::Button::new(context, icon, h, gui.sender(), message)?; line_layout.add(Box::new(button)); add(Box::new(line_layout)); add(Box::new(ui::Spacer::new_vertical(space_between_buttons))); } } } } layout.stretch_to_self(context)?; Ok(layout) } fn button_back( context: &mut Context, font: graphics::Font, gui: &mut ui::Gui<Message>, layout_width: f32, ) -> ZResult<Box<dyn ui::Widget>> { let h = utils::line_heights().normal; let text = Box::new(Text::new(("back", font, utils::font_size()))); let msg = Message::Back; let mut button = ui::Button::new(context, text, h, gui.sender(), msg)?.stretchable(true); button.stretch(context, layout_width / 3.0)?; button.set_stretchable(false); Ok(Box::new(button)) } #[derive(Debug)] pub struct AgentInfo { font: graphics::Font, gui: Gui<Message>, } impl AgentInfo {
pub fn new_upgrade_info( context: &mut Context, prototypes: &Prototypes, from: &ObjType, to: &ObjType, ) -> ZResult<Self> { let font = utils::default_font(context); let mut gui = ui::Gui::new(context); let mut layout = ui::VLayout::new(); let h = utils::line_heights().big; let line = { let mut line = Box::new(ui::HLayout::new()); let panel_from = info_panel(context, font, &mut gui, prototypes, from)?; let panel_from_height = panel_from.rect().h; line.add(panel_from); line.add(Box::new(ui::Spacer::new_horizontal(h))); let col = { let mut col = Box::new(ui::VLayout::new()); col.add(Box::new(ui::Spacer::new_vertical(panel_from_height * 0.5))); let text = Box::new(Text::new(("=>", font, utils::font_size()))); col.add(Box::new(ui::Label::new(context, text, h)?)); col }; line.add(col); line.add(Box::new(ui::Spacer::new_horizontal(h))); line.add(info_panel(context, font, &mut gui, prototypes, to)?); line }; layout.add(line); layout.add(Box::new(ui::Spacer::new_vertical(h))); layout.add(button_back(context, font, &mut gui, layout.rect().w)?); let layout = utils::add_offsets_and_bg_big(context, Box::new(layout))?; let anchor = ui::Anchor(ui::HAnchor::Middle, ui::VAnchor::Middle); gui.add(&ui::pack(layout), anchor); Ok(Self { font, gui }) } } impl Screen for AgentInfo { fn update(&mut self, _context: &mut Context, _dtime: Duration) -> ZResult<StackCommand> { Ok(StackCommand::None) } fn draw(&self, context: &mut Context) -> ZResult { self.gui.draw(context)?; Ok(()) } fn click(&mut self, context: &mut Context, pos: Point2) -> ZResult<StackCommand> { let message = self.gui.click(pos); match message { Some(Message::Back) => Ok(StackCommand::Pop), Some(Message::AbilityInfo(info)) => { let mut description = info.description(); description.push(format!("Cooldown: {}t", info.base_cooldown())); let screen = screen::GeneralInfo::new(context, &info.title(), &description)?; Ok(StackCommand::PushPopup(Box::new(screen))) } Some(Message::PassiveAbilityInfo(info)) => { let screen = screen::GeneralInfo::new(context, &info.title(), &info.description())?; Ok(StackCommand::PushPopup(Box::new(screen))) } None => Ok(StackCommand::None), } } fn resize(&mut self, aspect_ratio: f32) { self.gui.resize(aspect_ratio); } fn move_mouse(&mut self, _context: &mut Context, pos: Point2) -> ZResult { self.gui.move_mouse(pos); Ok(()) } }
pub fn new_agent_info( context: &mut Context, prototypes: &Prototypes, typename: &ObjType, ) -> ZResult<Self> { let font = utils::default_font(context); let mut gui = ui::Gui::new(context); let mut layout = ui::VLayout::new(); let h = utils::line_heights().big; layout.add(info_panel(context, font, &mut gui, prototypes, typename)?); layout.add(Box::new(ui::Spacer::new_vertical(h))); layout.add(button_back(context, font, &mut gui, layout.rect().w)?); let layout = utils::add_offsets_and_bg_big(context, Box::new(layout))?; let anchor = ui::Anchor(ui::HAnchor::Middle, ui::VAnchor::Middle); gui.add(&ui::pack(layout), anchor); Ok(Self { font, gui }) }
function_block-full_function
[ { "content": "fn label(context: &mut Context, font: Font, text: &str) -> ZResult<Box<dyn ui::Widget>> {\n\n let text = Box::new(Text::new((text, font, FONT_SIZE)));\n\n Ok(Box::new(ui::Label::new(context, text, line_height())?))\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Campaign {\n\n state: State,\n\n font: graphics::Font,\n\n receiver_battle_result: Option<Receiver<Option<BattleResult>>>,\n\n receiver_exit_confirmation: Option<Receiver<screen::confirm::Message>>,\n\n gui: Gui<Message>,\n\n layout: Option<ui::RcWidget>,\n\n label_central_message: Option<ui::RcWidget>,\n\n}\n\n\n\nimpl Campaign {\n\n pub fn new(context: &mut Context) -> ZResult<Self> {\n\n let plan = utils::deserialize_from_file(context, \"/campaign_01.ron\")?;\n\n let upgrades = utils::deserialize_from_file(context, \"/agent_campaign_info.ron\")?;\n", "file_path": "src/screen/campaign.rs", "rank": 0, "score": 479339.8304620908 }, { "content": "pub fn default_font(context: &mut Context) -> Font {\n\n Font::new(context, \"/OpenSans-Regular.ttf\").expect(\"Can't load the default font\")\n\n}\n\n\n\n// TODO: Move to some config (https://github.com/ozkriff/zemeroth/issues/424)\n\npub const fn font_size() -> f32 {\n\n 128.0\n\n}\n\n\n\npub struct LineHeights {\n\n pub small: f32,\n\n pub normal: f32,\n\n pub big: f32,\n\n pub large: f32,\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 2, "score": 406558.831002211 }, { "content": "fn make_gui(context: &mut Context, font: Font) -> ui::Result<ui::Gui<Message>> {\n\n let font_size = 32.0;\n\n let mut gui = ui::Gui::new(context);\n\n let anchor = ui::Anchor(ui::HAnchor::Right, ui::VAnchor::Bottom);\n\n let text = Box::new(Text::new((\"Button\", font, font_size)));\n\n let button = ui::Button::new(context, text, 0.2, gui.sender(), Message::Command)?;\n\n gui.add(&ui::pack(button), anchor);\n\n Ok(gui)\n\n}\n\n\n", "file_path": "zgui/examples/text_button.rs", "rank": 3, "score": 391274.20283746044 }, { "content": "fn make_gui(context: &mut Context, font: Font) -> ZResult<ui::Gui<Message>> {\n\n let mut gui = ui::Gui::new(context);\n\n let h = utils::line_heights().large;\n\n let font_size = utils::font_size();\n\n let space = || Box::new(ui::Spacer::new_vertical(h / 8.0));\n\n let button = &mut |context: &mut Context, text, message| -> ZResult<_> {\n\n let text = Box::new(Text::new((text, font, font_size)));\n\n let b = ui::Button::new(context, text, h, gui.sender(), message)?.stretchable(true);\n\n Ok(Box::new(b))\n\n };\n\n let mut layout = Box::new(ui::VLayout::new().stretchable(true));\n\n layout.add(button(context, \"demo battle\", Message::StartInstant)?);\n\n layout.add(space());\n\n layout.add(button(context, \"campaign\", Message::StartCampaign)?);\n\n #[cfg(not(target_arch = \"wasm32\"))] // can't quit WASM\n\n {\n\n layout.add(space());\n\n layout.add(button(context, \"exit\", Message::Exit)?);\n\n }\n\n layout.stretch_to_self(context)?;\n", "file_path": "src/screen/main_menu.rs", "rank": 4, "score": 389512.4602891377 }, { "content": "pub fn add_bg(context: &mut Context, w: Box<dyn ui::Widget>) -> ZResult<ui::LayersLayout> {\n\n let bg = ui::ColoredRect::new(context, ui::SPRITE_COLOR_BG, w.rect())?.stretchable(true);\n\n let mut layers = ui::LayersLayout::new();\n\n layers.add(Box::new(bg));\n\n layers.add(w);\n\n Ok(layers)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 5, "score": 386410.2979814685 }, { "content": "fn make_gui(context: &mut Context, font: Font) -> ui::Result<ui::Gui<Message>> {\n\n let font_size = 64.0;\n\n let mut gui = ui::Gui::new(context);\n\n let text = Box::new(Text::new((\"text\", font, font_size)));\n\n let image = Box::new(Image::new(context, \"/fire.png\")?);\n\n let button_1 = ui::Button::new(context, image, 0.2, gui.sender(), Message::Command1)?;\n\n let button_2 = ui::Label::new(context, text, 0.1)?;\n\n let mut layout = ui::LayersLayout::new();\n\n layout.add(Box::new(button_1));\n\n layout.add(Box::new(button_2));\n\n let anchor = ui::Anchor(ui::HAnchor::Right, ui::VAnchor::Bottom);\n\n gui.add(&ui::pack(layout), anchor);\n\n Ok(gui)\n\n}\n\n\n", "file_path": "zgui/examples/layers_layout.rs", "rank": 7, "score": 362075.4243265239 }, { "content": "fn make_gui(context: &mut Context, font: Font) -> ui::Result<ui::Gui<Message>> {\n\n let font_size = 64.0;\n\n let mut gui = ui::Gui::new(context);\n\n let text_1 = Box::new(Text::new((\"Button1\", font, font_size)));\n\n let text_2 = Box::new(Text::new((\"Button2\", font, font_size)));\n\n let button_1 = ui::Button::new(context, text_1, 0.2, gui.sender(), Message::Command1)?;\n\n let button_2 = ui::Button::new(context, text_2, 0.2, gui.sender(), Message::Command2)?;\n\n let mut layout = ui::VLayout::new();\n\n layout.add(Box::new(button_1));\n\n layout.add(Box::new(button_2));\n\n let anchor = ui::Anchor(ui::HAnchor::Right, ui::VAnchor::Bottom);\n\n gui.add(&ui::pack(layout), anchor);\n\n Ok(gui)\n\n}\n\n\n", "file_path": "zgui/examples/vertical_layout.rs", "rank": 8, "score": 362075.4243265239 }, { "content": "fn build_panel_end_turn(context: &mut Context, gui: &mut Gui<Message>) -> ZResult<ui::RcWidget> {\n\n let h = line_heights().large;\n\n let icon = Box::new(graphics::Image::new(context, \"/img/icon_end_turn.png\")?);\n\n let button = ui::Button::new(context, icon, h, gui.sender(), Message::EndTurn)?;\n\n let layout = ui::VLayout::from_widget(Box::new(button));\n\n let anchor = ui::Anchor(ui::HAnchor::Right, ui::VAnchor::Bottom);\n\n let packed_layout = ui::pack(layout);\n\n gui.add(&packed_layout, anchor);\n\n Ok(packed_layout)\n\n}\n\n\n", "file_path": "src/screen/battle.rs", "rank": 9, "score": 356714.52563053323 }, { "content": "/// <http://www.redblobgames.com/grids/hexagons/#pixel-to-hex>\n\npub fn point_to_hex(size: f32, mut point: Point2) -> PosHex {\n\n point.y /= FLATNESS_COEFFICIENT;\n\n let q = (point.x * SQRT_OF_3 / 3.0 - point.y / 3.0) / size;\n\n let r = point.y * 2.0 / 3.0 / size;\n\n hex_round(PosHex { q, r })\n\n}\n\n\n", "file_path": "src/geom.rs", "rank": 10, "score": 348857.6919953497 }, { "content": "fn basic_gui(context: &mut Context) -> ZResult<Gui<Message>> {\n\n let mut gui = Gui::new(context);\n\n let h = utils::line_heights().large;\n\n let button_menu = {\n\n let icon = Box::new(graphics::Image::new(context, \"/img/icon_menu.png\")?);\n\n ui::Button::new(context, icon, h, gui.sender(), Message::Menu)?\n\n };\n\n let mut layout = ui::VLayout::new();\n\n layout.add(Box::new(button_menu));\n\n let anchor = ui::Anchor(ui::HAnchor::Left, ui::VAnchor::Top);\n\n gui.add(&ui::pack(layout), anchor);\n\n Ok(gui)\n\n}\n\n\n", "file_path": "src/screen/campaign.rs", "rank": 11, "score": 348695.9525190518 }, { "content": "fn make_gui(context: &mut Context) -> ZResult<ui::Gui<Message>> {\n\n let mut gui = ui::Gui::new(context);\n\n let h = line_heights().large;\n\n {\n\n let icon = Box::new(graphics::Image::new(context, \"/img/icon_menu.png\")?);\n\n let button = ui::Button::new(context, icon, h, gui.sender(), Message::Exit)?;\n\n let layout = ui::VLayout::from_widget(Box::new(button));\n\n let anchor = ui::Anchor(ui::HAnchor::Left, ui::VAnchor::Top);\n\n gui.add(&ui::pack(layout), anchor);\n\n }\n\n Ok(gui)\n\n}\n\n\n", "file_path": "src/screen/battle.rs", "rank": 12, "score": 338037.0816548897 }, { "content": "fn make_gui(context: &mut Context, font: Font) -> ui::Result<ui::Gui<Message>> {\n\n let font_size = 32.0;\n\n let mut gui = ui::Gui::new(context);\n\n let anchor = ui::Anchor(ui::HAnchor::Right, ui::VAnchor::Bottom);\n\n let text = Box::new(Text::new((\"Add/Remove\", font, font_size)));\n\n let button = ui::Button::new(context, text, 0.2, gui.sender(), Message::AddOrRemove)?;\n\n gui.add(&ui::pack(button), anchor);\n\n Ok(gui)\n\n}\n\n\n", "file_path": "zgui/examples/remove.rs", "rank": 13, "score": 337150.7866484072 }, { "content": "// TODO: rework this into some more game-like\n\nfn make_gui(context: &mut Context, font: Font) -> ui::Result<ui::Gui<Message>> {\n\n let font_size = 32.0;\n\n let mut gui = ui::Gui::new(context);\n\n {\n\n let image = Box::new(Image::new(context, \"/fire.png\")?);\n\n let button = ui::Button::new(context, image, 0.1, gui.sender(), Message::Image)?;\n\n let anchor = ui::Anchor(ui::HAnchor::Right, ui::VAnchor::Top);\n\n gui.add(&ui::pack(button), anchor);\n\n }\n\n {\n\n let text = Box::new(Text::new((\"label\", font, font_size)));\n\n let label = ui::Label::new_with_bg(context, text, 0.1)?;\n\n let anchor = ui::Anchor(ui::HAnchor::Left, ui::VAnchor::Bottom);\n\n gui.add(&ui::pack(label), anchor);\n\n }\n\n let v_layout_1 = {\n\n let text_a = Box::new(Text::new((\"A\", font, font_size)));\n\n let text_b = Box::new(Text::new((\"A\", font, font_size)));\n\n let text_c = Box::new(Text::new((\"A\", font, font_size)));\n\n let button_a = ui::Button::new(context, text_a, 0.1, gui.sender(), Message::A)?;\n", "file_path": "zgui/examples/nested.rs", "rank": 14, "score": 337150.78664840723 }, { "content": "fn make_gui(context: &mut Context, font: Font) -> ui::Result<ui::Gui<Message>> {\n\n let mut gui = ui::Gui::new(context);\n\n let text_1 = Box::new(Text::new((\"Button1\", font, 32.0)));\n\n let text_2 = Box::new(Text::new((\"Button1\", font, 64.0)));\n\n let button_1 = ui::Button::new(context, text_1, 0.2, gui.sender(), Message::Command1)?;\n\n let button_2 = ui::Button::new(context, text_2, 0.2, gui.sender(), Message::Command2)?;\n\n let mut layout = ui::VLayout::new();\n\n layout.add(Box::new(button_1));\n\n layout.add(Box::new(button_2));\n\n let anchor = ui::Anchor(ui::HAnchor::Right, ui::VAnchor::Bottom);\n\n gui.add(&ui::pack(layout), anchor);\n\n Ok(gui)\n\n}\n\n\n", "file_path": "zgui/examples/pixel_coordinates.rs", "rank": 15, "score": 332944.27262854204 }, { "content": "fn make_gui(context: &mut Context, font: Font) -> ui::Result<ui::Gui<Message>> {\n\n let mut gui = ui::Gui::new(context);\n\n let text_1 = Box::new(Text::new((\"Button1\", font, 32.0)));\n\n let text_2 = Box::new(Text::new((\"Button2\", font, 64.0)));\n\n let button_1 = ui::Button::new(context, text_1, 0.2, gui.sender(), Message::Command1)?;\n\n let button_2 = ui::Button::new(context, text_2, 0.2, gui.sender(), Message::Command2)?;\n\n let mut layout = ui::VLayout::new();\n\n layout.add(Box::new(button_1));\n\n layout.add(Box::new(button_2));\n\n let anchor = ui::Anchor(ui::HAnchor::Right, ui::VAnchor::Bottom);\n\n gui.add(&ui::pack(layout), anchor);\n\n Ok(gui)\n\n}\n\n\n", "file_path": "zgui/examples/absolute_coordinates.rs", "rank": 16, "score": 332944.27262854204 }, { "content": "pub fn remove_widget<M: Clone>(gui: &mut ui::Gui<M>, widget: &mut Option<ui::RcWidget>) -> ZResult {\n\n if let Some(w) = widget.take() {\n\n gui.remove(&w)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 17, "score": 327480.20768278925 }, { "content": "pub fn window_to_screen(context: &Context, pos: Point2) -> Point2 {\n\n let (w, h) = graphics::drawable_size(context);\n\n let w = w as f32;\n\n let h = h as f32;\n\n let aspect_ratio = w / h;\n\n Point2::new(\n\n (2.0 * pos.x / w - 1.0) * aspect_ratio,\n\n 2.0 * pos.y / h - 1.0,\n\n )\n\n}\n\n\n\n#[derive(Clone, Copy, Debug)]\n\npub enum VAnchor {\n\n Top,\n\n Middle,\n\n Bottom,\n\n}\n\n\n\n#[derive(Clone, Copy, Debug)]\n\npub enum HAnchor {\n", "file_path": "zgui/src/lib.rs", "rank": 18, "score": 303938.2055553601 }, { "content": "pub fn add_offsets(w: Box<dyn ui::Widget>, offset: f32) -> Box<dyn ui::Widget> {\n\n let spacer = || {\n\n ui::Spacer::new(Rect {\n\n w: offset,\n\n h: offset,\n\n ..Default::default()\n\n })\n\n };\n\n let mut layout_h = ui::HLayout::new().stretchable(true);\n\n layout_h.add(Box::new(spacer()));\n\n layout_h.add(w);\n\n layout_h.add(Box::new(spacer()));\n\n let mut layout_v = ui::VLayout::new().stretchable(true);\n\n layout_v.add(Box::new(spacer()));\n\n layout_v.add(Box::new(layout_h));\n\n layout_v.add(Box::new(spacer()));\n\n Box::new(layout_v)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 19, "score": 296955.0910186764 }, { "content": "pub fn can_agent_use_ability(state: &State, id: Id, ability: &Ability) -> bool {\n\n let parts = state.parts();\n\n let agent_player_id = parts.belongs_to.get(id).0;\n\n let agent = parts.agent.get(id);\n\n let has_actions = agent.attacks > battle::Attacks(0) || agent.jokers > battle::Jokers(0);\n\n let is_player_agent = agent_player_id == state.player_id();\n\n let abilities = &parts.abilities.get(id).0;\n\n let r_ability = abilities.iter().find(|r| &r.ability == ability).unwrap();\n\n let is_ready = r_ability.status == ability::Status::Ready;\n\n is_player_agent && is_ready && has_actions\n\n}\n", "file_path": "src/core/battle/state.rs", "rank": 21, "score": 282486.76599368954 }, { "content": "pub fn deserialize_from_file<P, D>(context: &mut Context, path: P) -> ZResult<D>\n\nwhere\n\n P: AsRef<Path>,\n\n D: DeserializeOwned,\n\n{\n\n let path = path.as_ref();\n\n let s = read_file(context, path)?;\n\n ron::de::from_str(&s).map_err(|e| ZError::from_ron_de_error(e, path.into()))\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 22, "score": 273117.53078176023 }, { "content": "fn execute_use_ability_summon(state: &mut State, command: &command::UseAbility) -> ExecuteContext {\n\n let mut context = ExecuteContext::default();\n\n let max_summoned_count = state.parts().summoner.get(command.id).count;\n\n let available_typenames = &[\"imp\".into(), \"toxic_imp\".into(), \"imp_bomber\".into()];\n\n let existing_agents = existing_agent_typenames(state, state.player_id());\n\n let mut new_agents = Vec::new();\n\n for pos in state::free_neighbor_positions(state, command.pos, max_summoned_count as _) {\n\n let prototype = choose_who_to_summon(&existing_agents, &new_agents, available_typenames);\n\n let effect_create = effect_create_agent(state, &prototype, state.player_id(), pos);\n\n let id = state.alloc_id();\n\n let effects = vec![effect_create, Effect::Stun];\n\n new_agents.push(prototype);\n\n context.instant_effects.push((id, effects));\n\n context.moved_actor_ids.push(id);\n\n context.reaction_attack_targets.push(id);\n\n }\n\n context\n\n}\n\n\n", "file_path": "src/core/battle/execute.rs", "rank": 23, "score": 272121.1213686345 }, { "content": "pub fn pack<W: Widget + 'static>(widget: W) -> RcWidget {\n\n Rc::new(RefCell::new(widget))\n\n}\n\n\n", "file_path": "zgui/src/lib.rs", "rank": 24, "score": 270655.8804466518 }, { "content": "fn component_meta(name: &str) -> Component {\n\n component::Meta { name: name.into() }.into()\n\n}\n\n\n", "file_path": "src/core/battle/tests.rs", "rank": 25, "score": 270114.88601951924 }, { "content": "/// Read a file to a string.\n\npub fn read_file<P: AsRef<Path>>(context: &mut Context, path: P) -> ZResult<String> {\n\n let mut buf = String::new();\n\n let mut file = gwg::filesystem::open(context, path)?;\n\n file.read_to_string(&mut buf)?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 26, "score": 265411.9410838749 }, { "content": "/// <http://www.redblobgames.com/grids/hexagons/#hex-to-pixel>\n\npub fn hex_to_point(size: f32, hex: PosHex) -> Point2 {\n\n let x = size * SQRT_OF_3 * (hex.q as f32 + hex.r as f32 / 2.0);\n\n let y = size * 3.0 / 2.0 * hex.r as f32;\n\n Point2::new(x, y * FLATNESS_COEFFICIENT)\n\n}\n\n\n", "file_path": "src/geom.rs", "rank": 27, "score": 263165.016774594 }, { "content": "fn apply_event_use_passive_ability(_: &mut State, _: &event::UsePassiveAbility) {}\n\n\n", "file_path": "src/core/battle/state/apply.rs", "rank": 28, "score": 261415.2449949755 }, { "content": "fn execute_use_ability_rage(_: &mut State, _: &command::UseAbility) -> ExecuteContext {\n\n ExecuteContext::default()\n\n}\n\n\n", "file_path": "src/core/battle/execute.rs", "rank": 29, "score": 251836.00770420392 }, { "content": "fn make_rect(context: &mut Context, rect: Rect, color: Color) -> Result<Sprite> {\n\n let mode = graphics::DrawMode::fill();\n\n let white = [1.0, 1.0, 1.0, 1.0].into();\n\n let mesh = graphics::Mesh::new_rectangle(context, mode, rect, white)?;\n\n let mut sprite = Sprite::new(context, Box::new(mesh), rect.h)?;\n\n sprite.set_color(color);\n\n Ok(sprite)\n\n}\n\n\n", "file_path": "zgui/src/lib.rs", "rank": 30, "score": 249518.29890204483 }, { "content": "fn execute_use_ability_dash(_: &mut State, command: &command::UseAbility) -> ExecuteContext {\n\n let mut context = ExecuteContext::default();\n\n context.moved_actor_ids.push(command.id);\n\n context\n\n}\n\n\n", "file_path": "src/core/battle/execute.rs", "rank": 31, "score": 247648.1312183601 }, { "content": "fn execute_use_ability_jump(_: &mut State, command: &command::UseAbility) -> ExecuteContext {\n\n let mut context = ExecuteContext::default();\n\n context.moved_actor_ids.push(command.id);\n\n context\n\n}\n\n\n", "file_path": "src/core/battle/execute.rs", "rank": 32, "score": 247648.1312183601 }, { "content": "fn make_label(context: &mut Context) -> ui::Result<ui::RcWidget> {\n\n let image = Image::new(context, \"/fire.png\").expect(\"Can't load test image\");\n\n let label = ui::Label::new_with_bg(context, Box::new(image), 0.5)?;\n\n Ok(ui::pack(label))\n\n}\n\n\n", "file_path": "zgui/examples/remove.rs", "rank": 34, "score": 243985.6616798008 }, { "content": "fn execute_use_ability_long_jump(_: &mut State, command: &command::UseAbility) -> ExecuteContext {\n\n let mut context = ExecuteContext::default();\n\n context.moved_actor_ids.push(command.id);\n\n context\n\n}\n\n\n", "file_path": "src/core/battle/execute.rs", "rank": 35, "score": 243629.40587852543 }, { "content": "fn execute_use_ability_vanish(state: &mut State, command: &command::UseAbility) -> ExecuteContext {\n\n let mut context = ExecuteContext::default();\n\n assert!(state.parts().is_exist(command.id));\n\n let effects = vec![Effect::Vanish];\n\n context.instant_effects.push((command.id, effects));\n\n context\n\n}\n\n\n", "file_path": "src/core/battle/execute.rs", "rank": 36, "score": 243629.40587852543 }, { "content": "fn execute_use_ability_club(state: &mut State, command: &command::UseAbility) -> ExecuteContext {\n\n let mut context = ExecuteContext::default();\n\n let id = state::blocker_id_at(state, command.pos);\n\n if state.parts().belongs_to.get_opt(id).is_some() {\n\n let owner = state.parts().belongs_to.get(id).0;\n\n let phase = Phase::from_player_id(owner);\n\n let effect = effect::Timed {\n\n duration: effect::Duration::Rounds(1.into()),\n\n phase,\n\n effect: effect::Lasting::Stun,\n\n };\n\n context.timed_effects.push((id, vec![effect]));\n\n extend_or_crate_sub_vec(&mut context.instant_effects, id, vec![Effect::Stun]);\n\n }\n\n context.actor_ids.push(id);\n\n context\n\n}\n\n\n", "file_path": "src/core/battle/execute.rs", "rank": 37, "score": 243629.40587852543 }, { "content": "fn execute_use_ability_poison(state: &mut State, command: &command::UseAbility) -> ExecuteContext {\n\n let mut context = ExecuteContext::default();\n\n let id = state::blocker_id_at(state, command.pos);\n\n let owner = state.parts().belongs_to.get(id).0;\n\n let phase = Phase::from_player_id(owner);\n\n let effect = effect::Timed {\n\n duration: effect::Duration::Rounds(2.into()),\n\n phase,\n\n effect: effect::Lasting::Poison,\n\n };\n\n context.timed_effects.push((id, vec![effect]));\n\n context.actor_ids.push(id);\n\n context\n\n}\n\n\n", "file_path": "src/core/battle/execute.rs", "rank": 38, "score": 243629.40587852543 }, { "content": "fn make_popup_bg_mesh(context: &mut Context) -> ZResult<graphics::Mesh> {\n\n let coords = graphics::screen_coordinates(context);\n\n let mode = graphics::DrawMode::fill();\n\n Ok(graphics::Mesh::new_rectangle(\n\n context,\n\n mode,\n\n coords,\n\n COLOR_POPUP_BG,\n\n )?)\n\n}\n\n\n\npub struct Screens {\n\n screens: Vec<ScreenWithPopups>,\n\n popup_bg_mesh: graphics::Mesh,\n\n}\n\n\n\nimpl Screens {\n\n pub fn new(context: &mut Context, start_screen: Box<dyn Screen>) -> ZResult<Self> {\n\n Ok(Self {\n\n screens: vec![ScreenWithPopups::new(start_screen)],\n", "file_path": "src/screen.rs", "rank": 39, "score": 242046.56809087738 }, { "content": "pub fn tile_size(map_height: Distance) -> f32 {\n\n 1.0 / (map_height.0 as f32 * 0.75)\n\n}\n\n\n", "file_path": "src/screen/battle/view.rs", "rank": 40, "score": 241757.93632869865 }, { "content": "fn stretch_checks(widget: &impl Widget, width: f32) -> Option<StretchStatus> {\n\n if !widget.can_stretch() {\n\n return Some(StretchStatus::Unstretchable);\n\n }\n\n if widget.rect().w > width {\n\n return Some(StretchStatus::AlreadyWider);\n\n }\n\n None\n\n}\n\n\n\npub type RcWidget = Rc<RefCell<dyn Widget>>;\n\n\n\n#[derive(Debug)]\n\npub struct AnchoredWidget {\n\n widget: RcWidget,\n\n anchor: Anchor,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Gui<Message: Clone> {\n", "file_path": "zgui/src/lib.rs", "rank": 41, "score": 241135.89096947986 }, { "content": "fn component_passive_abilities(abilities: &[PassiveAbility]) -> Component {\n\n component::PassiveAbilities(abilities.to_vec()).into()\n\n}\n\n\n", "file_path": "src/core/battle/tests.rs", "rank": 42, "score": 239426.1392704119 }, { "content": "fn component_blocker(w: Weight) -> Component {\n\n component::Blocker { weight: w }.into()\n\n}\n\n\n", "file_path": "src/core/battle/tests.rs", "rank": 43, "score": 238918.86026607495 }, { "content": "pub fn obj_with_passive_ability_at(\n\n state: &State,\n\n pos: PosHex,\n\n ability: PassiveAbility,\n\n) -> Option<Id> {\n\n for id in ids_at(state, pos) {\n\n if let Some(abilities) = state.parts().passive_abilities.get_opt(id) {\n\n for &current_ability in &abilities.0 {\n\n if current_ability == ability {\n\n return Some(id);\n\n }\n\n }\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/core/battle/state.rs", "rank": 45, "score": 236031.40828314767 }, { "content": "fn component_abilities(abilities: &[Ability]) -> Component {\n\n let abilities = abilities.iter().cloned().map(Into::into).collect();\n\n component::Abilities(abilities).into()\n\n}\n\n\n", "file_path": "src/core/battle/tests.rs", "rank": 47, "score": 233849.3423064509 }, { "content": "fn visualize_event_use_ability_summon(\n\n state: &State,\n\n view: &mut BattleView,\n\n context: &mut Context,\n\n event: &event::UseAbility,\n\n) -> ZResult<Box<dyn Action>> {\n\n let sprite = view.id_to_sprite(event.id).clone();\n\n let frame_name = \"summon\";\n\n assert!(sprite.has_frame(frame_name));\n\n let pos = state.parts().pos.get(event.id).0;\n\n let color = [1.0, 1.0, 1.0, 0.7].into();\n\n let scale = 2.0;\n\n let time = time_s(TIME_DEFAULT_FLARE);\n\n let action_flare = show_flare_scale_time(view, context, pos, color, scale, time)?;\n\n Ok(seq([\n\n action::SetFrame::new(&sprite, frame_name).boxed(),\n\n action::Sleep::new(time_s(0.3)).boxed(),\n\n fork(seq([\n\n action_flare,\n\n action::SetFrame::new(&sprite, \"\").boxed(),\n\n ])),\n\n ]))\n\n}\n\n\n", "file_path": "src/screen/battle/visualize.rs", "rank": 48, "score": 230922.41828871635 }, { "content": "pub fn time_s(s: f32) -> Duration {\n\n let ms = s * 1000.0;\n\n Duration::from_millis(ms as u64)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 49, "score": 230714.68042244564 }, { "content": "pub trait Screen: Debug {\n\n fn update(&mut self, context: &mut Context, dtime: Duration) -> ZResult<StackCommand>;\n\n fn draw(&self, context: &mut Context) -> ZResult;\n\n fn click(&mut self, context: &mut Context, pos: Point2) -> ZResult<StackCommand>;\n\n fn resize(&mut self, aspect_ratio: f32);\n\n\n\n fn move_mouse(&mut self, _context: &mut Context, _pos: Point2) -> ZResult {\n\n Ok(())\n\n }\n\n}\n\n\n\nconst ERR_MSG_STACK_EMPTY: &str = \"Screen stack is empty\";\n\n\n", "file_path": "src/screen.rs", "rank": 50, "score": 229318.89467364535 }, { "content": "pub fn zrng() -> impl rand::Rng {\n\n QuadRand\n\n}\n\n\n", "file_path": "src/core/utils.rs", "rank": 52, "score": 221846.82431458484 }, { "content": "fn apply_event_use_ability(state: &mut State, event: &event::UseAbility) {\n\n let id = event.id;\n\n let parts = state.parts_mut();\n\n if let Some(abilities) = parts.abilities.get_opt_mut(id) {\n\n for r_ability in &mut abilities.0 {\n\n if r_ability.ability == event.ability {\n\n let cooldown = r_ability.ability.base_cooldown();\n\n assert_eq!(r_ability.status, ability::Status::Ready);\n\n if !cooldown.is_zero() {\n\n r_ability.status = ability::Status::Cooldown(cooldown);\n\n }\n\n }\n\n }\n\n }\n\n if let Some(agent) = parts.agent.get_opt_mut(id) {\n\n if agent.attacks.0 > 0 {\n\n agent.attacks.0 -= 1;\n\n } else if agent.jokers.0 > 0 {\n\n agent.jokers.0 -= 1;\n\n } else {\n", "file_path": "src/core/battle/state/apply.rs", "rank": 53, "score": 220740.75909327195 }, { "content": "/// Remove an element from a vector.\n\npub fn try_remove_item<T: Debug + PartialEq>(vec: &mut Vec<T>, e: &T) -> bool {\n\n vec.iter()\n\n .position(|current| current == e)\n\n .map(|e| vec.remove(e))\n\n .is_some()\n\n}\n\n\n", "file_path": "src/core/utils.rs", "rank": 54, "score": 218753.16327425255 }, { "content": "pub trait Widget: Debug {\n\n fn draw(&self, _: &mut Context) -> GameResult<()>;\n\n fn click(&self, _: Point2) {}\n\n fn move_mouse(&mut self, _: Point2) {}\n\n fn rect(&self) -> Rect;\n\n fn set_pos(&mut self, pos: Point2);\n\n\n\n fn can_stretch(&self) -> bool {\n\n false\n\n }\n\n\n\n fn stretch(&mut self, _: &mut Context, _width: f32) -> Result<StretchStatus> {\n\n // The default impl assumes the widget can't stretch.\n\n assert!(!self.can_stretch());\n\n Ok(StretchStatus::Unstretchable)\n\n }\n\n\n\n fn stretch_to_self(&mut self, context: &mut Context) -> Result<StretchStatus> {\n\n let w = self.rect().w;\n\n self.stretch(context, w)\n\n }\n\n}\n\n\n", "file_path": "zgui/src/lib.rs", "rank": 55, "score": 217269.0179865699 }, { "content": "pub fn sort_agent_ids_by_distance_to_enemies(state: &State, ids: &mut [Id]) {\n\n ids.sort_unstable_by_key(|&id| {\n\n let agent_player_id = state.parts().belongs_to.get(id).0;\n\n let agent_pos = state.parts().pos.get(id).0;\n\n let mut min_distance = state.map().height();\n\n for enemy_id in enemy_agent_ids(state, agent_player_id) {\n\n let enemy_pos = state.parts().pos.get(enemy_id).0;\n\n let distance = map::distance_hex(agent_pos, enemy_pos);\n\n if distance < min_distance {\n\n min_distance = distance;\n\n }\n\n }\n\n min_distance\n\n });\n\n}\n\n\n", "file_path": "src/core/battle/state.rs", "rank": 56, "score": 217194.9559809172 }, { "content": "fn execute_use_ability(state: &mut State, cb: Cb, command: &command::UseAbility) {\n\n let mut context = match command.ability {\n\n Ability::Knockback => execute_use_ability_knockback(state, command),\n\n Ability::Club => execute_use_ability_club(state, command),\n\n Ability::Jump => execute_use_ability_jump(state, command),\n\n Ability::LongJump => execute_use_ability_long_jump(state, command),\n\n Ability::Dash => execute_use_ability_dash(state, command),\n\n Ability::Rage => execute_use_ability_rage(state, command),\n\n Ability::Heal => execute_use_ability_heal(state, command, Strength(2)),\n\n Ability::GreatHeal => execute_use_ability_heal(state, command, Strength(3)),\n\n Ability::Vanish => execute_use_ability_vanish(state, command),\n\n Ability::ExplodeFire => execute_use_ability_explode_fire(state, command),\n\n Ability::ExplodePoison => execute_use_ability_explode_poison(state, command),\n\n Ability::ExplodePush => execute_use_ability_explode_push(state, command),\n\n Ability::ExplodeDamage => execute_use_ability_explode_damage(state, command),\n\n Ability::Poison => execute_use_ability_poison(state, command),\n\n Ability::Bomb => execute_use_ability_bomb_damage(state, command),\n\n Ability::BombPush => execute_use_ability_bomb_push(state, command),\n\n Ability::BombFire => execute_use_ability_bomb_fire(state, command),\n\n Ability::BombPoison => execute_use_ability_bomb_poison(state, command),\n", "file_path": "src/core/battle/execute.rs", "rank": 57, "score": 216805.36539404138 }, { "content": "fn line_with_info_button(\n\n context: &mut Context,\n\n font: Font,\n\n gui: &mut Gui<Message>,\n\n text: &str,\n\n message: Message,\n\n) -> ZResult<Box<dyn ui::Widget>> {\n\n let h = line_heights().normal;\n\n let text = Box::new(Text::new((text, font, FONT_SIZE)));\n\n let icon = Box::new(graphics::Image::new(context, \"/img/icon_info.png\")?);\n\n let button = ui::Button::new(context, icon, h, gui.sender(), message)?;\n\n let mut line = Box::new(ui::HLayout::new().stretchable(true));\n\n line.add(Box::new(ui::Label::new(context, text, h)?));\n\n line.add(Box::new(ui::Spacer::new_horizontal(0.0).stretchable(true)));\n\n line.add(Box::new(button));\n\n Ok(line)\n\n}\n\n\n", "file_path": "src/screen/battle.rs", "rank": 58, "score": 213209.17078660015 }, { "content": "pub fn line_heights() -> LineHeights {\n\n LineHeights {\n\n small: 1.0 / 20.0,\n\n normal: 1.0 / 12.0,\n\n big: 1.0 / 9.0,\n\n large: 1.0 / 6.0,\n\n }\n\n}\n\n\n\npub const OFFSET_SMALL: f32 = 0.02;\n\npub const OFFSET_BIG: f32 = 0.04;\n\n\n", "file_path": "src/utils.rs", "rank": 59, "score": 212242.3449279149 }, { "content": "fn try_execute_passive_ability_burn(state: &mut State, target_id: Id) -> ExecuteContext {\n\n let mut context = ExecuteContext::default();\n\n let damage = battle::Strength(1);\n\n let target_effects = vec![wound_or_kill(state, target_id, damage)];\n\n context.instant_effects.push((target_id, target_effects));\n\n context\n\n}\n\n\n", "file_path": "src/core/battle/execute.rs", "rank": 60, "score": 211533.13325766788 }, { "content": "fn does_agent_have_ability(state: &State, id: Id, ability: &Ability) -> bool {\n\n if let Some(abilities) = state.parts().abilities.get_opt(id) {\n\n for current_ability in &abilities.0 {\n\n if ability == &current_ability.ability {\n\n return true;\n\n }\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/core/battle/ai.rs", "rank": 61, "score": 210273.27799494367 }, { "content": "pub fn add_offsets_and_bg(\n\n context: &mut Context,\n\n w: Box<dyn ui::Widget>,\n\n offset: f32,\n\n) -> ZResult<ui::LayersLayout> {\n\n add_bg(context, add_offsets(w, offset))\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 62, "score": 209806.79710379685 }, { "content": "// The main line height of this screen.\n\nfn line_height() -> f32 {\n\n utils::line_heights().normal\n\n}\n\n\n", "file_path": "src/screen/campaign.rs", "rank": 63, "score": 209692.2118396187 }, { "content": "fn try_execute_passive_abilities_on_move(state: &mut State, cb: Cb, target_id: Id) {\n\n try_execute_passive_abilities_tick(state, cb, target_id)\n\n}\n\n\n", "file_path": "src/core/battle/execute.rs", "rank": 64, "score": 209624.07246187126 }, { "content": "pub fn message(\n\n view: &mut BattleView,\n\n context: &mut Context,\n\n pos: PosHex,\n\n text: &str,\n\n) -> ZResult<Box<dyn Action>> {\n\n let visible = [0.0, 0.0, 0.0, 1.0].into();\n\n let invisible = Color { a: 0.0, ..visible };\n\n let font_size = font_size();\n\n let text = Box::new(Text::new((text, view.font(), font_size)));\n\n let mut sprite = Sprite::from_drawable(context, text, 0.1)?;\n\n sprite.set_centered(true);\n\n let point = view.hex_to_point(pos);\n\n let point = point - Vector2::new(0.0, view.tile_size() * 1.5);\n\n sprite.set_pos(point);\n\n sprite.set_color(invisible);\n\n let action_show_hide = seq([\n\n action::Show::new(&view.layers().text, &sprite).boxed(),\n\n action::ChangeColorTo::new(&sprite, visible, time_s(0.4)).boxed(),\n\n action::Sleep::new(time_s(0.4)).boxed(),\n", "file_path": "src/screen/battle/visualize.rs", "rank": 65, "score": 208121.2780230436 }, { "content": "fn try_execute_passive_ability_spike_trap(state: &mut State, target_id: Id) -> ExecuteContext {\n\n let mut context = ExecuteContext::default();\n\n let damage = battle::Strength(1);\n\n let target_effects = vec![wound_or_kill(state, target_id, damage)];\n\n context.instant_effects.push((target_id, target_effects));\n\n context\n\n}\n\n\n", "file_path": "src/core/battle/execute.rs", "rank": 66, "score": 207659.79584799754 }, { "content": "fn default_sub_tile_z() -> f32 {\n\n 0.0\n\n}\n", "file_path": "src/sprite_info.rs", "rank": 67, "score": 206490.66157885687 }, { "content": "fn component_strength(n: i32) -> Component {\n\n component::Strength {\n\n strength: Strength(n),\n\n base_strength: Strength(n),\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "src/core/battle/tests.rs", "rank": 68, "score": 206341.5595319483 }, { "content": "fn build_panel_agent_abilities(\n\n context: &mut Context,\n\n _view: &BattleView, // TODO: use this for cloning stored icon images\n\n font: Font,\n\n gui: &mut Gui<Message>,\n\n state: &State,\n\n id: Id,\n\n mode: &SelectionMode,\n\n) -> ZResult<Option<ui::RcWidget>> {\n\n let parts = state.parts();\n\n let abilities = match parts.abilities.get_opt(id) {\n\n Some(abilities) => &abilities.0,\n\n None => return Ok(None),\n\n };\n\n let mut layout = ui::VLayout::new().stretchable(true);\n\n let h = line_heights().large;\n\n for ability in abilities {\n\n let image_path = match ability.ability {\n\n // TODO: load all the images only once. Store them in some struct and only clone them here.\n\n // TODO: Move into view::Images!\n", "file_path": "src/screen/battle.rs", "rank": 69, "score": 206268.31710292987 }, { "content": "fn interpolate(from: Color, to: Color, k: f32) -> Color {\n\n let calc = |a, b| a + (b - a) * k;\n\n Color {\n\n r: calc(from.r, to.r),\n\n g: calc(from.g, to.g),\n\n b: calc(from.b, to.b),\n\n a: calc(from.a, to.a),\n\n }\n\n}\n", "file_path": "zscene/src/action/change_color_to.rs", "rank": 70, "score": 205177.00795245531 }, { "content": "pub fn add_offsets_and_bg_big(\n\n context: &mut Context,\n\n w: Box<dyn ui::Widget>,\n\n) -> ZResult<ui::LayersLayout> {\n\n add_offsets_and_bg(context, w, OFFSET_BIG)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 71, "score": 205126.26864614978 }, { "content": "fn line_height_small() -> f32 {\n\n line_height() / 8.0\n\n}\n\n\n", "file_path": "src/screen/campaign.rs", "rank": 72, "score": 205038.47511544256 }, { "content": "fn visualize_event_use_ability(\n\n state: &State,\n\n view: &mut BattleView,\n\n context: &mut Context,\n\n event: &event::UseAbility,\n\n) -> ZResult<Box<dyn Action>> {\n\n let action_main = match event.ability {\n\n Ability::Jump | Ability::LongJump => {\n\n visualize_event_use_ability_jump(state, view, context, event)?\n\n }\n\n Ability::Dash => visualize_event_use_ability_dash(state, view, context, event)?,\n\n Ability::Summon => visualize_event_use_ability_summon(state, view, context, event)?,\n\n Ability::Bloodlust => visualize_event_use_ability_bloodlust(state, view, context, event)?,\n\n Ability::Heal | Ability::GreatHeal => {\n\n visualize_event_use_ability_heal(state, view, context, event)?\n\n }\n\n Ability::Rage => visualize_event_use_ability_rage(state, view, context, event)?,\n\n Ability::Knockback => visualize_event_use_ability_knockback(state, view, context, event)?,\n\n Ability::Club => visualize_event_use_ability_club(state, view, context, event)?,\n\n Ability::ExplodePush\n", "file_path": "src/screen/battle/visualize.rs", "rank": 73, "score": 200902.58983053302 }, { "content": "pub fn get_armor(state: &State, id: Id) -> Strength {\n\n let parts = state.parts();\n\n let default = Strength(0);\n\n parts.armor.get_opt(id).map(|v| v.armor).unwrap_or(default)\n\n}\n\n\n", "file_path": "src/core/battle/state.rs", "rank": 74, "score": 198951.2980537476 }, { "content": "fn main() -> gwg::GameResult {\n\n gwg::start(\n\n conf::Conf {\n\n physical_root_dir: Some(\"resources\".into()),\n\n ..Default::default()\n\n },\n\n |mut context| Box::new(State::new(&mut context).expect(\"Can't create the state\")),\n\n )\n\n}\n", "file_path": "zgui/examples/text_button.rs", "rank": 75, "score": 198704.72747954447 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nenum Message {\n\n Command,\n\n}\n\n\n", "file_path": "zgui/examples/text_button.rs", "rank": 76, "score": 198014.9468740269 }, { "content": "fn try_execute_passive_abilities_on_attack(\n\n state: &mut State,\n\n attacker_id: Id,\n\n target_id: Id,\n\n) -> ExecuteContext {\n\n let mut context = ExecuteContext::default();\n\n let parts = state.parts();\n\n let target_pos = parts.pos.get(target_id).0;\n\n let attacker_pos = parts.pos.get(attacker_id).0;\n\n if let Some(passive_abilities) = parts.passive_abilities.get_opt(attacker_id) {\n\n let abilities = passive_abilities.clone();\n\n for &ability in &abilities.0 {\n\n trace!(\"ability: {:?}\", ability);\n\n match ability {\n\n PassiveAbility::HeavyImpact => {\n\n let dir = Dir::get_dir_from_to(attacker_pos, target_pos);\n\n let from = target_pos;\n\n let strength = PushStrength(Weight::Normal);\n\n let blocker_weight = parts.blocker.get(target_id).weight;\n\n let to = if strength.can_push(blocker_weight) {\n", "file_path": "src/core/battle/execute.rs", "rank": 77, "score": 197368.0786669587 }, { "content": "// TODO: Return a `Result` or an `Option` (check that attack is possible at all?).\n\n// TODO: Return a struct with named fields.\n\n// TODO: Move to some other module.\n\npub fn hit_chance(state: &State, attacker_id: Id, target_id: Id) -> (i32, i32) {\n\n let parts = state.parts();\n\n let agent_target = parts.agent.get(target_id);\n\n let agent_attacker = parts.agent.get(attacker_id);\n\n let attacker_strength = parts.strength.get(attacker_id).strength;\n\n let attacker_base_strength = parts.strength.get(attacker_id).base_strength;\n\n let attacker_wounds = utils::clamp_max(attacker_base_strength.0 - attacker_strength.0, 3);\n\n let target_dodge = agent_target.dodge;\n\n let attack_accuracy = agent_attacker.attack_accuracy;\n\n let attack_strength = agent_attacker.attack_strength;\n\n let k_min = attack_accuracy.0 - target_dodge.0 - attacker_wounds;\n\n let k_max = k_min + attack_strength.0;\n\n (k_min, k_max)\n\n}\n\n\n", "file_path": "src/core/battle/execute.rs", "rank": 78, "score": 196340.1605713608 }, { "content": "#[derive(Clone, Debug)]\n\nenum Message {\n\n Back,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct GeneralInfo {\n\n font: graphics::Font,\n\n gui: Gui<Message>,\n\n}\n\n\n\nimpl GeneralInfo {\n\n pub fn new(context: &mut Context, title: &str, lines: &[String]) -> ZResult<Self> {\n\n let font = utils::default_font(context);\n\n let mut gui = ui::Gui::new(context);\n\n let h = utils::line_heights().normal;\n\n let font_size = utils::font_size();\n\n let mut layout = Box::new(ui::VLayout::new().stretchable(true));\n\n let text_ = |s: &str| Box::new(Text::new((s, font, font_size)));\n\n let label_ = |context: &mut Context, text: &str| -> ZResult<_> {\n\n Ok(ui::Label::new(context, text_(text), h)?)\n", "file_path": "src/screen/general_info.rs", "rank": 79, "score": 196159.35453846824 }, { "content": "fn visualize_event_use_ability_bloodlust(\n\n _: &State,\n\n view: &mut BattleView,\n\n _: &mut Context,\n\n event: &event::UseAbility,\n\n) -> ZResult<Box<dyn Action>> {\n\n let time = time_s(0.5);\n\n show_frame_for_time(view, event.id, \"bloodlust\", time)\n\n}\n\n\n", "file_path": "src/screen/battle/visualize.rs", "rank": 80, "score": 195766.28626499875 }, { "content": "fn visualize_event_use_ability_knockback(\n\n state: &State,\n\n view: &mut BattleView,\n\n _: &mut Context,\n\n event: &event::UseAbility,\n\n) -> ZResult<Box<dyn Action>> {\n\n lunge(state, view, event.id, event.pos)\n\n}\n\n\n", "file_path": "src/screen/battle/visualize.rs", "rank": 81, "score": 195766.28626499875 }, { "content": "fn visualize_event_use_ability_jump(\n\n state: &State,\n\n view: &mut BattleView,\n\n context: &mut Context,\n\n event: &event::UseAbility,\n\n) -> ZResult<Box<dyn Action>> {\n\n let sprite_object = view.id_to_sprite(event.id).clone();\n\n let sprite_shadow = view.id_to_shadow_sprite(event.id).clone();\n\n let from = state.parts().pos.get(event.id).0;\n\n let from = view.hex_to_point(from);\n\n let to = view.hex_to_point(event.pos);\n\n let diff = to - from;\n\n let action_arc_move = arc_move(view, &sprite_object, diff);\n\n let time = action_arc_move.duration();\n\n let z = hex_pos_to_z(event.pos);\n\n let action_move_shadow = action::MoveBy::new(&sprite_shadow, diff, time).boxed();\n\n let action_dust = show_dust_at_pos(view, context, event.pos)?;\n\n let mut actions = Vec::new();\n\n actions.push(action_set_z(&view.layers().objects, &sprite_object, 200.0));\n\n if sprite_object.has_frame(\"jump\") {\n", "file_path": "src/screen/battle/visualize.rs", "rank": 82, "score": 195766.28626499875 }, { "content": "fn visualize_event_use_ability_rage(\n\n _: &State,\n\n view: &mut BattleView,\n\n _: &mut Context,\n\n event: &event::UseAbility,\n\n) -> ZResult<Box<dyn Action>> {\n\n let time = time_s(1.0);\n\n Ok(fork(show_frame_for_time(view, event.id, \"rage\", time)?))\n\n}\n\n\n", "file_path": "src/screen/battle/visualize.rs", "rank": 83, "score": 195766.28626499875 }, { "content": "fn visualize_event_use_ability_heal(\n\n _: &State,\n\n view: &mut BattleView,\n\n _: &mut Context,\n\n event: &event::UseAbility,\n\n) -> ZResult<Box<dyn Action>> {\n\n let time = time_s(1.0);\n\n Ok(fork(show_frame_for_time(view, event.id, \"heal\", time)?))\n\n}\n\n\n", "file_path": "src/screen/battle/visualize.rs", "rank": 84, "score": 195766.28626499875 }, { "content": "fn visualize_event_use_ability_club(\n\n state: &State,\n\n view: &mut BattleView,\n\n _: &mut Context,\n\n event: &event::UseAbility,\n\n) -> ZResult<Box<dyn Action>> {\n\n lunge(state, view, event.id, event.pos)\n\n}\n\n\n", "file_path": "src/screen/battle/visualize.rs", "rank": 85, "score": 195766.28626499875 }, { "content": "fn visualize_event_use_ability_dash(\n\n state: &State,\n\n view: &mut BattleView,\n\n _: &mut Context,\n\n event: &event::UseAbility,\n\n) -> ZResult<Box<dyn Action>> {\n\n let sprite = view.id_to_sprite(event.id).clone();\n\n let z = hex_pos_to_z(event.pos);\n\n let from = state.parts().pos.get(event.id).0;\n\n let point_from = view.hex_to_point(from);\n\n let point_to = view.hex_to_point(event.pos);\n\n let diff = point_to - point_from;\n\n let time = time_s(0.1);\n\n Ok(seq([\n\n action_set_z(&view.layers().objects, &sprite, z),\n\n move_object_with_shadow(view, event.id, diff, time),\n\n ]))\n\n}\n\n\n", "file_path": "src/screen/battle/visualize.rs", "rank": 86, "score": 195766.28626499875 }, { "content": "fn visualize_event_use_ability_explode(\n\n state: &State,\n\n view: &mut BattleView,\n\n context: &mut Context,\n\n event: &event::UseAbility,\n\n) -> ZResult<Box<dyn Action>> {\n\n let pos = state.parts().pos.get(event.id).0;\n\n let action_dust = show_dust_at_pos(view, context, pos)?;\n\n let color = [1.0, 0.0, 0.0, 0.7].into();\n\n let scale = 2.5;\n\n let time = time_s(TIME_DEFAULT_FLARE * 0.8);\n\n let action_flare = show_flare_scale_time(view, context, pos, color, scale, time)?;\n\n let action_ground_mark = show_explosion_ground_mark(view, context, pos)?;\n\n Ok(seq([\n\n fork(seq([action_flare, action_dust])),\n\n action_ground_mark,\n\n ]))\n\n}\n\n\n", "file_path": "src/screen/battle/visualize.rs", "rank": 87, "score": 195766.28626499875 }, { "content": "fn remove_brief_agent_info(view: &mut BattleView, id: Id) -> ZResult<Box<dyn Action>> {\n\n let mut actions = Vec::new();\n\n let sprites = view.agent_info_get(id);\n\n for sprite in sprites {\n\n let color = Color {\n\n a: 0.0,\n\n ..sprite.color()\n\n };\n\n actions.push(fork(seq([\n\n action::ChangeColorTo::new(&sprite, color, time_s(0.4)).boxed(),\n\n action::Hide::new(&view.layers().dots, &sprite).boxed(),\n\n ])));\n\n }\n\n Ok(seq(actions))\n\n}\n\n\n", "file_path": "src/screen/battle/visualize.rs", "rank": 88, "score": 194635.16282022907 }, { "content": "fn prototypes(slice: &[(&str, Vec<Component>)]) -> Prototypes {\n\n let map = slice\n\n .iter()\n\n .cloned()\n\n .map(|(name, components)| (name.into(), components))\n\n .collect();\n\n let mut prototypes = Prototypes(map);\n\n prototypes.init_components();\n\n prototypes\n\n}\n\n\n", "file_path": "src/core/battle/tests.rs", "rank": 89, "score": 193121.2090472075 }, { "content": "fn try_execute_passive_abilities_on_begin_turn(state: &mut State, cb: Cb) {\n\n for id in state::players_agent_ids(state, state.player_id()) {\n\n try_execute_passive_abilities_tick(state, cb, id);\n\n }\n\n\n\n // TODO: extract to some self-abilities-method?\n\n {\n\n let ids = state.parts().passive_abilities.ids_collected();\n\n for id in ids {\n\n assert!(state.parts().is_exist(id));\n\n let owner = match state.parts().belongs_to.get_opt(id) {\n\n Some(owner) => owner.0,\n\n None => continue,\n\n };\n\n if state.player_id() != owner {\n\n continue;\n\n }\n\n let abilities = state.parts().passive_abilities.get(id).clone();\n\n for &ability in &abilities.0 {\n\n assert!(state.parts().is_exist(id));\n", "file_path": "src/core/battle/execute.rs", "rank": 90, "score": 192441.6146024348 }, { "content": "fn visualize_event_use_ability_throw_bomb(\n\n _: &State,\n\n view: &mut BattleView,\n\n _: &mut Context,\n\n event: &event::UseAbility,\n\n) -> ZResult<Box<dyn Action>> {\n\n let time = time_s(0.5);\n\n Ok(fork(show_frame_for_time(view, event.id, \"throw\", time)?))\n\n}\n\n\n", "file_path": "src/screen/battle/visualize.rs", "rank": 91, "score": 190921.80484551788 }, { "content": "pub fn rand_tile_offset(size: f32, radius: f32) -> Vector2 {\n\n assert!(radius >= 0.0);\n\n let r = size * radius;\n\n Vector2::new(roll_dice(-r, r), roll_dice(-r, r) * FLATNESS_COEFFICIENT)\n\n}\n\n\n\n#[derive(Clone, Copy, Debug)]\n\npub enum Facing {\n\n Left,\n\n Right,\n\n}\n\n\n\nimpl Facing {\n\n pub fn from_positions(tile_size: f32, from: PosHex, to: PosHex) -> Option<Self> {\n\n if from == to {\n\n return None;\n\n }\n\n let from = hex_to_point(tile_size, from);\n\n let to = hex_to_point(tile_size, to);\n\n Some(if to.x > from.x {\n", "file_path": "src/geom.rs", "rank": 92, "score": 190209.44762798012 }, { "content": "pub fn get_effect_icon(view: &BattleView, effect: &effect::Lasting) -> Image {\n\n match effect {\n\n effect::Lasting::Poison => view.images().effect_poison.clone(),\n\n effect::Lasting::Stun => view.images().effect_stun.clone(),\n\n effect::Lasting::Bloodlust => view.images().effect_bloodlust.clone(),\n\n }\n\n}\n\n\n", "file_path": "src/screen/battle/visualize.rs", "rank": 93, "score": 188298.67992408306 }, { "content": "fn apply_scheduled_ability(state: &mut State, id: Id, planned_ability: &PlannedAbility) {\n\n trace!(\"effect::apply_scheduled_ability: {:?}\", planned_ability);\n\n let schedule = &mut state.parts_mut().schedule;\n\n if schedule.get_opt(id).is_none() {\n\n schedule.insert(id, component::Schedule::default());\n\n }\n\n let planned = &mut schedule.get_mut(id).planned;\n\n if let Some(i) = planned\n\n .iter()\n\n .position(|e| e.ability == planned_ability.ability)\n\n {\n\n planned[i] = planned_ability.clone();\n\n } else {\n\n planned.push(planned_ability.clone());\n\n }\n\n}\n\n\n", "file_path": "src/core/battle/state/apply.rs", "rank": 94, "score": 187882.31943962895 }, { "content": "pub fn shuffle_vec<T>(mut vec: Vec<T>) -> Vec<T> {\n\n vec.shuffle(&mut zrng());\n\n vec\n\n}\n\n\n", "file_path": "src/core/utils.rs", "rank": 95, "score": 187245.11159370033 }, { "content": "fn make_bg(context: &mut Context, rect: Rect) -> Result<Sprite> {\n\n make_rect(context, rect, SPRITE_COLOR_BG)\n\n}\n\n\n", "file_path": "zgui/src/lib.rs", "rank": 96, "score": 183446.60569898965 }, { "content": "fn try_execute_passive_abilities_tick(state: &mut State, cb: Cb, target_id: Id) {\n\n trace!(\"try_execute_passive_abilities_tick\");\n\n if !state.parts().is_exist(target_id) {\n\n return;\n\n }\n\n let target_pos = state.parts().pos.get(target_id).0;\n\n let ids = state.parts().passive_abilities.ids_collected();\n\n for id in ids {\n\n if !state.parts().is_exist(target_id) {\n\n continue;\n\n }\n\n if state.parts().agent.get_opt(target_id).is_none() {\n\n continue;\n\n }\n\n let abilities = state.parts().passive_abilities.get(id).clone();\n\n let pos = match state.parts().pos.get_opt(id) {\n\n Some(pos) => pos.0,\n\n None => continue,\n\n };\n\n if pos != target_pos {\n", "file_path": "src/core/battle/execute.rs", "rank": 97, "score": 181193.39311236847 }, { "content": "pub fn try_receive<Message>(opt_rx: &Option<Receiver<Message>>) -> Option<Message> {\n\n opt_rx.as_ref().and_then(|rx| rx.try_recv().ok())\n\n}\n", "file_path": "src/utils.rs", "rank": 98, "score": 180924.84491106178 }, { "content": "fn tick_planned_abilities(state: &mut State) {\n\n let phase = Phase::from_player_id(state.player_id());\n\n let ids = state.parts().schedule.ids_collected();\n\n for obj_id in ids {\n\n let schedule = state.parts_mut().schedule.get_mut(obj_id);\n\n for planned in &mut schedule.planned {\n\n if planned.phase == phase {\n\n planned.rounds.decrease();\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/core/battle/state/apply.rs", "rank": 99, "score": 180720.33210395428 } ]
Rust
git-packetline/src/read/async_io.rs
mellowagain/gitoxide
dc58eca510e5a067acdeaad4b595a34b4598a0cd
use std::io; use bstr::ByteSlice; use futures_io::AsyncRead; use futures_lite::AsyncReadExt; use crate::{ decode, read::{ExhaustiveOutcome, WithSidebands}, PacketLine, StreamingPeekableIter, MAX_LINE_LEN, U16_HEX_BYTES, }; impl<T> StreamingPeekableIter<T> where T: AsyncRead + Unpin, { #[allow(clippy::needless_lifetimes)] async fn read_line_inner<'a>( reader: &mut T, buf: &'a mut Vec<u8>, ) -> io::Result<Result<PacketLine<'a>, decode::Error>> { let (hex_bytes, data_bytes) = buf.split_at_mut(4); reader.read_exact(hex_bytes).await?; let num_data_bytes = match decode::hex_prefix(hex_bytes) { Ok(decode::PacketLineOrWantedSize::Line(line)) => return Ok(Ok(line)), Ok(decode::PacketLineOrWantedSize::Wanted(additional_bytes)) => additional_bytes as usize, Err(err) => return Ok(Err(err)), }; let (data_bytes, _) = data_bytes.split_at_mut(num_data_bytes); reader.read_exact(data_bytes).await?; match decode::to_data_line(data_bytes) { Ok(line) => Ok(Ok(line)), Err(err) => Ok(Err(err)), } } async fn read_line_inner_exhaustive<'a>( reader: &mut T, buf: &'a mut Vec<u8>, delimiters: &[PacketLine<'static>], fail_on_err_lines: bool, buf_resize: bool, ) -> ExhaustiveOutcome<'a> { ( false, None, Some(match Self::read_line_inner(reader, buf).await { Ok(Ok(line)) => { if delimiters.contains(&line) { let stopped_at = delimiters.iter().find(|l| **l == line).cloned(); buf.clear(); return (true, stopped_at, None); } else if fail_on_err_lines { if let Some(err) = line.check_error() { let err = err.0.as_bstr().to_string(); buf.clear(); return (true, None, Some(Err(io::Error::new(io::ErrorKind::Other, err)))); } } let len = line .as_slice() .map(|s| s.len() + U16_HEX_BYTES) .unwrap_or(U16_HEX_BYTES); if buf_resize { buf.resize(len, 0); } Ok(Ok(crate::decode(buf).expect("only valid data here"))) } Ok(Err(err)) => { buf.clear(); Ok(Err(err)) } Err(err) => { buf.clear(); Err(err) } }), ) } pub async fn read_line(&mut self) -> Option<io::Result<Result<PacketLine<'_>, decode::Error>>> { if self.is_done { return None; } if !self.peek_buf.is_empty() { std::mem::swap(&mut self.peek_buf, &mut self.buf); self.peek_buf.clear(); Some(Ok(Ok(crate::decode(&self.buf).expect("only valid data in peek buf")))) } else { if self.buf.len() != MAX_LINE_LEN { self.buf.resize(MAX_LINE_LEN, 0); } let (is_done, stopped_at, res) = Self::read_line_inner_exhaustive( &mut self.read, &mut self.buf, self.delimiters, self.fail_on_err_lines, false, ) .await; self.is_done = is_done; self.stopped_at = stopped_at; res } } pub async fn peek_line(&mut self) -> Option<io::Result<Result<PacketLine<'_>, decode::Error>>> { if self.is_done { return None; } if self.peek_buf.is_empty() { self.peek_buf.resize(MAX_LINE_LEN, 0); let (is_done, stopped_at, res) = Self::read_line_inner_exhaustive( &mut self.read, &mut self.peek_buf, self.delimiters, self.fail_on_err_lines, true, ) .await; self.is_done = is_done; self.stopped_at = stopped_at; res } else { Some(Ok(Ok(crate::decode(&self.peek_buf).expect("only valid data here")))) } } pub fn as_read(&mut self) -> WithSidebands<'_, T, fn(bool, &[u8])> { WithSidebands::new(self) } pub fn as_read_with_sidebands<F: FnMut(bool, &[u8]) + Unpin>( &mut self, handle_progress: F, ) -> WithSidebands<'_, T, F> { WithSidebands::with_progress_handler(self, handle_progress) } pub fn as_read_without_sidebands<F: FnMut(bool, &[u8]) + Unpin>(&mut self) -> WithSidebands<'_, T, F> { WithSidebands::without_progress_handler(self) } }
use std::io; use bstr::ByteSlice; use futures_io::AsyncRead; use futures_lite::AsyncReadExt; use crate::{ decode, read::{ExhaustiveOutcome, WithSidebands}, PacketLine, StreamingPeekableIter, MAX_LINE_LEN, U16_HEX_BYTES, }; impl<T> StreamingPeekableIter<T> where T: AsyncRead + Unpin, { #[allow(clippy::needless_lifetimes)] async fn read_line_inner<'a>( reader: &mut T, buf: &'a mut Vec<u8>, ) -> io::Result<Result<PacketLine<'a>, decode::Error>> { let (hex_bytes, data_bytes) = buf.split_at_mut(4); reader.read_exact(hex_bytes).await?; let num_data_bytes = match decode::hex_prefix(hex_bytes) { Ok(decode::PacketLineOrWantedSize::Line(line)) => return Ok(Ok(line)), Ok(decode::PacketLineOrWantedSize::Wanted(additional_bytes)) => additional_bytes as usize, Err(err) => return Ok(Err(err)), }; let (data_bytes, _) = data_bytes.split_at_mut(num_data_bytes); reader.read_exact(data_bytes).await?; match decode::to_data_line(data_bytes) { Ok(line) => Ok(Ok(line)), Err(err) => Ok(Err(err)), } } async fn read_line_inner_exhaustive<'a>( reader: &mut T, buf: &'a mut Vec<u8>, delimiters: &[PacketLine<'static>], fail_on_err_lines: bool, buf_resize: bool, ) -> ExhaustiveOutcome<'a> { ( false, None, Some(match Self::read_line_inner(reader, buf).await { Ok(Ok(line)) => { if delimiters.contains(&line) { let stopped_at = delimiters.iter().find(|l| **l == line).cloned(); buf.clear(); return (true, stopped_at, None); } else if fail_on_err_lines { if let Some(err) = line.check_error() { let err = err.0.as_bstr().to_string(); buf.clear(); return (true, None, Some(Err(io::Error::new(io::ErrorKind::Other, err)))); } }
if buf_resize { buf.resize(len, 0); } Ok(Ok(crate::decode(buf).expect("only valid data here"))) } Ok(Err(err)) => { buf.clear(); Ok(Err(err)) } Err(err) => { buf.clear(); Err(err) } }), ) } pub async fn read_line(&mut self) -> Option<io::Result<Result<PacketLine<'_>, decode::Error>>> { if self.is_done { return None; } if !self.peek_buf.is_empty() { std::mem::swap(&mut self.peek_buf, &mut self.buf); self.peek_buf.clear(); Some(Ok(Ok(crate::decode(&self.buf).expect("only valid data in peek buf")))) } else { if self.buf.len() != MAX_LINE_LEN { self.buf.resize(MAX_LINE_LEN, 0); } let (is_done, stopped_at, res) = Self::read_line_inner_exhaustive( &mut self.read, &mut self.buf, self.delimiters, self.fail_on_err_lines, false, ) .await; self.is_done = is_done; self.stopped_at = stopped_at; res } } pub async fn peek_line(&mut self) -> Option<io::Result<Result<PacketLine<'_>, decode::Error>>> { if self.is_done { return None; } if self.peek_buf.is_empty() { self.peek_buf.resize(MAX_LINE_LEN, 0); let (is_done, stopped_at, res) = Self::read_line_inner_exhaustive( &mut self.read, &mut self.peek_buf, self.delimiters, self.fail_on_err_lines, true, ) .await; self.is_done = is_done; self.stopped_at = stopped_at; res } else { Some(Ok(Ok(crate::decode(&self.peek_buf).expect("only valid data here")))) } } pub fn as_read(&mut self) -> WithSidebands<'_, T, fn(bool, &[u8])> { WithSidebands::new(self) } pub fn as_read_with_sidebands<F: FnMut(bool, &[u8]) + Unpin>( &mut self, handle_progress: F, ) -> WithSidebands<'_, T, F> { WithSidebands::with_progress_handler(self, handle_progress) } pub fn as_read_without_sidebands<F: FnMut(bool, &[u8]) + Unpin>(&mut self) -> WithSidebands<'_, T, F> { WithSidebands::without_progress_handler(self) } }
let len = line .as_slice() .map(|s| s.len() + U16_HEX_BYTES) .unwrap_or(U16_HEX_BYTES);
assignment_statement
[ { "content": "fn into_io_err(err: Error) -> io::Error {\n\n io::Error::new(io::ErrorKind::Other, err)\n\n}\n\n\n\nimpl<W: AsyncWrite + Unpin> AsyncWrite for LineWriter<'_, W> {\n\n fn poll_write(self: Pin<&mut Self>, cx: &mut Context<'_>, data: &[u8]) -> Poll<io::Result<usize>> {\n\n use futures_lite::ready;\n\n let mut this = self.project();\n\n loop {\n\n match &mut this.state {\n\n State::Idle => {\n\n let data_len = this.prefix.len() + data.len() + this.suffix.len();\n\n if data_len > MAX_DATA_LEN {\n\n return Poll::Ready(Err(into_io_err(Error::DataLengthLimitExceeded(data_len))));\n\n }\n\n if data.is_empty() {\n\n return Poll::Ready(Err(into_io_err(Error::DataIsEmpty)));\n\n }\n\n let data_len = data_len + 4;\n\n let len_buf = u16_to_hex(data_len as u16);\n", "file_path": "git-packetline/src/encode/async_io.rs", "rank": 0, "score": 247027.51892694595 }, { "content": "/// Read bytes from `rd` and decompress them using `state` into a pre-allocated fitting buffer `dst`, returning the amount of bytes written.\n\npub fn read(rd: &mut impl BufRead, state: &mut Decompress, mut dst: &mut [u8]) -> io::Result<usize> {\n\n let mut total_written = 0;\n\n loop {\n\n let (written, consumed, ret, eof);\n\n {\n\n let input = rd.fill_buf()?;\n\n eof = input.is_empty();\n\n let before_out = state.total_out();\n\n let before_in = state.total_in();\n\n let flush = if eof {\n\n FlushDecompress::Finish\n\n } else {\n\n FlushDecompress::None\n\n };\n\n ret = state.decompress(input, dst, flush);\n\n written = (state.total_out() - before_out) as usize;\n\n total_written += written;\n\n dst = &mut dst[written..];\n\n consumed = (state.total_in() - before_in) as usize;\n\n }\n", "file_path": "git-features/src/zlib/stream/inflate.rs", "rank": 1, "score": 226715.25336563998 }, { "content": "fn div_decode_result(lhs: &mut data::decode_entry::Outcome, div: usize) {\n\n if div != 0 {\n\n lhs.num_deltas = (lhs.num_deltas as f32 / div as f32) as u32;\n\n lhs.decompressed_size /= div as u64;\n\n lhs.compressed_size /= div;\n\n lhs.object_size /= div as u64;\n\n }\n\n}\n\n\n\npub struct Reducer<'a, P, E> {\n\n progress: &'a parking_lot::Mutex<P>,\n\n check: traverse::SafetyCheck,\n\n then: Instant,\n\n entries_seen: usize,\n\n stats: traverse::Outcome,\n\n should_interrupt: &'a AtomicBool,\n\n _error: std::marker::PhantomData<E>,\n\n}\n\n\n\nimpl<'a, P, E> Reducer<'a, P, E>\n", "file_path": "git-pack/src/index/traverse/reduce.rs", "rank": 2, "score": 222946.7535086895 }, { "content": "/// An iterator over entries of the `log` file in reverse, using `buf` as sliding window.\n\n///\n\n/// Note that `buf` must be big enough to capture typical line length or else partial lines will be parsed and probably fail\n\n/// in the process.\n\n///\n\n/// This iterator is very expensive in terms of I/O operations and shouldn't be used to read more than the last few entries of the log.\n\n/// Use a forward iterator instead for these cases.\n\n///\n\n/// It will continue parsing even if individual log entries failed to parse, leaving it to the driver to decide whether to\n\n/// abort or continue.\n\npub fn reverse<F>(mut log: F, buf: &mut [u8]) -> std::io::Result<Reverse<'_, F>>\n\nwhere\n\n F: std::io::Read + std::io::Seek,\n\n{\n\n let pos = log.seek(std::io::SeekFrom::End(0))?;\n\n Ok(Reverse {\n\n buf,\n\n count: 0,\n\n read_and_pos: Some((log, pos)),\n\n last_nl_pos: None,\n\n })\n\n}\n\n\n\nimpl<'a, F> Iterator for Reverse<'a, F>\n\nwhere\n\n F: std::io::Read + std::io::Seek,\n\n{\n\n type Item = std::io::Result<Result<log::mutable::Line, decode::Error>>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n", "file_path": "git-ref/src/store/file/log/iter.rs", "rank": 3, "score": 221433.96325970447 }, { "content": "/// Write a flush message to `out`.\n\npub fn flush_to_write(mut out: impl io::Write) -> io::Result<usize> {\n\n out.write_all(FLUSH_LINE).map(|_| 4)\n\n}\n\n\n", "file_path": "git-packetline/src/encode/blocking_io.rs", "rank": 4, "score": 208244.71636165868 }, { "content": "/// Write a delim message to `out`.\n\npub fn delim_to_write(mut out: impl io::Write) -> io::Result<usize> {\n\n out.write_all(DELIMITER_LINE).map(|_| 4)\n\n}\n\n\n", "file_path": "git-packetline/src/encode/blocking_io.rs", "rank": 5, "score": 208244.71636165868 }, { "content": "/// Write a response-end message to `out`.\n\npub fn response_end_to_write(mut out: impl io::Write) -> io::Result<usize> {\n\n out.write_all(RESPONSE_END_LINE).map(|_| 4)\n\n}\n\n\n", "file_path": "git-packetline/src/encode/blocking_io.rs", "rank": 6, "score": 205166.51804288191 }, { "content": "#[inline]\n\nfn streaming_leb64decode(mut r: impl io::Read) -> Result<(u64, usize), io::Error> {\n\n let mut b = [0u8; 1];\n\n let mut i = 0;\n\n r.read_exact(&mut b)?;\n\n i += 1;\n\n let mut value = b[0] as u64 & 0x7f;\n\n while b[0] & 0x80 != 0 {\n\n r.read_exact(&mut b)?;\n\n i += 1;\n\n value += 1;\n\n value = (value << 7) + (b[0] as u64 & 0x7f)\n\n }\n\n Ok((value, i))\n\n}\n\n\n", "file_path": "git-pack/src/data/entry/decode.rs", "rank": 7, "score": 203447.6027716106 }, { "content": "pub fn locate_oid(id: git_hash::ObjectId, buf: &mut Vec<u8>) -> git_pack::data::Object<'_> {\n\n ldb().find(id, buf).expect(\"read success\").expect(\"id present\")\n\n}\n\n\n\nmod write {\n\n use git_odb::{loose, Write};\n\n\n\n use crate::store::loose::backend::{locate_oid, object_ids};\n\n\n\n #[test]\n\n fn read_and_write() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = tempfile::tempdir()?;\n\n let db = loose::Store::at(dir.path());\n\n let mut buf = Vec::new();\n\n let mut buf2 = Vec::new();\n\n\n\n for oid in object_ids() {\n\n let obj = locate_oid(oid, &mut buf);\n\n let actual = db.write(&obj.decode()?.into(), git_hash::Kind::Sha1)?;\n\n assert_eq!(actual, oid);\n", "file_path": "git-odb/tests/odb/store/loose/backend.rs", "rank": 8, "score": 199243.20721007476 }, { "content": "#[test]\n\nfn encode_decode_roundtrip() -> crate::Result {\n\n let buf = std::fs::read(fixture_path(\n\n \"objects/pack/pack-11fdfa9e156ab73caae3b6da867192221f2089c2.pack\",\n\n ))?;\n\n let expected_encoded_header = &buf[..12];\n\n let (version, num_objects) = git_pack::data::header::decode(expected_encoded_header.try_into()?)?;\n\n let actual_encoded_header = git_pack::data::header::encode(version, num_objects);\n\n assert_eq!(actual_encoded_header, expected_encoded_header);\n\n Ok(())\n\n}\n", "file_path": "git-pack/tests/pack/data/header.rs", "rank": 9, "score": 198820.44451626012 }, { "content": "pub fn assert_err_display<T: std::fmt::Debug, E: std::error::Error>(\n\n res: std::result::Result<T, E>,\n\n expected: impl AsRef<str>,\n\n) {\n\n match res {\n\n Ok(v) => panic!(\"Expected error '{}', got value {:?}\", expected.as_ref(), v),\n\n Err(err) => assert_eq!(err.to_string(), expected.as_ref()),\n\n }\n\n}\n\n\n\n#[cfg(all(feature = \"async-io\", not(feature = \"blocking-io\")))]\n\nmod decode;\n\n#[cfg(all(feature = \"async-io\", not(feature = \"blocking-io\")))]\n\nmod encode;\n\n#[cfg(all(feature = \"async-io\", not(feature = \"blocking-io\")))]\n\nmod read;\n\n#[cfg(all(feature = \"async-io\", not(feature = \"blocking-io\")))]\n\nmod write;\n", "file_path": "git-packetline/tests/async-packetline.rs", "rank": 10, "score": 190672.19473966942 }, { "content": "#[inline]\n\nfn streaming_parse_header_info(mut read: impl io::Read) -> Result<(u8, u64, usize), io::Error> {\n\n let mut byte = [0u8; 1];\n\n read.read_exact(&mut byte)?;\n\n let mut c = byte[0];\n\n let mut i = 1;\n\n let type_id = (c >> 4) & 0b0000_0111;\n\n let mut size = c as u64 & 0b0000_1111;\n\n let mut s = 4;\n\n while c & 0b1000_0000 != 0 {\n\n read.read_exact(&mut byte)?;\n\n c = byte[0];\n\n i += 1;\n\n size += ((c & 0b0111_1111) as u64) << s;\n\n s += 7\n\n }\n\n Ok((type_id, size, i))\n\n}\n\n\n\n/// Parses the header of a pack-entry, yielding object type id, decompressed object size, and consumed bytes\n", "file_path": "git-pack/src/data/entry/decode.rs", "rank": 11, "score": 190530.76458068882 }, { "content": "/// Decode `data` as packet line while reporting whether the data is complete or not using a [`Stream`].\n\npub fn streaming(data: &[u8]) -> Result<Stream<'_>, Error> {\n\n let data_len = data.len();\n\n if data_len < U16_HEX_BYTES {\n\n return Ok(Stream::Incomplete {\n\n bytes_needed: U16_HEX_BYTES - data_len,\n\n });\n\n }\n\n let wanted_bytes = match hex_prefix(&data[..U16_HEX_BYTES])? {\n\n PacketLineOrWantedSize::Wanted(s) => s as usize,\n\n PacketLineOrWantedSize::Line(line) => {\n\n return Ok(Stream::Complete {\n\n line,\n\n bytes_consumed: 4,\n\n })\n\n }\n\n } + U16_HEX_BYTES;\n\n if wanted_bytes > MAX_LINE_LEN {\n\n return Err(Error::DataLengthLimitExceeded(wanted_bytes));\n\n }\n\n if data_len < wanted_bytes {\n", "file_path": "git-packetline/src/decode.rs", "rank": 12, "score": 185455.41554970015 }, { "content": "/// Decode an entire packet line from data or fail.\n\n///\n\n/// Note that failure also happens if there is not enough data to parse a complete packet line, as opposed to [`streaming()`] decoding\n\n/// succeeds in that case, stating how much more bytes are required.\n\npub fn all_at_once(data: &[u8]) -> Result<PacketLine<'_>, Error> {\n\n match streaming(data)? {\n\n Stream::Complete { line, .. } => Ok(line),\n\n Stream::Incomplete { bytes_needed } => Err(Error::NotEnoughData(bytes_needed)),\n\n }\n\n}\n", "file_path": "git-packetline/src/decode.rs", "rank": 13, "score": 185450.6081983465 }, { "content": "#[cfg(feature = \"async-client\")]\n\npub fn transport<W: futures_io::AsyncWrite + Unpin>(\n\n out: W,\n\n path: &str,\n\n desired_version: git_transport::Protocol,\n\n mode: git_transport::client::git::ConnectMode,\n\n) -> git_transport::client::git::Connection<Cursor, W> {\n\n let response = fixture_bytes(path);\n\n git_transport::client::git::Connection::new(\n\n Cursor::new(response),\n\n out,\n\n desired_version,\n\n b\"does/not/matter\".as_bstr().to_owned(),\n\n None::<(&str, _)>,\n\n mode,\n\n )\n\n}\n\n\n", "file_path": "git-protocol/tests/fetch/mod.rs", "rank": 14, "score": 185205.2184734735 }, { "content": "fn sha1_path(id: &git_hash::oid, mut root: PathBuf) -> PathBuf {\n\n match id.kind() {\n\n git_hash::Kind::Sha1 => {\n\n let hex = id.to_sha1_hex();\n\n let buf = std::str::from_utf8(&hex).expect(\"ascii only in hex\");\n\n root.push(&buf[..2]);\n\n root.push(&buf[2..]);\n\n root\n\n }\n\n }\n\n}\n\n\n\n///\n\npub mod find;\n\n///\n\npub mod iter;\n\n#[doc(inline)]\n\npub use iter::Iter;\n\n///\n\npub mod write;\n", "file_path": "git-odb/src/store/loose/mod.rs", "rank": 15, "score": 181619.63914013858 }, { "content": "#[test]\n\nfn reference_with_explicit_value_must_match_the_value_on_update() -> crate::Result {\n\n let (_keep, store) = store_writable(\"make_repo_for_reflog.sh\")?;\n\n let head = store.loose_find(\"HEAD\")?.expect(\"head exists already\");\n\n let target = head.target;\n\n\n\n let res = store.transaction().prepare(\n\n Some(RefEdit {\n\n change: Change::Update {\n\n log: LogChange::default(),\n\n new: Target::Peeled(ObjectId::null_sha1()),\n\n mode: Create::OrUpdate {\n\n previous: Some(Target::Peeled(hex_to_id(\"28ce6a8b26aa170e1de65536fe8abe1832bd3242\"))),\n\n },\n\n },\n\n name: \"HEAD\".try_into()?,\n\n deref: false,\n\n }),\n\n Fail::Immediately,\n\n );\n\n match res {\n\n Err(transaction::prepare::Error::ReferenceOutOfDate { full_name, actual, .. }) => {\n\n assert_eq!(full_name, \"HEAD\");\n\n assert_eq!(actual, target);\n\n }\n\n _ => unreachable!(\"unexpected result\"),\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "git-ref/tests/file/transaction/prepare_and_commit/create_or_update.rs", "rank": 16, "score": 181375.52229976637 }, { "content": "fn add_decode_result(lhs: &mut data::decode_entry::Outcome, rhs: data::decode_entry::Outcome) {\n\n lhs.num_deltas += rhs.num_deltas;\n\n lhs.decompressed_size += rhs.decompressed_size;\n\n lhs.compressed_size += rhs.compressed_size;\n\n lhs.object_size += rhs.object_size;\n\n}\n\n\n", "file_path": "git-pack/src/index/traverse/reduce.rs", "rank": 17, "score": 180449.17522322567 }, { "content": "/// Obtain a `PacketLine` from `data` after assuring `data` is small enough to fit.\n\npub fn to_data_line(data: &[u8]) -> Result<PacketLine<'_>, Error> {\n\n if data.len() > MAX_LINE_LEN {\n\n return Err(Error::DataLengthLimitExceeded(data.len()));\n\n }\n\n\n\n Ok(PacketLine::Data(data))\n\n}\n\n\n", "file_path": "git-packetline/src/decode.rs", "rank": 18, "score": 179906.73639282875 }, { "content": "#[test]\n\nfn reference_with_create_only_must_not_exist_already_when_creating_it_if_the_value_does_not_match() -> crate::Result {\n\n let (_keep, store) = store_writable(\"make_repo_for_reflog.sh\")?;\n\n let head = store.loose_find(\"HEAD\")?.expect(\"head exists already\");\n\n let target = head.target;\n\n\n\n let res = store.transaction().prepare(\n\n Some(RefEdit {\n\n change: Change::Update {\n\n log: LogChange::default(),\n\n new: Target::Peeled(ObjectId::null_sha1()),\n\n mode: Create::Only,\n\n },\n\n name: \"HEAD\".try_into()?,\n\n deref: false,\n\n }),\n\n Fail::Immediately,\n\n );\n\n match res {\n\n Err(transaction::prepare::Error::MustNotExist { full_name, actual, .. }) => {\n\n assert_eq!(full_name, \"HEAD\");\n\n assert_eq!(actual, target);\n\n }\n\n _ => unreachable!(\"unexpected result\"),\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "git-ref/tests/file/transaction/prepare_and_commit/create_or_update.rs", "rank": 19, "score": 179239.6539696699 }, { "content": "#[test]\n\nfn reference_with_create_only_must_not_exist_already_when_creating_it_unless_the_value_matches() -> crate::Result {\n\n let (_keep, store) = store_writable(\"make_repo_for_reflog.sh\")?;\n\n let head = store.loose_find(\"HEAD\")?.expect(\"head exists already\");\n\n let target = head.target;\n\n let previous_reflog_count = reflog_lines(&store, \"HEAD\")?.len();\n\n\n\n let edits = store\n\n .transaction()\n\n .prepare(\n\n Some(RefEdit {\n\n change: Change::Update {\n\n log: LogChange::default(),\n\n new: target.clone(),\n\n mode: Create::Only,\n\n },\n\n name: \"HEAD\".try_into()?,\n\n deref: false,\n\n }),\n\n Fail::Immediately,\n\n )?\n", "file_path": "git-ref/tests/file/transaction/prepare_and_commit/create_or_update.rs", "rank": 20, "score": 177181.86550501164 }, { "content": "pub fn is_workspace_member(meta: &Metadata, crate_name: &str) -> bool {\n\n workspace_package_by_name(meta, crate_name).is_some()\n\n}\n\n\n", "file_path": "cargo-smart-release/src/command/release/utils.rs", "rank": 21, "score": 174978.99997087402 }, { "content": "/// Decode the `four_bytes` packet line prefix provided in hexadecimal form and check it for validity.\n\npub fn hex_prefix(four_bytes: &[u8]) -> Result<PacketLineOrWantedSize<'_>, Error> {\n\n debug_assert_eq!(four_bytes.len(), 4, \"need four hex bytes\");\n\n for (line_bytes, line_type) in &[\n\n (FLUSH_LINE, PacketLine::Flush),\n\n (DELIMITER_LINE, PacketLine::Delimiter),\n\n (RESPONSE_END_LINE, PacketLine::ResponseEnd),\n\n ] {\n\n if four_bytes == *line_bytes {\n\n return Ok(PacketLineOrWantedSize::Line(*line_type));\n\n }\n\n }\n\n\n\n let mut buf = [0u8; U16_HEX_BYTES / 2];\n\n hex::decode_to_slice(four_bytes, &mut buf).map_err(|err| Error::HexDecode(err.to_string()))?;\n\n let wanted_bytes = u16::from_be_bytes(buf);\n\n\n\n if wanted_bytes == 3 {\n\n return Err(Error::InvalidLineLength);\n\n }\n\n if wanted_bytes == 4 {\n\n return Err(Error::DataIsEmpty);\n\n }\n\n debug_assert!(\n\n wanted_bytes as usize > U16_HEX_BYTES,\n\n \"by now there should be more wanted bytes than prefix bytes\"\n\n );\n\n Ok(PacketLineOrWantedSize::Wanted(wanted_bytes - U16_HEX_BYTES as u16))\n\n}\n\n\n", "file_path": "git-packetline/src/decode.rs", "rank": 22, "score": 172424.6244480433 }, { "content": "pub fn write_packed_refs_with(input: &[u8]) -> crate::Result<(tempfile::TempDir, PathBuf)> {\n\n let dir = tempfile::tempdir()?;\n\n let packed_refs_path = dir.path().join(\"packed-refs\");\n\n std::fs::write(&packed_refs_path, input)?;\n\n Ok((dir, packed_refs_path))\n\n}\n\n\n\nmod find;\n\npub mod iter;\n\nmod open;\n", "file_path": "git-ref/tests/packed/mod.rs", "rank": 23, "score": 165189.96923999186 }, { "content": "fn reflog_lines(store: &file::Store, name: &str, buf: &mut Vec<u8>) -> Result<Vec<log::mutable::Line>> {\n\n store\n\n .reflog_iter(name, buf)?\n\n .expect(\"existing reflog\")\n\n .map(|l| l.map(log::mutable::Line::from))\n\n .collect::<std::result::Result<Vec<_>, _>>()\n\n .map_err(Into::into)\n\n}\n\n\n\nconst WRITE_MODES: &[WriteReflog] = &[WriteReflog::Normal, WriteReflog::Disable];\n\n\n", "file_path": "git-ref/src/store/file/loose/reflog/create_or_update/tests.rs", "rank": 24, "score": 164398.49490580277 }, { "content": "/// Returns true if an interrupt is requested.\n\npub fn is_triggered() -> bool {\n\n IS_INTERRUPTED.load(Ordering::Relaxed)\n\n}\n\n\n", "file_path": "git-repository/src/interrupt.rs", "rank": 25, "score": 164077.46120094642 }, { "content": "/// Decode all lines in `input` as key-value pairs produced by a `git credentials` helper program.\n\npub fn decode_message(mut input: impl io::Read) -> io::Result<Vec<(String, String)>> {\n\n let mut buf = String::new();\n\n input.read_to_string(&mut buf)?;\n\n buf.lines()\n\n .take_while(|l| !l.is_empty())\n\n .map(|l| {\n\n let mut iter = l.splitn(2, '=').map(|s| s.to_owned());\n\n match (iter.next(), iter.next()) {\n\n (Some(key), Some(value)) => validate(&key).and_then(|_| validate(&value)).map(|_| (key, value)),\n\n _ => Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n \"Invalid format, expecting key=value\",\n\n )),\n\n }\n\n })\n\n .collect::<io::Result<Vec<_>>>()\n\n}\n", "file_path": "git-protocol/src/credentials.rs", "rank": 26, "score": 162487.66847995794 }, { "content": "/// A function for use in [`loose::Reference::peel_to_id_in_place()`] to indicate no peeling should happen.\n\npub fn none(\n\n _id: git_hash::ObjectId,\n\n _buf: &mut Vec<u8>,\n\n) -> Result<Option<(git_object::Kind, &[u8])>, std::convert::Infallible> {\n\n Ok(Some((git_object::Kind::Commit, &[])))\n\n}\n\n\n\nimpl loose::Reference {\n\n /// Follow this symbolic reference one level and return the ref it refers to, possibly providing access to `packed` references for lookup.\n\n ///\n\n /// Returns `None` if this is not a symbolic reference, hence the leaf of the chain.\n\n pub fn follow_symbolic<'p>(\n\n &self,\n\n store: &file::Store,\n\n packed: Option<&'p packed::Buffer>,\n\n ) -> Option<Result<file::Reference<'p>, Error>> {\n\n match &self.target {\n\n Target::Peeled(_) => None,\n\n Target::Symbolic(full_name) => {\n\n let path = full_name.to_path();\n", "file_path": "git-ref/src/store/file/loose/reference/peel.rs", "rank": 27, "score": 161544.18324507083 }, { "content": "fn section<'a, 'b>(i: &'a [u8], node: &'b mut ParserNode) -> IResult<&'a [u8], (ParsedSection<'a>, usize)> {\n\n let (mut i, section_header) = section_header(i)?;\n\n\n\n let mut newlines = 0;\n\n let mut items = vec![];\n\n\n\n // This would usually be a many0(alt(...)), the manual loop allows us to\n\n // optimize vec insertions\n\n loop {\n\n let old_i = i;\n\n\n\n if let Ok((new_i, v)) = take_spaces(i) {\n\n if old_i != new_i {\n\n i = new_i;\n\n items.push(Event::Whitespace(Cow::Borrowed(v)));\n\n }\n\n }\n\n\n\n if let Ok((new_i, (v, new_newlines))) = take_newline(i) {\n\n if old_i != new_i {\n", "file_path": "git-config/src/parser.rs", "rank": 28, "score": 161522.7789512212 }, { "content": "#[inline]\n\nfn leb64decode(d: &[u8]) -> (u64, usize) {\n\n let mut i = 0;\n\n let mut c = d[i];\n\n i += 1;\n\n let mut value = c as u64 & 0x7f;\n\n while c & 0x80 != 0 {\n\n c = d[i];\n\n i += 1;\n\n value += 1;\n\n value = (value << 7) + (c as u64 & 0x7f)\n\n }\n\n (value, i)\n\n}\n\n\n", "file_path": "git-pack/src/data/entry/decode.rs", "rank": 29, "score": 161455.69840151747 }, { "content": "#[test]\n\nfn performance() -> crate::Result {\n\n let store = store_at(\"make_repository_with_lots_of_packed_refs.sh\")?;\n\n let start = std::time::Instant::now();\n\n let actual = store.packed_buffer()?.expect(\"packed-refs present\").iter()?.count();\n\n assert_eq!(actual, 150003);\n\n let elapsed = start.elapsed().as_secs_f32();\n\n eprintln!(\n\n \"Enumerated {} refs in {}s ({} refs/s)\",\n\n actual,\n\n elapsed,\n\n actual as f32 / elapsed\n\n );\n\n Ok(())\n\n}\n", "file_path": "git-ref/tests/packed/iter.rs", "rank": 30, "score": 160645.03416790217 }, { "content": "#[test]\n\nfn empty() -> crate::Result {\n\n assert_eq!(\n\n packed::Iter::new(&[])?.count(),\n\n 0,\n\n \"empty buffers are fine and lead to no line returned\"\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "git-ref/tests/packed/iter.rs", "rank": 31, "score": 160645.03416790217 }, { "content": "#[test]\n\nfn from_bytes() -> crate::Result {\n\n let (caps, delim_pos) = Capabilities::from_bytes(&b\"7814e8a05a59c0cf5fb186661d1551c75d1299b5 HEAD\\0multi_ack thin-pack side-band side-band-64k ofs-delta shallow deepen-since deepen-not deepen-relative no-progress include-tag multi_ack_detailed symref=HEAD:refs/heads/master object-format=sha1 agent=git/2.28.0\"[..])?;\n\n assert_eq!(delim_pos, 45);\n\n assert_eq!(\n\n caps.iter().map(|c| c.name().to_owned()).collect::<Vec<_>>(),\n\n vec![\n\n \"multi_ack\",\n\n \"thin-pack\",\n\n \"side-band\",\n\n \"side-band-64k\",\n\n \"ofs-delta\",\n\n \"shallow\",\n\n \"deepen-since\",\n\n \"deepen-not\",\n\n \"deepen-relative\",\n\n \"no-progress\",\n\n \"include-tag\",\n\n \"multi_ack_detailed\",\n\n \"symref\",\n\n \"object-format\",\n", "file_path": "git-transport/tests/client/capabilities.rs", "rank": 32, "score": 160645.03416790217 }, { "content": "pub fn to_bstr_err(err: nom::Err<VerboseError<&[u8]>>) -> VerboseError<&BStr> {\n\n let err = match err {\n\n nom::Err::Error(err) | nom::Err::Failure(err) => err,\n\n nom::Err::Incomplete(_) => unreachable!(\"not a streaming parser\"),\n\n };\n\n VerboseError {\n\n errors: err.errors.into_iter().map(|(i, v)| (i.as_bstr(), v)).collect(),\n\n }\n\n}\n", "file_path": "tests/tools/src/lib.rs", "rank": 33, "score": 160468.44186074715 }, { "content": "#[test]\n\nfn basic_nesting() -> crate::Result<()> {\n\n let db = db()?;\n\n let mut buf = Vec::new();\n\n let mut buf2 = Vec::new();\n\n let mut commit = db.find_existing_commit_iter(\n\n hex_to_id(\"85df34aa34848b8138b2b3dcff5fb5c2b734e0ce\"),\n\n &mut buf,\n\n &mut pack::cache::Never,\n\n )?;\n\n let mut recorder = tree::Recorder::default();\n\n git_traverse::tree::breadthfirst(\n\n db.find_existing_tree_iter(\n\n commit.tree_id().expect(\"a tree is available in a commit\"),\n\n &mut buf2,\n\n &mut pack::cache::Never,\n\n )?,\n\n tree::breadthfirst::State::default(),\n\n |oid, buf| db.find_existing_tree_iter(oid, buf, &mut pack::cache::Never).ok(),\n\n &mut recorder,\n\n )?;\n", "file_path": "git-traverse/tests/tree/mod.rs", "rank": 34, "score": 158494.7938972636 }, { "content": "#[test]\n\nfn single_commit() -> crate::Result {\n\n let repo_dir = make_readonly_repo(\"single_commit.sh\");\n\n let refs = inspect_refs(&repo_dir, &[\"commit\"]);\n\n let cg = Graph::from_info_dir(repo_dir.join(\".git\").join(\"objects\").join(\"info\"))?;\n\n check_common(&cg, &refs);\n\n\n\n assert_eq!(cg.commit_at(refs[\"commit\"].pos()).generation(), 1);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "git-commitgraph/tests/access/mod.rs", "rank": 35, "score": 158494.7938972636 }, { "content": "#[test]\n\nfn two_parents() -> crate::Result {\n\n let repo_dir = make_readonly_repo(\"two_parents.sh\");\n\n let refs = inspect_refs(&repo_dir, &[\"parent1\", \"parent2\", \"child\"]);\n\n let cg = Graph::from_info_dir(repo_dir.join(\".git\").join(\"objects\").join(\"info\"))?;\n\n check_common(&cg, &refs);\n\n\n\n assert_eq!(cg.commit_at(refs[\"parent1\"].pos()).generation(), 1);\n\n assert_eq!(cg.commit_at(refs[\"parent2\"].pos()).generation(), 1);\n\n assert_eq!(cg.commit_at(refs[\"child\"].pos()).generation(), 2);\n\n\n\n Ok(())\n\n}\n", "file_path": "git-commitgraph/tests/access/mod.rs", "rank": 36, "score": 158494.7938972636 }, { "content": "#[test]\n\nfn with_username() -> crate::Result {\n\n let (user, resolved_path) = expand_path::parse(b\"/~byron/hello/git\".as_bstr())?;\n\n let resolved_path = expand_path::with(user.as_ref(), resolved_path.as_ref(), |user: &ForUser| match user {\n\n ForUser::Current => unreachable!(\"we have a name\"),\n\n ForUser::Name(name) => Some(user_home(name.to_str_lossy().as_ref())),\n\n })?;\n\n assert_eq!(resolved_path, expected_path());\n\n Ok(())\n\n}\n", "file_path": "git-url/tests/expand_user/mod.rs", "rank": 37, "score": 158494.7938972636 }, { "content": "#[test]\n\nfn find_speed() -> crate::Result {\n\n let store = store_at(\"make_repository_with_lots_of_packed_refs.sh\")?;\n\n let packed = store.packed_buffer()?.expect(\"packed-refs present\");\n\n let start = std::time::Instant::now();\n\n let mut num_refs = 0;\n\n for r in packed.iter()?.take(10_000) {\n\n num_refs += 1;\n\n let r = r?;\n\n assert_eq!(packed.find(r.name)?.expect(\"ref was found\"), r, \"the refs are the same\");\n\n }\n\n let elapsed = start.elapsed().as_secs_f32();\n\n eprintln!(\n\n \"Found {} refs in {}s ({} refs/s)\",\n\n num_refs,\n\n elapsed,\n\n num_refs as f32 / elapsed\n\n );\n\n Ok(())\n\n}\n", "file_path": "git-ref/tests/packed/find.rs", "rank": 38, "score": 158494.7938972636 }, { "content": "#[test]\n\nfn iter_prefix() -> crate::Result {\n\n let packed = store_with_packed_refs()?.packed_buffer()?.expect(\"packed-refs\");\n\n assert_eq!(\n\n packed\n\n .iter_prefixed(\"refs/heads/\")?\n\n .map(|r| r.map(|r| r.name.as_bstr()))\n\n .collect::<Result<Vec<_>, _>>()?,\n\n vec![\n\n \"refs/heads/d1\".as_bytes().as_bstr(),\n\n \"refs/heads/dt1\".into(),\n\n \"refs/heads/main\".into()\n\n ]\n\n );\n\n\n\n assert_eq!(\n\n packed\n\n .iter_prefixed(\"refs/remotes/\")?\n\n .map(|r| r.map(|r| r.name.as_bstr()))\n\n .collect::<Result<Vec<_>, _>>()?,\n\n vec![\n", "file_path": "git-ref/tests/packed/iter.rs", "rank": 39, "score": 158494.7938972636 }, { "content": "#[test]\n\nfn single_parent() -> crate::Result {\n\n let repo_dir = make_readonly_repo(\"single_parent.sh\");\n\n let refs = inspect_refs(&repo_dir, &[\"parent\", \"child\"]);\n\n let cg = Graph::from_info_dir(repo_dir.join(\".git\").join(\"objects\").join(\"info\"))?;\n\n check_common(&cg, &refs);\n\n\n\n assert_eq!(cg.commit_at(refs[\"parent\"].pos()).generation(), 1);\n\n assert_eq!(cg.commit_at(refs[\"child\"].pos()).generation(), 2);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "git-commitgraph/tests/access/mod.rs", "rank": 40, "score": 158494.7938972636 }, { "content": "#[test]\n\nfn octupus_merges() -> crate::Result {\n\n let repo_dir = make_readonly_repo(\"octopus_merges.sh\");\n\n let refs = inspect_refs(\n\n &repo_dir,\n\n &[\n\n \"root\",\n\n \"parent1\",\n\n \"parent2\",\n\n \"parent3\",\n\n \"parent4\",\n\n \"three_parents\",\n\n \"four_parents\",\n\n ],\n\n );\n\n let cg = Graph::from_info_dir(repo_dir.join(\".git\").join(\"objects\").join(\"info\"))?;\n\n check_common(&cg, &refs);\n\n\n\n assert_eq!(cg.commit_at(refs[\"root\"].pos()).generation(), 1);\n\n assert_eq!(cg.commit_at(refs[\"parent1\"].pos()).generation(), 2);\n\n assert_eq!(cg.commit_at(refs[\"parent2\"].pos()).generation(), 2);\n\n assert_eq!(cg.commit_at(refs[\"parent3\"].pos()).generation(), 2);\n\n assert_eq!(cg.commit_at(refs[\"parent4\"].pos()).generation(), 2);\n\n assert_eq!(cg.commit_at(refs[\"three_parents\"].pos()).generation(), 3);\n\n assert_eq!(cg.commit_at(refs[\"four_parents\"].pos()).generation(), 3);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "git-commitgraph/tests/access/mod.rs", "rank": 41, "score": 158494.7938972636 }, { "content": "#[test]\n\nfn host_is_ipv4() -> crate::Result {\n\n assert_url_roundtrip(\n\n \"ssh://127.69.0.1/hello\",\n\n url(Scheme::Ssh, None, \"127.69.0.1\", None, b\"/hello\"),\n\n )\n\n}\n\n\n", "file_path": "git-url/tests/parse/ssh.rs", "rank": 42, "score": 158494.7938972636 }, { "content": "#[cfg(feature = \"env_logger\")]\n\n#[allow(unused)] // Squelch warning because it's used in porcelain as well and we can't know that at compile time\n\npub fn init_env_logger(verbose: bool) {\n\n if verbose {\n\n env_logger::Builder::from_env(env_logger::Env::default().default_filter_or(\"info\"))\n\n .format_module_path(false)\n\n .init();\n\n } else {\n\n env_logger::init();\n\n }\n\n}\n\n\n", "file_path": "src/shared.rs", "rank": 43, "score": 158392.38706849926 }, { "content": "pub fn reference<'a, E: ParseError<&'a [u8]> + FromExternalError<&'a [u8], crate::name::Error>>(\n\n input: &'a [u8],\n\n) -> IResult<&'a [u8], packed::Reference<'a>, E> {\n\n let (input, (target, name)) = tuple((\n\n terminated(hex_hash, tag(b\" \")),\n\n map_res(until_newline, crate::FullName::try_from),\n\n ))(input)?;\n\n let (rest, object) = opt(delimited(tag(b\"^\"), hex_hash, newline))(input)?;\n\n Ok((rest, packed::Reference { name, target, object }))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "git-ref/src/store/packed/decode.rs", "rank": 44, "score": 158101.2515677913 }, { "content": "#[test]\n\nfn without_user_and_with_port() -> crate::Result {\n\n assert_url_roundtrip(\"ssh://host.xz:21/\", url(Scheme::Ssh, None, \"host.xz\", 21, b\"/\"))\n\n}\n\n\n", "file_path": "git-url/tests/parse/ssh.rs", "rank": 45, "score": 156440.74007892614 }, { "content": "#[test]\n\nfn packed_refs_with_header() -> crate::Result {\n\n let dir = git_testtools::scripted_fixture_repo_read_only(\"make_packed_ref_repository.sh\")?;\n\n let buf = std::fs::read(dir.join(\".git\").join(\"packed-refs\"))?;\n\n let iter = packed::Iter::new(&buf)?;\n\n assert_eq!(iter.count(), 8, \"it finds the right amount of items\");\n\n Ok(())\n\n}\n\n\n", "file_path": "git-ref/tests/packed/iter.rs", "rank": 46, "score": 156440.74007892614 }, { "content": "#[test]\n\nfn with_user_and_without_port() -> crate::Result {\n\n assert_url_roundtrip(\n\n \"ssh://user@host.xz/.git\",\n\n url(Scheme::Ssh, \"user\", \"host.xz\", None, b\"/.git\"),\n\n )\n\n}\n\n\n", "file_path": "git-url/tests/parse/ssh.rs", "rank": 47, "score": 156440.74007892614 }, { "content": "#[test]\n\nfn without_username() -> crate::Result {\n\n let (user, resolved_path) = expand_path::parse(b\"/~/hello/git\".as_bstr())?;\n\n let resolved_path = expand_path::with(user.as_ref(), resolved_path.as_ref(), |user: &ForUser| match user {\n\n ForUser::Current => Some(user_home(\"byron\")),\n\n ForUser::Name(name) => Some(format!(\"/home/{}\", name).into()),\n\n })?;\n\n assert_eq!(resolved_path, expected_path());\n\n Ok(())\n\n}\n\n\n", "file_path": "git-url/tests/expand_user/mod.rs", "rank": 48, "score": 156440.74007892614 }, { "content": "#[test]\n\nfn file_path_with_protocol() -> crate::Result {\n\n assert_url_roundtrip(\n\n \"file:///path/to/git\",\n\n url(Scheme::File, None, None, None, b\"/path/to/git\"),\n\n )\n\n}\n\n\n", "file_path": "git-url/tests/parse/file.rs", "rank": 49, "score": 156440.74007892614 }, { "content": "#[test]\n\nfn overlay_iter() -> crate::Result {\n\n use git_ref::mutable::Target::*;\n\n\n\n let store = store_at(\"make_packed_ref_repository_for_overlay.sh\")?;\n\n let ref_names = store\n\n .iter(store.packed_buffer()?.as_ref())?\n\n .map(|r| r.map(|r| (r.name().as_bstr().to_owned(), r.target(), r.is_packed())))\n\n .collect::<Result<Vec<_>, _>>()?;\n\n let c1 = hex_to_id(\"134385f6d781b7e97062102c6a483440bfda2a03\");\n\n let c2 = hex_to_id(\"9902e3c3e8f0c569b4ab295ddf473e6de763e1e7\");\n\n assert_eq!(\n\n ref_names,\n\n vec![\n\n (b\"refs/heads/main\".as_bstr().to_owned(), Peeled(c1), true),\n\n (\"refs/heads/newer-as-loose\".into(), Peeled(c2), false),\n\n (\n\n \"refs/remotes/origin/HEAD\".into(),\n\n Symbolic(\"refs/remotes/origin/main\".try_into()?),\n\n false\n\n ),\n", "file_path": "git-ref/tests/file/store/iter.rs", "rank": 50, "score": 156440.74007892614 }, { "content": "#[test]\n\nfn username_expansion_with_username() -> crate::Result {\n\n assert_url_roundtrip(\n\n \"ssh://example.com/~byron/hello/git\",\n\n url(Scheme::Ssh, None, \"example.com\", None, b\"/~byron/hello/git\"),\n\n )\n\n}\n\n\n", "file_path": "git-url/tests/parse/ssh.rs", "rank": 51, "score": 156440.74007892614 }, { "content": "fn do_gitoxide_tree_diff<C, L>(commits: &[ObjectId], make_find: C, mode: Computation) -> anyhow::Result<usize>\n\nwhere\n\n C: Fn() -> L + Sync,\n\n L: for<'b> FnMut(&oid, &'b mut Vec<u8>) -> Option<odb::data::Object<'b>>,\n\n{\n\n let changes: usize = match mode {\n\n Computation::MultiThreaded => {\n\n let changes = std::sync::atomic::AtomicUsize::new(0);\n\n commits.par_windows(2).try_for_each_init::<_, _, _, anyhow::Result<_>>(\n\n || {\n\n (\n\n diff::tree::State::default(),\n\n Vec::<u8>::new(),\n\n Vec::<u8>::new(),\n\n make_find(),\n\n )\n\n },\n\n |(state, buf1, buf2, find), pair| {\n\n let (ca, cb) = (pair[0], pair[1]);\n\n let (ta, tb) = (\n", "file_path": "experiments/diffing/src/main.rs", "rank": 52, "score": 156438.98184888624 }, { "content": "fn has_no_explicit_protocol(url: &[u8]) -> bool {\n\n url.find(b\"://\").is_none()\n\n}\n\n\n", "file_path": "git-url/src/parse.rs", "rank": 53, "score": 156232.68338649033 }, { "content": "fn is_hex_digit_lc(b: u8) -> bool {\n\n matches!(b, b'0'..=b'9' | b'a'..=b'f')\n\n}\n\n\n", "file_path": "git-ref/src/parse.rs", "rank": 54, "score": 156232.68338649033 }, { "content": "fn usage(c: &mut Criterion) {\n\n c.benchmark_group(\"Owned::cloned\")\n\n .throughput(Throughput::Elements(1))\n\n .bench_function(\"Sha1\", |b| {\n\n let source = hash::Owned::sha1();\n\n b.iter(|| {\n\n black_box(source.clone());\n\n });\n\n })\n\n .bench_function(\"Sha256\", |b| {\n\n let source = hash::Owned::sha256();\n\n b.iter(|| {\n\n black_box(source.clone());\n\n });\n\n });\n\n c.benchmark_group(\"Owned::by_ref\")\n\n .throughput(Throughput::Elements(1))\n\n .bench_function(\"Sha1\", |b| {\n\n let source = hash::Owned::sha1();\n\n b.iter(|| {\n", "file_path": "experiments/hash-owned-borrowed/benches/usage.rs", "rank": 55, "score": 155938.08243322207 }, { "content": "fn parser(c: &mut Criterion) {\n\n c.bench_function(\"Parser large config file\", |b| {\n\n b.iter(|| Parser::try_from(black_box(CONFIG_FILE)).unwrap())\n\n });\n\n}\n\n\n\ncriterion_group!(benches, git_config, parser);\n\ncriterion_main!(benches);\n\n\n\n// Found from https://gist.github.com/pksunkara/988716\n\nconst CONFIG_FILE: &str = r#\"[user]\n\nname = Pavan Kumar Sunkara\n\nemail = pavan.sss1991@gmail.com\n\nusername = pksunkara\n\n[core]\n\neditor = vim\n\nwhitespace = fix,-indent-with-non-tab,trailing-space,cr-at-eol\n\npager = delta\n\n[sendemail]\n\nsmtpencryption = tls\n", "file_path": "git-config/benches/large_config_file.rs", "rank": 56, "score": 155938.08243322207 }, { "content": "fn db() -> crate::Result<Store> {\n\n let dir = git_testtools::scripted_fixture_repo_read_only(\"make_traversal_repo_for_trees.sh\")?;\n\n let db = Store::at(dir.join(\".git\").join(\"objects\"))?;\n\n Ok(db)\n\n}\n\n\n", "file_path": "git-traverse/tests/tree/mod.rs", "rank": 57, "score": 155123.45408374345 }, { "content": "#[test]\n\nfn packed_refs_without_header() -> crate::Result {\n\n let packed_refs = b\"916840c0e2f67d370291042cb5274a597f4fa9bc refs/tags/TEST-0.0.1\n\nc4cebba92af964f2d126be90b8a6298c4cf84d45 refs/tags/git-actor-v0.1.0\n\n^13da90b54699a6b500ec5cd7d175f2cd5a1bed06\n\n0b92c8a256ae06c189e3b9c30b646d62ac8f7d10 refs/tags/git-actor-v0.1.1\\n\";\n\n assert_eq!(\n\n packed::Iter::new(packed_refs)?.collect::<Result<Vec<_>, _>>()?,\n\n vec![\n\n packed::Reference {\n\n name: \"refs/tags/TEST-0.0.1\".try_into()?,\n\n target: \"916840c0e2f67d370291042cb5274a597f4fa9bc\".into(),\n\n object: None\n\n },\n\n packed::Reference {\n\n name: \"refs/tags/git-actor-v0.1.0\".try_into()?,\n\n target: \"c4cebba92af964f2d126be90b8a6298c4cf84d45\".into(),\n\n object: Some(\"13da90b54699a6b500ec5cd7d175f2cd5a1bed06\".into())\n\n },\n\n packed::Reference {\n\n name: \"refs/tags/git-actor-v0.1.1\".try_into()?,\n\n target: \"0b92c8a256ae06c189e3b9c30b646d62ac8f7d10\".into(),\n\n object: None\n\n }\n\n ]\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "git-ref/tests/packed/iter.rs", "rank": 58, "score": 154476.5598625286 }, { "content": "#[test]\n\nfn without_user_and_without_port() -> crate::Result {\n\n assert_url_roundtrip(\n\n \"ssh://host.xz/path/to/repo.git/\",\n\n url(Scheme::Ssh, None, \"host.xz\", None, b\"/path/to/repo.git/\"),\n\n )\n\n}\n\n\n", "file_path": "git-url/tests/parse/ssh.rs", "rank": 59, "score": 154476.5598625286 }, { "content": "#[test]\n\nfn loose_iter_with_prefix() -> crate::Result {\n\n let store = store()?;\n\n\n\n let actual = store\n\n .loose_iter_prefixed(\"refs/heads/\")?\n\n .collect::<Result<Vec<_>, _>>()\n\n .expect(\"no broken ref in this subset\")\n\n .into_iter()\n\n .map(|e| e.name.into_inner())\n\n .collect::<Vec<_>>();\n\n\n\n assert_eq!(\n\n actual,\n\n vec![\n\n \"refs/heads/d1\",\n\n \"refs/heads/dt1\",\n\n \"refs/heads/main\",\n\n \"refs/heads/multi-link-target1\",\n\n ]\n\n .into_iter()\n\n .map(String::from)\n\n .collect::<Vec<_>>(),\n\n \"all paths are as expected\"\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "git-ref/tests/file/store/iter.rs", "rank": 60, "score": 154476.5598625286 }, { "content": "#[test]\n\nfn scp_like_without_user() -> crate::Result {\n\n let url = assert_url_and(\n\n \"host.xz:path/to/git\",\n\n url(Scheme::Ssh, None, \"host.xz\", None, b\"/path/to/git\"),\n\n )?\n\n .to_string();\n\n assert_eq!(url, \"ssh://host.xz/path/to/git\");\n\n Ok(())\n\n}\n\n\n", "file_path": "git-url/tests/parse/ssh.rs", "rank": 61, "score": 154476.5598625286 }, { "content": "#[test]\n\nfn username_expansion_without_username() -> crate::Result {\n\n assert_url_roundtrip(\n\n \"ssh://example.com/~/hello/git\",\n\n url(Scheme::Ssh, None, \"example.com\", None, b\"/~/hello/git\"),\n\n )\n\n}\n\n\n", "file_path": "git-url/tests/parse/ssh.rs", "rank": 62, "score": 154476.5598625286 }, { "content": "#[test]\n\nfn file_path_without_protocol() -> crate::Result {\n\n let url = assert_url_and(\"/path/to/git\", url(Scheme::File, None, None, None, b\"/path/to/git\"))?.to_string();\n\n assert_eq!(url, \"file:///path/to/git\");\n\n Ok(())\n\n}\n\n\n", "file_path": "git-url/tests/parse/file.rs", "rank": 63, "score": 154476.5598625286 }, { "content": "#[test]\n\nfn all_iterable_refs_can_be_found() -> crate::Result {\n\n let store = store_with_packed_refs()?;\n\n let packed_refs = store.packed_buffer()?.expect(\"packed-refs exist\");\n\n\n\n for reference in packed_refs.iter()? {\n\n let reference = reference?;\n\n let found = packed_refs.find(reference.name)?.expect(\"reference exists\");\n\n assert_eq!(reference, found, \"both refs are exactly the same\");\n\n let found = packed_refs.find_existing(reference.name)?;\n\n assert_eq!(reference, found);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "git-ref/tests/packed/find.rs", "rank": 64, "score": 154476.5598625286 }, { "content": "#[test]\n\nfn bogus_content_triggers_an_error() -> crate::Result {\n\n let packed_refs_data = b\"starts with a bogus record, not a header anyway\";\n\n let (_keep, path) = write_packed_refs_with(packed_refs_data)?;\n\n\n\n match git_ref::packed::Buffer::open(path, 32) {\n\n Ok(_) => unreachable!(\"unsorted buffers can't be opened\"),\n\n Err(err) => assert_eq!(\n\n err.to_string(),\n\n \"The packed-refs file did not have a header or wasn't sorted and could not be iterated\"\n\n ),\n\n }\n\n Ok(())\n\n}\n", "file_path": "git-ref/tests/packed/open.rs", "rank": 65, "score": 154476.5598625286 }, { "content": "#[test]\n\nfn overlay_prefixed_iter() -> crate::Result {\n\n use git_ref::mutable::Target::*;\n\n\n\n let store = store_at(\"make_packed_ref_repository_for_overlay.sh\")?;\n\n let ref_names = store\n\n .iter_prefixed(store.packed_buffer()?.as_ref(), \"refs/heads\")?\n\n .map(|r| r.map(|r| (r.name().as_bstr().to_owned(), r.target(), r.is_packed())))\n\n .collect::<Result<Vec<_>, _>>()?;\n\n let c1 = hex_to_id(\"134385f6d781b7e97062102c6a483440bfda2a03\");\n\n let c2 = hex_to_id(\"9902e3c3e8f0c569b4ab295ddf473e6de763e1e7\");\n\n assert_eq!(\n\n ref_names,\n\n vec![\n\n (b\"refs/heads/main\".as_bstr().to_owned(), Peeled(c1), true),\n\n (\"refs/heads/newer-as-loose\".into(), Peeled(c2), false),\n\n ]\n\n );\n\n Ok(())\n\n}\n", "file_path": "git-ref/tests/file/store/iter.rs", "rank": 66, "score": 154476.5598625286 }, { "content": "#[test]\n\nfn traversals() -> crate::Result {\n\n #[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]\n\n struct Count {\n\n trees: usize,\n\n commits: usize,\n\n blobs: usize,\n\n tags: usize,\n\n delta_ref: usize,\n\n delta_oid: usize,\n\n }\n\n #[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]\n\n struct ObjectCount {\n\n trees: usize,\n\n commits: usize,\n\n blobs: usize,\n\n tags: usize,\n\n }\n\n impl ObjectCount {\n\n fn total(&self) -> usize {\n\n self.tags + self.trees + self.commits + self.blobs\n", "file_path": "git-pack/tests/pack/data/output/count_and_entries.rs", "rank": 67, "score": 154476.5598625286 }, { "content": "#[test]\n\nfn packed_file_iter() -> crate::Result {\n\n let store = store_with_packed_refs()?;\n\n assert_eq!(store.packed_buffer()?.expect(\"pack available\").iter()?.count(), 8);\n\n Ok(())\n\n}\n\n\n", "file_path": "git-ref/tests/file/store/iter.rs", "rank": 68, "score": 154476.5598625286 }, { "content": "/// Given the decompressed pack delta `d`, decode a size in bytes (either the base object size or the result object size)\n\n/// Equivalent to [this canonical git function](https://github.com/git/git/blob/311531c9de557d25ac087c1637818bd2aad6eb3a/delta.h#L89)\n\npub fn decode_header_size(d: &[u8]) -> (u64, usize) {\n\n let mut i = 0;\n\n let mut size = 0u64;\n\n let mut consumed = 0;\n\n for cmd in d.iter() {\n\n consumed += 1;\n\n size |= (*cmd as u64 & 0x7f) << i;\n\n i += 7;\n\n if *cmd & 0x80 == 0 {\n\n break;\n\n }\n\n }\n\n (size, consumed)\n\n}\n\n\n", "file_path": "git-pack/src/data/delta.rs", "rank": 69, "score": 154465.92333638982 }, { "content": "pub fn apply(base: &[u8], mut target: &mut [u8], data: &[u8]) {\n\n let mut i = 0;\n\n while let Some(cmd) = data.get(i) {\n\n i += 1;\n\n match cmd {\n\n cmd if cmd & 0b1000_0000 != 0 => {\n\n let (mut ofs, mut size): (u32, u32) = (0, 0);\n\n if cmd & 0b0000_0001 != 0 {\n\n ofs = data[i] as u32;\n\n i += 1;\n\n }\n\n if cmd & 0b0000_0010 != 0 {\n\n ofs |= (data[i] as u32) << 8;\n\n i += 1;\n\n }\n\n if cmd & 0b0000_0100 != 0 {\n\n ofs |= (data[i] as u32) << 16;\n\n i += 1;\n\n }\n\n if cmd & 0b0000_1000 != 0 {\n", "file_path": "git-pack/src/data/delta.rs", "rank": 70, "score": 154183.6533927742 }, { "content": "fn is_hex_digit_lc(b: u8) -> bool {\n\n matches!(b, b'0'..=b'9' | b'a'..=b'f')\n\n}\n\n\n", "file_path": "git-object/src/immutable/parse.rs", "rank": 71, "score": 154174.47582105818 }, { "content": "fn git_config(c: &mut Criterion) {\n\n c.bench_function(\"GitConfig large config file\", |b| {\n\n b.iter(|| GitConfig::try_from(black_box(CONFIG_FILE)).unwrap())\n\n });\n\n}\n\n\n", "file_path": "git-config/benches/large_config_file.rs", "rank": 72, "score": 153880.9217316591 }, { "content": "#[test]\n\nfn empty_buffers_should_not_exist_but_are_fine_to_open() -> crate::Result {\n\n let (_keep, path) = write_packed_refs_with(&[])?;\n\n assert_eq!(git_ref::packed::Buffer::open(path, 512)?.iter()?.count(), 0);\n\n Ok(())\n\n}\n\n\n", "file_path": "git-ref/tests/packed/open.rs", "rank": 73, "score": 152596.48099907802 }, { "content": "#[test]\n\nfn loose_iter_with_broken_refs() -> crate::Result {\n\n let store = store()?;\n\n\n\n let mut actual: Vec<_> = store.loose_iter()?.collect();\n\n assert_eq!(actual.len(), 15);\n\n actual.sort_by_key(|r| r.is_err());\n\n let first_error = actual\n\n .iter()\n\n .enumerate()\n\n .find_map(|(idx, r)| if r.is_err() { Some(idx) } else { None })\n\n .expect(\"there is an error\");\n\n\n\n assert_eq!(\n\n first_error, 14,\n\n \"there is exactly one invalid item, and it didn't abort the iterator most importantly\"\n\n );\n\n #[cfg(not(windows))]\n\n let msg = \"The reference at 'refs/broken' could not be instantiated\";\n\n #[cfg(windows)]\n\n let msg = \"The reference at 'refs\\\\broken' could not be instantiated\";\n", "file_path": "git-ref/tests/file/store/iter.rs", "rank": 74, "score": 152596.48099907802 }, { "content": "#[test]\n\nfn no_username_expansion_for_file_paths_with_protocol() -> crate::Result {\n\n assert_url_roundtrip(\n\n \"file://~username/path/to/git\",\n\n url(Scheme::File, None, None, None, b\"~username/path/to/git\"),\n\n )\n\n}\n\n\n", "file_path": "git-url/tests/parse/file.rs", "rank": 75, "score": 152596.48099907802 }, { "content": "#[test]\n\nfn no_alternate_in_first_objects_dir() -> crate::Result {\n\n let tmp = git_testtools::tempfile::TempDir::new()?;\n\n assert!(alternate::resolve(tmp.path())?.is_empty());\n\n Ok(())\n\n}\n", "file_path": "git-odb/tests/odb/alternate/mod.rs", "rank": 76, "score": 152596.48099907802 }, { "content": "#[test]\n\nfn clone_v1() -> crate::Result {\n\n let (server, mut c) = mock::serve_and_connect(\n\n \"v1/http-handshake.response\",\n\n \"path/not/important/due/to/mock\",\n\n Protocol::V1,\n\n )?;\n\n let SetServiceResponse { refs, .. } =\n\n c.handshake(Service::UploadPack, &[(\"key\", Some(\"value\")), (\"value-only\", None)])?;\n\n io::copy(&mut refs.expect(\"refs in protocol V1\"), &mut io::sink())?;\n\n assert_eq!(\n\n server.received_as_string().lines().nth(4).expect(\"git-protocol header\"),\n\n \"Git-Protocol: key=value:value-only\",\n\n \"it writes extra-parameters without the version\"\n\n );\n\n\n\n server.next_read_and_respond_with(fixture_bytes(\"v1/http-clone.response\"));\n\n let mut writer = c.request(\n\n client::WriteMode::OneLfTerminatedLinePerWriteCall,\n\n client::MessageKind::Text(b\"done\"),\n\n )?;\n", "file_path": "git-transport/tests/client/blocking_io/http/mod.rs", "rank": 77, "score": 152596.48099907802 }, { "content": "#[test]\n\nfn relative_file_path_without_protocol() -> crate::Result {\n\n let parsed = assert_url_and(\n\n \"../../path/to/git\",\n\n url(Scheme::File, None, None, None, b\"../../path/to/git\"),\n\n )?\n\n .to_string();\n\n assert_eq!(parsed, \"file://../../path/to/git\");\n\n let url = assert_url_and(\"path/to/git\", url(Scheme::File, None, None, None, b\"path/to/git\"))?.to_string();\n\n assert_eq!(url, \"file://path/to/git\");\n\n Ok(())\n\n}\n\n\n", "file_path": "git-url/tests/parse/file.rs", "rank": 78, "score": 152596.48099907802 }, { "content": "#[test]\n\nfn handshake_v1() -> crate::Result {\n\n let (server, mut c) = mock::serve_and_connect(\n\n \"v1/http-handshake.response\",\n\n \"path/not/important/due/to/mock\",\n\n Protocol::V1,\n\n )?;\n\n assert!(\n\n !c.connection_persists_across_multiple_requests(),\n\n \"http connections are never stateful\"\n\n );\n\n let SetServiceResponse {\n\n actual_protocol,\n\n capabilities,\n\n refs,\n\n } = c.handshake(Service::UploadPack, &[])?;\n\n assert_eq!(actual_protocol, Protocol::V1);\n\n assert_eq!(\n\n capabilities\n\n .iter()\n\n .map(|c| (c.name().to_owned(), c.value().map(ToOwned::to_owned)))\n", "file_path": "git-transport/tests/client/blocking_io/http/mod.rs", "rank": 79, "score": 152596.48099907802 }, { "content": "#[test]\n\nfn broken_ref_doesnt_end_the_iteration() -> crate::Result {\n\n let packed_refs = b\"916840c0e2f67d370291042cb5274a597f4fa9bc refs/tags/TEST-0.0.1\n\nbuggy-hash refs/wrong\n\n^buggy-hash-too\n\n0b92c8a256ae06c189e3b9c30b646d62ac8f7d10 refs/tags/git-actor-v0.1.1\\n\";\n\n let mut iter = packed::Iter::new(packed_refs)?;\n\n\n\n assert!(iter.next().expect(\"first ref\").is_ok(), \"first line is valid\");\n\n assert_eq!(\n\n iter.next()\n\n .expect(\"second ref\")\n\n .expect_err(\"an error is produced\")\n\n .to_string(),\n\n \"Invalid reference in line 2: 'buggy-hash refs/wrong'\",\n\n \"second line is invalid\",\n\n );\n\n assert_eq!(\n\n iter.next()\n\n .expect(\"third ref\")\n\n .expect_err(\"an error is produced\")\n\n .to_string(),\n\n \"Invalid reference in line 3: '^buggy-hash-too'\",\n\n \"third line is invalid\",\n\n );\n\n assert!(iter.next().expect(\"last ref\").is_ok(), \"last line is valid\");\n\n assert!(iter.next().is_none(), \"exhausted\");\n\n Ok(())\n\n}\n\n\n", "file_path": "git-ref/tests/packed/iter.rs", "rank": 80, "score": 152596.48099907802 }, { "content": "pub fn assert_err_display<T: std::fmt::Debug, E: std::error::Error>(\n\n res: std::result::Result<T, E>,\n\n expected: impl AsRef<str>,\n\n) {\n\n match res {\n\n Ok(v) => panic!(\"Expected error '{}', got value {:?}\", expected.as_ref(), v),\n\n Err(err) => assert_eq!(err.to_string(), expected.as_ref()),\n\n }\n\n}\n\n\n\n#[cfg(feature = \"blocking-io\")]\n\nmod decode;\n\n#[cfg(feature = \"blocking-io\")]\n\nmod encode;\n\n#[cfg(feature = \"blocking-io\")]\n\nmod read;\n\n#[cfg(feature = \"blocking-io\")]\n\nmod write;\n", "file_path": "git-packetline/tests/blocking-packetline.rs", "rank": 81, "score": 151482.89542357353 }, { "content": "#[test]\n\nfn no_username_expansion_for_file_paths_without_protocol() -> crate::Result {\n\n let url = assert_url_and(\"~/path/to/git\", url(Scheme::File, None, None, None, b\"~/path/to/git\"))?.to_string();\n\n assert_eq!(url, \"file://~/path/to/git\");\n\n Ok(())\n\n}\n", "file_path": "git-url/tests/parse/file.rs", "rank": 82, "score": 150795.21518582266 }, { "content": "#[test]\n\nfn no_packed_available_thus_no_iteration_possible() -> crate::Result {\n\n let store_without_packed = store()?;\n\n assert!(\n\n store_without_packed.packed_buffer()?.is_none(),\n\n \"there is no packed refs in this store\"\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "git-ref/tests/file/store/iter.rs", "rank": 83, "score": 150795.21518582266 }, { "content": "#[test]\n\nfn circular_alternates_are_detected_with_relative_paths() -> crate::Result {\n\n let tmp = git_testtools::tempfile::TempDir::new()?;\n\n let (from, _) = alternate(tmp.path().join(\"a\"), tmp.path().join(\"b\"))?;\n\n alternate(tmp.path().join(\"b\"), Path::new(\"..\").join(\"a\"))?;\n\n\n\n match alternate::resolve(&from) {\n\n Err(alternate::Error::Cycle(chain)) => {\n\n assert_eq!(\n\n chain\n\n .into_iter()\n\n .map(|p| p.file_name().expect(\"non-root\").to_str().expect(\"utf8\").to_owned())\n\n .collect::<Vec<_>>(),\n\n vec![\"a\", \"b\"]\n\n );\n\n }\n\n res => unreachable!(\"should be a specific kind of error: {:?}\", res),\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "git-odb/tests/odb/alternate/mod.rs", "rank": 84, "score": 150795.21518582266 }, { "content": "#[test]\n\nfn interior_relative_file_path_without_protocol() -> crate::Result {\n\n let url = assert_url_and(\n\n \"/abs/path/../../path/to/git\",\n\n url(Scheme::File, None, None, None, b\"/abs/path/../../path/to/git\"),\n\n )?\n\n .to_string();\n\n assert_eq!(url, \"file:///abs/path/../../path/to/git\");\n\n Ok(())\n\n}\n\n\n\nmod windows {\n\n use git_url::Scheme;\n\n\n\n use crate::parse::{assert_url_and, assert_url_roundtrip, url};\n\n\n\n #[test]\n\n fn file_path_without_protocol() -> crate::Result {\n\n let url = assert_url_and(\"x:/path/to/git\", url(Scheme::File, None, None, None, b\"x:/path/to/git\"))?.to_string();\n\n assert_eq!(url, \"file://x:/path/to/git\");\n\n Ok(())\n", "file_path": "git-url/tests/parse/file.rs", "rank": 85, "score": 150795.21518582266 }, { "content": "#[test]\n\nfn non_utf8_file_path_without_protocol() -> crate::Result {\n\n let parsed = git_url::parse(b\"/path/to\\xff/git\")?;\n\n assert_eq!(parsed, url(Scheme::File, None, None, None, b\"/path/to\\xff/git\",));\n\n assert_eq!(\n\n parsed.to_string(),\n\n \"file:///path/to�/git\",\n\n \"non-unicode is made unicode safe\"\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "git-url/tests/parse/file.rs", "rank": 86, "score": 150795.21518582266 }, { "content": "fn store() -> crate::Result<file::Store> {\n\n store_at(\"make_ref_repository.sh\")\n\n}\n\n\n", "file_path": "git-ref/tests/file/mod.rs", "rank": 87, "score": 150022.66353603257 }, { "content": "#[inline]\n\nfn id_to_count<Find: crate::Find>(\n\n db: &Find,\n\n buf: &mut Vec<u8>,\n\n id: &oid,\n\n progress: &mut impl Progress,\n\n statistics: &mut Outcome,\n\n allow_pack_lookups: bool,\n\n) -> output::Count {\n\n progress.inc();\n\n statistics.expanded_objects += 1;\n\n output::Count {\n\n id: id.to_owned(),\n\n entry_pack_location: if allow_pack_lookups {\n\n PackLocation::LookedUp(db.location_by_oid(id, buf))\n\n } else {\n\n PackLocation::NotLookedUp\n\n },\n\n }\n\n}\n\n\n\nmod util {\n", "file_path": "git-pack/src/data/output/count/objects.rs", "rank": 88, "score": 149225.14118555788 }, { "content": "#[inline]\n\nfn parse_header_info(data: &[u8]) -> (u8, u64, usize) {\n\n let mut c = data[0];\n\n let mut i = 1;\n\n let type_id = (c >> 4) & 0b0000_0111;\n\n let mut size = c as u64 & 0b0000_1111;\n\n let mut s = 4;\n\n while c & 0b1000_0000 != 0 {\n\n c = data[i];\n\n i += 1;\n\n size += ((c & 0b0111_1111) as u64) << s;\n\n s += 7\n\n }\n\n (type_id, size, i)\n\n}\n", "file_path": "git-pack/src/data/entry/decode.rs", "rank": 89, "score": 149144.88759353827 }, { "content": "#[test]\n\nfn delete_reflog_only_of_symbolic_with_deref() -> crate::Result {\n\n let (_keep, store) = store_writable(\"make_repo_for_reflog.sh\")?;\n\n let head = store.loose_find_existing(\"HEAD\")?;\n\n assert!(head.log_exists(&store));\n\n\n\n let edits = store\n\n .transaction()\n\n .prepare(\n\n Some(RefEdit {\n\n change: Change::Delete {\n\n previous: Some(Target::must_exist()),\n\n log: RefLog::Only,\n\n },\n\n name: head.name,\n\n deref: true,\n\n }),\n\n Fail::Immediately,\n\n )?\n\n .commit(&committer())?;\n\n\n", "file_path": "git-ref/tests/file/transaction/prepare_and_commit/delete.rs", "rank": 90, "score": 149067.90838981332 }, { "content": "#[test]\n\nfn delete_a_ref_which_is_gone_succeeds() -> crate::Result {\n\n let (_keep, store) = empty_store()?;\n\n let edits = store\n\n .transaction()\n\n .prepare(\n\n Some(RefEdit {\n\n change: Change::Delete {\n\n previous: None,\n\n log: RefLog::AndReference,\n\n },\n\n name: \"DOES_NOT_EXIST\".try_into()?,\n\n deref: false,\n\n }),\n\n Fail::Immediately,\n\n )?\n\n .commit(&committer())?;\n\n assert_eq!(edits.len(), 1);\n\n Ok(())\n\n}\n\n\n", "file_path": "git-ref/tests/file/transaction/prepare_and_commit/delete.rs", "rank": 91, "score": 149067.90838981332 }, { "content": "#[test]\n\nfn handshake_and_lsrefs_and_fetch_v2() -> crate::Result {\n\n let (server, mut c) = mock::serve_and_connect(\n\n \"v2/http-handshake.response\",\n\n \"path/not/important/due/to/mock\",\n\n Protocol::V2,\n\n )?;\n\n assert!(\n\n !c.connection_persists_across_multiple_requests(),\n\n \"http connections are never stateful\"\n\n );\n\n let SetServiceResponse {\n\n actual_protocol,\n\n capabilities,\n\n refs,\n\n } = c.handshake(Service::UploadPack, &[(\"value-only\", None), (\"key\", Some(\"value\"))])?;\n\n assert_eq!(actual_protocol, Protocol::V2);\n\n assert!(\n\n refs.is_none(),\n\n \"refs are only returned in V1, as V2 favors a separate command (with more options)\"\n\n );\n", "file_path": "git-transport/tests/client/blocking_io/http/mod.rs", "rank": 92, "score": 149067.90838981332 }, { "content": "#[test]\n\nfn delete_reflog_only_of_symbolic_no_deref() -> crate::Result {\n\n let (_keep, store) = store_writable(\"make_repo_for_reflog.sh\")?;\n\n let head = store.loose_find_existing(\"HEAD\")?;\n\n assert!(head.log_exists(&store));\n\n\n\n let edits = store\n\n .transaction()\n\n .prepare(\n\n Some(RefEdit {\n\n change: Change::Delete {\n\n previous: Some(Target::Symbolic(\"refs/heads/main\".try_into()?)),\n\n log: RefLog::Only,\n\n },\n\n name: head.name,\n\n deref: false,\n\n }),\n\n Fail::Immediately,\n\n )?\n\n .commit(&committer())?;\n\n\n", "file_path": "git-ref/tests/file/transaction/prepare_and_commit/delete.rs", "rank": 93, "score": 149067.90838981332 }, { "content": "#[test]\n\nfn partial_name_to_full_name_conversion_rules_are_applied() -> crate::Result {\n\n let store = store_at(\"make_packed_refs_for_lookup_rules.sh\")?;\n\n let packed = store.packed_buffer()?.expect(\"packed-refs exists\");\n\n\n\n assert_eq!(\n\n store.loose_find_existing(\"origin\")?.name.as_bstr(),\n\n \"refs/remotes/origin/HEAD\",\n\n \"a special that only applies to loose refs\"\n\n );\n\n assert!(\n\n packed.find(\"origin\")?.is_none(),\n\n \"packed refs don't have this special case as they don't store HEADs or symrefs\"\n\n );\n\n assert_eq!(\n\n store.loose_find_existing(\"HEAD\")?.name.as_bstr(),\n\n \"HEAD\",\n\n \"HEAD can be found in loose stores\"\n\n );\n\n assert!(\n\n packed.find(\"HEAD\")?.is_none(),\n", "file_path": "git-ref/tests/packed/find.rs", "rank": 94, "score": 149067.90838981332 }, { "content": "#[test]\n\nfn invalid_refs_within_a_file_do_not_lead_to_incorrect_results() -> crate::Result {\n\n let broken_packed_refs = b\"# pack-refs with: peeled fully-peeled sorted\n\n916840c0e2f67d370291042cb5274a597f4fa9bc refs/tags/TEST-0.0.1\n\nbogus refs/tags/git-actor-v0.1.0\n\n^13da90b54699a6b500ec5cd7d175f2cd5a1bed06\n\n0b92c8a256ae06c189e3b9c30b646d62ac8f7d10 refs/tags/git-actor-v0.1.1\\n\";\n\n let (_keep, path) = write_packed_refs_with(broken_packed_refs)?;\n\n\n\n let buf = packed::Buffer::open(path, 1024)?;\n\n\n\n let name = \"refs/tags/git-actor-v0.1.1\";\n\n assert_eq!(\n\n buf.find(name)?.expect(\"reference exists\"),\n\n packed::Reference {\n\n name: name.try_into()?,\n\n target: \"0b92c8a256ae06c189e3b9c30b646d62ac8f7d10\".into(),\n\n object: None\n\n }\n\n );\n\n\n", "file_path": "git-ref/tests/packed/find.rs", "rank": 95, "score": 149067.90838981332 }, { "content": "#[test]\n\nfn scp_like_without_user_and_username_expansion_with_username() -> crate::Result {\n\n let url = assert_url_and(\n\n \"host.xz:~byron/to/git\",\n\n url(Scheme::Ssh, None, \"host.xz\", None, b\"/~byron/to/git\"),\n\n )?\n\n .to_string();\n\n assert_eq!(url, \"ssh://host.xz/~byron/to/git\");\n\n Ok(())\n\n}\n\n\n", "file_path": "git-url/tests/parse/ssh.rs", "rank": 96, "score": 149067.90838981332 }, { "content": "#[test]\n\nfn single_link_with_comment_before_path_and_ansi_c_escape() -> crate::Result {\n\n let tmp = git_testtools::tempfile::TempDir::new()?;\n\n let non_alternate = tmp.path().join(\"actual\");\n\n\n\n // let (from, to) = alternate_with(tmp.path().join(\"a\"), non_alternate, Some(\"# comment\\n\\\"../a\\\"\\n\"))?;\n\n let (from, to) = alternate_with(tmp.path().join(\"a\"), non_alternate, Some(\"# comment\\n\"))?;\n\n let alternates = alternate::resolve(from)?;\n\n assert_eq!(alternates.len(), 1);\n\n assert_eq!(alternates[0], to);\n\n Ok(())\n\n}\n\n\n", "file_path": "git-odb/tests/odb/alternate/mod.rs", "rank": 97, "score": 149067.90838981332 }, { "content": "fn repo() -> crate::Result<git_repository::Easy> {\n\n crate::repo(\"make_references_repo.sh\").map(Into::into)\n\n}\n\n\n\nmod find {\n\n use std::convert::TryInto;\n\n\n\n use git_repository::{prelude::*, refs};\n\n use git_testtools::hex_to_id;\n\n\n\n use crate::reference::repo;\n\n\n\n #[test]\n\n fn find_and_peel() {\n\n let repo = repo().unwrap();\n\n let mut packed_tag_ref = repo.try_find_reference(\"dt1\").unwrap().expect(\"tag to exist\");\n\n assert_eq!(packed_tag_ref.name(), \"refs/tags/dt1\".try_into().unwrap());\n\n\n\n assert_eq!(\n\n packed_tag_ref.target(),\n", "file_path": "git-repository/tests/reference/mod.rs", "rank": 98, "score": 148058.483319635 }, { "content": "#[test]\n\nfn loose_iter_with_prefix_wont_allow_absolute_paths() -> crate::Result {\n\n let store = store()?;\n\n #[cfg(not(windows))]\n\n let abs_path = \"/hello\";\n\n #[cfg(windows)]\n\n let abs_path = \"c:\\\\hello\";\n\n\n\n match store.loose_iter_prefixed(abs_path) {\n\n Ok(_) => unreachable!(\"absolute paths aren't allowed\"),\n\n Err(err) => assert_eq!(err.to_string(), \"prefix must be a relative path, like 'refs/heads'\"),\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "git-ref/tests/file/store/iter.rs", "rank": 99, "score": 147410.0971674333 } ]
Rust
src/dnsmx.rs
oxidizers/drdns
98c1153a09642c2a5d8d2ed77ef7d9429d94995a
use buffer::{Buffer, STDOUT_BUFFER}; use byte; use dns; use libc; use stralloc::StrAlloc; use strerr::{StrErr, STRERR_SYS}; use uint16; use ulong; #[no_mangle] pub unsafe extern "C" fn nomem() { StrErr::die( 111i32, (*b"dnsmx: fatal: \0").as_ptr(), (*b"out of memory\0").as_ptr(), 0i32 as (*const u8), 0i32 as (*const u8), 0i32 as (*const u8), 0i32 as (*const u8), 0i32 as (*const StrErr), ); } static mut seed: [u8; 128] = [0u8; 128]; static mut fqdn: StrAlloc = StrAlloc { s: 0 as (*mut u8), len: 0u32, a: 0u32, }; static mut q: *mut u8 = 0 as (*mut u8); static mut out: StrAlloc = StrAlloc { s: 0 as (*mut u8), len: 0u32, a: 0u32, }; #[no_mangle] pub static mut strnum: [u8; 40] = [0u8; 40]; fn main() { use std::os::unix::ffi::OsStringExt; let mut argv_storage = ::std::env::args_os() .map(|str| { let mut vec = str.into_vec(); vec.push(b'\0'); vec }) .collect::<Vec<_>>(); let mut argv = argv_storage .iter_mut() .map(|vec| vec.as_mut_ptr()) .chain(Some(::std::ptr::null_mut())) .collect::<Vec<_>>(); let ret = unsafe { _c_main(argv_storage.len() as (i32), argv.as_mut_ptr()) }; ::std::process::exit(ret); } #[no_mangle] pub unsafe extern "C" fn _c_main(mut argc: i32, mut argv: *mut *mut u8) -> i32 { let mut i: i32; let mut j: i32; let mut pref: u16; dns::random::init(seed.as_mut_ptr() as (*const u8)); if !(*argv).is_null() { argv = argv.offset(1isize); } 'loop2: loop { if (*argv).is_null() { break; } if StrAlloc::copys(&mut fqdn as (*mut StrAlloc), *argv as (*const u8)) == 0 { nomem(); } if dns::mx::mx( &mut out as (*mut StrAlloc), &mut fqdn as (*mut StrAlloc) as (*const StrAlloc), ) == -1i32 { StrErr::die( 111i32, (*b"dnsmx: fatal: \0").as_ptr(), (*b"unable to find MX records for \0").as_ptr(), *argv as (*const u8), (*b": \0").as_ptr(), 0i32 as (*const u8), 0i32 as (*const u8), &mut STRERR_SYS as (*mut StrErr) as (*const StrErr), ); } if out.len == 0 { if dns::domain::fromdot( &mut q as (*mut *mut u8), *argv as (*const u8), libc::strlen(*argv as *const i8) as u32, ) == 0 { nomem(); } if StrAlloc::copys(&mut out as (*mut StrAlloc), (*b"0 \0").as_ptr()) == 0 { nomem(); } if dns::domain::todot_cat(&mut out as (*mut StrAlloc), q as (*const u8)) == 0 { nomem(); } if StrAlloc::cats(&mut out as (*mut StrAlloc), (*b"\n\0").as_ptr()) == 0 { nomem(); } Buffer::put(STDOUT_BUFFER.as_mut_ptr(), out.s as (*const u8), out.len); } else { i = 0i32; 'loop10: loop { if !((i + 2i32) as (u32) < out.len) { break; } j = byte::chr( out.s.offset(i as (isize)).offset(2isize), out.len.wrapping_sub(i as (u32)).wrapping_sub(2u32), 0i32, ) as (i32); uint16::unpack_big( out.s.offset(i as (isize)) as (*const u8), &mut pref as (*mut u16), ); Buffer::put( STDOUT_BUFFER.as_mut_ptr(), strnum.as_mut_ptr() as (*const u8), ulong::fmt(strnum.as_mut_ptr(), pref as (usize)), ); Buffer::puts(STDOUT_BUFFER.as_mut_ptr(), (*b" \0").as_ptr()); Buffer::put( STDOUT_BUFFER.as_mut_ptr(), out.s.offset(i as (isize)).offset(2isize) as (*const u8), j as (u32), ); Buffer::puts(STDOUT_BUFFER.as_mut_ptr(), (*b"\n\0").as_ptr()); i = i + (j + 3i32); } } argv = argv.offset(1isize); } Buffer::flush(STDOUT_BUFFER.as_mut_ptr()); libc::_exit(0i32); }
use buffer::{Buffer, STDOUT_BUFFER}; use byte; use dns; use libc; use stralloc::StrAlloc; use strerr::{StrErr, STRERR_SYS}; use uint16; use ulong; #[no_mangle]
static mut seed: [u8; 128] = [0u8; 128]; static mut fqdn: StrAlloc = StrAlloc { s: 0 as (*mut u8), len: 0u32, a: 0u32, }; static mut q: *mut u8 = 0 as (*mut u8); static mut out: StrAlloc = StrAlloc { s: 0 as (*mut u8), len: 0u32, a: 0u32, }; #[no_mangle] pub static mut strnum: [u8; 40] = [0u8; 40]; fn main() { use std::os::unix::ffi::OsStringExt; let mut argv_storage = ::std::env::args_os() .map(|str| { let mut vec = str.into_vec(); vec.push(b'\0'); vec }) .collect::<Vec<_>>(); let mut argv = argv_storage .iter_mut() .map(|vec| vec.as_mut_ptr()) .chain(Some(::std::ptr::null_mut())) .collect::<Vec<_>>(); let ret = unsafe { _c_main(argv_storage.len() as (i32), argv.as_mut_ptr()) }; ::std::process::exit(ret); } #[no_mangle] pub unsafe extern "C" fn _c_main(mut argc: i32, mut argv: *mut *mut u8) -> i32 { let mut i: i32; let mut j: i32; let mut pref: u16; dns::random::init(seed.as_mut_ptr() as (*const u8)); if !(*argv).is_null() { argv = argv.offset(1isize); } 'loop2: loop { if (*argv).is_null() { break; } if StrAlloc::copys(&mut fqdn as (*mut StrAlloc), *argv as (*const u8)) == 0 { nomem(); } if dns::mx::mx( &mut out as (*mut StrAlloc), &mut fqdn as (*mut StrAlloc) as (*const StrAlloc), ) == -1i32 { StrErr::die( 111i32, (*b"dnsmx: fatal: \0").as_ptr(), (*b"unable to find MX records for \0").as_ptr(), *argv as (*const u8), (*b": \0").as_ptr(), 0i32 as (*const u8), 0i32 as (*const u8), &mut STRERR_SYS as (*mut StrErr) as (*const StrErr), ); } if out.len == 0 { if dns::domain::fromdot( &mut q as (*mut *mut u8), *argv as (*const u8), libc::strlen(*argv as *const i8) as u32, ) == 0 { nomem(); } if StrAlloc::copys(&mut out as (*mut StrAlloc), (*b"0 \0").as_ptr()) == 0 { nomem(); } if dns::domain::todot_cat(&mut out as (*mut StrAlloc), q as (*const u8)) == 0 { nomem(); } if StrAlloc::cats(&mut out as (*mut StrAlloc), (*b"\n\0").as_ptr()) == 0 { nomem(); } Buffer::put(STDOUT_BUFFER.as_mut_ptr(), out.s as (*const u8), out.len); } else { i = 0i32; 'loop10: loop { if !((i + 2i32) as (u32) < out.len) { break; } j = byte::chr( out.s.offset(i as (isize)).offset(2isize), out.len.wrapping_sub(i as (u32)).wrapping_sub(2u32), 0i32, ) as (i32); uint16::unpack_big( out.s.offset(i as (isize)) as (*const u8), &mut pref as (*mut u16), ); Buffer::put( STDOUT_BUFFER.as_mut_ptr(), strnum.as_mut_ptr() as (*const u8), ulong::fmt(strnum.as_mut_ptr(), pref as (usize)), ); Buffer::puts(STDOUT_BUFFER.as_mut_ptr(), (*b" \0").as_ptr()); Buffer::put( STDOUT_BUFFER.as_mut_ptr(), out.s.offset(i as (isize)).offset(2isize) as (*const u8), j as (u32), ); Buffer::puts(STDOUT_BUFFER.as_mut_ptr(), (*b"\n\0").as_ptr()); i = i + (j + 3i32); } } argv = argv.offset(1isize); } Buffer::flush(STDOUT_BUFFER.as_mut_ptr()); libc::_exit(0i32); }
pub unsafe extern "C" fn nomem() { StrErr::die( 111i32, (*b"dnsmx: fatal: \0").as_ptr(), (*b"out of memory\0").as_ptr(), 0i32 as (*const u8), 0i32 as (*const u8), 0i32 as (*const u8), 0i32 as (*const u8), 0i32 as (*const StrErr), ); }
function_block-full_function
[ { "content": "//! `uint16.rs`: network byte order (i.e. big endian) conversions\n\n//!\n\n//! This should probably be replaced by the byteorder crate\n\n\n\npub unsafe fn pack(s: *mut u8, u: u16) {\n\n *s.offset(0isize) = (u as (i32) & 255i32) as (u8);\n\n *s.offset(1isize) = (u as (i32) >> 8i32) as (u8);\n\n}\n\n\n\npub unsafe fn pack_big(s: *mut u8, u: u16) {\n\n *s.offset(1isize) = (u as (i32) & 255i32) as (u8);\n\n *s.offset(0isize) = (u as (i32) >> 8i32) as (u8);\n\n}\n\n\n\npub unsafe fn unpack(s: *const u8, u: *mut u16) {\n\n let mut result: u16;\n\n result = *s.offset(1isize) as (u16);\n\n result = (result as (i32) << 8i32) as (u16);\n\n result = (result as (i32) + *s.offset(0isize) as (i32)) as (u16);\n\n *u = result;\n", "file_path": "src/uint16.rs", "rank": 0, "score": 43433.91174787486 }, { "content": "}\n\n\n\npub unsafe fn unpack_big(s: *const u8, u: *mut u16) {\n\n let mut result: u16;\n\n result = *s.offset(0isize) as (u16);\n\n result = (result as (i32) << 8i32) as (u16);\n\n result = (result as (i32) + *s.offset(1isize) as (i32)) as (u16);\n\n *u = result;\n\n}\n", "file_path": "src/uint16.rs", "rank": 1, "score": 43428.44735578065 }, { "content": "//! `ulong.rs`: Functions that act on ulongs\n\n//!\n\n//! Not sure exactly what these do, but we can probably get rid of them\n\n\n\npub unsafe fn fmt(mut s: *mut u8, mut u: usize) -> u32 {\n\n let mut len: u32;\n\n let mut q: usize;\n\n len = 1u32;\n\n q = u;\n\n 'loop1: loop {\n\n if !(q > 9usize) {\n\n break;\n\n }\n\n len = len.wrapping_add(1u32);\n\n q = q.wrapping_div(10usize);\n\n }\n\n if !s.is_null() {\n\n s = s.offset(len as (isize));\n\n 'loop4: loop {\n\n *{\n", "file_path": "src/ulong.rs", "rank": 2, "score": 43427.46636388842 }, { "content": " } < 10usize)\n\n {\n\n break;\n\n }\n\n result = result.wrapping_mul(10usize).wrapping_add(c);\n\n pos = pos.wrapping_add(1u32);\n\n }\n\n *u = result;\n\n pos\n\n}\n", "file_path": "src/ulong.rs", "rank": 3, "score": 43422.918672273925 }, { "content": " s = s.offset(-1isize);\n\n s\n\n } = (b'0' as (usize)).wrapping_add(u.wrapping_rem(10usize)) as (u8);\n\n u = u.wrapping_div(10usize);\n\n if u == 0 {\n\n break;\n\n }\n\n }\n\n }\n\n len\n\n}\n\n\n\npub unsafe fn scan(s: *const u8, u: *mut usize) -> u32 {\n\n let mut pos: u32 = 0u32;\n\n let mut result: usize = 0usize;\n\n let mut c: usize;\n\n 'loop1: loop {\n\n if !({\n\n c = (*s.offset(pos as (isize)) as (i32) - b'0' as (i32)) as (u8) as (usize);\n\n c\n", "file_path": "src/ulong.rs", "rank": 4, "score": 43422.918672273925 }, { "content": "//! `byte.rs`: Byte-related functionality which should probably be replaced by\n\n//! calls to the standard library\n\n\n\npub unsafe fn chr(s: *mut u8, mut n: u32, c: i32) -> u32 {\n\n let ch: u8;\n\n let mut t: *mut u8;\n\n ch = c as (u8);\n\n t = s;\n\n 'loop1: loop {\n\n if n == 0 {\n\n break;\n\n }\n\n if *t as (i32) == ch as (i32) {\n\n break;\n\n }\n\n t = t.offset(1isize);\n\n n = n.wrapping_sub(1u32);\n\n if n == 0 {\n\n break;\n\n }\n", "file_path": "src/byte.rs", "rank": 5, "score": 43369.102677312694 }, { "content": " from = from.offset(-1isize);\n\n from\n\n };\n\n n = n.wrapping_sub(1u32);\n\n }\n\n if current_block == 6 {\n\n } else if current_block == 7 {\n\n } else if current_block == 8 {\n\n }\n\n}\n\n\n\npub unsafe fn diff(mut s: *mut u8, mut n: u32, mut t: *mut u8) -> i32 {\n\n let current_block;\n\n 'loop0: loop {\n\n if n == 0 {\n\n current_block = 13;\n\n break;\n\n }\n\n if *s as (i32) != *t as (i32) {\n\n current_block = 12;\n", "file_path": "src/byte.rs", "rank": 6, "score": 43364.52086600733 }, { "content": " _old\n\n } = 0u8;\n\n n = n.wrapping_sub(1u32);\n\n if n == 0 {\n\n break;\n\n }\n\n *{\n\n let _old = s;\n\n s = s.offset(1isize);\n\n _old\n\n } = 0u8;\n\n n = n.wrapping_sub(1u32);\n\n if n == 0 {\n\n break;\n\n }\n\n *{\n\n let _old = s;\n\n s = s.offset(1isize);\n\n _old\n\n } = 0u8;\n", "file_path": "src/byte.rs", "rank": 7, "score": 43364.52086600733 }, { "content": " 0i32\n\n } else if current_block == 10 {\n\n 0i32\n\n } else if current_block == 11 {\n\n 0i32\n\n } else if current_block == 12 {\n\n *s as (u32) as (i32) - *t as (u32) as (i32)\n\n } else {\n\n 0i32\n\n }\n\n}\n\n\n\npub unsafe fn zero(mut s: *mut u8, mut n: u32) {\n\n 'loop0: loop {\n\n if n == 0 {\n\n break;\n\n }\n\n *{\n\n let _old = s;\n\n s = s.offset(1isize);\n", "file_path": "src/byte.rs", "rank": 8, "score": 43364.52086600733 }, { "content": " break;\n\n }\n\n s = s.offset(1isize);\n\n t = t.offset(1isize);\n\n n = n.wrapping_sub(1u32);\n\n if n == 0 {\n\n current_block = 11;\n\n break;\n\n }\n\n if *s as (i32) != *t as (i32) {\n\n current_block = 12;\n\n break;\n\n }\n\n s = s.offset(1isize);\n\n t = t.offset(1isize);\n\n n = n.wrapping_sub(1u32);\n\n if n == 0 {\n\n current_block = 10;\n\n break;\n\n }\n", "file_path": "src/byte.rs", "rank": 9, "score": 43364.52086600733 }, { "content": " };\n\n n = n.wrapping_sub(1u32);\n\n if n == 0 {\n\n current_block = 7;\n\n break;\n\n }\n\n *{\n\n let _old = to;\n\n to = to.offset(1isize);\n\n _old\n\n } = *{\n\n let _old = from;\n\n from = from.offset(1isize);\n\n _old\n\n };\n\n n = n.wrapping_sub(1u32);\n\n if n == 0 {\n\n current_block = 6;\n\n break;\n\n }\n", "file_path": "src/byte.rs", "rank": 10, "score": 43364.52086600733 }, { "content": " if *t as (i32) == ch as (i32) {\n\n break;\n\n }\n\n t = t.offset(1isize);\n\n n = n.wrapping_sub(1u32);\n\n if n == 0 {\n\n break;\n\n }\n\n if *t as (i32) == ch as (i32) {\n\n break;\n\n }\n\n t = t.offset(1isize);\n\n n = n.wrapping_sub(1u32);\n\n if n == 0 {\n\n break;\n\n }\n\n if *t as (i32) == ch as (i32) {\n\n break;\n\n }\n\n t = t.offset(1isize);\n", "file_path": "src/byte.rs", "rank": 11, "score": 43364.52086600733 }, { "content": " n = n.wrapping_sub(1u32);\n\n if n == 0 {\n\n break;\n\n }\n\n *{\n\n let _old = s;\n\n s = s.offset(1isize);\n\n _old\n\n } = 0u8;\n\n n = n.wrapping_sub(1u32);\n\n }\n\n}\n", "file_path": "src/byte.rs", "rank": 12, "score": 43364.52086600733 }, { "content": " if *s as (i32) != *t as (i32) {\n\n current_block = 12;\n\n break;\n\n }\n\n s = s.offset(1isize);\n\n t = t.offset(1isize);\n\n n = n.wrapping_sub(1u32);\n\n if n == 0 {\n\n current_block = 9;\n\n break;\n\n }\n\n if *s as (i32) != *t as (i32) {\n\n current_block = 12;\n\n break;\n\n }\n\n s = s.offset(1isize);\n\n t = t.offset(1isize);\n\n n = n.wrapping_sub(1u32);\n\n }\n\n if current_block == 9 {\n", "file_path": "src/byte.rs", "rank": 13, "score": 43364.52086600733 }, { "content": " if n == 0 {\n\n current_block = 7;\n\n break;\n\n }\n\n *{\n\n to = to.offset(-1isize);\n\n to\n\n } = *{\n\n from = from.offset(-1isize);\n\n from\n\n };\n\n n = n.wrapping_sub(1u32);\n\n if n == 0 {\n\n current_block = 6;\n\n break;\n\n }\n\n *{\n\n to = to.offset(-1isize);\n\n to\n\n } = *{\n", "file_path": "src/byte.rs", "rank": 14, "score": 43364.52086600733 }, { "content": " *{\n\n to = to.offset(-1isize);\n\n to\n\n } = *{\n\n from = from.offset(-1isize);\n\n from\n\n };\n\n n = n.wrapping_sub(1u32);\n\n if n == 0 {\n\n current_block = 8;\n\n break;\n\n }\n\n *{\n\n to = to.offset(-1isize);\n\n to\n\n } = *{\n\n from = from.offset(-1isize);\n\n from\n\n };\n\n n = n.wrapping_sub(1u32);\n", "file_path": "src/byte.rs", "rank": 15, "score": 43364.52086600733 }, { "content": " from = from.offset(1isize);\n\n _old\n\n };\n\n n = n.wrapping_sub(1u32);\n\n }\n\n if current_block == 5 {\n\n } else if current_block == 6 {\n\n } else if current_block == 7 {\n\n }\n\n}\n\n\n\npub unsafe fn copyr(mut to: *mut u8, mut n: u32, mut from: *mut u8) {\n\n let current_block;\n\n to = to.offset(n as (isize));\n\n from = from.offset(n as (isize));\n\n 'loop1: loop {\n\n if n == 0 {\n\n current_block = 9;\n\n break;\n\n }\n", "file_path": "src/byte.rs", "rank": 16, "score": 43364.52086600733 }, { "content": " n = n.wrapping_sub(1u32);\n\n }\n\n ((t as (isize)).wrapping_sub(s as (isize)) / ::std::mem::size_of::<u8>() as (isize)) as (u32)\n\n}\n\n\n\npub unsafe fn copy(mut to: *mut u8, mut n: u32, mut from: *mut u8) {\n\n let current_block;\n\n 'loop0: loop {\n\n if n == 0 {\n\n current_block = 8;\n\n break;\n\n }\n\n *{\n\n let _old = to;\n\n to = to.offset(1isize);\n\n _old\n\n } = *{\n\n let _old = from;\n\n from = from.offset(1isize);\n\n _old\n", "file_path": "src/byte.rs", "rank": 17, "score": 43364.52086600733 }, { "content": " *{\n\n let _old = to;\n\n to = to.offset(1isize);\n\n _old\n\n } = *{\n\n let _old = from;\n\n from = from.offset(1isize);\n\n _old\n\n };\n\n n = n.wrapping_sub(1u32);\n\n if n == 0 {\n\n current_block = 5;\n\n break;\n\n }\n\n *{\n\n let _old = to;\n\n to = to.offset(1isize);\n\n _old\n\n } = *{\n\n let _old = from;\n", "file_path": "src/byte.rs", "rank": 18, "score": 43364.52086600733 }, { "content": "use buffer::{self, Buffer};\n\nuse byte;\n\nuse case;\n\nuse cdb::Cdb;\n\nuse dns;\n\nuse droproot::droproot;\n\nuse ip4;\n\nuse libc;\n\nuse open;\n\nuse stralloc::StrAlloc;\n\nuse strerr::{StrErr, STRERR_SYS};\n\nuse tai::Tai;\n\nuse uint16;\n\nuse uint32;\n\nuse ulong;\n\n\n\nextern \"C\" {\n\n fn qlog(\n\n arg1: *const u8,\n\n arg2: u16,\n", "file_path": "src/axfrdns.rs", "rank": 19, "score": 19.57142320912045 }, { "content": "use alloc;\n\nuse byte;\n\nuse cache;\n\nuse dns::{self, DnsTransmit};\n\nuse droproot::droproot;\n\nuse errno::{self, Errno};\n\nuse ip4;\n\nuse iopause::iopause;\n\nuse libc;\n\nuse ndelay;\n\nuse socket;\n\nuse strerr::{StrErr, STRERR_SYS};\n\nuse tai::Tai;\n\nuse taia::TaiA;\n\nuse uint16;\n\nuse ulong;\n\n\n\nextern \"C\" {\n\n fn log_query(\n\n arg1: *mut usize,\n", "file_path": "src/dnscache.rs", "rank": 20, "score": 19.267090583362698 }, { "content": "//! `dns/domain.rs`: DNS domain functions\n\n\n\nuse alloc;\n\nuse byte;\n\nuse case;\n\nuse errno::{self, Errno};\n\nuse libc;\n\nuse stralloc::StrAlloc;\n\n\n\npub unsafe fn length(dn: *const u8) -> u32 {\n\n let mut x: *const u8;\n\n let mut c: u8;\n\n x = dn;\n\n 'loop1: loop {\n\n if {\n\n c = *{\n\n let _old = x;\n\n x = x.offset(1isize);\n\n _old\n\n };\n", "file_path": "src/dns/domain.rs", "rank": 22, "score": 17.750934717929006 }, { "content": "use byte;\n\nuse case;\n\nuse uint16;\n\nuse ulong;\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn parsetype(mut s: *mut u8, mut type_: *mut u8) -> i32 {\n\n let mut u: usize;\n\n if *s.offset(ulong::scan(s as (*const u8), &mut u as (*mut usize)) as\n\n (isize)) == 0\n\n {\n\n uint16::pack_big(type_, u as (u16));\n\n } else if case::diffs(s as (*const u8), (*b\"any\\0\").as_ptr()) == 0 {\n\n byte::copy(type_, 2u32, (*b\"\\0\\xFF\\0\").as_ptr() as (*mut u8));\n\n } else if case::diffs(s as (*const u8), (*b\"a\\0\").as_ptr()) == 0 {\n\n byte::copy(type_, 2u32, (*b\"\\0\\x01\\0\").as_ptr() as (*mut u8));\n\n } else if case::diffs(s as (*const u8), (*b\"ns\\0\").as_ptr()) == 0 {\n\n byte::copy(type_, 2u32, (*b\"\\0\\x02\\0\").as_ptr() as (*mut u8));\n\n } else if case::diffs(s as (*const u8), (*b\"mx\\0\").as_ptr()) == 0 {\n\n byte::copy(type_, 2u32, (*b\"\\0\\x0F\\0\").as_ptr() as (*mut u8));\n", "file_path": "src/parsetype.rs", "rank": 23, "score": 17.466093988591695 }, { "content": "use buffer::{self, Buffer};\n\nuse byte;\n\nuse case;\n\nuse cdb::CdbMake;\n\nuse dns;\n\nuse ip4;\n\nuse libc;\n\nuse open;\n\nuse stralloc::StrAlloc;\n\nuse strerr::{StrErr, STRERR_SYS};\n\nuse uint16;\n\nuse uint32;\n\nuse ulong;\n\n\n\nextern \"C\" {\n\n fn __swbuf(arg1: i32, arg2: *mut __sFILE) -> i32;\n\n fn fstat(arg1: i32, arg2: *mut stat) -> i32;\n\n fn fsync(arg1: i32) -> i32;\n\n fn getln(arg1: *mut Buffer, arg2: *mut StrAlloc, arg3: *mut i32, arg4: i32) -> i32;\n\n fn rename(__old: *const u8, __new: *const u8) -> i32;\n\n fn umask(arg1: u16) -> u16;\n\n}\n\n\n", "file_path": "src/tinydns-data.rs", "rank": 24, "score": 16.552961882899957 }, { "content": "//! `socket.rs`: Socket-related functionality\n\n//!\n\n//! This should probably be replaced by `std::io`\n\n\n\nuse byte;\n\nuse libc;\n\nuse ndelay;\n\nuse uint16;\n\n\n\npub unsafe fn accept4(s: i32, ip: *mut u8, port: *mut u16) -> i32 {\n\n let mut sa: libc::sockaddr_in = ::std::mem::zeroed();\n\n let mut dummy: i32 = ::std::mem::size_of::<libc::sockaddr_in>() as (i32);\n\n let fd = libc::accept(\n\n s,\n\n &mut sa as (*mut libc::sockaddr_in) as (*mut libc::sockaddr),\n\n &mut dummy as (*mut i32) as (*mut u32),\n\n );\n\n if fd == -1i32 {\n\n -1i32\n\n } else {\n", "file_path": "src/socket.rs", "rank": 25, "score": 16.439465279874142 }, { "content": "use byte;\n\nuse dns;\n\nuse errno::{self, Errno};\n\nuse libc;\n\nuse stralloc::StrAlloc;\n\nuse uint16;\n\n\n\nextern \"C\" {\n\n fn printrecord_cat(\n\n arg1: *mut StrAlloc,\n\n arg2: *const u8,\n\n arg3: u32,\n\n arg4: u32,\n\n arg5: *const u8,\n\n arg6: *const u8,\n\n ) -> u32;\n\n}\n\n\n\nstatic mut d: *mut u8 = 0 as (*mut u8);\n\n\n", "file_path": "src/printpacket.rs", "rank": 26, "score": 16.343673234213263 }, { "content": "use byte;\n\nuse buffer::{self, Buffer};\n\nuse dns;\n\nuse errno::{self, Errno};\n\nuse ip4;\n\nuse libc;\n\nuse open;\n\nuse stralloc::StrAlloc;\n\nuse strerr::{StrErr, STRERR_SYS};\n\nuse uint16;\n\nuse uint32;\n\nuse ulong;\n\n\n\nextern \"C\" {\n\n fn __swbuf(arg1: i32, arg2: *mut __sFILE) -> i32;\n\n fn fsync(arg1: i32) -> i32;\n\n fn getln(arg1: *mut Buffer, arg2: *mut StrAlloc, arg3: *mut i32, arg4: i32) -> i32;\n\n fn rename(__old: *const u8, __new: *const u8) -> i32;\n\n fn timeoutread(t: i32, fd: i32, buf: *mut u8, len: i32) -> i32;\n\n fn timeoutwrite(t: i32, fd: i32, buf: *mut u8, len: i32) -> i32;\n\n}\n\n\n", "file_path": "src/axfr-get.rs", "rank": 27, "score": 16.061650869584753 }, { "content": "use buffer::{Buffer, STDERR_BUFFER};\n\nuse byte;\n\nuse cache;\n\nuse errno::{self, Errno};\n\nuse libc;\n\nuse uint16;\n\nuse uint32;\n\n\n\nextern \"C\" {\n\n static mut numqueries: usize;\n\n static mut tactive: i32;\n\n static mut uactive: i32;\n\n}\n\n\n\nstatic mut u64: usize = 0usize;\n\n\n\nunsafe extern \"C\" fn string(mut s: *const u8) {\n\n Buffer::puts(STDERR_BUFFER.as_mut_ptr(), s);\n\n}\n\n\n", "file_path": "src/log.rs", "rank": 28, "score": 15.601826445103818 }, { "content": "use byte;\n\nuse dns;\n\nuse errno::{self, Errno};\n\nuse libc;\n\nuse uint16;\n\nuse uint32;\n\n\n\nstatic mut d: *mut u8 = 0 as (*mut u8);\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn printrecord_cat(\n\n mut out: *mut StrAlloc,\n\n mut buf: *const u8,\n\n mut len: u32,\n\n mut pos: u32,\n\n mut q: *const u8,\n\n mut qtype: *const u8,\n\n) -> u32 {\n\n let mut _currentBlock;\n\n let mut x: *const u8;\n", "file_path": "src/printrecord.rs", "rank": 29, "score": 15.374661662779774 }, { "content": "use alloc;\n\nuse byte;\n\nuse errno::{self, Errno};\n\nuse libc;\n\nuse socket;\n\nuse super::{domain, packet, random};\n\nuse taia::TaiA;\n\nuse uint16;\n\n\n\n#[derive(Copy)]\n\n#[repr(C)]\n\npub struct DnsTransmit {\n\n pub query: *mut u8,\n\n pub querylen: u32,\n\n pub packet: *mut u8,\n\n pub packetlen: u32,\n\n pub s1: i32,\n\n pub tcpstate: i32,\n\n pub udploop: u32,\n\n pub curserver: u32,\n", "file_path": "src/dns/transmit.rs", "rank": 30, "score": 15.280618739439852 }, { "content": "use alloc;\n\nuse buffer::{Buffer, STDOUT_BUFFER};\n\nuse byte;\n\nuse dns::{self, DnsTransmit};\n\nuse errno::errno;\n\nuse ip4;\n\nuse iopause::iopause;\n\nuse libc;\n\nuse stralloc::StrAlloc;\n\nuse strerr::{StrErr, STRERR_SYS};\n\nuse tai::Tai;\n\nuse taia::TaiA;\n\nuse ulong;\n\n\n\nextern \"C\" {\n\n fn sgetoptmine(arg1: i32, arg2: *mut *mut u8, arg3: *const u8) -> i32;\n\n static mut subgetoptarg: *mut u8;\n\n static mut subgetoptdone: i32;\n\n}\n\n\n", "file_path": "src/dnsfilter.rs", "rank": 31, "score": 15.01614188831561 }, { "content": "use byte;\n\nuse case;\n\nuse cdb::Cdb;\n\nuse dns;\n\nuse libc;\n\nuse open;\n\nuse tai::Tai;\n\nuse uint16;\n\nuse uint32;\n\n\n\nextern \"C\" {\n\n static mut response: *mut u8;\n\n fn response_addbytes(arg1: *const u8, arg2: u32) -> i32;\n\n fn response_addname(arg1: *const u8) -> i32;\n\n static mut response_len: u32;\n\n fn response_nxdomain();\n\n fn response_rfinish(arg1: i32);\n\n fn response_rstart(arg1: *const u8, arg2: *const u8, arg3: u32) -> i32;\n\n}\n\n\n", "file_path": "src/tdlookup.rs", "rank": 32, "score": 14.976450018566501 }, { "content": "use buffer::{self, Buffer};\n\nuse byte;\n\nuse cdb::CdbMake;\n\nuse ip4;\n\nuse libc;\n\nuse open;\n\nuse stralloc::StrAlloc;\n\nuse strerr::{StrErr, STRERR_SYS};\n\nuse ulong;\n\n\n\nextern \"C\" {\n\n fn __swbuf(arg1: i32, arg2: *mut __sFILE) -> i32;\n\n fn fsync(arg1: i32) -> i32;\n\n fn getln(arg1: *mut Buffer, arg2: *mut StrAlloc, arg3: *mut i32, arg4: i32) -> i32;\n\n fn rename(__old: *const u8, __new: *const u8) -> i32;\n\n fn umask(arg1: u16) -> u16;\n\n}\n\n\n", "file_path": "src/rbldns-data.rs", "rank": 33, "score": 14.616219152780818 }, { "content": "//! `dns/name.rs`: DNS name facilities\n\n\n\nuse byte;\n\nuse stralloc::StrAlloc;\n\nuse dns;\n\nuse super::DnsTransmit;\n\nuse uint16;\n\nuse ulong;\n\n\n\nstatic mut Q: *mut u8 = 0i32 as (*mut u8);\n\n\n\npub unsafe fn packet(out: *mut StrAlloc, buf: *const u8, len: u32) -> i32 {\n\n let current_block;\n\n let mut pos: u32;\n\n let mut header: [u8; 12] = [0u8; 12];\n\n let mut numanswers: u16 = 0;\n\n let mut datalen: u16 = 0;\n\n if StrAlloc::copys(out, (*b\"\\0\").as_ptr()) == 0 {\n\n -1i32\n\n } else {\n", "file_path": "src/dns/name.rs", "rank": 34, "score": 14.516326858114407 }, { "content": "use byte;\n\nuse buffer::{Buffer, STDOUT_BUFFER};\n\nuse dns::{self, DnsTransmit};\n\nuse errno::errno;\n\nuse iopause::iopause;\n\nuse libc;\n\nuse stralloc::StrAlloc;\n\nuse strerr::{StrErr, STRERR_SYS};\n\nuse tai::Tai;\n\nuse taia::TaiA;\n\nuse uint16;\n\n\n\nextern \"C\" {\n\n fn parsetype(arg1: *mut u8, arg2: *mut u8) -> i32;\n\n fn printpacket_cat(arg1: *mut StrAlloc, arg2: *mut u8, arg3: u32) -> u32;\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn usage() {\n\n StrErr::die(\n", "file_path": "src/dnsq.rs", "rank": 35, "score": 14.48497007586864 }, { "content": "use alloc;\n\nuse buffer::{self, Buffer};\n\nuse byte;\n\nuse case;\n\nuse cdb::CdbMake;\n\nuse dns;\n\nuse ip4;\n\nuse libc;\n\nuse open;\n\nuse stralloc::StrAlloc;\n\nuse strerr::{StrErr, STRERR_SYS};\n\nuse ulong;\n\n\n\nextern \"C\" {\n\n fn __swbuf(arg1: i32, arg2: *mut __sFILE) -> i32;\n\n fn fsync(arg1: i32) -> i32;\n\n fn getln(arg1: *mut Buffer, arg2: *mut StrAlloc, arg3: *mut i32, arg4: i32) -> i32;\n\n fn rename(__old: *const u8, __new: *const u8) -> i32;\n\n fn umask(arg1: u16) -> u16;\n\n}\n\n\n", "file_path": "src/pickdns-data.rs", "rank": 36, "score": 14.423874992637284 }, { "content": "use alloc;\n\nuse buffer::{Buffer, STDOUT_BUFFER};\n\nuse byte;\n\nuse dns::{self, DnsTransmit};\n\nuse errno::{self, Errno};\n\nuse ip4;\n\nuse iopause::iopause;\n\nuse libc;\n\nuse stralloc::StrAlloc;\n\nuse strerr::StrErr;\n\nuse tai::Tai;\n\nuse taia::TaiA;\n\nuse uint16;\n\n\n\nextern \"C\" {\n\n fn dd(arg1: *const u8, arg2: *const u8, arg3: *mut u8) -> i32;\n\n fn ip4_fmt(arg1: *mut u8, arg2: *const u8) -> u32;\n\n fn parsetype(arg1: *mut u8, arg2: *mut u8) -> i32;\n\n fn printrecord(\n\n arg1: *mut StrAlloc,\n", "file_path": "src/dnstrace.rs", "rank": 37, "score": 14.405112497614322 }, { "content": "use alloc;\n\nuse byte;\n\nuse cache;\n\nuse case;\n\nuse dns::{self, DnsTransmit};\n\nuse errno::{self, Errno};\n\nuse libc;\n\nuse tai::Tai;\n\nuse taia::TaiA;\n\nuse uint16;\n\nuse uint32;\n\n\n\nextern \"C\" {\n\n fn dd(arg1: *const u8, arg2: *const u8, arg3: *mut u8) -> i32;\n\n fn log_cachedanswer(arg1: *const u8, arg2: *const u8);\n\n fn log_cachedcname(arg1: *const u8, arg2: *const u8);\n\n fn log_cachedns(arg1: *const u8, arg2: *const u8);\n\n fn log_cachednxdomain(arg1: *const u8);\n\n fn log_lame(arg1: *const u8, arg2: *const u8, arg3: *const u8);\n\n fn log_nodata(arg1: *const u8, arg2: *const u8, arg3: *const u8, arg4: u32);\n", "file_path": "src/query.rs", "rank": 38, "score": 13.857976280654295 }, { "content": "use buffer::{self, Buffer};\n\nuse byte;\n\nuse dns;\n\nuse ip4;\n\nuse libc;\n\nuse open;\n\nuse stralloc::StrAlloc;\n\nuse strerr::{StrErr, STRERR_SYS};\n\nuse string;\n\nuse ulong;\n\n\n\nextern \"C\" {\n\n fn __swbuf(arg1: i32, arg2: *mut __sFILE) -> i32;\n\n fn fchmod(arg1: i32, arg2: u16) -> i32;\n\n fn fstat(arg1: i32, arg2: *mut stat) -> i32;\n\n fn fsync(arg1: i32) -> i32;\n\n fn getln(arg1: *mut Buffer, arg2: *mut StrAlloc, arg3: *mut i32, arg4: i32) -> i32;\n\n fn rename(__old: *const u8, __new: *const u8) -> i32;\n\n fn umask(arg1: u16) -> u16;\n\n}\n\n\n", "file_path": "src/tinydns-edit.rs", "rank": 39, "score": 13.848630325909674 }, { "content": "use libc;\n\nuse prot;\n\nuse strerr::{StrErr, STRERR_SYS};\n\nuse ulong;\n\n\n\npub unsafe fn droproot(fatal: *const u8) {\n\n let mut id: usize = 0;\n\n let mut x = libc::getenv((*b\"ROOT\\0\").as_ptr() as *const libc::c_char) as *mut u8;\n\n if x.is_null() {\n\n StrErr::die(\n\n 111i32,\n\n fatal,\n\n (*b\"$ROOT not set\\0\").as_ptr(),\n\n 0i32 as (*const u8),\n\n 0i32 as (*const u8),\n\n 0i32 as (*const u8),\n\n 0i32 as (*const u8),\n\n 0i32 as (*const StrErr),\n\n );\n\n }\n", "file_path": "src/droproot.rs", "rank": 40, "score": 13.629043098855169 }, { "content": "use byte;\n\nuse buffer::{Buffer, STDOUT_BUFFER};\n\nuse case;\n\nuse dns;\n\nuse ip4;\n\nuse libc;\n\nuse stralloc::StrAlloc;\n\nuse uint16;\n\n\n\nextern \"C\" {\n\n fn parsetype(arg1: *mut u8, arg2: *mut u8) -> i32;\n\n fn printpacket_cat(arg1: *mut StrAlloc, arg2: *mut u8, arg3: u32) -> u32;\n\n fn respond(arg1: *mut u8, arg2: *mut u8, arg3: *mut u8) -> i32;\n\n static mut response: *mut u8;\n\n static mut response_len: u32;\n\n fn response_query(arg1: *const u8, arg2: *const u8, arg3: *const u8) -> i32;\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn usage() {\n", "file_path": "src/tinydns-get.rs", "rank": 41, "score": 13.516454390420936 }, { "content": "//! `alloc.rs`: Legacy allocator functionality\n\n//!\n\n//! This should eventually be replaced with the Rust global allocator, and in\n\n//! a perfect world safe Rust that uses `Box` and `Heap`.\n\n\n\nuse byte;\n\nuse errno::{self, Errno};\n\nuse libc;\n\n\n\npub unsafe fn alloc(n: u32) -> *mut u8 {\n\n let x: *mut u8;\n\n x = libc::malloc(n as (usize)) as (*mut u8);\n\n if x.is_null() {\n\n errno::set_errno(Errno(libc::ENOMEM));\n\n }\n\n x\n\n}\n\n\n\npub unsafe fn alloc_re(x: *mut *mut u8, m: u32, n: u32) -> i32 {\n\n let y: *mut u8;\n", "file_path": "src/alloc.rs", "rank": 42, "score": 13.110734279729975 }, { "content": "//! `buffer.rs`: Buffered I/O\n\n//!\n\n//! This should probably be replaced eventually with e.g. the bytes crate\n\n\n\nuse byte;\n\nuse errno::{errno, Errno};\n\nuse libc;\n\n\n\npub type Op = unsafe fn(i32, *const u8, u32) -> i32;\n\n\n\n#[derive(Copy)]\n\n#[repr(C)]\n\npub struct Buffer {\n\n pub x: *mut u8,\n\n pub p: u32,\n\n pub n: u32,\n\n pub fd: i32,\n\n pub op: Option<Op>,\n\n}\n\n\n", "file_path": "src/buffer.rs", "rank": 43, "score": 12.924460828857876 }, { "content": "use buffer::{Buffer, STDERR_BUFFER};\n\nuse byte;\n\nuse case;\n\nuse dns;\n\nuse droproot::droproot;\n\nuse ip4;\n\nuse libc;\n\nuse ndelay;\n\nuse socket;\n\nuse strerr::{StrErr, STRERR_SYS};\n\n\n\nextern \"C\" {\n\n static mut fatal: *mut u8;\n\n fn initialize();\n\n fn qlog(\n\n arg1: *const u8,\n\n arg2: u16,\n\n arg3: *const u8,\n\n arg4: *const u8,\n\n arg5: *const u8,\n", "file_path": "src/server.rs", "rank": 44, "score": 12.735276794849717 }, { "content": "use byte;\n\nuse dns;\n\nuse errno::{self, Errno};\n\nuse ip4;\n\nuse libc;\n\nuse open;\n\nuse openreadclose::openreadclose;\n\nuse stralloc::StrAlloc;\n\nuse string;\n\n\n\nextern \"C\" {\n\n fn chdir(arg1: *const u8) -> i32;\n\n fn closedir(arg1: *mut Struct1) -> i32;\n\n fn fchdir(arg1: i32) -> i32;\n\n fn opendir(arg1: *const u8) -> *mut Struct1;\n\n fn readdir(arg1: *mut Struct1) -> *mut dirent;\n\n}\n\n\n", "file_path": "src/roots.rs", "rank": 45, "score": 12.603795673956927 }, { "content": "//! `dns/rcrw.rs`: rewrite resolv.conf\n\n\n\nuse byte;\n\nuse libc;\n\nuse openreadclose::openreadclose;\n\nuse stralloc::StrAlloc;\n\nuse string;\n\nuse tai::Tai;\n\nuse taia::TaiA;\n\n\n\nstatic mut DATA: StrAlloc = StrAlloc {\n\n s: 0i32 as (*mut u8),\n\n len: 0u32,\n\n a: 0u32,\n\n};\n\n\n\nstatic mut OK: i32 = 0i32;\n\n\n\nstatic mut USES: u32 = 0u32;\n\n\n", "file_path": "src/dns/rcrw.rs", "rank": 46, "score": 12.519149692044984 }, { "content": "//! `dns/resolvconf.rs`: Functions for interacting with resolv.conf\n\n\n\nuse byte;\n\nuse ip4;\n\nuse libc;\n\nuse openreadclose::openreadclose;\n\nuse stralloc::StrAlloc;\n\nuse tai::Tai;\n\nuse taia::TaiA;\n\n\n\nstatic mut DATA: StrAlloc = StrAlloc {\n\n s: 0i32 as (*mut u8),\n\n len: 0u32,\n\n a: 0u32,\n\n};\n\n\n\nstatic mut OK: i32 = 0i32;\n\n\n\nstatic mut USES: u32 = 0u32;\n\n\n", "file_path": "src/dns/rcip.rs", "rank": 47, "score": 12.456148644352911 }, { "content": "//! `stralloc.rs`: Heap-backed string type\n\n//!\n\n//! This should probably be replaced by Rust's `String` type\n\n\n\nuse alloc;\n\nuse byte;\n\nuse libc;\n\n\n\n#[derive(Copy)]\n\n#[repr(C)]\n\npub struct StrAlloc {\n\n pub s: *mut u8,\n\n pub len: u32,\n\n pub a: u32,\n\n}\n\n\n\nimpl Clone for StrAlloc {\n\n fn clone(&self) -> Self {\n\n *self\n\n }\n", "file_path": "src/stralloc.rs", "rank": 48, "score": 12.436936947433063 }, { "content": " byte::copy(\n\n ip,\n\n 4u32,\n\n &mut sa.sin_addr as (*mut libc::in_addr) as (*mut u8),\n\n );\n\n uint16::unpack_big(\n\n &mut sa.sin_port as (*mut u16) as (*mut u8) as (*const u8),\n\n port,\n\n );\n\n fd\n\n }\n\n}\n\n\n\npub unsafe fn bind4(s: i32, ip: *mut u8, port: u16) -> i32 {\n\n let mut sa: libc::sockaddr_in = ::std::mem::zeroed();\n\n sa.sin_family = 2;\n\n uint16::pack_big(&mut sa.sin_port as (*mut u16) as (*mut u8), port);\n\n byte::copy(\n\n &mut sa.sin_addr as (*mut libc::in_addr) as (*mut u8),\n\n 4u32,\n", "file_path": "src/socket.rs", "rank": 49, "score": 12.368419156655724 }, { "content": "use byte;\n\nuse dns;\n\nuse uint16;\n\nuse uint32;\n\n\n\n#[no_mangle]\n\npub static mut response: [u8; 65535] = [0u8; 65535];\n\n\n\n#[no_mangle]\n\npub static mut response_len: u32 = 0u32;\n\n\n\nstatic mut tctarget: u32 = 0u32;\n\n\n\nstatic mut name: [[u8; 128]; 100] = [[0u8; 128]; 100];\n\n\n\nstatic mut name_ptr: [u32; 100] = [0u32; 100];\n\n\n\nstatic mut name_num: u32 = 0u32;\n\n\n\n#[no_mangle]\n", "file_path": "src/response.rs", "rank": 50, "score": 12.04991621167307 }, { "content": "use byte;\n\nuse cdb::Cdb;\n\nuse dns;\n\nuse ip4;\n\nuse libc;\n\nuse open;\n\nuse uint32;\n\nuse strerr::StrErr;\n\n\n\nextern \"C\" {\n\n fn dd(arg1: *const u8, arg2: *const u8, arg3: *mut u8) -> i32;\n\n static mut response: *mut u8;\n\n fn response_addbytes(arg1: *const u8, arg2: u32) -> i32;\n\n fn response_nxdomain();\n\n fn response_rfinish(arg1: i32);\n\n fn response_rstart(arg1: *const u8, arg2: *const u8, arg3: u32) -> i32;\n\n}\n\n\n\nstatic mut base: *mut u8 = 0 as (*mut u8);\n\n\n", "file_path": "src/rbldns.rs", "rank": 51, "score": 11.604748086815004 }, { "content": "use buffer::{Buffer, STDOUT_BUFFER};\n\nuse dns;\n\nuse libc;\n\nuse ulong;\n\n\n\n#[no_mangle]\n\npub static mut ip: [u8; 4] = [0u8; 4];\n\n\n\n#[no_mangle]\n\npub static mut ipfixed: i32 = 0i32;\n\n\n\n#[no_mangle]\n\npub static mut loops: usize = 10000usize;\n\n\n\n#[no_mangle]\n\npub static mut tab: [u8; 256] = [0u8; 256];\n\n\n\n#[no_mangle]\n\npub static mut strnum: [u8; 40] = [0u8; 40];\n\n\n\n#[no_mangle]\n\npub static mut seed: [u8; 128] = [0u8; 128];\n\n\n", "file_path": "src/random-ip.rs", "rank": 52, "score": 11.594192355141521 }, { "content": " nomem();\n\n }\n\n ulong::scan(f[1usize].s as (*const u8), &mut u as (*mut usize));\n\n uint16::pack_big(type_.as_mut_ptr(), u as (u16));\n\n if byte::diff(\n\n type_.as_mut_ptr(),\n\n 2u32,\n\n (*b\"\\0\\xFC\\0\").as_ptr() as (*mut u8),\n\n ) == 0\n\n {\n\n syntaxerror((*b\": type AXFR prohibited\\0\").as_ptr());\n\n }\n\n if byte::diff(type_.as_mut_ptr(), 2u32, (*b\"\\0\\0\\0\").as_ptr() as (*mut u8)) == 0 {\n\n syntaxerror((*b\": type 0 prohibited\\0\").as_ptr());\n\n }\n\n if byte::diff(\n\n type_.as_mut_ptr(),\n\n 2u32,\n\n (*b\"\\0\\x06\\0\").as_ptr() as (*mut u8),\n\n ) == 0\n", "file_path": "src/tinydns-data.rs", "rank": 53, "score": 11.590516832500473 }, { "content": "use buffer::{Buffer, STDOUT_BUFFER};\n\nuse dns::{self, DnsTransmit};\n\nuse errno::errno;\n\nuse libc;\n\nuse stralloc::StrAlloc;\n\nuse strerr::{StrErr, STRERR_SYS};\n\nuse tai::Tai;\n\nuse taia::TaiA;\n\nuse uint16;\n\n\n\nextern \"C\" {\n\n fn parsetype(arg1: *mut u8, arg2: *mut u8) -> i32;\n\n fn printpacket_cat(arg1: *mut StrAlloc, arg2: *mut u8, arg3: u32) -> u32;\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn usage() {\n\n StrErr::die(\n\n 100i32,\n\n (*b\"dnsqr: usage: dnsqr type name\\0\").as_ptr(),\n", "file_path": "src/dnsqr.rs", "rank": 54, "score": 11.523674270956057 }, { "content": " errno::set_errno(Errno(libc::EPERM));\n\n -1i32\n\n } else {\n\n cleanup(z);\n\n (*z).level = 0u32;\n\n (*z).loopvar = 0u32;\n\n (if dns::domain::copy(&mut (*z).name[0usize] as (*mut *mut u8), dn as (*const u8)) == 0 {\n\n -1i32\n\n } else {\n\n byte::copy((*z).type_.as_mut_ptr(), 2u32, type_);\n\n byte::copy((*z).class.as_mut_ptr(), 2u32, class);\n\n byte::copy((*z).localip.as_mut_ptr(), 4u32, localip);\n\n doit(z, 0i32)\n\n })\n\n }\n\n}\n\n\n\n#[derive(Copy)]\n\n#[repr(C)]\n\npub struct pollfd {\n", "file_path": "src/query.rs", "rank": 55, "score": 11.480184976764585 }, { "content": " let mut dummy: i32 = ::std::mem::size_of::<libc::sockaddr_in>() as (i32);\n\n let r = libc::recvfrom(\n\n s,\n\n buf as (*mut ::libc::c_void),\n\n len as (usize),\n\n 0i32,\n\n &mut sa as (*mut libc::sockaddr_in) as (*mut libc::sockaddr),\n\n &mut dummy as (*mut i32) as (*mut u32),\n\n ) as (i32);\n\n if r == -1i32 {\n\n -1i32\n\n } else {\n\n byte::copy(\n\n ip,\n\n 4u32,\n\n &mut sa.sin_addr as (*mut libc::in_addr) as (*mut u8),\n\n );\n\n uint16::unpack_big(\n\n &mut sa.sin_port as (*mut u16) as (*mut u8) as (*const u8),\n\n port,\n", "file_path": "src/socket.rs", "rank": 56, "score": 11.454879647650358 }, { "content": "//! `cdb/cdb.rs`: C DataBase (CDB) file reader\n\n\n\nuse byte;\n\nuse errno::{self, Errno};\n\nuse libc;\n\nuse uint32;\n\nuse super::hash as cdb_hash;\n\n\n\n/// C DataBase file reader\n\n#[derive(Copy)]\n\n#[repr(C)]\n\npub struct Cdb {\n\n pub map: *mut u8,\n\n pub fd: i32,\n\n pub size: u32,\n\n pub loopvar: u32,\n\n pub khash: u32,\n\n pub kpos: u32,\n\n pub hpos: u32,\n\n pub hslots: u32,\n", "file_path": "src/cdb/cdb.rs", "rank": 57, "score": 11.404588615102163 }, { "content": "use byte;\n\nuse case;\n\nuse stralloc::StrAlloc;\n\nuse string;\n\nuse super::{DnsTransmit, domain, packet, rcrw, resolve, sortip};\n\nuse uint16;\n\n\n\npub unsafe fn packet(out: *mut StrAlloc, buf: *const u8, len: u32) -> i32 {\n\n let current_block;\n\n let mut pos: u32;\n\n let mut header: [u8; 12] = [0u8; 12];\n\n let mut numanswers: u16 = 0;\n\n let mut datalen: u16 = 0;\n\n if StrAlloc::copys(out, (*b\"\\0\").as_ptr()) == 0 {\n\n -1i32\n\n } else {\n\n pos = packet::copy(buf, len, 0u32, header.as_mut_ptr(), 12u32);\n\n (if pos == 0 {\n\n -1i32\n\n } else {\n", "file_path": "src/dns/ip4.rs", "rank": 58, "score": 11.374939152593317 }, { "content": " }\n\n if current_block == 1 {}\n\n}\n\n\n\npub unsafe fn connect4(s: i32, ip: *const u8, port: u16) -> i32 {\n\n let mut sa: libc::sockaddr_in = ::std::mem::zeroed();\n\n sa.sin_family = 2;\n\n uint16::pack_big(&mut sa.sin_port as (*mut u16) as (*mut u8), port);\n\n byte::copy(\n\n &mut sa.sin_addr as (*mut libc::in_addr) as (*mut u8),\n\n 4u32,\n\n ip as (*mut u8),\n\n );\n\n libc::connect(\n\n s,\n\n &mut sa as (*mut libc::sockaddr_in) as (*mut libc::sockaddr) as (*const libc::sockaddr),\n\n ::std::mem::size_of::<libc::sockaddr_in>() as (u32),\n\n )\n\n}\n\n\n", "file_path": "src/socket.rs", "rank": 59, "score": 11.339866260915041 }, { "content": " );\n\n r\n\n }\n\n}\n\n\n\npub unsafe fn send4(s: i32, buf: *const u8, len: i32, ip: *const u8, port: u16) -> i32 {\n\n let mut sa: libc::sockaddr_in = ::std::mem::zeroed();\n\n sa.sin_family = 2;\n\n uint16::pack_big(&mut sa.sin_port as (*mut u16) as (*mut u8), port);\n\n byte::copy(\n\n &mut sa.sin_addr as (*mut libc::in_addr) as (*mut u8),\n\n 4u32,\n\n ip as (*mut u8),\n\n );\n\n libc::sendto(\n\n s,\n\n buf as (*const ::libc::c_void),\n\n len as (usize),\n\n 0i32,\n\n &mut sa as (*mut libc::sockaddr_in) as (*mut libc::sockaddr) as (*const libc::sockaddr),\n", "file_path": "src/socket.rs", "rank": 60, "score": 11.238031628600616 }, { "content": "//! `dns/txt.rs`: TXT record packet functionality\n\n\n\nuse byte;\n\nuse stralloc::StrAlloc;\n\nuse super::{domain, packet, resolve};\n\nuse super::DnsTransmit;\n\nuse uint16;\n\n\n\npub unsafe fn packet(out: *mut StrAlloc, buf: *const u8, len: u32) -> i32 {\n\n let current_block;\n\n let mut pos: u32;\n\n let mut header: [u8; 12] = [0u8; 12];\n\n let mut numanswers: u16 = 0;\n\n let mut datalen: u16 = 0;\n\n let mut ch: u8;\n\n let mut txtlen: u32;\n\n let mut i: i32;\n\n if StrAlloc::copys(out, (*b\"\\0\").as_ptr()) == 0 {\n\n -1i32\n\n } else {\n", "file_path": "src/dns/txt.rs", "rank": 61, "score": 11.225458458221206 }, { "content": "use alloc;\n\nuse byte;\n\nuse libc;\n\nuse tai::Tai;\n\nuse uint32;\n\n\n\npub static mut MOTION: usize = 0usize;\n\n\n\nstatic mut X: *mut u8 = 0i32 as (*mut u8);\n\nstatic mut SIZE: u32 = 0u32;\n\nstatic mut HSIZE: u32 = 0u32;\n\nstatic mut WRITER: u32 = 0u32;\n\nstatic mut OLDEST: u32 = 0u32;\n\nstatic mut UNUSED: u32 = 0u32;\n\n\n\npub unsafe fn init(mut cachesize: u32) -> i32 {\n\n if !X.is_null() {\n\n alloc::free(X);\n\n X = 0i32 as (*mut u8);\n\n }\n", "file_path": "src/cache.rs", "rank": 62, "score": 11.010097835610628 }, { "content": "//! `dns/mx.rs`: MX record packet functionality\n\n\n\nuse byte;\n\nuse stralloc::StrAlloc;\n\nuse super::{domain, packet, resolve};\n\nuse super::DnsTransmit;\n\nuse uint16;\n\n\n\nstatic mut Q: *mut u8 = 0i32 as (*mut u8);\n\n\n\npub unsafe fn packet(out: *mut StrAlloc, buf: *const u8, len: u32) -> i32 {\n\n let current_block;\n\n let mut pos: u32;\n\n let mut header: [u8; 12] = [0u8; 12];\n\n let mut pref: [u8; 2] = [0u8; 2];\n\n let mut numanswers: u16 = 0;\n\n let mut datalen: u16 = 0;\n\n if StrAlloc::copys(out, (*b\"\\0\").as_ptr()) == 0 {\n\n -1i32\n\n } else {\n", "file_path": "src/dns/mx.rs", "rank": 63, "score": 11.005352206069446 }, { "content": "use buffer::{Buffer, STDOUT_BUFFER};\n\nuse cache;\n\nuse libc;\n\nuse string;\n\n\n", "file_path": "src/cachetest.rs", "rank": 64, "score": 10.792620956163523 }, { "content": "//! `openreadclose.rs`: Open a file, read it, and then close it\n\n//!\n\n//! This should probably be replaced by `std::io`\n\n\n\nuse errno::{errno, Errno};\n\nuse libc;\n\nuse open;\n\nuse readclose::readclose;\n\nuse stralloc::StrAlloc;\n\n\n\npub unsafe fn openreadclose(filename: *const u8, sa: *mut StrAlloc, bufsize: u32) -> i32 {\n\n let fd = open::read(filename);\n\n\n\n if fd == -1 {\n\n if errno() == Errno(libc::ENOENT) {\n\n 0\n\n } else {\n\n -1\n\n }\n\n } else if readclose(fd, sa, bufsize) == -1 {\n\n -1\n\n } else {\n\n 1\n\n }\n\n}\n", "file_path": "src/openreadclose.rs", "rank": 65, "score": 9.977729859274564 }, { "content": "use alloc;\n\nuse buffer::{self, Buffer};\n\nuse errno::{self, Errno};\n\nuse libc;\n\nuse uint32;\n\nuse super::hash as cdb_hash;\n\n\n\n#[derive(Copy)]\n\n#[repr(C)]\n\npub struct CdbHp {\n\n pub h: u32,\n\n pub p: u32,\n\n}\n\n\n\nimpl Clone for CdbHp {\n\n fn clone(&self) -> Self {\n\n *self\n\n }\n\n}\n\n\n", "file_path": "src/cdb/make.rs", "rank": 66, "score": 9.692375270739776 }, { "content": " y = alloc(n);\n\n if y.is_null() {\n\n 0i32\n\n } else {\n\n byte::copy(y, m, *x);\n\n free(*x);\n\n *x = y;\n\n 1i32\n\n }\n\n}\n\n\n\npub unsafe fn free(x: *mut u8) {\n\n libc::free(x as (*mut libc::c_void));\n\n}\n", "file_path": "src/alloc.rs", "rank": 67, "score": 9.556588341084582 }, { "content": "use errno::{self, Errno};\n\nuse iopause::iopause;\n\nuse libc;\n\nuse tai::Tai;\n\nuse taia::TaiA;\n\n\n\n#[derive(Copy)]\n\n#[repr(C)]\n\npub struct pollfd {\n\n pub fd: i32,\n\n pub events: i16,\n\n pub revents: i16,\n\n}\n\n\n\nimpl Clone for pollfd {\n\n fn clone(&self) -> Self {\n\n *self\n\n }\n\n}\n\n\n", "file_path": "src/timeoutread.rs", "rank": 68, "score": 9.540615920248793 }, { "content": "use errno::{self, Errno};\n\nuse iopause::iopause;\n\nuse libc;\n\nuse tai::Tai;\n\nuse taia::TaiA;\n\n\n\n#[derive(Copy)]\n\n#[repr(C)]\n\npub struct pollfd {\n\n pub fd: i32,\n\n pub events: i16,\n\n pub revents: i16,\n\n}\n\n\n\nimpl Clone for pollfd {\n\n fn clone(&self) -> Self {\n\n *self\n\n }\n\n}\n\n\n", "file_path": "src/timeoutwrite.rs", "rank": 69, "score": 9.540615920248793 }, { "content": "use byte;\n\nuse super::random;\n\n\n\npub unsafe fn sortip(s: *mut u8, mut n: u32) {\n\n let mut i: u32;\n\n let mut tmp: [u8; 4] = [0u8; 4];\n\n n = n >> 2i32;\n\n 'loop1: loop {\n\n if !(n > 1u32) {\n\n break;\n\n }\n\n i = random::random(n);\n\n n = n.wrapping_sub(1u32);\n\n byte::copy(tmp.as_mut_ptr(), 4u32, s.offset((i << 2i32) as (isize)));\n\n byte::copy(\n\n s.offset((i << 2i32) as (isize)),\n\n 4u32,\n\n s.offset((n << 2i32) as (isize)),\n\n );\n\n byte::copy(s.offset((n << 2i32) as (isize)), 4u32, tmp.as_mut_ptr());\n\n }\n\n}\n", "file_path": "src/dns/sortip.rs", "rank": 70, "score": 9.47342524803835 }, { "content": " (if pos == 0 {\n\n 1i32\n\n } else if domain::equal(\n\n dn as (*const u8),\n\n (*d).query.offset(14isize) as (*const u8),\n\n ) == 0\n\n {\n\n alloc::free(dn);\n\n 1i32\n\n } else {\n\n alloc::free(dn);\n\n pos = packet::copy(buf, len, pos, out.as_mut_ptr(), 4u32);\n\n (if pos == 0 {\n\n 1i32\n\n } else if byte::diff(out.as_mut_ptr(), 2u32, (*d).qtype.as_mut_ptr()) != 0 {\n\n 1i32\n\n } else if byte::diff(\n\n out.as_mut_ptr().offset(2isize),\n\n 2u32,\n\n (*b\"\\0\\x01\\0\").as_ptr() as (*mut u8),\n", "file_path": "src/dns/transmit.rs", "rank": 71, "score": 9.469107948718207 }, { "content": "/* `ndelay.rs`: Enable or disable O_NDELAY (i.e. Nagle's algorithm) */\n\n\n\nuse libc;\n\n\n\npub unsafe fn off(fd: i32) -> i32 {\n\n libc::fcntl(\n\n fd,\n\n libc::F_SETFL,\n\n libc::fcntl(fd, libc::F_GETFL, 0) & !libc::O_NONBLOCK,\n\n )\n\n}\n\n\n\npub unsafe fn on(fd: i32) -> i32 {\n\n libc::fcntl(\n\n fd,\n\n libc::F_SETFL,\n\n libc::fcntl(fd, libc::F_GETFL, 0) | libc::O_NONBLOCK,\n\n )\n\n}\n", "file_path": "src/ndelay.rs", "rank": 72, "score": 9.413547895680269 }, { "content": "//! `readclose.rs`: Read a fle into the buffer and then close it\n\n//!\n\n//! This is used exclusively by the `openreadclose` module, and can be\n\n//! replaced with `std::io`\n\n\n\nuse errno::{errno, Errno};\n\nuse libc;\n\nuse stralloc::StrAlloc;\n\n\n\npub unsafe extern \"C\" fn readclose(fd: i32, sa: *mut StrAlloc, bufsize: u32) -> i32 {\n\n if StrAlloc::copys(sa, (*b\"\\0\").as_ptr()) == 0 {\n\n libc::close(fd);\n\n return -1;\n\n }\n\n\n\n let current_block;\n\n let mut r: i32 = 0;\n\n 'loop1: loop {\n\n if StrAlloc::readyplus(sa, bufsize) == 0 {\n\n current_block = 7;\n", "file_path": "src/readclose.rs", "rank": 73, "score": 9.403446893894222 }, { "content": "//! `open.rs`: Helpers for opening files\n\n//!\n\n//! These should get replaced with `std::io`\n\n\n\nuse libc;\n\n\n\npub unsafe fn read(filename: *const u8) -> i32 {\n\n libc::open(filename as *const i8, libc::O_RDONLY | libc::O_NDELAY)\n\n}\n\n\n\npub unsafe fn trunc(filename: *const u8) -> i32 {\n\n libc::open(\n\n filename as *const i8,\n\n libc::O_WRONLY | libc::O_NDELAY | libc::O_TRUNC | libc::O_CREAT,\n\n 0o644,\n\n )\n\n}\n", "file_path": "src/open.rs", "rank": 74, "score": 9.285031704490075 }, { "content": "use buffer::{Buffer, STDOUT_BUFFER};\n\nuse dns;\n\nuse ip4;\n\nuse libc;\n\nuse stralloc::StrAlloc;\n\nuse strerr::{StrErr, STRERR_SYS};\n\n\n\nstatic mut seed: [u8; 128] = [0u8; 128];\n\n\n\n#[no_mangle]\n\npub static mut ip: [u8; 4] = [0u8; 4];\n\n\n\nstatic mut out: StrAlloc = StrAlloc {\n\n s: 0 as (*mut u8),\n\n len: 0u32,\n\n a: 0u32,\n\n};\n\n\n", "file_path": "src/dnsname.rs", "rank": 75, "score": 9.241094855955616 }, { "content": "//! `prot.rs`: UID/GID protection\n\n//!\n\n//! Replace this with Rust standard library functionality\n\n\n\nuse libc;\n\n\n\npub unsafe fn gid(mut g: i32) -> i32 {\n\n if libc::setgroups(1, &mut g as (*mut i32) as (*const u32)) == -1i32 {\n\n -1i32\n\n } else {\n\n libc::setgid(g as (u32))\n\n }\n\n}\n\n\n\npub unsafe fn uid(u: i32) -> i32 {\n\n libc::setuid(u as (u32))\n\n}\n", "file_path": "src/prot.rs", "rank": 76, "score": 9.139668215277217 }, { "content": "use buffer::{Buffer, STDOUT_BUFFER};\n\nuse dns;\n\nuse ip4;\n\nuse libc;\n\nuse stralloc::StrAlloc;\n\nuse strerr::{StrErr, STRERR_SYS};\n\n\n\nstatic mut seed: [u8; 128] = [0u8; 128];\n\n\n\nstatic mut in_: StrAlloc = StrAlloc {\n\n s: 0 as (*mut u8),\n\n len: 0u32,\n\n a: 0u32,\n\n};\n\n\n\nstatic mut fqdn: StrAlloc = StrAlloc {\n\n s: 0 as (*mut u8),\n\n len: 0u32,\n\n a: 0u32,\n\n};\n", "file_path": "src/dnsipq.rs", "rank": 77, "score": 9.036393046576745 }, { "content": "use buffer::{Buffer, STDOUT_BUFFER};\n\nuse libc;\n\nuse dns;\n\nuse stralloc::StrAlloc;\n\nuse strerr::{StrErr, STRERR_SYS};\n\n\n\nstatic mut seed: [u8; 128] = [0u8; 128];\n\n\n\nstatic mut fqdn: StrAlloc = StrAlloc {\n\n s: 0 as (*mut u8),\n\n len: 0u32,\n\n a: 0u32,\n\n};\n\n\n\nstatic mut out: StrAlloc = StrAlloc {\n\n s: 0 as (*mut u8),\n\n len: 0u32,\n\n a: 0u32,\n\n};\n\n\n", "file_path": "src/dnstxt.rs", "rank": 78, "score": 8.939370711874203 }, { "content": "use buffer::{self, Buffer};\n\nuse errno::{errno, Errno};\n\nuse libc;\n\nuse open;\n\nuse strerr::{StrErr, STRERR_SYS};\n\nuse tai::Tai;\n\nuse taia::TaiA;\n\n\n\nextern \"C\" {\n\n static mut auto_home: *const u8;\n\n fn chdir(arg1: *const u8) -> i32;\n\n fn copyfrom(arg1: *mut Buffer);\n\n fn finish();\n\n fn getgid() -> u32;\n\n fn getpid() -> i32;\n\n fn getppid() -> i32;\n\n fn getpwnam(arg1: *const u8) -> *mut passwd;\n\n fn getuid() -> u32;\n\n fn init(arg1: *const u8, arg2: *const u8);\n\n fn makedir(arg1: *const u8);\n", "file_path": "src/dnscache-conf.rs", "rank": 79, "score": 8.77809772791793 }, { "content": "use buffer::{Buffer, STDOUT_BUFFER};\n\nuse dns;\n\nuse ip4;\n\nuse libc;\n\nuse stralloc::StrAlloc;\n\nuse strerr::{StrErr, STRERR_SYS};\n\n\n\nstatic mut seed: [u8; 128] = [0u8; 128];\n\n\n\nstatic mut fqdn: StrAlloc = StrAlloc {\n\n s: 0 as (*mut u8),\n\n len: 0u32,\n\n a: 0u32,\n\n};\n\n\n\nstatic mut out: StrAlloc = StrAlloc {\n\n s: 0 as (*mut u8),\n\n len: 0u32,\n\n a: 0u32,\n\n};\n\n\n\n#[no_mangle]\n\npub static mut str: [u8; 20] = [0u8; 20];\n\n\n", "file_path": "src/dnsip.rs", "rank": 80, "score": 8.769685560995722 }, { "content": "use super::{rcip, DnsTransmit};\n\nuse iopause::iopause;\n\nuse libc;\n\nuse tai::Tai;\n\nuse taia::TaiA;\n\n\n\npub static mut TX: DnsTransmit = DnsTransmit {\n\n query: 0i32 as (*mut u8),\n\n querylen: 0u32,\n\n packet: 0 as (*mut u8),\n\n packetlen: 0u32,\n\n s1: 0i32,\n\n tcpstate: 0i32,\n\n udploop: 0u32,\n\n curserver: 0u32,\n\n deadline: TaiA {\n\n sec: Tai { x: 0usize },\n\n nano: 0usize,\n\n atto: 0usize,\n\n },\n", "file_path": "src/dns/resolve.rs", "rank": 81, "score": 8.733164393712533 }, { "content": "use buffer::{self, Buffer};\n\nuse libc;\n\n\n\n#[no_mangle]\n\npub static mut bspace: [u8; 256] = [0u8; 256];\n\n\n\n#[no_mangle]\n\npub static mut b: Buffer = Buffer {\n\n x: bspace.as_mut_ptr(),\n\n p: 0u32,\n\n n: ::std::mem::size_of::<[u8; 256]>() as (u32),\n\n fd: 1i32,\n\n op: Some(buffer::unixwrite as buffer::Op),\n\n};\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn puts(mut s: *const u8) {\n\n if Buffer::puts(&mut b as (*mut Buffer), s) == -1i32 {\n\n libc::_exit(111i32);\n\n }\n\n}\n\n\n", "file_path": "src/auto-str.rs", "rank": 82, "score": 8.687972144060272 }, { "content": "//! `dns/packet.rs`: DNS packet utilities\n\n\n\nuse errno::{self, Errno};\n\nuse libc;\n\nuse super::domain;\n\n\n\npub unsafe fn copy(\n\n buf: *const u8,\n\n len: u32,\n\n mut pos: u32,\n\n mut out: *mut u8,\n\n mut outlen: u32,\n\n) -> u32 {\n\n let current_block;\n\n 'loop0: loop {\n\n if outlen == 0 {\n\n current_block = 1;\n\n break;\n\n }\n\n if pos >= len {\n", "file_path": "src/dns/packet.rs", "rank": 83, "score": 8.62332043096617 }, { "content": "use byte;\n\nuse case;\n\nuse cdb::Cdb;\n\nuse dns;\n\nuse open;\n\n\n\nextern \"C\" {\n\n static mut response: *mut u8;\n\n fn response_addbytes(arg1: *const u8, arg2: u32) -> i32;\n\n fn response_rfinish(arg1: i32);\n\n fn response_rstart(arg1: *const u8, arg2: *const u8, arg3: u32) -> i32;\n\n}\n\n\n\n#[no_mangle]\n\npub static mut fatal: *const u8 = (*b\"pickdns: fatal: \\0\").as_ptr();\n\n\n\n#[no_mangle]\n\npub static mut starting: *const u8 = (*b\"starting pickdns\\n\\0\").as_ptr();\n\n\n\nstatic mut seed: [u8; 128] = [0u8; 128];\n", "file_path": "src/pickdns.rs", "rank": 85, "score": 8.372822843226533 }, { "content": "//! `iopause.rs`: Pause while waiting for I/O\n\n\n\nuse libc;\n\nuse taia::TaiA;\n\n\n\npub unsafe fn iopause(x: *mut libc::pollfd, len: u32, deadline: *mut TaiA, stamp: *mut TaiA) {\n\n let mut t: TaiA;\n\n let millisecs: i32;\n\n let mut d: f64;\n\n let mut i: i32;\n\n if TaiA::less(deadline as (*const TaiA), stamp as (*const TaiA)) != 0 {\n\n millisecs = 0i32;\n\n } else {\n\n t = *stamp;\n\n TaiA::sub(\n\n &mut t as (*mut TaiA),\n\n deadline as (*const TaiA),\n\n &mut t as (*mut TaiA) as (*const TaiA),\n\n );\n\n d = TaiA::approx(&mut t as (*mut TaiA) as (*const TaiA));\n", "file_path": "src/iopause.rs", "rank": 86, "score": 8.370935723071453 }, { "content": " }\n\n if OK == 0 {\n\n if init(IP.as_mut_ptr()) == -1i32 {\n\n return -1i32;\n\n } else {\n\n TaiA::uint(&mut DEADLINE as (*mut TaiA), 600u32);\n\n TaiA::add(\n\n &mut DEADLINE as (*mut TaiA),\n\n &mut now as (*mut TaiA) as (*const TaiA),\n\n &mut DEADLINE as (*mut TaiA) as (*const TaiA),\n\n );\n\n USES = 10000u32;\n\n OK = 1i32;\n\n }\n\n }\n\n USES = USES.wrapping_sub(1u32);\n\n byte::copy(s, 64u32, IP.as_mut_ptr());\n\n 0i32\n\n}\n\n\n", "file_path": "src/dns/rcip.rs", "rank": 87, "score": 8.353276388276335 }, { "content": " droproot((*b\"dnscache: fatal: \\0\").as_ptr());\n\n socket::tryreservein(udp53, 131072i32);\n\n byte::zero(\n\n seed.as_mut_ptr(),\n\n ::std::mem::size_of::<[u8; 128]>() as (u32),\n\n );\n\n libc::read(\n\n 0i32,\n\n seed.as_mut_ptr() as (*mut libc::c_void),\n\n ::std::mem::size_of::<[u8; 128]>(),\n\n );\n\n dns::random::init(seed.as_mut_ptr() as (*const u8));\n\n libc::close(0i32);\n\n x = libc::getenv((*b\"IPSEND\\0\").as_ptr() as *const libc::c_char);\n\n if x.is_null() {\n\n StrErr::die(\n\n 111i32,\n\n (*b\"dnscache: fatal: \\0\").as_ptr(),\n\n (*b\"$IPSEND not set\\0\").as_ptr(),\n\n 0i32 as (*const u8),\n", "file_path": "src/dnscache.rs", "rank": 88, "score": 8.326258400874815 }, { "content": "//! `strerr.rs`: Error string functionality\n\n//!\n\n//! This should probably be replaced by panic!\n\n\n\nuse buffer::{Buffer, STDERR_BUFFER};\n\nuse errno::errno;\n\nuse libc;\n\n\n\n#[derive(Copy)]\n\n#[repr(C)]\n\npub struct StrErr {\n\n pub who: *mut StrErr,\n\n pub x: *const u8,\n\n pub y: *const u8,\n\n pub z: *const u8,\n\n}\n\n\n\npub static mut STRERR_SYS: StrErr = StrErr {\n\n who: 0 as (*mut StrErr),\n\n x: 0 as (*const u8),\n", "file_path": "src/strerr.rs", "rank": 89, "score": 8.278060946833547 }, { "content": "//! `dns/random.rs`: DNS randomization utilities\n\n\n\nuse taia::TaiA;\n\nuse libc;\n\nuse uint32;\n\n\n\nstatic mut SEED: [u32; 32] = [0u32; 32];\n\n\n\nstatic mut IN: [u32; 12] = [0u32; 12];\n\n\n\nstatic mut OUT: [u32; 8] = [0u32; 8];\n\n\n\nstatic mut OUTLEFT: i32 = 0i32;\n\n\n\npub unsafe fn init(data: *const u8) {\n\n let mut i: i32;\n\n let mut t: TaiA = ::std::mem::zeroed();\n\n let mut tpack: [u8; 16] = [0u8; 16];\n\n i = 0i32;\n\n 'loop1: loop {\n", "file_path": "src/dns/random.rs", "rank": 90, "score": 8.184986882392895 }, { "content": " name(control);\n\n space();\n\n name(referral);\n\n line();\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn log_servfail(mut dn: *const u8) {\n\n let mut x = libc::strerror(errno().0);\n\n string((*b\"servfail \\0\").as_ptr());\n\n name(dn);\n\n space();\n\n string(x);\n\n line();\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn log_rr(\n\n mut server: *const u8,\n\n mut q: *const u8,\n", "file_path": "src/log.rs", "rank": 91, "score": 8.1526087913815 }, { "content": "use buffer::Buffer;\n\nuse byte;\n\nuse stralloc::StrAlloc;\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn getln2(\n\n mut ss: *mut Buffer,\n\n mut sa: *mut StrAlloc,\n\n mut cont: *mut *mut u8,\n\n mut clen: *mut u32,\n\n mut sep: i32,\n\n) -> i32 {\n\n let mut _currentBlock;\n\n let mut x: *mut u8;\n\n let mut i: u32;\n\n let mut n: i32;\n\n if StrAlloc::ready(sa, 0u32) == 0 {\n\n -1i32\n\n } else {\n\n (*sa).len = 0u32;\n", "file_path": "src/getln2.rs", "rank": 92, "score": 7.9004497872626445 }, { "content": " if pos == 0 {\n\n current_block = 17;\n\n break;\n\n }\n\n pos = packet::copy(buf, len, pos, header.as_mut_ptr(), 10u32);\n\n if pos == 0 {\n\n current_block = 16;\n\n break;\n\n }\n\n uint16::unpack_big(\n\n header.as_mut_ptr().offset(8isize) as (*const u8),\n\n &mut datalen as (*mut u16),\n\n );\n\n if byte::diff(\n\n header.as_mut_ptr(),\n\n 2u32,\n\n (*b\"\\0\\x01\\0\").as_ptr() as (*mut u8),\n\n ) == 0\n\n {\n\n if byte::diff(\n", "file_path": "src/dns/ip4.rs", "rank": 93, "score": 7.8949903178047744 }, { "content": "//! `ip4.rs`: Common IPv4 functionality\n\n//!\n\n//! This can probably be replaced by the Rust standard library\n\n\n\nuse ulong;\n\n\n\npub unsafe fn fmt(mut s: *mut u8, ip: *const u8) -> u32 {\n\n let mut len: u32;\n\n let mut i: u32;\n\n len = 0u32;\n\n i = ulong::fmt(s, *ip.offset(0isize) as (usize));\n\n len = len.wrapping_add(i);\n\n if !s.is_null() {\n\n s = s.offset(i as (isize));\n\n }\n\n if !s.is_null() {\n\n *{\n\n let _old = s;\n\n s = s.offset(1isize);\n\n _old\n", "file_path": "src/ip4.rs", "rank": 94, "score": 7.79556598862497 }, { "content": "mod iopause;\n\nmod ndelay;\n\nmod open;\n\nmod openreadclose;\n\nmod prot;\n\nmod readclose;\n\nmod socket;\n\nmod stralloc;\n\nmod strerr;\n\nmod string;\n\nmod tai;\n\nmod taia;\n\nmod uint16;\n\nmod uint32;\n\nmod ulong;\n", "file_path": "src/lib.rs", "rank": 95, "score": 7.686401950231806 }, { "content": " &mut numqueries as (*mut u16),\n\n );\n\n uint16::unpack_big(\n\n data.as_mut_ptr().offset(6isize) as (*const u8),\n\n &mut numanswers as (*mut u16),\n\n );\n\n uint16::unpack_big(\n\n data.as_mut_ptr().offset(8isize) as (*const u8),\n\n &mut numauthority as (*mut u16),\n\n );\n\n uint16::unpack_big(\n\n data.as_mut_ptr().offset(10isize) as (*const u8),\n\n &mut numglue as (*mut u16),\n\n );\n\n (if StrAlloc::catulong0(out, len as (usize), 0u32) == 0 {\n\n 0u32\n\n } else if StrAlloc::cats(out, (*b\" bytes, \\0\").as_ptr()) == 0 {\n\n 0u32\n\n } else if StrAlloc::catulong0(out, numqueries as (usize), 0u32) == 0 {\n\n 0u32\n", "file_path": "src/printpacket.rs", "rank": 96, "score": 7.624688452723255 }, { "content": " strnum.as_mut_ptr() as (*const u8),\n\n ulong::fmt(strnum.as_mut_ptr(), u),\n\n );\n\n Buffer::puts(STDOUT_BUFFER.as_mut_ptr(), (*b\".\\0\").as_ptr());\n\n u = ip[3usize] as (usize);\n\n Buffer::put(\n\n STDOUT_BUFFER.as_mut_ptr(),\n\n strnum.as_mut_ptr() as (*const u8),\n\n ulong::fmt(strnum.as_mut_ptr(), u),\n\n );\n\n Buffer::puts(STDOUT_BUFFER.as_mut_ptr(), (*b\"\\n\\0\").as_ptr());\n\n }\n\n Buffer::flush(STDOUT_BUFFER.as_mut_ptr());\n\n libc::_exit(0i32);\n\n}\n", "file_path": "src/random-ip.rs", "rank": 97, "score": 7.594868825878379 }, { "content": " 0i32 as (*const u8),\n\n &mut STRERR_SYS as (*mut StrErr) as (*const StrErr),\n\n );\n\n }\n\n x = libc::getenv((*b\"GID\\0\").as_ptr() as *const libc::c_char) as *mut u8;\n\n if x.is_null() {\n\n StrErr::die(\n\n 111i32,\n\n fatal,\n\n (*b\"$GID not set\\0\").as_ptr(),\n\n 0i32 as (*const u8),\n\n 0i32 as (*const u8),\n\n 0i32 as (*const u8),\n\n 0i32 as (*const u8),\n\n 0i32 as (*const StrErr),\n\n );\n\n }\n\n ulong::scan(x as (*const u8), &mut id as (*mut usize));\n\n if prot::gid(id as (i32)) == -1i32 {\n\n StrErr::die(\n", "file_path": "src/droproot.rs", "rank": 98, "score": 7.425750350801245 }, { "content": " (*d).curserver = (*d).curserver.wrapping_add(1u32);\n\n DnsTransmit::thistcp(d)\n\n }\n\n\n\n unsafe fn irrelevant(d: *mut DnsTransmit, buf: *const u8, len: u32) -> i32 {\n\n let mut out: [u8; 12] = [0u8; 12];\n\n let mut dn: *mut u8;\n\n let mut pos: u32;\n\n pos = packet::copy(buf, len, 0u32, out.as_mut_ptr(), 12u32);\n\n if pos == 0 {\n\n 1i32\n\n } else if byte::diff(out.as_mut_ptr(), 2u32, (*d).query.offset(2isize)) != 0 {\n\n 1i32\n\n } else if out[4usize] as (i32) != 0i32 {\n\n 1i32\n\n } else if out[5usize] as (i32) != 1i32 {\n\n 1i32\n\n } else {\n\n dn = 0i32 as (*mut u8);\n\n pos = packet::getname(buf, len, pos, &mut dn as (*mut *mut u8));\n", "file_path": "src/dns/transmit.rs", "rank": 99, "score": 7.366851636453488 } ]
Rust
src/lisp_subr.rs
tjshaffer21/rustl
31c786c32b35f364a2b2c282874c2bda81fe01ef
use std::rc::Rc; use lisp_types::{ LispResult, LispParam, sexpr::*, errors::* }; use environment::Env; pub fn atom<'a>(args_ptr: &LispParam<'a>, env: &'a Env<'a>) -> LispResult<'a> { let args = args_ptr.borrow(); let len = args.len(); if len == 0 { Err(LispError::TooFewArguments) } else if len > 1 { Err(LispError::TooManyArguments) } else { match args.front().unwrap() { SExpr::Atom(_) => Ok(env.get(&"t").unwrap()), _ => Ok(env.get(&"nil").unwrap()), } } } pub fn eq<'a>(args_ptr: &LispParam<'a>, env: &'a Env<'a>) -> LispResult<'a> { let mut args = args_ptr.borrow_mut(); if args.len() > 2 { return Err(LispError::TooManyArguments) } if let Some(x) = args.pop_front() { if let Some(y) = args.pop_front() { if x == y { Ok(env.get(&"t").unwrap()) } else { Ok(env.get(&"nil").unwrap()) } } else { Err(LispError::TooFewArguments) } } else { Err(LispError::TooFewArguments) } } pub fn car<'a>(args_ptr: &LispParam<'a>, _env: &Env) -> LispResult<'a> { let mut args = args_ptr.borrow_mut(); let len = args.len(); if len == 0 { Err(LispError::TooFewArguments) } else if len > 1 { Err(LispError::TooManyArguments) } else { if let Some(val) = args.pop_front() { match val { SExpr::Cons(mut v) => { if let Some(r) = v.pop_front() { Ok(Rc::new(r)) } else { Err(LispError::InvalidArgument) } }, _ => Err(LispError::InvalidArgument), } } else { Err(LispError::InvalidArgument) } } } pub fn cdr<'a>(args_ptr: &LispParam<'a>, _env: &Env) -> LispResult<'a> { let mut args = args_ptr.borrow_mut(); let len = args.len(); if len == 0 { Err(LispError::TooFewArguments) } else if len > 1 { Err(LispError::TooManyArguments) } else { let arg = args.pop_front(); if let Some(val) = arg { match val { SExpr::Cons(mut v) => { v.pop_front(); Ok(Rc::new(create_sexpr!(cons v))) }, _ => Err(LispError::InvalidArgument) } } else { Err(LispError::InvalidArgument) } } } pub fn cons<'a>(args_ptr: &LispParam<'a>, _env: &Env) -> LispResult<'a> { let mut args = args_ptr.borrow_mut(); let len = args.len(); if len <= 1 { Err(LispError::TooFewArguments) } else if len > 2 { Err(LispError::TooManyArguments) } else { if let Some(elov) = args.pop_front() { if let Some(eltv) = args.pop_front() { let mut new_list = std::collections::LinkedList::new(); new_list.push_front(eltv); new_list.push_front(elov); Ok(Rc::new(create_sexpr!(cons new_list))) } else { Err(LispError::InvalidArgument) } } else { Err(LispError::InvalidArgument) } } } pub fn add<'a>(args_ptr: &LispParam<'a>, _env: &Env) -> LispResult<'a> { let args = args_ptr.borrow_mut(); let mut sum: f64 = 0.0; let mut float_flag = false; for i in args.iter() { match i { SExpr::Atom(Atom::Number(Number::Integer(i))) => sum += *i as f64, SExpr::Atom(Atom::Number(Number::FloatingPoint(f))) => { float_flag = true; sum += *f }, _ => return Err(LispError::InvalidArgument), } } if float_flag { Ok(Rc::new(create_sexpr!(float sum))) } else { Ok(Rc::new(create_sexpr!(int sum as i64))) } }
use std::rc::Rc; use lisp_types::{ LispResult, LispParam, sexpr::*, errors::* }; use environment::Env; pub fn atom<'a>(args_ptr: &LispParam<'a>, env: &'a Env<'a>) -> LispResult<'a> { let args = args_ptr.borrow(); let len = args.len(); if len == 0 { Err(LispError::TooFewArguments) }
pub fn eq<'a>(args_ptr: &LispParam<'a>, env: &'a Env<'a>) -> LispResult<'a> { let mut args = args_ptr.borrow_mut(); if args.len() > 2 { return Err(LispError::TooManyArguments) } if let Some(x) = args.pop_front() { if let Some(y) = args.pop_front() { if x == y { Ok(env.get(&"t").unwrap()) } else { Ok(env.get(&"nil").unwrap()) } } else { Err(LispError::TooFewArguments) } } else { Err(LispError::TooFewArguments) } } pub fn car<'a>(args_ptr: &LispParam<'a>, _env: &Env) -> LispResult<'a> { let mut args = args_ptr.borrow_mut(); let len = args.len(); if len == 0 { Err(LispError::TooFewArguments) } else if len > 1 { Err(LispError::TooManyArguments) } else { if let Some(val) = args.pop_front() { match val { SExpr::Cons(mut v) => { if let Some(r) = v.pop_front() { Ok(Rc::new(r)) } else { Err(LispError::InvalidArgument) } }, _ => Err(LispError::InvalidArgument), } } else { Err(LispError::InvalidArgument) } } } pub fn cdr<'a>(args_ptr: &LispParam<'a>, _env: &Env) -> LispResult<'a> { let mut args = args_ptr.borrow_mut(); let len = args.len(); if len == 0 { Err(LispError::TooFewArguments) } else if len > 1 { Err(LispError::TooManyArguments) } else { let arg = args.pop_front(); if let Some(val) = arg { match val { SExpr::Cons(mut v) => { v.pop_front(); Ok(Rc::new(create_sexpr!(cons v))) }, _ => Err(LispError::InvalidArgument) } } else { Err(LispError::InvalidArgument) } } } pub fn cons<'a>(args_ptr: &LispParam<'a>, _env: &Env) -> LispResult<'a> { let mut args = args_ptr.borrow_mut(); let len = args.len(); if len <= 1 { Err(LispError::TooFewArguments) } else if len > 2 { Err(LispError::TooManyArguments) } else { if let Some(elov) = args.pop_front() { if let Some(eltv) = args.pop_front() { let mut new_list = std::collections::LinkedList::new(); new_list.push_front(eltv); new_list.push_front(elov); Ok(Rc::new(create_sexpr!(cons new_list))) } else { Err(LispError::InvalidArgument) } } else { Err(LispError::InvalidArgument) } } } pub fn add<'a>(args_ptr: &LispParam<'a>, _env: &Env) -> LispResult<'a> { let args = args_ptr.borrow_mut(); let mut sum: f64 = 0.0; let mut float_flag = false; for i in args.iter() { match i { SExpr::Atom(Atom::Number(Number::Integer(i))) => sum += *i as f64, SExpr::Atom(Atom::Number(Number::FloatingPoint(f))) => { float_flag = true; sum += *f }, _ => return Err(LispError::InvalidArgument), } } if float_flag { Ok(Rc::new(create_sexpr!(float sum))) } else { Ok(Rc::new(create_sexpr!(int sum as i64))) } }
else if len > 1 { Err(LispError::TooManyArguments) } else { match args.front().unwrap() { SExpr::Atom(_) => Ok(env.get(&"t").unwrap()), _ => Ok(env.get(&"nil").unwrap()), } } }
function_block-function_prefix_line
[]
Rust
src/parse/header.rs
MikuroXina/bms-rs
16f5301fe8847a6aa6791dd7e1a3308204172cf4
use std::{collections::HashMap, fmt::Debug, path::PathBuf}; use super::{ParseError, Result}; use crate::lex::{command::*, token::Token}; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum LnType { Rdm, Mgq, } impl Default for LnType { fn default() -> Self { Self::Rdm } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Bmp { pub file: PathBuf, pub transparent_color: Argb, } #[derive(Debug, Default, Clone, PartialEq)] pub struct Header { pub player: Option<PlayerMode>, pub genre: Option<String>, pub title: Option<String>, pub subtitle: Option<String>, pub artist: Option<String>, pub sub_artist: Option<String>, pub maker: Option<String>, pub comment: Option<Vec<String>>, pub email: Option<String>, pub url: Option<String>, pub options: Option<Vec<String>>, pub bpm: Option<f64>, pub play_level: Option<u8>, pub rank: Option<JudgeLevel>, pub difficulty: Option<u8>, pub total: Option<f64>, pub volume: Volume, pub ln_type: LnType, pub poor_bga_mode: PoorMode, pub back_bmp: Option<PathBuf>, pub stage_file: Option<PathBuf>, pub banner: Option<PathBuf>, pub is_octave: bool, pub midi_file: Option<PathBuf>, pub video_file: Option<PathBuf>, pub wav_path_root: Option<PathBuf>, pub wav_files: HashMap<ObjId, PathBuf>, pub poor_bmp: Option<PathBuf>, pub bmp_files: HashMap<ObjId, Bmp>, pub bpm_changes: HashMap<ObjId, f64>, pub texts: HashMap<ObjId, String>, pub change_options: HashMap<ObjId, String>, } impl Header { pub(crate) fn parse(&mut self, token: &Token) -> Result<()> { match *token { Token::Artist(artist) => self.artist = Some(artist.into()), Token::AtBga { .. } => todo!(), Token::Banner(file) => self.banner = Some(file.into()), Token::BackBmp(bmp) => self.back_bmp = Some(bmp.into()), Token::Bga { .. } => todo!(), Token::Bmp(id, path) => { if id.is_none() { self.poor_bmp = Some(path.into()); return Ok(()); } let id = id.unwrap(); if self .bmp_files .insert( id, Bmp { file: path.into(), transparent_color: Argb::default(), }, ) .is_some() { eprintln!( "duplicated bmp definition found: {:?} {:?}", id, path.display() ); } } Token::Bpm(bpm) => { if let Ok(parsed) = bpm.parse() { if 0.0 < parsed { self.bpm = Some(parsed); } else { eprintln!("not positive bpm found: {:?}", parsed); } } else { eprintln!("not number bpm found: {:?}", bpm); } } Token::BpmChange(id, bpm) => { let parsed: f64 = bpm .parse() .map_err(|_| ParseError::BpmParseError(bpm.into()))?; if parsed <= 0.0 || !parsed.is_finite() { return Err(ParseError::BpmParseError(bpm.into())); } if self.bpm_changes.insert(id, parsed).is_some() { eprintln!("duplicated bpm change definition found: {:?} {:?}", id, bpm); } } Token::ChangeOption(id, option) => { if self.change_options.insert(id, option.into()).is_some() { eprintln!( "duplicated change option definition found: {:?} {}", id, option ); } } Token::Comment(comment) => self .comment .get_or_insert_with(Vec::new) .push(comment.into()), Token::Difficulty(diff) => self.difficulty = Some(diff), Token::Email(email) => self.email = Some(email.into()), Token::ExBmp(id, transparent_color, path) => { if self .bmp_files .insert( id, Bmp { file: path.into(), transparent_color, }, ) .is_some() { eprintln!( "duplicated bmp definition found: {:?} {:?}", id, path.display() ); } } Token::ExRank(_, _) => todo!(), Token::ExWav(_, _, _) => todo!(), Token::Genre(genre) => self.genre = Some(genre.to_owned()), Token::LnTypeRdm => { self.ln_type = LnType::Rdm; } Token::LnTypeMgq => { self.ln_type = LnType::Mgq; } Token::Maker(maker) => self.maker = Some(maker.into()), Token::MidiFile(midi_file) => self.midi_file = Some(midi_file.into()), Token::OctFp => self.is_octave = true, Token::Option(option) => self .options .get_or_insert_with(Vec::new) .push(option.into()), Token::PathWav(wav_path_root) => self.wav_path_root = Some(wav_path_root.into()), Token::Player(player) => self.player = Some(player), Token::PlayLevel(play_level) => self.play_level = Some(play_level), Token::PoorBga(poor_bga_mode) => self.poor_bga_mode = poor_bga_mode, Token::Rank(rank) => self.rank = Some(rank), Token::StageFile(file) => self.stage_file = Some(file.into()), Token::SubArtist(sub_artist) => self.sub_artist = Some(sub_artist.into()), Token::SubTitle(subtitle) => self.subtitle = Some(subtitle.into()), Token::Text(id, text) => { if self.texts.insert(id, text.into()).is_some() { eprintln!("duplicated text definition found: {:?} {}", id, text); } } Token::Title(title) => self.title = Some(title.into()), Token::Total(total) => { if let Ok(parsed) = total.parse() { self.total = Some(parsed); } else { eprintln!("not number total found: {:?}", total); } } Token::Url(url) => self.url = Some(url.into()), Token::VideoFile(video_file) => self.video_file = Some(video_file.into()), Token::VolWav(volume) => self.volume = volume, Token::Wav(id, path) => { if self.wav_files.insert(id, path.into()).is_some() { eprintln!( "duplicated wav definition found: {:?} {:?}", id, path.display() ); } } _ => {} } Ok(()) } }
use std::{collections::HashMap, fmt::Debug, path::PathBuf}; use super::{ParseError, Result}; use crate::lex::{command::*, token::Token}; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum LnType { Rdm, Mgq, } impl Default for LnType { fn default() -> Self { Self::Rdm } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Bmp { pub file: PathBuf, pub transparent_color: Argb, } #[derive(Debug, Default, Clone, PartialEq)] pub struct Header { pub player: Option<PlayerMode>, pub genre: Option<String>, pub title: Option<String>, pub subtitle: Option<String>, pub artist: Option<String>, pub sub_artist: Option<String>, pub maker: Option<String>, pub comment: Option<Vec<String>>, pub email: Option<String>, pub url: Option<String>, pub options: Option<Vec<String>>, pub bpm: Option<f64>, pub play_level: Option<u8>, pub rank: Option<JudgeLevel>, pub difficulty: Option<u8>, pub total: Option<f64>, pub volume: Volume, pub ln_type: LnType, pub poor_bga_mode: PoorMode, pub back_bmp: Option<PathBuf>, pub stage_file: Option<PathBuf>, pub banner: Option<PathBuf>, pub is_octave: bool, pub midi_file: Option<PathBuf>, pub video_file: Option<PathBuf>, pub wav_path_root: Option<PathBuf>, pub wav_files: HashMap<ObjId, PathBuf>, pub poor_bmp: Option<PathBuf>, pub bmp_files: HashMap<ObjId, Bmp>, pub bpm_changes: HashMap<ObjId, f64>, pub texts: HashMap<ObjId, String>, pub change_options: HashMap<ObjId, String>, } impl Header { pub(crate) fn parse(&mut self, token: &Token) -> Result<()> { match *token { Token::Artist(artist) => self.artist = Some(artist.into()), Token::AtBga { .. } => todo!(), Token::Banner(file) => self.banner = Some(file.into()), Token::BackBmp(bmp) => self.back_bmp = Some(bmp.into()), Token::Bga { .. } => todo!(), Token::Bmp(id, path) => { if id.is_none() { self.poor_bmp = Some(path.into()); return Ok(()); } let id = id.unwrap(); if self .bmp_files .insert( id, Bmp { file: path.into(), transparent_color: Argb::default(), }, ) .is_some() { eprintln!( "duplicated bmp definition found: {:?} {:?}", id, path.display() ); } } Token::Bpm(bpm) => { if let Ok(parsed) = bpm.parse() { if 0.0 < parsed { self.bpm = Some(parsed); } else { eprintln!("not positive bpm found: {:?}", parsed); } } else { eprintln!("not number bpm found: {:?}", bpm); } } Token::BpmChange(id, bpm) => { let parsed: f64 = bpm .parse() .map_err(|_| ParseError::BpmParseError(bpm.into()))?; if parsed <= 0.0 || !parsed.is_finite() { return Err(ParseError::BpmParseError(bpm.into())); } if self.bpm_changes.insert(id, parsed).is_some() { eprintln!("duplicated bpm change definition found: {:?} {:?}", id, bpm); } } Token::ChangeOption(id, option) => { if self.change_options.insert(id, option.into()).is_some() { eprintln!( "duplicated change option definition found: {:?} {}", id, option ); } } Token::Comment(comment) => self .comment .get_or_insert_with(Vec::new) .push(comment.into()), Token::Difficulty(diff) => self.difficulty = Some(diff), Token::Email(email) => self.email = Some(email.into()), Token::ExBmp(id, transparent_color, path) => { if self .bmp_files .insert( id, Bmp { file: path.into(), transparent_color, }, ) .is_some() { eprintln!( "duplicated bmp definition found: {:?} {:?}", id, path.display() ); } } Token::ExRank(_, _) => todo!(), Token::ExWav(_, _, _) => todo!(), Token::Genre(genre) => self.genre = Some(genre.to_owned()), Token::LnTypeRdm => { self.ln_type = LnType::Rdm; } Token::LnTypeMgq => { self.ln_type = LnType::Mgq; } Token::Maker(maker) => self.maker = Some(maker.into()), Token::MidiFile(midi_file) => self.midi_file = Some(midi_file.into()), Token::OctFp => self.is_octave = true, Token::Option(option) => self .options .get_or_insert_with(Vec::new) .push(option.into()), Token::PathWav(wav_path_root) => self.wav_path_root = Some(wav_path_root.into()), Token::Player(player) => self.player = Some(player), Token::PlayLevel(play_level) => self.play_level = Some(play_level), Token::PoorBga(poor_bga_mode) => self.poor_bga_mode = poor_bga_mode, Token::Rank(rank) => self.rank = Some(rank), Token::StageFile(file) => self.stage_file = Some(file.into()), Token::SubArtist(sub_artist) => self.sub_artist = Some(sub_artist.into()), Token::SubTitle(subtitle) => self.subtitle = Some(subtitle.into()), Token::Text(id, text) => { if self.texts.insert(id, text.into()).is_some() { eprintln!("duplicated text definition found: {:?} {}", id, text); } } Token::Title(title) => self.title = Some(title.into()), Token::Total(total) => { if let Ok(parsed) = total.parse() { self.total = Some(parsed); }
}
else { eprintln!("not number total found: {:?}", total); } } Token::Url(url) => self.url = Some(url.into()), Token::VideoFile(video_file) => self.video_file = Some(video_file.into()), Token::VolWav(volume) => self.volume = volume, Token::Wav(id, path) => { if self.wav_files.insert(id, path.into()).is_some() { eprintln!( "duplicated wav definition found: {:?} {:?}", id, path.display() ); } } _ => {} } Ok(()) }
function_block-function_prefix_line
[ { "content": "/// Analyzes and converts the BMS format text into [`TokenStream`].\n\npub fn parse(source: &str) -> Result<TokenStream> {\n\n let mut cursor = Cursor::new(source);\n\n\n\n let mut tokens = vec![];\n\n while !cursor.is_end() {\n\n tokens.push(Token::parse(&mut cursor)?);\n\n }\n\n Ok(TokenStream::from_tokens(tokens))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::path::Path;\n\n\n\n use super::{command::*, parse, token::Token::*};\n\n\n\n #[test]\n\n fn simple() {\n\n const SRC: &str = r\"\n\n#PLAYER 1\n", "file_path": "src/lex.rs", "rank": 0, "score": 97000.0630764865 }, { "content": "#[test]\n\nfn test_lal() {\n\n let source = include_str!(\"lilith_mx.bms\");\n\n let ts = parse(source).expect(\"must be parsed\");\n\n let bms = Bms::from_token_stream(&ts, RngMock([1])).expect(\"must be parsed\");\n\n eprintln!(\"{:?}\", bms);\n\n}\n\n\n", "file_path": "tests/files.rs", "rank": 1, "score": 57560.746811263816 }, { "content": "#[test]\n\nfn test_nc() {\n\n let source = include_str!(\"nc_mx.bme\");\n\n let ts = parse(source).expect(\"must be parsed\");\n\n let bms = Bms::from_token_stream(&ts, RngMock([1])).expect(\"must be parsed\");\n\n eprintln!(\"{:?}\", bms);\n\n}\n\n\n", "file_path": "tests/files.rs", "rank": 2, "score": 57560.746811263816 }, { "content": "#[test]\n\nfn test_j219() {\n\n let source = include_str!(\"J219_7key.bms\");\n\n let ts = parse(source).expect(\"must be parsed\");\n\n let bms = Bms::from_token_stream(&ts, RngMock([1])).expect(\"must be parsed\");\n\n eprintln!(\"{:?}\", bms);\n\n}\n", "file_path": "tests/files.rs", "rank": 3, "score": 57560.746811263816 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\nenum ClauseState {\n\n Random(u32),\n\n If(bool),\n\n}\n\n\n\npub struct RandomParser<R> {\n\n rng: R,\n\n random_stack: Vec<ClauseState>,\n\n}\n\n\n\nimpl<R: Rng> RandomParser<R> {\n\n pub fn new(rng: R) -> Self {\n\n Self {\n\n rng,\n\n random_stack: vec![],\n\n }\n\n }\n\n\n\n pub fn parse(&mut self, token: &Token) -> ControlFlow<Result<()>> {\n\n match *token {\n", "file_path": "src/parse/random.rs", "rank": 4, "score": 36141.34529972496 }, { "content": "/// A random generator for parsing BMS.\n\npub trait Rng {\n\n /// Generates a random integer within the `range`. Returning the number outside the range will result weird.\n\n fn gen(&mut self, range: RangeInclusive<u32>) -> u32;\n\n}\n\n\n\n/// A random generator for mocking. This generates the number from the array in rotation.\n\npub struct RngMock<const N: usize>(pub [u32; N]);\n\n\n\nimpl<const N: usize> Rng for RngMock<N> {\n\n fn gen(&mut self, _range: std::ops::RangeInclusive<u32>) -> u32 {\n\n self.0.rotate_left(1);\n\n self.0[N - 1]\n\n }\n\n}\n", "file_path": "src/parse/rng.rs", "rank": 5, "score": 33849.11167850432 }, { "content": "#[test]\n\nfn test1() {\n\n let mut cursor = Cursor::new(\n\n r\"\n\n hoge\n\n foo\n\n bar bar\n\n \",\n\n );\n\n\n\n assert_eq!(cursor.line(), 1);\n\n assert_eq!(cursor.col(), 1);\n\n assert_eq!(cursor.next_token(), Some(\"hoge\"));\n\n assert_eq!(cursor.line(), 2);\n\n assert_eq!(cursor.col(), 17);\n\n assert_eq!(cursor.next_token(), Some(\"foo\"));\n\n assert_eq!(cursor.line(), 3);\n\n assert_eq!(cursor.col(), 16);\n\n assert_eq!(cursor.next_token(), Some(\"bar\"));\n\n assert_eq!(cursor.line(), 4);\n\n assert_eq!(cursor.col(), 16);\n\n assert_eq!(cursor.next_token(), Some(\"bar\"));\n\n assert_eq!(cursor.line(), 4);\n\n assert_eq!(cursor.col(), 20);\n\n}\n\n\n", "file_path": "src/lex/cursor.rs", "rank": 6, "score": 32268.856228569282 }, { "content": "#[test]\n\nfn test2() {\n\n const SOURCE: &str = r\"\n\n #TITLE 花たちに希望を [SP ANOTHER]\n\n #ARTIST Sound piercer feat.DAZBEE\n\n #BPM 187\n\n \";\n\n\n\n let mut cursor = Cursor::new(SOURCE);\n\n\n\n assert_eq!(cursor.next_token(), Some(\"#TITLE\"));\n\n assert_eq!(cursor.next_line_remaining(), \"花たちに希望を [SP ANOTHER]\");\n\n assert_eq!(cursor.next_token(), Some(\"#ARTIST\"));\n\n assert_eq!(cursor.next_line_remaining(), \"Sound piercer feat.DAZBEE\");\n\n assert_eq!(cursor.next_token(), Some(\"#BPM\"));\n\n assert_eq!(cursor.next_line_remaining(), \"187\");\n\n}\n", "file_path": "src/lex/cursor.rs", "rank": 7, "score": 32268.856228569282 }, { "content": "#[test]\n\nfn nested_random() {\n\n const SRC: &str = r\"\n\n #00111:11000000\n\n\n\n #RANDOM 2\n\n\n\n #IF 1\n\n #00112:00220000\n\n\n\n #RANDOM 2\n\n\n\n #IF 1\n\n #00115:00550000\n\n #ENDIF\n\n\n\n #IF 2\n\n #00116:00006600\n\n #ENDIF\n\n\n\n #ENDRANDOM\n", "file_path": "tests/nested_random.rs", "rank": 8, "score": 31085.12033253574 }, { "content": "use bms_rs::{\n\n lex::parse,\n\n parse::{rng::RngMock, Bms},\n\n};\n\n\n\n#[test]\n", "file_path": "tests/files.rs", "rank": 9, "score": 28068.91011892232 }, { "content": "\n\n break Ok(match command.to_uppercase().as_str() {\n\n \"#PLAYER\" => Self::Player(PlayerMode::from(c)?),\n\n \"#GENRE\" => Self::Genre(c.next_line_remaining()),\n\n \"#TITLE\" => Self::Title(c.next_line_remaining()),\n\n \"#SUBTITLE\" => Self::Title(c.next_line_remaining()),\n\n \"#ARTIST\" => Self::Artist(c.next_line_remaining()),\n\n \"#SUBARTIST\" => Self::SubArtist(c.next_line_remaining()),\n\n \"#DIFFICULTY\" => Self::Difficulty(\n\n c.next_token()\n\n .ok_or_else(|| c.err_expected_token(\"difficulty\"))?\n\n .parse()\n\n .map_err(|_| c.err_expected_token(\"integer\"))?,\n\n ),\n\n \"#STAEGFILE\" => Self::StageFile(\n\n c.next_token()\n\n .map(Path::new)\n\n .ok_or_else(|| c.err_expected_token(\"stage filename\"))?,\n\n ),\n\n \"#BANNER\" => Self::Banner(\n", "file_path": "src/lex/token.rs", "rank": 22, "score": 25835.985805690656 }, { "content": " },\n\n /// `#BMP[01-ZZ] [filename]`. Defines the background image/movie object. The file specified may be not only BMP format, and also PNG, AVI, MP4, MKV and others. Its size should be less than or equal to 256x256. The black (`#000000`) pixel in the image will be treated as transparent. When the id `00` is specified, this first field will be `None` and the image will be shown when the player get mistaken.\n\n Bmp(Option<ObjId>, &'a Path),\n\n /// `#BPM [f64]`. Defines the base Beats-Per-Minute of the score. Defaults to 130, but some players don't conform to it.\n\n Bpm(&'a str),\n\n /// `#BPM[01-ZZ] [f64]`. Defines the Beats-Per-Minute change object.\n\n BpmChange(ObjId, &'a str),\n\n /// `#CASE [u32]`. Starts a case scope if the integer equals to the generated random number. If there's no `#SKIP` command in the scope, the parsing will **fallthrough** to the next `#CASE` or `#DEF`. See also [`Token::Switch`].\n\n Case(u32),\n\n /// `#CHANGEOPTION[01-ZZ] [string]`. Defines the play option change object. Some players interpret and apply the preferences.\n\n ChangeOption(ObjId, &'a str),\n\n /// `#COMMENT [string]`. Defines the text which is shown in the music select view. This may or may not be surrounded by double-quotes.\n\n Comment(&'a str),\n\n /// `#DEF`. Starts a case scope if any `#CASE` had not matched to the generated random number. It must be placed in the end of the switch scope. See also [`Token::Switch`].\n\n Def,\n\n /// `#DIFFICULTY [1-5]`. Defines the difficulty of the score. It can be used to sort the score having the same title.\n\n Difficulty(u8),\n\n /// `#ELSEIF [u32]`. Starts an if scope when the preceding `#IF` had not matched to the generated random number. It must be in an if scope.\n\n Else,\n\n /// `#ELSEIF [u32]`. Starts an if scope when the integer equals to the generated random number. It must be in an if scope. If preceding `#IF` had matched to the generated, this scope don't start. Syntax sugar for:\n", "file_path": "src/lex/token.rs", "rank": 23, "score": 25833.295276059707 }, { "content": " /// `#TITLE [string]`. Defines the title of the music.\n\n Title(&'a str),\n\n /// `#TOTAL [f64]`. Defines the total gauge percentage when all notes is got as PERFECT.\n\n Total(&'a str),\n\n /// `%URL [string]`. The url of this score file.\n\n Url(&'a str),\n\n /// `#VIDEOFILE [filename]` / `#MOVIE [filename]`. Defines the background movie file. The audio track in the movie file should not be played. The play should start from the track `000`.\n\n VideoFile(&'a Path),\n\n /// `#VOLWAV [0-255]`. Defines the relative volume percentage of the sound in the score.\n\n VolWav(Volume),\n\n /// `#WAV[01-ZZ] [filename]`. Defines the key sound object. When same id multiple objects ring at same time, it must be played only one. The file specified may be not only WAV format, and also OGG, MP3 and others.\n\n Wav(ObjId, &'a Path),\n\n}\n\n\n\nimpl<'a> Token<'a> {\n\n pub(crate) fn parse(c: &mut Cursor<'a>) -> Result<Self> {\n\n loop {\n\n let command = c\n\n .next_token()\n\n .ok_or_else(|| c.err_expected_token(\"command\"))?;\n", "file_path": "src/lex/token.rs", "rank": 24, "score": 25832.318443887758 }, { "content": "//! Definitions of the token in BMS format.\n\n\n\nuse std::path::Path;\n\n\n\nuse super::{command::*, cursor::Cursor, Result};\n\n\n\n/// A token of BMS format.\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub enum Token<'a> {\n\n /// `#ARTIST [string]`. Defines the artist name of the music.\n\n Artist(&'a str),\n\n /// `#@BGA[01-ZZ] [01-ZZ] [sx] [sy] [w] [h] [dx] [dy]`. Defines the image object from trimming the existing image object.\n\n AtBga {\n\n /// The id of the object to define.\n\n id: ObjId,\n\n /// The id of the object to be trimmed.\n\n source_bmp: ObjId,\n\n /// The top left point of the trim area in pixels.\n\n trim_top_left: (i16, i16),\n\n /// The size of the trim area in pixels.\n", "file_path": "src/lex/token.rs", "rank": 25, "score": 25831.389124559315 }, { "content": " .ok_or_else(|| c.err_expected_token(\"key audio filename\"))?,\n\n );\n\n Self::Wav(ObjId::from(id, c)?, filename)\n\n }\n\n bmp if bmp.starts_with(\"#BMP\") => {\n\n let id = command.trim_start_matches(\"#BMP\");\n\n let filename = Path::new(\n\n c.next_token()\n\n .ok_or_else(|| c.err_expected_token(\"bgi image filename\"))?,\n\n );\n\n if id == \"00\" {\n\n Self::Bmp(None, filename)\n\n } else {\n\n Self::Bmp(Some(ObjId::from(id, c)?), filename)\n\n }\n\n }\n\n bpm if bpm.starts_with(\"#BPM\") => {\n\n let id = command.trim_start_matches(\"#BPM\");\n\n let bpm = c.next_token().ok_or_else(|| c.err_expected_token(\"bpm\"))?;\n\n Self::BpmChange(ObjId::from(id, c)?, bpm)\n", "file_path": "src/lex/token.rs", "rank": 26, "score": 25829.520459463667 }, { "content": " c.next_token()\n\n .map(Path::new)\n\n .ok_or_else(|| c.err_expected_token(\"banner filename\"))?,\n\n ),\n\n \"#TOTAL\" => Self::Total(\n\n c.next_token()\n\n .ok_or_else(|| c.err_expected_token(\"gauge increase rate\"))?,\n\n ),\n\n \"#BPM\" => Self::Bpm(c.next_token().ok_or_else(|| c.err_expected_token(\"bpm\"))?),\n\n \"#PLAYLEVEL\" => Self::PlayLevel(\n\n c.next_token()\n\n .ok_or_else(|| c.err_expected_token(\"play level\"))?\n\n .parse()\n\n .map_err(|_| c.err_expected_token(\"integer\"))?,\n\n ),\n\n \"#RANK\" => Self::Rank(JudgeLevel::from(c)?),\n\n \"#LNTYPE\" => {\n\n if c.next_token() == Some(\"2\") {\n\n Self::LnTypeMgq\n\n } else {\n", "file_path": "src/lex/token.rs", "rank": 27, "score": 25829.055330917814 }, { "content": " ExRank(ObjId, JudgeLevel),\n\n /// `#EXWAV[01-ZZ] [parameter order] [pan or volume or frequency; 1-3] [filename]`. Defines the key sound object with the effect of pan, volume and frequency.\n\n ExWav(ObjId, [&'a str; 4], &'a Path),\n\n /// `#GENRE [string]`. Defines the genre of the music.\n\n Genre(&'a str),\n\n /// `#IF [u32]`. Starts an if scope when the integer equals to the generated random number. This must be placed in a random scope. See also [`Token::Random`].\n\n If(u32),\n\n /// `#LNOBJ [01-ZZ]`. Declares the object as the end of an LN. The preceding object of the declared will be treated as the beginning of an LN.\n\n LnObj(ObjId),\n\n /// `#LNTYPE 1`. Declares the LN notation as the RDM type.\n\n LnTypeRdm,\n\n /// `#LNTYPE 2`. Declares the LN notation as the MGQ type.\n\n LnTypeMgq,\n\n /// `#MAKER [string]`. Defines the author name of the score.\n\n Maker(&'a str),\n\n /// `#XXXYY:ZZ...`. Defines the message which places the object onto the score. `XXX` is the track, `YY` is the channel, and `ZZ...` is the object id sequence.\n\n Message {\n\n /// The track, or measure, must start from 1. But some player may allow the 0 measure (i.e. Lunatic Rave 2).\n\n track: Track,\n\n /// The channel commonly expresses what the lane be arranged the note to.\n", "file_path": "src/lex/token.rs", "rank": 28, "score": 25826.905207423395 }, { "content": " /// `#RANK [0-3]`. Defines the judgement level.\n\n Rank(JudgeLevel),\n\n /// `#SETRANDOM [u32]`. Starts a random scope but the integer will be used as the generated random number. It should be used only for tests.\n\n SetRandom(u32),\n\n /// `#SETSWITCH [u32]`. Starts a switch scope but the integer will be used as the generated random number. It should be used only for tests.\n\n SetSwitch(u32),\n\n /// `#SKIP`. Escapes the current switch scope. It is often used in the end of every case scope.\n\n Skip,\n\n /// `#STAGEFILE [filename]`. Defines the splashscreen image. It should be 640x480.\n\n StageFile(&'a Path),\n\n /// `#STOP[01-ZZ] [0-4294967295]`. Defines the stop object. The scroll will stop the beats of the integer divided by 192. A beat length depends on the current BPM. If there are other objects on same time, the stop object must be evaluated at last.\n\n Stop(ObjId, u32),\n\n /// `#SUBARTIST [string]`. Defines the sub-artist name of the music.\n\n SubArtist(&'a str),\n\n /// `#SUBTITLE [string]`. Defines the subtitle of the music.\n\n SubTitle(&'a str),\n\n /// `#SWITCH [u32]`. Starts a switch scope which can contain only `#CASE` or `#DEF` scopes. The switch scope must close with `#ENDSW`. A random integer from 1 to the integer will be generated when parsing the score. Then if the integer of `#CASE` equals to the random integer, the commands in a case scope will be parsed, otherwise all command in it will be ignored. Any command except `#CASE` and `#DEF` must not be included in the scope, but some players allow it.\n\n Switch(u32),\n\n /// `#TEXT[01-ZZ] string`. Defines the text object.\n\n Text(ObjId, &'a str),\n", "file_path": "src/lex/token.rs", "rank": 29, "score": 25824.56621763086 }, { "content": " \"#ENDIF\" => Self::EndIf,\n\n \"#STAGEFILE\" => Self::StageFile(\n\n c.next_token()\n\n .map(Path::new)\n\n .ok_or_else(|| c.err_expected_token(\"splashscreen imege filename\"))?,\n\n ),\n\n \"#VOLWAV\" => {\n\n let volume = c\n\n .next_token()\n\n .ok_or_else(|| c.err_expected_token(\"volume\"))?\n\n .parse()\n\n .map_err(|_| c.err_expected_token(\"integer\"))?;\n\n Self::VolWav(Volume {\n\n relative_percent: volume,\n\n })\n\n }\n\n wav if wav.starts_with(\"#WAV\") => {\n\n let id = command.trim_start_matches(\"#WAV\");\n\n let filename = Path::new(\n\n c.next_token()\n", "file_path": "src/lex/token.rs", "rank": 30, "score": 25824.405377786028 }, { "content": " ///\n\n /// ```text\n\n /// #ELSE\n\n /// #IF n\n\n /// // ...\n\n /// #ENDIF\n\n /// #ENDIF\n\n /// ```\n\n ElseIf(u32),\n\n /// `%EMAIL [string]`. The email address of this score file author.\n\n Email(&'a str),\n\n /// `#ENDIF`. Closes the if scope. See [Token::If].\n\n EndIf,\n\n /// `#ENDRANDOM`. Closes the random scope. See [Token::Random].\n\n EndRandom,\n\n /// `#ENDSWITCH`. Closes the random scope. See [Token::Switch].\n\n EndSwitch,\n\n /// `#BMP[01-ZZ] [0-255],[0-255],[0-255],[0-255] [filename]`. Defines the background image/movie object with the color (alpha, red, green and blue) which will be treated as transparent.\n\n ExBmp(ObjId, Argb, &'a Path),\n\n /// `#EXRANK[01-ZZ] [0-3]`. Defines the judgement level change object.\n", "file_path": "src/lex/token.rs", "rank": 31, "score": 25822.796366155417 }, { "content": "/// A sequence of the [`Token`]. It can be used to run [`crate::parse::Bms::from_token_stream`].\n\npub struct TokenStream<'a> {\n\n tokens: Vec<Token<'a>>,\n\n}\n\n\n\nimpl<'a> TokenStream<'a> {\n\n pub(crate) fn from_tokens(tokens: Vec<Token<'a>>) -> Self {\n\n Self { tokens }\n\n }\n\n\n\n /// Returns the borrowed iterator of the tokens.\n\n pub fn iter(&self) -> TokenStreamIter<'_, 'a> {\n\n TokenStreamIter {\n\n iter: self.tokens.iter(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> IntoIterator for TokenStream<'a> {\n\n type Item = Token<'a>;\n", "file_path": "src/lex/token.rs", "rank": 32, "score": 25820.309332332545 }, { "content": " channel: Channel,\n\n /// The message to the channel.\n\n message: &'a str,\n\n },\n\n /// `#MIDIFILE [filename]`. Defines the MIDI file as the BGM. *Deprecated*\n\n MidiFile(&'a Path),\n\n /// `#OCT/FP`. Declares the score as the octave mode.\n\n OctFp,\n\n /// `#OPTION [string]`. Defines the play option of the score. Some players interpret and apply the preferences.\n\n Option(&'a str),\n\n /// `#PATH_WAV [string]`. Defines the root path of [`Token::Wav`] paths. This should be used only for tests.\n\n PathWav(&'a Path),\n\n /// `#PLAYER [1-4]`. Defines the play style of the score.\n\n Player(PlayerMode),\n\n /// `#PLAYLEVEL [integer]`. Defines the difficulty level of the score. This can be used on music select view.\n\n PlayLevel(u8),\n\n /// `#POORBGA [0-2]`. Defines the display mode of the POOR BGA.\n\n PoorBga(PoorMode),\n\n /// `#RANDOM [u32]`. Starts a random scope which can contain only `#IF`-`#ENDIF` scopes. The random scope must close with `#ENDRANDOM`. A random integer from 1 to the integer will be generated when parsing the score. Then if the integer of `#IF` equals to the random integer, the commands in an if scope will be parsed, otherwise all command in it will be ignored. Any command except `#IF` and `#ENDIF` must not be included in the scope, but some players allow it.\n\n Random(u32),\n", "file_path": "src/lex/token.rs", "rank": 33, "score": 25819.774944328172 }, { "content": " trim_size: (u16, u16),\n\n /// The top left point to be rendered in pixels.\n\n draw_point: (i16, i16),\n\n },\n\n /// `#BANNER [filename]`. Defines the banner image. This can be used on music select or result view. It should be 300x80.\n\n Banner(&'a Path),\n\n /// `#BACKBMP [filename]`. Defines the background image file of the play view. It should be 640x480. The effect will depend on the skin of the player.\n\n BackBmp(&'a Path),\n\n /// `#BGA[01-ZZ] [01-ZZ] [x1] [y1] [x2] [y2] [dx] [dy]`. Defines the image object from trimming the existing image object.\n\n Bga {\n\n /// The id of the object to define.\n\n id: ObjId,\n\n /// The id of the object to be trimmed.\n\n source_bmp: ObjId,\n\n /// The top left point of the trim area in pixels.\n\n trim_top_left: (i16, i16),\n\n /// The bottom right point of the trim area in pixels.\n\n trim_bottom_right: (i16, i16),\n\n /// The top left point to be rendered in pixels.\n\n draw_point: (i16, i16),\n", "file_path": "src/lex/token.rs", "rank": 34, "score": 25818.066127903432 }, { "content": " let message = &command[7..];\n\n Self::Message {\n\n track: Track(track),\n\n channel: Channel::from(channel, c)?,\n\n message,\n\n }\n\n }\n\n comment if !comment.starts_with('#') => {\n\n c.next_line_remaining();\n\n continue;\n\n }\n\n unknown => {\n\n eprintln!(\"unknown command found: {:?}\", unknown);\n\n todo!();\n\n }\n\n });\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lex/token.rs", "rank": 35, "score": 25817.775849176236 }, { "content": " type IntoIter = <Vec<Token<'a>> as IntoIterator>::IntoIter;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.tokens.into_iter()\n\n }\n\n}\n\n\n\n/// An iterator of the [`TokenStream`].\n\n#[derive(Debug)]\n\npub struct TokenStreamIter<'t, 'a> {\n\n iter: std::slice::Iter<'t, Token<'a>>,\n\n}\n\n\n\nimpl<'t, 'a> Iterator for TokenStreamIter<'t, 'a> {\n\n type Item = &'t Token<'a>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.iter.next()\n\n }\n\n}\n", "file_path": "src/lex/token.rs", "rank": 36, "score": 25817.022975791424 }, { "content": " }\n\n stop if stop.starts_with(\"#STOP\") => {\n\n let id = command.trim_start_matches(\"#STOP\");\n\n let stop = c\n\n .next_token()\n\n .ok_or_else(|| c.err_expected_token(\"stop beats\"))?\n\n .parse()\n\n .map_err(|_| c.err_expected_token(\"integer\"))?;\n\n Self::Stop(ObjId::from(id, c)?, stop)\n\n }\n\n message\n\n if message.starts_with('#')\n\n && message.chars().nth(6) == Some(':')\n\n && 8 <= message.len() =>\n\n {\n\n let track = command[1..4]\n\n .parse()\n\n .map_err(|_| c.err_expected_token(\"[000-999]\"))?;\n\n let channel = &command[4..6];\n\n\n", "file_path": "src/lex/token.rs", "rank": 37, "score": 25816.110298057854 }, { "content": " Self::LnTypeRdm\n\n }\n\n }\n\n \"#RANDOM\" => {\n\n let rand_max = c\n\n .next_token()\n\n .ok_or_else(|| c.err_expected_token(\"random max\"))?\n\n .parse()\n\n .map_err(|_| c.err_expected_token(\"integer\"))?;\n\n Self::Random(rand_max)\n\n }\n\n \"#ENDRANDOM\" => Self::EndRandom,\n\n \"#IF\" => {\n\n let rand_target = c\n\n .next_token()\n\n .ok_or_else(|| c.err_expected_token(\"random target\"))?\n\n .parse()\n\n .map_err(|_| c.err_expected_token(\"integer\"))?;\n\n Self::If(rand_target)\n\n }\n", "file_path": "src/lex/token.rs", "rank": 38, "score": 25815.568994596175 }, { "content": "//! Definitions of command argument data.\n\n\n\nuse std::num::NonZeroU16;\n\n\n\nuse super::{cursor::Cursor, LexError, Result};\n\n\n\n/// A play style of the score.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum PlayerMode {\n\n /// For single play, a player uses 5 or 7 keys.\n\n Single,\n\n /// For couple play, two players use each 5 or 7 keys.\n\n Two,\n\n /// For double play, a player uses 10 or 14 keys.\n\n Double,\n\n}\n\n\n\nimpl PlayerMode {\n\n pub(crate) fn from(c: &mut Cursor) -> Result<Self> {\n\n Ok(match c.next_token() {\n", "file_path": "src/lex/command.rs", "rank": 39, "score": 29.780801623707426 }, { "content": " let id = u16::from_str_radix(id, 36).map_err(|_| c.err_expected_token(\"[00-ZZ]\"))?;\n\n id.try_into()\n\n .map(Self)\n\n .map_err(|_| c.err_expected_token(\"non zero index\"))\n\n }\n\n}\n\n\n\n/// A play volume of the sound in the score. Defaults to 100.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct Volume {\n\n /// A play volume percentage of the sound.\n\n pub relative_percent: u8,\n\n}\n\n\n\nimpl Default for Volume {\n\n fn default() -> Self {\n\n Self {\n\n relative_percent: 100,\n\n }\n\n }\n", "file_path": "src/lex/command.rs", "rank": 40, "score": 27.312144911166353 }, { "content": "\n\nimpl JudgeLevel {\n\n pub(crate) fn from(c: &mut Cursor) -> Result<Self> {\n\n Ok(match c.next_token() {\n\n Some(\"0\") => Self::VeryHard,\n\n Some(\"1\") => Self::Hard,\n\n Some(\"2\") => Self::Normal,\n\n Some(\"3\") => Self::Easy,\n\n _ => return Err(c.err_expected_token(\"one of 0, 1, 2 or 3\")),\n\n })\n\n }\n\n}\n\n\n\n/// An object id. Its meaning is determined by the channel belonged to.\n\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub struct ObjId(pub NonZeroU16);\n\n\n\nimpl std::fmt::Debug for ObjId {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let digits = (self.0.get() / 36, self.0.get() % 36);\n", "file_path": "src/lex/command.rs", "rank": 41, "score": 27.157171356596272 }, { "content": "//! Note objects manager.\n\n\n\nuse itertools::Itertools;\n\nuse std::collections::{BTreeMap, HashMap};\n\n\n\nuse super::{\n\n header::Header,\n\n obj::{Obj, ObjTime},\n\n ParseError, Result,\n\n};\n\nuse crate::lex::{\n\n command::{Channel, Key, NoteKind, ObjId},\n\n token::Token,\n\n};\n\n\n\n/// An object to change the BPM of the score.\n\n#[derive(Debug, Clone, Copy)]\n\npub struct BpmChangeObj {\n\n /// The time to begin the change of BPM.\n\n pub time: ObjTime,\n", "file_path": "src/parse/notes.rs", "rank": 42, "score": 26.206002215726407 }, { "content": " /// The BPM to be.\n\n pub bpm: f64,\n\n}\n\n\n\nimpl PartialEq for BpmChangeObj {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.time == other.time\n\n }\n\n}\n\n\n\nimpl Eq for BpmChangeObj {}\n\n\n\nimpl PartialOrd for BpmChangeObj {\n\n fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n\n\n\nimpl Ord for BpmChangeObj {\n\n fn cmp(&self, other: &Self) -> std::cmp::Ordering {\n", "file_path": "src/parse/notes.rs", "rank": 43, "score": 25.151871914402417 }, { "content": "# BMS Format Specification\n\n\n\nwritten by Urao Yane <yaneurao@gmail.com>\n\n\n\n\"BMS\" means a Be-Music Source file. A file which has BMS suffix is regarded as the BMS file. This file format was produced by Urao Yane and NBK in 1998. And I adopted this file format to BM98. Now,anyone can use this format freely.\n\n\n\n---\n\n\n\n# Command Line\n\n\n\nThe line beginning at `#` is the command line. All the rest are ignored (use for comments). And this BMS file is compiled at runtime, so you can order any lines freely. And there is no difference in the command line between using a capital letter or not.\n\n\n\n# Header\n\n\n\n #PLAYER 1\n\nThis data is for Single Play.\n\n\n\n---\n\n\n\n #PLAYER 2\n\nThis data is for Two Play.\n\n\n\n---\n\n\n\n #PLAYER 3\n\nThis data is for Double Play.\n\n\n\n---\n\n\n\n #GENRE xxxxxxxx\n\nDefinition of Genre.\n\n\n\n---\n\n\n\n #TITLE xxxxxxxx\n\nDefinition of Title.\n\n\n\n---\n\n\n\n #ARTIST xxxxxxxx\n\nDefinition of Artist.\n\n\n\n---\n\n\n\n #BPM xxx\n\nDefinition of BPM (Beat Per Minute) at the top of music. Default is 130.\n\n\n\n---\n\n\n\n #MIDIFILE xxxxxxx.mid\n\nBackground music by MIDI.\n\n\n\n---\n\n\n\n #PLAYLEVEL x\n\nInformation of Game Level for player.\n\n\n\n---\n\n\n\n #RANK x\n\njudgement level. x = `0`: very hard, `1`: hard, `2`: normal, `3`: easy.\n\n\n\n---\n\n\n\n #VOLWAV xxx\n\nRelative volume control in percentage.\n\n\n\n---\n\n\n\n #WAVxx yyyyyyyy.wav\n\nDefinition of Wave Data. `xx`: `01` to `FF` (Hex), `yyyyyyyy.wav`: wave file name.\n\n\n\ne.g.\n\n\n\n #WAV01 HOUSE01.WAV // assign HOUSE01.WAV to 01 wav\n\n #WAV02 HOUSE02.WAV // assign HOUSE02.WAV to 02 wav\n\n #WAVFF HOUSE03.WAV // assign HOUSE03.WAV to FF wav\n\n\n\n---\n\n\n\n #BMPxx yyyyyyyy.bmp\n\nDefinition of Bitmap file. `xx`: `01` to `FF` (Hex), `yyyyyyyy.bmp`: bitmap file name. Bitmap size must be 256 * 256. (max color 65536)\n\n\n\ne.g.\n\n\n\n #BMP02 HOUSE02.BMP // assign HOUSE02.BMP to 02 bitmap\n\n #BMP01 HOUSE01.BMP // assign HOUSE01.BMP to 01 bitmap\n\n #BMPEE HOUSE03.BMP // assign HOUSE03.BMP to EE bitmap\n\n\n\nHowever, the bitmap defined by `#BMP00` is something special. This bitmap shows when a player do a poor play.\n\n\n", "file_path": "SPECIFICATION.md", "rank": 44, "score": 22.589870992295292 }, { "content": " self.time.cmp(&other.time)\n\n }\n\n}\n\n\n\n/// An object to change its section length of the score.\n\n#[derive(Debug, Clone, Copy)]\n\npub struct SectionLenChangeObj {\n\n /// The time to begin the change of section length.\n\n pub time: ObjTime,\n\n /// The length to be.\n\n pub length: f64,\n\n}\n\n\n\nimpl PartialEq for SectionLenChangeObj {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.time == other.time\n\n }\n\n}\n\n\n\nimpl Eq for SectionLenChangeObj {}\n", "file_path": "src/parse/notes.rs", "rank": 45, "score": 22.51186603537398 }, { "content": " if id == 0 {\n\n continue;\n\n }\n\n let obj = (id as u16).try_into().unwrap();\n\n let time = ObjTime::new(track.0, i as u32, denominator);\n\n let &bpm = header\n\n .bpm_changes\n\n .get(&obj)\n\n .ok_or(ParseError::UndefinedObject(obj))?;\n\n if self\n\n .bpm_changes\n\n .insert(time, BpmChangeObj { time, bpm })\n\n .is_some()\n\n {\n\n eprintln!(\"duplicate bpm change object detected at {:?}\", time);\n\n }\n\n }\n\n }\n\n Token::Message {\n\n track,\n", "file_path": "src/parse/notes.rs", "rank": 46, "score": 22.42594057054531 }, { "content": "}\n\n\n\n/// An alpha-red-gree-blue color data.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct Argb {\n\n /// A component of alpha.\n\n pub alpha: u8,\n\n /// A component of red.\n\n pub red: u8,\n\n /// A component of green.\n\n pub green: u8,\n\n /// A component of blue.\n\n pub blue: u8,\n\n}\n\n\n\nimpl Default for Argb {\n\n fn default() -> Self {\n\n Self {\n\n alpha: 255,\n\n red: 0,\n", "file_path": "src/lex/command.rs", "rank": 47, "score": 22.171792295882778 }, { "content": " Some(\"1\") => Self::Single,\n\n Some(\"2\") => Self::Two,\n\n Some(\"3\") => Self::Double,\n\n _ => return Err(c.err_expected_token(\"one of 1, 2 or 3\")),\n\n })\n\n }\n\n}\n\n\n\n/// A rank to determine judge level, but treatment differs among the BMS players.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum JudgeLevel {\n\n /// Rank 0, the most difficult rank.\n\n VeryHard,\n\n /// Rank 1, the harder rank.\n\n Hard,\n\n /// Rank 2, the easier rank.\n\n Normal,\n\n /// Rank 3, the easiest rank.\n\n Easy,\n\n}\n", "file_path": "src/lex/command.rs", "rank": 48, "score": 21.59387260201361 }, { "content": " /// For the change option object.\n\n ChangeOption,\n\n /// For the note which the user can interact.\n\n Note {\n\n /// The kind of the note.\n\n kind: NoteKind,\n\n /// The note for the player 1.\n\n is_player1: bool,\n\n /// The key which corresponds to the note.\n\n key: Key,\n\n },\n\n /// For the section length change object.\n\n SectionLen,\n\n /// For the stop object.\n\n Stop,\n\n}\n\n\n\nimpl Channel {\n\n pub(crate) fn from(channel: &str, c: &mut Cursor) -> Result<Self> {\n\n use Channel::*;\n", "file_path": "src/lex/command.rs", "rank": 49, "score": 20.611633756540712 }, { "content": "\n\nimpl PartialOrd for SectionLenChangeObj {\n\n fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n\n\n\nimpl Ord for SectionLenChangeObj {\n\n fn cmp(&self, other: &Self) -> std::cmp::Ordering {\n\n self.time.cmp(&other.time)\n\n }\n\n}\n\n\n\n/// The objects set for querying by lane or time.\n\n#[derive(Debug, Default)]\n\npub struct Notes {\n\n objs: HashMap<ObjId, Obj>,\n\n bgms: BTreeMap<ObjTime, Vec<ObjId>>,\n\n ids_by_key: HashMap<Key, BTreeMap<ObjTime, ObjId>>,\n\n bpm_changes: BTreeMap<ObjTime, BpmChangeObj>,\n", "file_path": "src/parse/notes.rs", "rank": 50, "score": 20.127523453962233 }, { "content": "//! Parser for BMS format. The reason why the implementation separated into lex and parse is the score may contain some randomized elements such as `#RANDOM`. This separation make us able to parse the tokens with the custom random generator cheaply.\n\n\n\npub mod header;\n\npub mod notes;\n\npub mod obj;\n\nmod random;\n\npub mod rng;\n\n\n\nuse std::ops::ControlFlow;\n\n\n\nuse self::{header::Header, notes::Notes, random::RandomParser, rng::Rng};\n\nuse crate::lex::{command::ObjId, token::TokenStream};\n\n\n\n/// An error occurred when parsing the [`TokenStream`].\n\n#[derive(Debug, Clone)]\n\npub enum ParseError {\n\n /// Syntax formed from the commands was invalid.\n\n SyntaxError(String),\n\n /// The invalid real number for the BPM.\n\n BpmParseError(String),\n", "file_path": "src/parse.rs", "rank": 51, "score": 19.842534206066137 }, { "content": " pub fn bpm_changes(&self) -> &BTreeMap<ObjTime, BpmChangeObj> {\n\n &self.bpm_changes\n\n }\n\n\n\n /// Returns the section len change objects.\n\n pub fn section_len_changes(&self) -> &BTreeMap<ObjTime, SectionLenChangeObj> {\n\n &self.section_len_changes\n\n }\n\n\n\n /// Adds the new note object to the notes.\n\n pub fn push(&mut self, note: Obj) {\n\n self.objs.insert(note.obj, note.clone());\n\n self.ids_by_key\n\n .entry(note.key)\n\n .or_insert_with(BTreeMap::new)\n\n .insert(note.offset, note.obj);\n\n }\n\n\n\n /// Removes the note from the notes.\n\n pub fn remove(&mut self, id: ObjId) -> Option<Obj> {\n", "file_path": "src/parse/notes.rs", "rank": 52, "score": 19.72782513190888 }, { "content": "use std::ops::ControlFlow::{self, *};\n\n\n\nuse super::{rng::Rng, ParseError, Result};\n\nuse crate::lex::token::Token;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n", "file_path": "src/parse/random.rs", "rank": 53, "score": 19.624000996591977 }, { "content": "\n\n let id1 = 1.try_into().unwrap();\n\n let id2 = 2.try_into().unwrap();\n\n let id3 = 3.try_into().unwrap();\n\n let tokens: Vec<_> = ts.into_iter().collect();\n\n assert_eq!(\n\n tokens,\n\n vec![\n\n Player(PlayerMode::Single),\n\n Genre(\"FUGA\"),\n\n Title(\"BAR(^^)\"),\n\n Artist(\"MikuroXina\"),\n\n Bpm(\"120\"),\n\n PlayLevel(6),\n\n Rank(JudgeLevel::Normal),\n\n Wav(id1, Path::new(\"hoge.WAV\")),\n\n Wav(id2, Path::new(\"foo.WAV\")),\n\n Wav(id3, Path::new(\"bar.WAV\")),\n\n Message {\n\n track: Track(2),\n", "file_path": "src/lex.rs", "rank": 54, "score": 19.580080258639722 }, { "content": "#[derive(Debug)]\n\npub struct Bms {\n\n /// The header data in the score.\n\n pub header: Header,\n\n /// The objects in the score.\n\n pub notes: Notes,\n\n}\n\n\n\nimpl Bms {\n\n /// Parses a token stream into [`Bms`] with a random generator [`Rng`].\n\n pub fn from_token_stream(token_stream: &TokenStream, rng: impl Rng) -> Result<Self> {\n\n let mut random_parser = RandomParser::new(rng);\n\n let mut notes = Notes::default();\n\n let mut header = Header::default();\n\n\n\n for token in token_stream.iter() {\n\n match random_parser.parse(token) {\n\n ControlFlow::Continue(_) => {}\n\n ControlFlow::Break(Ok(_)) => continue,\n\n ControlFlow::Break(Err(e)) => return Err(e),\n\n }\n\n notes.parse(token, &header)?;\n\n header.parse(token)?;\n\n }\n\n\n\n Ok(Self { header, notes })\n\n }\n\n}\n", "file_path": "src/parse.rs", "rank": 55, "score": 19.54584121956027 }, { "content": "//! Definitions of the note object.\n\n\n\nuse crate::lex::command::{Key, NoteKind, ObjId};\n\n\n\n/// A time of the object on the score.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct ObjTime {\n\n /// The track, or measure, where the object is in.\n\n pub track: u32,\n\n /// The time offset numerator in the track.\n\n pub numerator: u32,\n\n /// The time offset denominator in the track.\n\n pub denominator: u32,\n\n}\n\n\n\nimpl ObjTime {\n\n /// Create a new time.\n\n ///\n\n /// # Panics\n\n ///\n", "file_path": "src/parse/obj.rs", "rank": 56, "score": 19.424414939543116 }, { "content": " self.objs.remove(&id).map(|removed| {\n\n self.ids_by_key\n\n .get_mut(&removed.key)\n\n .unwrap()\n\n .remove(&removed.offset)\n\n .unwrap();\n\n removed\n\n })\n\n }\n\n\n\n pub(crate) fn parse(&mut self, token: &Token, header: &Header) -> Result<()> {\n\n match token {\n\n Token::Message {\n\n track,\n\n channel: Channel::BpmChange,\n\n message,\n\n } => {\n\n let denominator = message.len() as u32 / 2;\n\n for (i, (c1, c2)) in message.chars().tuples().into_iter().enumerate() {\n\n let id = c1.to_digit(36).unwrap() * 36 + c2.to_digit(36).unwrap();\n", "file_path": "src/parse/notes.rs", "rank": 57, "score": 17.979325161470594 }, { "content": " fn default() -> Self {\n\n Self::Interrupt\n\n }\n\n}\n\n\n\n/// The channel, or lane, where the note will be on.\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub enum Channel {\n\n /// The BGA channel.\n\n BgaBase,\n\n /// The BGA channel but overlay to [`Channel::BgaBase`] channel.\n\n BgaLayer,\n\n /// The POOR BGA channel.\n\n BgaPoor,\n\n /// For the note which will be auto-played.\n\n Bgm,\n\n /// For the bpm change by an [`u8`] integer.\n\n BpmChangeU8,\n\n /// For the bpm change object.\n\n BpmChange,\n", "file_path": "src/lex/command.rs", "rank": 58, "score": 17.697728998338633 }, { "content": " {\n\n eprintln!(\"duplicate bpm change object detected at {:?}\", time);\n\n }\n\n }\n\n }\n\n Token::Message {\n\n track,\n\n channel: Channel::SectionLen,\n\n message,\n\n } => {\n\n let time = ObjTime::new(track.0, 0, 4);\n\n let length = message.parse().expect(\"f64 as section length\");\n\n assert!(0.0 < length, \"section length must be greater than zero\");\n\n if self\n\n .section_len_changes\n\n .insert(time, SectionLenChangeObj { time, length })\n\n .is_some()\n\n {\n\n eprintln!(\"duplicate bpm change object detected at {:?}\", time);\n\n }\n", "file_path": "src/parse/notes.rs", "rank": 59, "score": 17.45052019953974 }, { "content": " \"8\" => Key6,\n\n \"9\" => Key7,\n\n _ => return Err(c.err_expected_token(\"[1-9]\")),\n\n })\n\n }\n\n}\n\n\n\n/// A POOR BGA display mode.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\n\n\npub enum PoorMode {\n\n /// To hide the normal BGA and display the POOR BGA.\n\n Interrupt,\n\n /// To overlap the POOR BGA onto the normal BGA.\n\n Overlay,\n\n /// Not to display the POOR BGA.\n\n Hidden,\n\n}\n\n\n\nimpl Default for PoorMode {\n", "file_path": "src/lex/command.rs", "rank": 60, "score": 17.03508751466325 }, { "content": "\n\nimpl Ord for ObjTime {\n\n fn cmp(&self, other: &Self) -> std::cmp::Ordering {\n\n let self_time_in_track = self.numerator * other.denominator;\n\n let other_time_in_track = other.numerator * self.denominator;\n\n self.track\n\n .cmp(&other.track)\n\n .then(self_time_in_track.cmp(&other_time_in_track))\n\n }\n\n}\n\n\n\n/// An object on the score.\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct Obj {\n\n /// The time offset in the track.\n\n pub offset: ObjTime,\n\n /// THe note kind of the the object.\n\n pub kind: NoteKind,\n\n /// Whether the note is for player 1.\n\n pub is_player1: bool,\n", "file_path": "src/parse/obj.rs", "rank": 61, "score": 16.86716881574383 }, { "content": " /// The object has required but not defined,\n\n UndefinedObject(ObjId),\n\n}\n\n\n\nimpl std::fmt::Display for ParseError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n ParseError::SyntaxError(mes) => write!(f, \"syntax error: {}\", mes),\n\n ParseError::BpmParseError(bpm) => write!(f, \"not a number bpm: {}\", bpm),\n\n ParseError::UndefinedObject(id) => write!(f, \"undefined object: {:?}\", id),\n\n }\n\n }\n\n}\n\n\n\nimpl std::error::Error for ParseError {}\n\n\n\n/// A custom result type for parsing.\n\npub type Result<T> = std::result::Result<T, ParseError>;\n\n\n\n/// A score data of BMS format.\n", "file_path": "src/parse.rs", "rank": 62, "score": 16.512944011056593 }, { "content": " f.debug_tuple(\"ObjId\")\n\n .field(&format!(\n\n \"{}{}\",\n\n char::from_digit(digits.0 as u32, 36).unwrap(),\n\n char::from_digit(digits.1 as u32, 36).unwrap()\n\n ))\n\n .finish()\n\n }\n\n}\n\n\n\nimpl TryFrom<u16> for ObjId {\n\n type Error = std::num::TryFromIntError;\n\n\n\n fn try_from(value: u16) -> std::result::Result<Self, Self::Error> {\n\n Ok(Self(value.try_into()?))\n\n }\n\n}\n\n\n\nimpl ObjId {\n\n pub(crate) fn from(id: &str, c: &mut Cursor) -> Result<Self> {\n", "file_path": "src/lex/command.rs", "rank": 63, "score": 16.12374686440736 }, { "content": "//! # About the format\n\n//!\n\n//! ## Command\n\n//!\n\n//! Each command starts with `#` character, and other lines will be ignored. Some commands require arguments separated by whitespace character such as spaces or tabs.\n\n//!\n\n//! ```text\n\n//! #PLAYER 1\n\n//! #GENRE FUGA\n\n//! #TITLE BAR(^^)\n\n//! #ARTIST MikuroXina\n\n//! #BPM 120\n\n//! #PLAYLEVEL 6\n\n//! #RANK 2\n\n//!\n\n//! #WAV01 hoge.WAV\n\n//! #WAV02 foo.WAV\n\n//! #WAV03 bar.WAV\n\n//!\n\n//! #00211:0303030303\n", "file_path": "src/lib.rs", "rank": 64, "score": 15.63573342425357 }, { "content": " section_len_changes: BTreeMap<ObjTime, SectionLenChangeObj>,\n\n}\n\n\n\nimpl Notes {\n\n /// Converts into the notes sorted by time.\n\n pub fn into_all_notes(self) -> Vec<Obj> {\n\n self.objs.into_values().sorted().collect()\n\n }\n\n\n\n /// Returns the iterator having all of the notes sorted by time.\n\n pub fn all_notes(&self) -> impl Iterator<Item = &Obj> {\n\n self.objs.values().sorted()\n\n }\n\n\n\n /// Returns all the bgms in the score.\n\n pub fn bgms(&self) -> &BTreeMap<ObjTime, Vec<ObjId>> {\n\n &self.bgms\n\n }\n\n\n\n /// Returns the bpm change objects.\n", "file_path": "src/parse/notes.rs", "rank": 65, "score": 15.132037319661357 }, { "content": "//! ```\n\n//!\n\n//! ### Header command\n\n//!\n\n//! Header commands are used to express the metadata of the music or the definition for note arrangement.\n\n//!\n\n//! ### Message command\n\n//!\n\n//! Message command starts with `#XXXYY:ZZ.... XXX` is the number of the measure, `YY` is the channel of the message, and `ZZ...` is the object id sequence.\n\n//!\n\n//! The measure must start from 1, but some player may allow the 0 measure (i.e. Lunatic Rave 2).\n\n//!\n\n//! The channel commonly expresses what the lane be arranged the note to.\n\n//!\n\n//! The object id is formed by 2-digit of 36-radix (`[0-9a-zA-Z]`) integer. So the sequence length must be an even number. The `00` object id is the special id, expresses the rest (no object lies). The object lies on the position divided equally by how many the object is in the measure. For example:\n\n//!\n\n//! ```text\n\n//! #00211:0303000303\n\n//! ```\n\n//!\n", "file_path": "src/lib.rs", "rank": 66, "score": 14.980992708145394 }, { "content": " Key6,\n\n /// The rightmost white key.\n\n Key7,\n\n /// The scratch disk.\n\n Scratch,\n\n /// The zone that the user can scratch disk freely.\n\n FreeZone,\n\n}\n\n\n\nimpl Key {\n\n pub(crate) fn from(key: &str, c: &mut Cursor) -> Result<Self> {\n\n use Key::*;\n\n Ok(match key {\n\n \"1\" => Key1,\n\n \"2\" => Key2,\n\n \"3\" => Key3,\n\n \"4\" => Key4,\n\n \"5\" => Key5,\n\n \"6\" => Scratch,\n\n \"7\" => FreeZone,\n", "file_path": "src/lex/command.rs", "rank": 67, "score": 14.827199459467158 }, { "content": " f,\n\n \"expected {}, but not found at line {}, col {}\",\n\n message, line, col\n\n ),\n\n }\n\n }\n\n}\n\n\n\nimpl std::error::Error for LexError {}\n\n\n\n/// An error occurred when lexical analyzing the BMS format file.\n\npub type Result<T> = std::result::Result<T, LexError>;\n\n\n\n/// Analyzes and converts the BMS format text into [`TokenStream`].\n", "file_path": "src/lex.rs", "rank": 68, "score": 14.144110997670776 }, { "content": " col: usize,\n\n },\n\n /// The token was expected but not found.\n\n ExpectedToken {\n\n /// The line number of the token expected.\n\n line: usize,\n\n /// The column number of the token expected.\n\n col: usize,\n\n /// What the expected is.\n\n message: &'static str,\n\n },\n\n}\n\n\n\nimpl std::fmt::Display for LexError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n LexError::UnknownCommand { line, col } => {\n\n write!(f, \"unknown command found at line {}, col {}\", line, col)\n\n }\n\n LexError::ExpectedToken { line, col, message } => write!(\n", "file_path": "src/lex.rs", "rank": 69, "score": 13.756559213170961 }, { "content": " /// The key, or lane, where the object is placed.\n\n pub key: Key,\n\n /// The id of the object.\n\n pub obj: ObjId,\n\n}\n\n\n\nimpl PartialOrd for Obj {\n\n fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n\n\n\nimpl Ord for Obj {\n\n fn cmp(&self, other: &Self) -> std::cmp::Ordering {\n\n self.offset\n\n .cmp(&other.offset)\n\n .then(self.obj.cmp(&other.obj))\n\n }\n\n}\n", "file_path": "src/parse/obj.rs", "rank": 70, "score": 13.556160831325657 }, { "content": "use super::LexError;\n\n\n\npub(crate) struct Cursor<'a> {\n\n line: usize,\n\n col: usize,\n\n index: usize,\n\n source: &'a str,\n\n}\n\n\n\nimpl<'a> Cursor<'a> {\n\n pub(crate) fn new(source: &'a str) -> Self {\n\n Self {\n\n line: 1,\n\n col: 1,\n\n index: 0,\n\n source,\n\n }\n\n }\n\n\n\n pub(crate) fn is_end(&self) -> bool {\n", "file_path": "src/lex/cursor.rs", "rank": 71, "score": 13.08015477143549 }, { "content": "//! Lexical analyzer of BMS format.\n\n\n\npub mod command;\n\nmod cursor;\n\npub mod token;\n\n\n\nuse self::{\n\n cursor::Cursor,\n\n token::{Token, TokenStream},\n\n};\n\n\n\n/// An error occurred when lexical analysis.\n\n#[non_exhaustive]\n\n#[derive(Debug, Clone)]\n\npub enum LexError {\n\n /// An unknown command detected.\n\n UnknownCommand {\n\n /// The line number of the command detected.\n\n line: usize,\n\n /// The column number of the command detected.\n", "file_path": "src/lex.rs", "rank": 72, "score": 12.91082414856554 }, { "content": "///\n\n/// ```text\n\n/// |---------|----------------------|\n\n/// | | [K2] [K4] [K6] |\n\n/// |(Scratch)|[K1] [K3] [K5] [K7]|\n\n/// |---------|----------------------|\n\n/// ```\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum Key {\n\n /// The leftmost white key.\n\n Key1,\n\n /// The leftmost black key.\n\n Key2,\n\n /// The second white key from the left.\n\n Key3,\n\n /// The second black key from the left.\n\n Key4,\n\n /// The third white key from the left.\n\n Key5,\n\n /// The rightmost black key.\n", "file_path": "src/lex/command.rs", "rank": 73, "score": 12.873192780937956 }, { "content": "#GENRE FUGA\n\n#TITLE BAR(^^)\n\n#ARTIST MikuroXina\n\n#BPM 120\n\n#PLAYLEVEL 6\n\n#RANK 2\n\n\n\n#WAV01 hoge.WAV\n\n#WAV02 foo.WAV\n\n#WAV03 bar.WAV\n\n\n\n#00211:0303030303\n\n\n\n#00211:0303000303\n\n\n\n#00211:010101\n\n#00211:00020202\n\n\";\n\n\n\n let ts = parse(SRC).expect(\"SRC must be parsed\");\n", "file_path": "src/lex.rs", "rank": 74, "score": 12.554540103708936 }, { "content": " /// Panics if `denominator` is 0 or `numerator` is greater than or equal to `denominator`.\n\n pub fn new(track: u32, numerator: u32, denominator: u32) -> Self {\n\n if track == 0 {\n\n eprintln!(\"warning: track 000 detected\");\n\n }\n\n assert!(0 < denominator);\n\n assert!(numerator < denominator);\n\n Self {\n\n track,\n\n numerator,\n\n denominator,\n\n }\n\n }\n\n}\n\n\n\nimpl PartialOrd for ObjTime {\n\n fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n", "file_path": "src/parse/obj.rs", "rank": 75, "score": 11.423083291375693 }, { "content": " Ok(match channel.to_uppercase().as_str() {\n\n \"01\" => Bgm,\n\n \"02\" => SectionLen,\n\n \"03\" => BpmChangeU8,\n\n \"08\" => BpmChange,\n\n \"04\" => BgaBase,\n\n \"06\" => BgaPoor,\n\n \"07\" => BgaLayer,\n\n \"09\" => Stop,\n\n player1 if player1.starts_with('1') => Note {\n\n kind: NoteKind::Visible,\n\n is_player1: true,\n\n key: Key::from(&channel[1..], c)?,\n\n },\n\n player2 if player2.starts_with('2') => Note {\n\n kind: NoteKind::Visible,\n\n is_player1: false,\n\n key: Key::from(&channel[1..], c)?,\n\n },\n\n player1 if player1.starts_with('3') => Note {\n", "file_path": "src/lex/command.rs", "rank": 76, "score": 11.319801000082663 }, { "content": "}\n\n\n\nimpl std::fmt::Display for BmsError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n BmsError::LexError(lex) => {\n\n write!(f, \"lex error: {}\", lex)\n\n }\n\n BmsError::ParseError(parse) => {\n\n write!(f, \"parse error: {}\", parse)\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl std::error::Error for BmsError {\n\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n\n match self {\n\n BmsError::LexError(lex) => Some(lex),\n\n BmsError::ParseError(parse) => Some(parse),\n\n }\n\n }\n\n}\n\n\n\n/// A custom result type for bms-rs.\n\npub type Result<T> = std::result::Result<T, BmsError>;\n", "file_path": "src/lib.rs", "rank": 77, "score": 11.26585476876328 }, { "content": " channel: Channel::BpmChangeU8,\n\n message,\n\n } => {\n\n let denominator = message.len() as u32 / 2;\n\n for (i, (c1, c2)) in message.chars().tuples().into_iter().enumerate() {\n\n let bpm = c1.to_digit(16).unwrap() * 16 + c2.to_digit(16).unwrap();\n\n if bpm == 0 {\n\n continue;\n\n }\n\n let time = ObjTime::new(track.0, i as u32, denominator);\n\n if self\n\n .bpm_changes\n\n .insert(\n\n time,\n\n BpmChangeObj {\n\n time,\n\n bpm: bpm as f64,\n\n },\n\n )\n\n .is_some()\n", "file_path": "src/parse/notes.rs", "rank": 78, "score": 11.258868447906282 }, { "content": " kind: NoteKind::Landmine,\n\n is_player1: true,\n\n key: Key::from(&channel[1..], c)?,\n\n },\n\n player2 if player2.starts_with('E') => Note {\n\n kind: NoteKind::Landmine,\n\n is_player1: false,\n\n key: Key::from(&channel[1..], c)?,\n\n },\n\n _ => {\n\n return Err(LexError::UnknownCommand {\n\n line: c.line(),\n\n col: c.col(),\n\n })\n\n }\n\n })\n\n }\n\n}\n\n\n\n/// A track, or bar, in the score. It must greater than 0, but some scores may include the 0 track.\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct Track(pub u32);\n", "file_path": "src/lex/command.rs", "rank": 79, "score": 10.922296894094824 }, { "content": " green: 0,\n\n blue: 0,\n\n }\n\n }\n\n}\n\n\n\n/// A kind of the note.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum NoteKind {\n\n /// A normal note can be seen by the user.\n\n Visible,\n\n /// A invisible note cannot be played by the user.\n\n Invisible,\n\n /// A long-press note (LN), requires the user to hold pressing the key.\n\n Long,\n\n /// A landmine note that treated as POOR judgement when\n\n Landmine,\n\n}\n\n\n\n/// A key of the controller or keyboard.\n", "file_path": "src/lex/command.rs", "rank": 80, "score": 10.799005165602964 }, { "content": "\n\n/// An error occurred when parsing the BMS format file.\n\n#[non_exhaustive]\n\n#[derive(Debug, Clone)]\n\npub enum BmsError {\n\n /// An error comes from lexical analyzer.\n\n LexError(LexError),\n\n /// An error comes from syntax parser.\n\n ParseError(ParseError),\n\n}\n\n\n\nimpl From<LexError> for BmsError {\n\n fn from(e: LexError) -> Self {\n\n Self::LexError(e)\n\n }\n\n}\n\nimpl From<ParseError> for BmsError {\n\n fn from(e: ParseError) -> Self {\n\n Self::ParseError(e)\n\n }\n", "file_path": "src/lib.rs", "rank": 81, "score": 9.594693467354093 }, { "content": "## Example\n\n\n\n // a sample of random loading function\n\n\n\n #random 2 // create a random number (1 or 2)\n\n\n\n #if 1 // if the number was equal to 1 then...\n\n #00111:31313131 // this is effective...\n\n #endif\n\n\n\n #if 2 // if the number was equal to 2 then...\n\n #00113:32003232 // this is effective\n\n #endif\n\n\n\n# Channel Messages\n\n\n\n`#aaabb:cccccccc`\n\n\n\n`aaa`: track number (from `000` to `999`).\n\n`bb`: channel number where you want to send message (from `00` to `FF`).\n\n`cccccccc`: any message.\n\n\n\n## A brief Channel Number\n\n\n\n`01`: BGM (background music by WAVE).\n\n`03`: changing a Tempo.\n\n`04`: BGA (background animation).\n\n`06`: changing Poor-bitmap.\n\n`11` to `17`: Object Channels of 1 player side from left to right.\n\n`21` to `27`: Object Channels of 2 player side from left to right.\n\n\n\n## Example\n\n\n\n #00211:03030303\n\nThis means 4 object `03`s at the left of 1 player side `11` in `002` track. This object is assigned to wave No. `03` which was defined by `#WAV03 xxxx.wav`. And this 4 objects are arranged evenly in this track.\n\n\n\nPlease try the following patterns.\n\n\n\n #00211:0303030303\n\n\n\n #00211:0303000303\n\n\n\n #00211:010101\n\n #00211:00020202\n\n\n\n---\n\n\n\nThis document and this format is free! I hope the day will come when my BMS format will use all over the world.\n", "file_path": "SPECIFICATION.md", "rank": 82, "score": 9.520002679674263 }, { "content": " self.peek_token().is_none()\n\n }\n\n\n\n fn get_token(&self) -> std::ops::Range<usize> {\n\n fn is_separator(c: char) -> bool {\n\n c.is_whitespace() || c == '\\n'\n\n }\n\n let next_token_start = self.source[self.index..]\n\n .find(|c: char| !is_separator(c))\n\n .map_or(self.source.len(), |i| i + self.index);\n\n let next_token_end = self.source[next_token_start..]\n\n .trim_start()\n\n .find(is_separator)\n\n .map_or(self.source.len(), |i| i + next_token_start);\n\n next_token_start..next_token_end\n\n }\n\n\n\n pub(crate) fn peek_token(&self) -> Option<&'a str> {\n\n let ret = self.get_token();\n\n if ret.is_empty() {\n", "file_path": "src/lex/cursor.rs", "rank": 83, "score": 8.884667724015728 }, { "content": "//! This will be placed as:\n\n//!\n\n//! ```text\n\n//! 003|--|--------------|\n\n//! | |03 |\n\n//! | |03 |\n\n//! | | |\n\n//! | |03 |\n\n//! 002|--|03------------|\n\n//! | | [] [] [] |\n\n//! |()|[] [] [] []|\n\n//! |-----------------|\n\n//! ```\n\n\n\n#![warn(missing_docs)]\n\n\n\npub mod lex;\n\npub mod parse;\n\n\n\nuse self::{lex::LexError, parse::ParseError};\n", "file_path": "src/lib.rs", "rank": 84, "score": 8.453338027994164 }, { "content": " key: *key,\n\n obj,\n\n });\n\n }\n\n }\n\n &Token::LnObj(end_id) => {\n\n let mut end_note = self\n\n .remove(end_id)\n\n .ok_or(ParseError::UndefinedObject(end_id))?;\n\n let Obj { offset, key, .. } = &end_note;\n\n let (_, &begin_id) =\n\n self.ids_by_key[key].range(..offset).last().ok_or_else(|| {\n\n ParseError::SyntaxError(format!(\n\n \"expected preceding object for #LNOBJ {:?}\",\n\n end_id\n\n ))\n\n })?;\n\n let mut begin_note = self.remove(begin_id).unwrap();\n\n begin_note.kind = NoteKind::Long;\n\n end_note.kind = NoteKind::Long;\n\n self.push(begin_note);\n\n self.push(end_note);\n\n }\n\n _ => {}\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/parse/notes.rs", "rank": 85, "score": 8.021631669553623 }, { "content": " }\n\n Token::Message {\n\n track,\n\n channel: Channel::Bgm,\n\n message,\n\n } => {\n\n let denominator = message.len() as u32 / 2;\n\n for (i, (c1, c2)) in message.chars().tuples().into_iter().enumerate() {\n\n let id = c1.to_digit(36).unwrap() * 36 + c2.to_digit(36).unwrap();\n\n if id == 0 {\n\n continue;\n\n }\n\n let obj = (id as u16).try_into().unwrap();\n\n self.bgms\n\n .entry(ObjTime::new(track.0, i as u32, denominator))\n\n .and_modify(|vec| vec.push(obj))\n\n .or_insert_with(Vec::new);\n\n }\n\n }\n\n Token::Message {\n", "file_path": "src/parse/notes.rs", "rank": 86, "score": 7.519215377800424 }, { "content": "//! The BMS format parser.\n\n//!\n\n//! Be-Music Source, called BMS for short, is a file format devised by Urao Yane in 1998 for a simulator of the game Beatmania by KONAMI. This describes what and when notes are arranged and its music metadata. It is a plain text file with some \"command\" lines starting with `#` character.\n\n//!\n\n//! # Usage\n\n//!\n\n//! At first, you can get the tokens stream with [`lex::parse`]. Then pass it and the random generator to [`parse::Bms::from_token_stream`] to get the notes data. Because BMS format has some randomized syntax.\n\n//!\n\n//! ```\n\n//! use bms_rs::{\n\n//! lex::parse,\n\n//! parse::{rng::RngMock, Bms},\n\n//! };\n\n//!\n\n//! let source = std::fs::read_to_string(\"tests/lilith_mx.bms\").unwrap();\n\n//! let token_stream = parse(&source).expect(\"must be parsed\");\n\n//! let rng = RngMock([1]);\n\n//! let bms = Bms::from_token_stream(&token_stream, rng).expect(\"must be parsed\");\n\n//! ```\n\n//!\n", "file_path": "src/lib.rs", "rank": 87, "score": 7.195217301404094 }, { "content": " return None;\n\n }\n\n Some(&self.source[ret])\n\n }\n\n\n\n pub(crate) fn next_token(&mut self) -> Option<&'a str> {\n\n let ret = self.get_token();\n\n if ret.is_empty() {\n\n return None;\n\n }\n\n let advanced_lines = self.source[self.index..ret.end]\n\n .chars()\n\n .filter(|&c| c == '\\n')\n\n .count();\n\n self.line += advanced_lines;\n\n if advanced_lines != 0 {\n\n self.col = 1;\n\n }\n\n self.col += self.source[self.index..ret.end]\n\n .lines()\n", "file_path": "src/lex/cursor.rs", "rank": 88, "score": 7.025744283378273 }, { "content": " offset: ObjTime::new(1, 1, 4),\n\n kind: NoteKind::Visible,\n\n is_player1: true,\n\n key: Key::Key5,\n\n obj: id55,\n\n },\n\n Obj {\n\n offset: ObjTime::new(1, 3, 4),\n\n kind: NoteKind::Visible,\n\n is_player1: true,\n\n key: Key::Key4,\n\n obj: id44,\n\n }\n\n ]\n\n );\n\n\n\n let rng = RngMock([1, 2]);\n\n let bms = Bms::from_token_stream(&ts, rng).expect(\"must be parsed\");\n\n assert_eq!(\n\n bms.notes.into_all_notes(),\n", "file_path": "tests/nested_random.rs", "rank": 89, "score": 6.582245661894062 }, { "content": " let rng = RngMock([1]);\n\n let bms = Bms::from_token_stream(&ts, rng).expect(\"must be parsed\");\n\n assert_eq!(\n\n bms.notes.into_all_notes(),\n\n vec![\n\n Obj {\n\n offset: ObjTime::new(1, 0, 4),\n\n kind: NoteKind::Visible,\n\n is_player1: true,\n\n key: Key::Key1,\n\n obj: id11,\n\n },\n\n Obj {\n\n offset: ObjTime::new(1, 1, 4),\n\n kind: NoteKind::Visible,\n\n is_player1: true,\n\n key: Key::Key2,\n\n obj: id22,\n\n },\n\n Obj {\n", "file_path": "tests/nested_random.rs", "rank": 90, "score": 6.432888574337115 }, { "content": " Token::If(rand_target) => {\n\n if let Some(&ClauseState::Random(rand)) = self.random_stack.last() {\n\n self.random_stack.push(ClauseState::If(rand_target == rand));\n\n Break(Ok(()))\n\n } else {\n\n Break(Err(ParseError::SyntaxError(\n\n \"#IF command must be in #RANDOM - #ENDRANDOM block\".into(),\n\n )))\n\n }\n\n }\n\n Token::ElseIf(rand_target) => {\n\n if let Some(ClauseState::If(_)) = self.random_stack.last() {\n\n self.random_stack.pop();\n\n let rand = match self.random_stack.last().unwrap() {\n\n &ClauseState::Random(rand) => rand,\n\n ClauseState::If(_) => unreachable!(),\n\n };\n\n self.random_stack.push(ClauseState::If(rand_target == rand));\n\n Break(Ok(()))\n\n } else {\n", "file_path": "src/parse/random.rs", "rank": 91, "score": 6.395029338258228 }, { "content": " obj: id66,\n\n },\n\n Obj {\n\n offset: ObjTime::new(1, 3, 4),\n\n kind: NoteKind::Visible,\n\n is_player1: true,\n\n key: Key::Key4,\n\n obj: id44,\n\n }\n\n ]\n\n );\n\n\n\n let rng = RngMock([2]);\n\n let bms = Bms::from_token_stream(&ts, rng).expect(\"must be parsed\");\n\n assert_eq!(\n\n bms.notes.into_all_notes(),\n\n vec![\n\n Obj {\n\n offset: ObjTime::new(1, 0, 4),\n\n kind: NoteKind::Visible,\n", "file_path": "tests/nested_random.rs", "rank": 92, "score": 5.953344198013653 }, { "content": "//! Random generator for parsing BMS format.\n\n//!\n\n//! [`RngMock`] can be used for testing the parser result with some random scopes.\n\n\n\nuse std::ops::RangeInclusive;\n\n\n\n/// A random generator for parsing BMS.\n", "file_path": "src/parse/rng.rs", "rank": 93, "score": 5.783006376030665 }, { "content": "\n\n pub(crate) fn err_expected_token(&self, message: &'static str) -> LexError {\n\n LexError::ExpectedToken {\n\n line: self.line(),\n\n col: self.col(),\n\n message,\n\n }\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/lex/cursor.rs", "rank": 94, "score": 5.445675720855448 }, { "content": " } else if let Some(ClauseState::If(false)) = self.random_stack.last() {\n\n self.random_stack.push(ClauseState::Random(0));\n\n Break(Ok(()))\n\n } else {\n\n self.random_stack\n\n .push(ClauseState::Random(self.rng.gen(1..=rand_max)));\n\n Break(Ok(()))\n\n }\n\n }\n\n Token::EndRandom => {\n\n if let Some(&ClauseState::Random(_)) = self.random_stack.last() {\n\n self.random_stack.pop();\n\n Break(Ok(()))\n\n } else {\n\n Break(Err(ParseError::SyntaxError(\n\n \"#ENDRANDOM command must come after #RANDOM block\".into(),\n\n )))\n\n }\n\n }\n\n _ => {\n", "file_path": "src/parse/random.rs", "rank": 95, "score": 4.81980387476799 }, { "content": " Break(Err(ParseError::SyntaxError(\n\n \"#ELSEIF command must come after #IF block\".into(),\n\n )))\n\n }\n\n }\n\n Token::EndIf => {\n\n if let Some(ClauseState::If(_)) = self.random_stack.last() {\n\n self.random_stack.pop();\n\n Break(Ok(()))\n\n } else {\n\n Break(Err(ParseError::SyntaxError(\n\n \"#ENDIF command must come after #IF or #ELSEIF block\".into(),\n\n )))\n\n }\n\n }\n\n Token::Random(rand_max) => {\n\n if let Some(&ClauseState::Random(_)) = self.random_stack.last() {\n\n Break(Err(ParseError::SyntaxError(\n\n \"#RANDOM command must come in root or #IF block\".into(),\n\n )))\n", "file_path": "src/parse/random.rs", "rank": 96, "score": 4.18644089664499 }, { "content": " is_player1: true,\n\n key: Key::Key1,\n\n obj: id11,\n\n },\n\n Obj {\n\n offset: ObjTime::new(1, 2, 4),\n\n kind: NoteKind::Visible,\n\n is_player1: true,\n\n key: Key::Key3,\n\n obj: id33,\n\n },\n\n Obj {\n\n offset: ObjTime::new(1, 3, 4),\n\n kind: NoteKind::Visible,\n\n is_player1: true,\n\n key: Key::Key4,\n\n obj: id44,\n\n }\n\n ]\n\n );\n\n}\n", "file_path": "tests/nested_random.rs", "rank": 97, "score": 3.3788157290014715 }, { "content": " is_player1: true,\n\n key: Key::Key1,\n\n },\n\n message: \"010101\",\n\n },\n\n Message {\n\n track: Track(2),\n\n channel: Channel::Note {\n\n kind: NoteKind::Visible,\n\n is_player1: true,\n\n key: Key::Key1,\n\n },\n\n message: \"00020202\",\n\n },\n\n ]\n\n );\n\n }\n\n}\n", "file_path": "src/lex.rs", "rank": 98, "score": 3.3553671061130057 }, { "content": " vec![\n\n Obj {\n\n offset: ObjTime::new(1, 0, 4),\n\n kind: NoteKind::Visible,\n\n is_player1: true,\n\n key: Key::Key1,\n\n obj: id11,\n\n },\n\n Obj {\n\n offset: ObjTime::new(1, 1, 4),\n\n kind: NoteKind::Visible,\n\n is_player1: true,\n\n key: Key::Key2,\n\n obj: id22,\n\n },\n\n Obj {\n\n offset: ObjTime::new(1, 2, 4),\n\n kind: NoteKind::Visible,\n\n is_player1: true,\n\n key: Key::Scratch,\n", "file_path": "tests/nested_random.rs", "rank": 99, "score": 3.250139788830041 } ]
Rust
src/common/core.rs
hbeimf/crust
2cc9414ef5ad57133ad25de2193ba1734798a9de
use crate::common::{CommonError, Result, State}; use maidsafe_utilities::thread::{self, Joiner}; use mio::{Event, Events, Poll, PollOpt, Ready, Token}; use mio_extras::channel::{self, Receiver, Sender}; use mio_extras::timer::{Timeout, Timer}; use std::cell::RefCell; use std::collections::HashMap; use std::rc::Rc; use std::sync::mpsc::TryRecvError; use std::time::Duration; const EVENT_CAPACITY: usize = 1024; const CHANNEL_TOKEN_OFFSET: usize = 0; const TIMER_TOKEN_OFFSET: usize = CHANNEL_TOKEN_OFFSET + 1; const USER_TOKEN_OFFSET: usize = TIMER_TOKEN_OFFSET + 1; pub struct EventLoop<T> { tx: Sender<CoreMessage<T>>, _joiner: Joiner, } impl<T> EventLoop<T> { pub fn send(&self, msg: CoreMessage<T>) -> Result<()> { self.tx.send(msg).map_err(|_e| CommonError::CoreMsgTx) } } impl<T> Drop for EventLoop<T> { fn drop(&mut self) { if let Err(e) = self.tx.send(CoreMessage(None)) { warn!( "Could not send a terminator to event-loop. We will possibly not be able to \ gracefully exit. Error: {:?}", e ); } } } pub fn spawn_event_loop<T: 'static, F>( token_counter_start: usize, event_loop_id: Option<&str>, init_user_data: F, ) -> Result<EventLoop<T>> where F: 'static + FnOnce() -> T + Send, { let poll = Poll::new()?; let (tx, rx) = channel::channel(); let timer = Timer::default(); poll.register( &rx, Token(token_counter_start + CHANNEL_TOKEN_OFFSET), Ready::readable(), PollOpt::edge(), )?; poll.register( &timer, Token(token_counter_start + TIMER_TOKEN_OFFSET), Ready::readable(), PollOpt::edge(), )?; let mut name = "CRUST-Event-Loop".to_string(); if let Some(id) = event_loop_id { name.push_str(": "); name.push_str(id); } let tx_clone = tx.clone(); let joiner = thread::named(name, move || { let user_data = init_user_data(); let core = Core::new( token_counter_start + USER_TOKEN_OFFSET, tx_clone, timer, user_data, ); match event_loop_impl(token_counter_start, &poll, &rx, core) { Ok(()) => trace!("Graceful event loop exit."), Err(e) => error!("Event loop killed due to {:?}", e), } }); Ok(EventLoop { tx, _joiner: joiner, }) } fn event_loop_impl<T>( token_counter_start: usize, poll: &Poll, rx: &Receiver<CoreMessage<T>>, mut core: Core<T>, ) -> Result<()> { let mut events = Events::with_capacity(EVENT_CAPACITY); 'event_loop: loop { let _ = poll.poll(&mut events, None)?; for event in events.iter() { match event.token() { Token(t) if t == token_counter_start + CHANNEL_TOKEN_OFFSET => { if !event.readiness().is_readable() { warn!( "Communication channel to event loop errored out: {:?}", event ); continue; } loop { let msg = match rx.try_recv() { Ok(msg) => msg, Err(TryRecvError::Empty) => break, Err(TryRecvError::Disconnected) => break 'event_loop, }; match msg.0 { Some(mut f) => f(&mut core, poll), None => break 'event_loop, } } } Token(t) if t == token_counter_start + TIMER_TOKEN_OFFSET => { core.handle_timer(poll, event.readiness()) } _ => core.handle_event(poll, event), } } } Ok(()) } type CoreMessageHandler<T> = Box<FnMut(&mut Core<T>, &Poll) + Send>; pub struct CoreMessage<T>(Option<CoreMessageHandler<T>>); #[derive(Hash, Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Debug)] pub struct CoreTimer { pub state_id: Token, pub timer_id: u8, } pub struct Core<T> { tx: Sender<CoreMessage<T>>, timer: Timer<CoreTimer>, token_counter: usize, states: HashMap<Token, Rc<RefCell<State<T>>>>, user_data: T, } impl<T> Core<T> { fn new( token_counter_start: usize, tx: Sender<CoreMessage<T>>, timer: Timer<CoreTimer>, user_data: T, ) -> Self { Core { tx, timer, token_counter: token_counter_start, states: HashMap::new(), user_data, } } #[cfg(test)] pub fn new_for_tests( token_counter_start: usize, tx: Sender<CoreMessage<T>>, timer: Timer<CoreTimer>, user_data: T, ) -> Self { Self::new(token_counter_start, tx, timer, user_data) } pub fn sender(&self) -> &Sender<CoreMessage<T>> { &self.tx } pub fn set_timeout(&mut self, interval: Duration, core_timer: CoreTimer) -> Timeout { self.timer.set_timeout(interval, core_timer) } pub fn cancel_timeout(&mut self, timeout: &Timeout) -> Option<CoreTimer> { self.timer.cancel_timeout(timeout) } pub fn get_new_token(&mut self) -> Token { let token = Token(self.token_counter); self.token_counter += 1; token } pub fn insert_state( &mut self, token: Token, state: Rc<RefCell<State<T>>>, ) -> Option<Rc<RefCell<State<T>>>> { self.states.insert(token, state) } pub fn remove_state(&mut self, token: Token) -> Option<Rc<RefCell<State<T>>>> { self.states.remove(&token) } pub fn get_state(&self, key: Token) -> Option<Rc<RefCell<State<T>>>> { self.states.get(&key).cloned() } pub fn user_data(&self) -> &T { &self.user_data } pub fn user_data_mut(&mut self) -> &mut T { &mut self.user_data } fn handle_event(&mut self, poll: &Poll, event: Event) { if let Some(state) = self.get_state(event.token()) { state.borrow_mut().ready(self, poll, event.readiness()); } } fn handle_timer(&mut self, poll: &Poll, kind: Ready) { if !kind.is_readable() { warn!("Timer errored out: {:?}", kind); return; } while let Some(core_timer) = self.timer.poll() { if let Some(state) = self.get_state(core_timer.state_id) { state.borrow_mut().timeout(self, poll, core_timer.timer_id); } } } } impl<T> CoreMessage<T> { pub fn new<F: FnOnce(&mut Core<T>, &Poll) + Send + 'static>(f: F) -> Self { let mut f = Some(f); CoreMessage(Some(Box::new(move |core: &mut Core<T>, poll: &Poll| { if let Some(f) = f.take() { f(core, poll) } }))) } } impl CoreTimer { pub fn new(state_id: Token, timer_id: u8) -> Self { CoreTimer { state_id, timer_id } } }
use crate::common::{CommonError, Result, State}; use maidsafe_utilities::thread::{self, Joiner}; use mio::{Event, Events, Poll, PollOpt, Ready, Token}; use mio_extras::channel::{self, Receiver, Sender}; use mio_extras::timer::{Timeout, Timer}; use std::cell::RefCell; use std::collections::HashMap; use std::rc::Rc; use std::sync::mpsc::TryRecvError; use std::time::Duration; const EVENT_CAPACITY: usize = 1024; const CHANNEL_TOKEN_OFFSET: usize = 0; const TIMER_TOKEN_OFFSET: usize = CHANNEL_TOKEN_OFFSET + 1; const USER_TOKEN_OFFSET: usize = TIMER_TOKEN_OFFSET + 1; pub struct EventLoop<T> { tx: Sender<CoreMessage<T>>, _joiner: Joiner, } impl<T> EventLoop<T> { pub fn send(&self, msg: CoreMessage<T>) -> Result<()> { self.tx.send(msg).map_err(|_e| CommonError::CoreMsgTx) } } impl<T> Drop for EventLoop<T> { fn drop(&mut self) { if let Err(e) = self.tx.send(CoreMessage(None)) { warn!( "Could not send a terminator to event-loop. We will possibly not be able to \ gracefully exit. Error: {:?}", e ); } } } pub fn spawn_event_loop<T: 'static, F>( token_counter_start: usize, event_loop_id: Option<&str>, init_user_data: F, ) -> Result<EventLoop<T>> where F: 'static + FnOnce() -> T + Send, { let poll = Poll::new()?; let (tx, rx) = channel::channel(); let timer = Timer::default(); poll.register( &rx, Token(token_counter_start + CHANNEL_TOKEN_OFFSET), Ready::readable(), PollOpt::edge(), )?; poll.register( &timer, Token(token_counter_start + TIMER_TOKEN_OFFSET), Ready::readable(), PollOpt::edge(), )?; let mut name = "CRUST-Event-Loop".to_string(); if let Some(id) = event_loop_id { name.push_str(": "); name.push_str(id); } let tx_clone = tx.clone(); let joiner = thread::named(name, move || { let user_data = init_user_data(); let core = Core::new( token_counter_start + USER_TOKEN_OFFSET, tx_clone, timer, user_data, ); match event_loop_impl(token_counter_start, &poll, &rx, core) { Ok(()) => trace!("Graceful event loop exit."), Err(e) => error!("Event loop killed due to {:?}", e), } }); Ok(EventLoop { tx, _joiner: joiner, }) } fn event_loop_impl<T>( token_counter_start: usize, poll: &Poll, rx: &Receiver<CoreMessage<T>>, mut core: Core<T>, ) -> Result<()> { let mut events = Events::with_capacity(EVENT_CAPACITY); 'event_loop: loop { let _ = poll.poll(&mut events, None)?; for event in events.iter() { match event.token() { Token(t) if t == token_counter_start + CHANNEL_TOKEN_OFFSET => { if !event.readiness().is_readable() { warn!( "Communication channel to event loop errored out: {:?}", event ); continue; } loop { let msg = match rx.try_recv() { Ok(msg) => msg, Err(TryRecvError::Empty) => break, Err(TryRecvError::Disconnected) => break 'event_loop, }; match msg.0 { Some(mut f) => f(&mut core, poll), None => break 'event_loop, } } } Token(t) if t == token_counter_start + TIMER_TOKEN_OFFSET => { core.handle_timer(poll, event.readiness()) } _ => core.handle_event(poll, event), } } } Ok(()) } type CoreMessageHandler<T> = Box<FnMut(&mut Core<T>, &Poll) + Send>; pub struct CoreMessage<T>(Option<CoreMessageHandler<T>>); #[derive(Hash, Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Debug)] pub struct CoreTimer { pub state_id: Token, pub timer_id: u8, } pub struct Core<T> { tx: Sender<CoreMessage<T>>, timer: Timer<CoreTimer>, token_counter: usize, states: HashMap<Token, Rc<RefCell<State<T>>>>, user_data: T, } impl<T> Core<T> { fn new( token_counter_start: usize, tx: Sender<CoreMessage<T>>, timer: Timer<CoreTimer>, user_data: T, ) -> Self { Core { tx, timer, token_counter: token_counter_start, states: HashMap::new(), user_data, } } #[cfg(test)] pub fn new_for_tests( token_counter_start: usize, tx: Sender<CoreMessage<T>>, timer: Timer<CoreTimer>, user_data: T, ) -> Self { Self::new(token_counter_start, tx, timer, user_data) } pub fn sender(&self) -> &Sender<CoreMessage<T>> { &self.tx } pub fn set_timeout(&mut self, interval: Duration, core_timer: CoreTimer) -> Timeout { self.timer.set_timeout(interval, core_timer) } pub fn cancel_timeout(&mut self, timeout: &Timeout) -> Option<CoreTimer> { self.timer.cancel_timeout(timeout) } pub fn get_new_token(&mut self) -> Token { let token = Token(self.token_counter); self.token_counter += 1; token } pub fn insert_state( &mut self, token: Token, state: Rc<RefCell<State<T>>>, ) -> Option<Rc<RefCell<State<T>>>> { self.states.insert(token, state) } pub fn remove_state(&mut self, token: Token) -> Option<Rc<RefCell<State<T>>>> { self.states.remove(&token) } pub fn get_state(&self, key: Token) -> Option<Rc<RefCell<State<T>>>> { self.states.get(&key).cloned() } pub fn user_data(&self) -> &T { &self.user_data } pub fn user_data_mut(&mut self) -> &mut T { &mut self.user_data } fn handle_event(&mut self, poll: &Poll, event: Event) { if let Some(state) = self.get_state(event.token()) { state.borrow_mut().ready(self, poll, event.readiness()); } } fn handle_timer(&mut self, poll: &Poll, kind: Ready) { if !kind.is_readable() { warn!("Timer errored out: {:?}", kind); return; } while let Some(core_timer) = self.timer.poll() { if let Some(state) = self.get_state(core_timer.state_id) { state.borrow_mut().timeout(self, poll, core_timer.timer_id); }
r_id: u8) -> Self { CoreTimer { state_id, timer_id } } }
} } } impl<T> CoreMessage<T> { pub fn new<F: FnOnce(&mut Core<T>, &Poll) + Send + 'static>(f: F) -> Self { let mut f = Some(f); CoreMessage(Some(Box::new(move |core: &mut Core<T>, poll: &Poll| { if let Some(f) = f.take() { f(core, poll) } }))) } } impl CoreTimer { pub fn new(state_id: Token, time
random
[ { "content": "/// Puts given peer contacts into bootstrap cache which is then written to disk.\n\npub fn cache_peer_info(core: &mut EventLoopCore, peer_info: PeerInfo, config: &CrustConfig) {\n\n let hard_coded_peers = &unwrap!(config.lock()).cfg.hard_coded_contacts;\n\n if hard_coded_peers.contains(&peer_info) {\n\n debug!(\"Connecting to hard coded peer - it won't be cached.\");\n\n return;\n\n }\n\n\n\n let bootstrap_cache = core.user_data_mut();\n\n bootstrap_cache.put(peer_info);\n\n if let Err(e) = bootstrap_cache.commit() {\n\n info!(\"Failed to write bootstrap cache to disk: {}\", e);\n\n }\n\n}\n\n\n", "file_path": "src/main/bootstrap/mod.rs", "rank": 2, "score": 164411.807384852 }, { "content": "pub fn get_event_sender() -> (crate::CrustEventSender<UniqueId>, Receiver<Event<UniqueId>>) {\n\n let (category_tx, _) = mpsc::channel();\n\n let (event_tx, event_rx) = mpsc::channel();\n\n\n\n (\n\n MaidSafeObserver::new(event_tx, MaidSafeEventCategory::Crust, category_tx),\n\n event_rx,\n\n )\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 3, "score": 160307.8956304794 }, { "content": "#[allow(unused)]\n\npub fn timebomb<R, F>(dur: Duration, f: F) -> R\n\nwhere\n\n R: Send,\n\n F: Send + FnOnce() -> R,\n\n{\n\n crossbeam::scope(|scope| {\n\n let thread_handle = thread::current();\n\n let (done_tx, done_rx) = mpsc::channel::<()>();\n\n let jh = scope.spawn(move || {\n\n let ret = f();\n\n drop(done_tx);\n\n thread_handle.unpark();\n\n ret\n\n });\n\n thread::park_timeout(dur);\n\n match done_rx.try_recv() {\n\n Err(mpsc::TryRecvError::Empty) => panic!(\"Timed out!\"),\n\n Ok(()) | Err(mpsc::TryRecvError::Disconnected) => jh.join(),\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 4, "score": 150840.32131283265 }, { "content": "/// Creates `Core` for tests with some defaults.\n\npub fn test_core(bootstrap_cache: BootstrapCache) -> EventLoopCore {\n\n let (event_tx, _event_rx) = channel();\n\n let timer = timer::Builder::default().build();\n\n EventLoopCore::new_for_tests(0, event_tx, timer, bootstrap_cache)\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 5, "score": 149528.6711259905 }, { "content": "// If bootstrap doesn't succeed in n seconds and we're trying to run the speed\n\n// test, then fail overall. Otherwise, if no peer endpoints were provided and\n\n// bootstrapping fails, assume this is OK, i.e. this is the first node of a new\n\n// network.\n\nfn on_time_out(timeout: Duration, flag_speed: bool) -> Sender<bool> {\n\n let (tx, rx) = channel();\n\n let _ = std::thread::spawn(move || {\n\n std::thread::sleep(timeout);\n\n match rx.try_recv() {\n\n Ok(true) => {}\n\n _ => {\n\n if flag_speed {\n\n println!(\"Failed to connect to a peer. Exiting.\");\n\n std::process::exit(3);\n\n }\n\n println!(\n\n \"Didn't bootstrap to an existing network - this may be the first node \\\n\n of a new network.\"\n\n );\n\n }\n\n }\n\n });\n\n\n\n tx\n\n}\n\n\n", "file_path": "examples/crust_peer.rs", "rank": 6, "score": 135865.97268157973 }, { "content": "/// A convevience method to build IPv4 address with a port number.\n\npub fn ipv4_addr(a: u8, b: u8, c: u8, d: u8, port: u16) -> SocketAddr {\n\n SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(a, b, c, d), port))\n\n}\n\n\n\nmod core;\n\nmod error;\n\nmod message;\n\nmod state;\n", "file_path": "src/common/mod.rs", "rank": 8, "score": 124922.29702877506 }, { "content": "pub fn new_reusably_bound_tcp_socket(local_addr: &SocketAddr) -> io::Result<TcpBuilder> {\n\n let socket = match local_addr.ip() {\n\n IpAddr::V4(..) => TcpBuilder::new_v4()?,\n\n IpAddr::V6(..) => TcpBuilder::new_v6()?,\n\n };\n\n let _ = socket.reuse_address(true)?;\n\n enable_so_reuseport(&socket)?;\n\n let _ = socket.bind(local_addr)?;\n\n\n\n Ok(socket)\n\n}\n\n\n", "file_path": "src/nat/util.rs", "rank": 9, "score": 116623.85652740701 }, { "content": "#[cfg(target_family = \"windows\")]\n\npub fn enable_so_reuseport(_sock: &TcpBuilder) -> io::Result<()> {\n\n Ok(())\n\n}\n\n\n", "file_path": "src/nat/util.rs", "rank": 10, "score": 112498.9556646262 }, { "content": "#[cfg(target_family = \"unix\")]\n\npub fn enable_so_reuseport(sock: &TcpBuilder) -> io::Result<()> {\n\n use net2::unix::UnixTcpBuilderExt;\n\n let _ = sock.reuse_port(true)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/nat/util.rs", "rank": 11, "score": 112498.9556646262 }, { "content": "fn generate_random_vec_u8(size: usize) -> Vec<u8> {\n\n let mut vec: Vec<u8> = Vec::with_capacity(size);\n\n for _ in 0..size {\n\n vec.push(rand::random::<u8>());\n\n }\n\n vec\n\n}\n\n\n", "file_path": "examples/crust_peer.rs", "rank": 12, "score": 111209.08815989044 }, { "content": "pub trait State<T> {\n\n fn as_any(&mut self) -> &mut Any;\n\n\n\n fn ready(&mut self, _core: &mut Core<T>, _poll: &Poll, _kind: Ready) {}\n\n\n\n fn terminate(&mut self, _core: &mut Core<T>, _poll: &Poll) {}\n\n\n\n fn timeout(&mut self, _core: &mut Core<T>, _poll: &Poll, _timer_id: u8) {}\n\n\n\n fn write(&mut self, _core: &mut Core<T>, _poll: &Poll, _data: Vec<u8>, _priority: Priority) {}\n\n}\n", "file_path": "src/common/state.rs", "rank": 13, "score": 107088.9324612265 }, { "content": "fn test_service() -> (Service, Receiver<Event<UniqueId>>) {\n\n let config = gen_config();\n\n let (event_tx, event_rx) = get_event_sender();\n\n let service = unwrap!(Service::with_config(event_tx, config, rand::random()));\n\n (service, event_rx)\n\n}\n\n\n\nmod connect {\n\n use super::*;\n\n\n\n #[test]\n\n fn successfully_connected_peer_contacts_are_cached() {\n\n let (mut service1, event_rx1) = test_service();\n\n let (service2, event_rx2) = test_service();\n\n\n\n unwrap!(service1.start_listening_tcp());\n\n expect_event!(event_rx1, Event::ListenerStarted(_port) => ());\n\n unwrap!(service1.set_ext_reachability_test(false));\n\n let uid1 = service1.id();\n\n\n", "file_path": "src/tests/mod.rs", "rank": 14, "score": 103591.65054477323 }, { "content": "#[derive(Debug, Clone, Copy, Serialize, Deserialize, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\nstruct UniqueId([u8; 20]);\n\nimpl Uid for UniqueId {}\n\n\n\nimpl Distribution<UniqueId> for Standard {\n\n fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> UniqueId {\n\n UniqueId(rng.gen())\n\n }\n\n}\n\n\n", "file_path": "examples/crust_peer.rs", "rank": 15, "score": 98612.68987827198 }, { "content": "/// Constructs peer info with random generated public key.\n\npub fn peer_info_with_rand_key(addr: SocketAddr) -> PeerInfo {\n\n let (pk, _) = gen_encrypt_keypair();\n\n PeerInfo::new(addr, pk)\n\n}\n", "file_path": "src/tests/utils.rs", "rank": 16, "score": 94175.31376313993 }, { "content": "#[test]\n\nfn drop_peer_when_no_message_received_within_inactivity_period() {\n\n use self::broken_peer;\n\n use crate::common::{spawn_event_loop, CoreMessage};\n\n use mio::net::TcpListener;\n\n\n\n // Spin up the non-responsive peer.\n\n let el = unwrap!(spawn_event_loop(0, None, || ()));\n\n\n\n let bind_addr = unwrap!(SocketAddr::from_str(\"127.0.0.1:0\"), \"Could not parse addr\");\n\n let listener = unwrap!(TcpListener::bind(&bind_addr), \"Could not bind listener\");\n\n let (listener_pk, listener_sk) = gen_encrypt_keypair();\n\n let address = PeerInfo::new(unwrap!(listener.local_addr()), listener_pk);\n\n\n\n unwrap!(el.send(CoreMessage::new(move |core, poll| {\n\n broken_peer::Listen::start(core, poll, listener, listener_pk, listener_sk)\n\n })));\n\n\n\n // Spin up normal service that will connect to the above guy.\n\n let mut config = gen_config();\n\n config.hard_coded_contacts = vec![address];\n", "file_path": "src/tests/mod.rs", "rank": 17, "score": 92427.72679699899 }, { "content": "// Generate config with unique bootstrap cache name.\n\npub fn gen_config() -> Config {\n\n let mut config = Config::default();\n\n config.bootstrap_cache_name = Some(bootstrap_cache_tmp_file().into());\n\n config\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 18, "score": 88081.47156491759 }, { "content": "#[derive(Debug, PartialEq)]\n\n#[repr(usize)]\n\nenum EventToken {\n\n Bootstrap,\n\n ServiceDiscovery,\n\n Listener,\n\n ConfigRefresher,\n\n Unreserved,\n\n}\n\n\n\nimpl From<EventToken> for Token {\n\n fn from(token: EventToken) -> Token {\n\n Token(token as usize)\n\n }\n\n}\n\n\n\nconst SERVICE_DISCOVERY_DEFAULT_PORT: u16 = 5484;\n\n\n\nconst DISABLE_NAT: bool = true;\n\n\n\n/// A structure representing all the Crust services. This is the main object through which crust is\n\n/// used.\n", "file_path": "src/main/service.rs", "rank": 19, "score": 86656.40224993156 }, { "content": "/// Generates random unique id.\n\npub fn rand_uid() -> UniqueId {\n\n rand::thread_rng().gen()\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 20, "score": 85750.58425555006 }, { "content": "/// Bootstrap cache on tmp directory with unique file name.\n\npub fn test_bootstrap_cache() -> BootstrapCache {\n\n let cache_file = bootstrap_cache_tmp_file().into();\n\n BootstrapCache::new(Some(cache_file))\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 21, "score": 83612.56730753332 }, { "content": "/// Constructs random bootstrap cache file name.\n\npub fn bootstrap_cache_tmp_file() -> PathBuf {\n\n let fname = format!(\"{:016x}.bootstrap.cache\", rand::random::<u64>());\n\n let mut path = env::temp_dir();\n\n path.push(fname);\n\n path\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 22, "score": 81634.03088470851 }, { "content": "#[test]\n\nfn drop_disconnects() {\n\n let config_0 = gen_config();\n\n let (event_tx_0, event_rx_0) = get_event_sender();\n\n let mut service_0 = unwrap!(Service::with_config(event_tx_0, config_0, rand::random()));\n\n\n\n unwrap!(service_0.start_listening_tcp());\n\n let port = expect_event!(event_rx_0, Event::ListenerStarted(port) => port);\n\n unwrap!(service_0.set_accept_bootstrap(true));\n\n\n\n let mut config_1 = gen_config();\n\n config_1.hard_coded_contacts = vec![localhost_contact_info(port, service_0.pub_key())];\n\n\n\n let (event_tx_1, event_rx_1) = get_event_sender();\n\n let mut service_1 = unwrap!(Service::with_config(event_tx_1, config_1, rand::random()));\n\n\n\n unwrap!(service_1.start_bootstrap(HashSet::new(), CrustUser::Client));\n\n\n\n let peer_id_0 = expect_event!(event_rx_1, Event::BootstrapConnect(peer_id, _) => peer_id);\n\n expect_event!(event_rx_0, Event::BootstrapAccept(_peer_id, _));\n\n\n", "file_path": "src/tests/mod.rs", "rank": 23, "score": 78996.02686184611 }, { "content": "fn handle_new_peer(\n\n service: &Service,\n\n protected_network: Arc<Mutex<Network>>,\n\n peer_id: UniqueId,\n\n) -> usize {\n\n let mut network = unwrap!(protected_network.lock());\n\n let peer_index = network.next_peer_index();\n\n let _ = network.nodes.insert(peer_index, peer_id);\n\n network.print_connected_nodes(service);\n\n peer_index\n\n}\n\n\n", "file_path": "examples/crust_peer.rs", "rank": 24, "score": 76222.50323766485 }, { "content": "/// Returns a hash of the network name.\n\nfn name_hash(network_name: &Option<String>) -> NameHash {\n\n trace!(\"Network name: {:?}\", network_name);\n\n match *network_name {\n\n Some(ref name) => safe_crypto::hash(name.as_bytes()),\n\n None => [0; HASH_SIZE],\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::common::CrustUser;\n\n use crate::main::{self, Event};\n\n use crate::tests::{get_event_sender, timebomb, UniqueId};\n\n use crate::CrustError;\n\n use maidsafe_utilities;\n\n use maidsafe_utilities::thread::Joiner;\n\n use rand;\n\n use std::collections::{hash_map, HashMap};\n\n use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};\n", "file_path": "src/main/service.rs", "rank": 25, "score": 76127.34010517069 }, { "content": "/// A replacement for `Ipv4Addr::is_global` while we wait for that to enter stable.\n\npub fn ipv4_addr_is_global(ipv4: Ipv4Addr) -> bool {\n\n !(ipv4.is_loopback()\n\n || ipv4.is_private()\n\n || ipv4.is_link_local()\n\n || ipv4.is_multicast()\n\n || ipv4.is_broadcast()\n\n || ipv4.is_documentation()\n\n || ipv4.octets() == [0, 0, 0, 0])\n\n}\n\n\n", "file_path": "src/nat/util.rs", "rank": 26, "score": 74809.98900366103 }, { "content": "/// Reads the default crust config file.\n\npub fn read_config_file() -> crate::Res<Config> {\n\n let file_handler = FileHandler::new(&get_file_name()?, false)?;\n\n let cfg = file_handler.read_file()?;\n\n Ok(cfg)\n\n}\n\n\n\n/// Writes a Crust config file **for use by tests and examples**.\n\n///\n\n/// The file is written to the [`current_bin_dir()`](file_handler/fn.current_bin_dir.html)\n\n/// with the appropriate file name.\n\n///\n\n/// N.B. This method should only be used as a utility for test and examples. In normal use cases,\n\n/// this file should be created by the installer for the dependent application.\n", "file_path": "src/main/config_handler.rs", "rank": 27, "score": 74809.98900366103 }, { "content": "/// A replacement for `IpAddr::is_global` while we wait for that to enter stable.\n\npub fn ip_addr_is_global(ip: &IpAddr) -> bool {\n\n match *ip {\n\n IpAddr::V4(addr_v4) => ipv4_addr_is_global(addr_v4),\n\n IpAddr::V6(addr_v6) => ipv6_addr_is_global(addr_v6),\n\n }\n\n}\n\n\n", "file_path": "src/nat/util.rs", "rank": 28, "score": 74809.98900366103 }, { "content": "/// A replacement for `Ipv6Addr::is_global` while we wait for that to enter stable.\n\npub fn ipv6_addr_is_global(ipv6: Ipv6Addr) -> bool {\n\n // TODO(canndrew): This function is incomplete and may return false-positives.\n\n !(ipv6.is_loopback() || ipv6.is_unspecified())\n\n}\n", "file_path": "src/nat/util.rs", "rank": 29, "score": 74809.98900366103 }, { "content": "#[test]\n\nfn bootstrap_timeouts_if_there_are_only_invalid_contacts() {\n\n use std::net::TcpListener;\n\n\n\n let deaf_listener = unwrap!(TcpListener::bind(\"127.0.0.1:0\"));\n\n let (pk, _sk) = gen_encrypt_keypair();\n\n let address = PeerInfo::new(unwrap!(deaf_listener.local_addr()), pk);\n\n\n\n let mut config = gen_config();\n\n config.hard_coded_contacts = vec![address];\n\n\n\n let (event_tx, event_rx) = get_event_sender();\n\n let mut service = unwrap!(Service::with_config(event_tx, config, rand::random()));\n\n\n\n unwrap!(service.start_bootstrap(HashSet::new(), CrustUser::Client));\n\n expect_event!(event_rx, Event::BootstrapFailed);\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 30, "score": 73676.37433435858 }, { "content": "#[test]\n\nfn bootstrap_two_services_using_service_discovery() {\n\n let mut config0 = gen_config();\n\n let service0_discovery_port = gen_service_discovery_port();\n\n config0.service_discovery_listener_port = Some(service0_discovery_port);\n\n\n\n let (event_tx0, event_rx0) = get_event_sender();\n\n let mut service0 = unwrap!(Service::with_config(event_tx0, config0, rand::random()));\n\n\n\n let (event_tx1, event_rx1) = get_event_sender();\n\n let mut config1 = gen_config();\n\n config1.service_discovery_listener_port = Some(gen_service_discovery_port());\n\n config1.service_discovery_port = Some(service0_discovery_port);\n\n let mut service1 = unwrap!(Service::with_config(event_tx1, config1, rand::random()));\n\n\n\n unwrap!(service1.start_listening_tcp());\n\n let _ = expect_event!(event_rx1, Event::ListenerStarted(port) => port);\n\n\n\n service0.start_service_discovery();\n\n service0.set_service_discovery_listen(true);\n\n unwrap!(service0.start_listening_tcp());\n", "file_path": "src/tests/mod.rs", "rank": 31, "score": 69201.82769452207 }, { "content": "#[test]\n\nfn do_not_drop_peer_even_when_no_data_messages_are_exchanged_within_inactivity_period() {\n\n use crate::main::INACTIVITY_TIMEOUT_MS;\n\n use std::thread;\n\n use std::time::Duration;\n\n\n\n let config0 = gen_config();\n\n let (event_tx0, event_rx0) = get_event_sender();\n\n let mut service0 = unwrap!(Service::with_config(event_tx0, config0, rand::random()));\n\n\n\n unwrap!(service0.start_listening_tcp());\n\n let port0 = expect_event!(event_rx0, Event::ListenerStarted(port) => port);\n\n unwrap!(service0.set_accept_bootstrap(true));\n\n\n\n let mut config1 = gen_config();\n\n config1.hard_coded_contacts = vec![localhost_contact_info(port0, service0.pub_key())];\n\n\n\n let (event_tx1, event_rx1) = get_event_sender();\n\n let mut service1 = unwrap!(Service::with_config(event_tx1, config1, rand::random()));\n\n\n\n unwrap!(service1.start_bootstrap(HashSet::new(), CrustUser::Client));\n", "file_path": "src/tests/mod.rs", "rank": 32, "score": 63644.67275163777 }, { "content": "fn get_file_name() -> crate::Res<OsString> {\n\n let mut name = config_file_handler::exe_file_stem()?;\n\n name.push(\".crust.config\");\n\n Ok(name)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Config;\n\n use serde_json;\n\n use std::io::Read;\n\n use std::path::Path;\n\n\n\n #[test]\n\n fn parse_sample_config_file() {\n\n let path = Path::new(\"installer/sample.config\").to_path_buf();\n\n\n\n let mut file = match ::std::fs::File::open(path) {\n\n Ok(file) => file,\n\n Err(what) => panic!(format!(\"CrustError opening sample.config: {:?}\", what)),\n", "file_path": "src/main/config_handler.rs", "rank": 33, "score": 60981.75943190629 }, { "content": "#[cfg(test)]\n\n#[allow(dead_code)]\n\npub fn write_config_file(hard_coded_contacts: Option<Vec<PeerInfo>>) -> crate::Res<PathBuf> {\n\n use serde_json;\n\n use std::io::Write;\n\n\n\n let mut config = Config::default();\n\n\n\n if let Some(contacts) = hard_coded_contacts {\n\n config.hard_coded_contacts = contacts;\n\n }\n\n\n\n let mut config_path = config_file_handler::current_bin_dir()?;\n\n config_path.push(get_file_name()?);\n\n let mut file = ::std::fs::File::create(&config_path)?;\n\n write!(\n\n &mut file,\n\n \"{}\",\n\n unwrap!(serde_json::to_string_pretty(&config))\n\n )?;\n\n file.sync_all()?;\n\n Ok(config_path)\n\n}\n\n\n", "file_path": "src/main/config_handler.rs", "rank": 34, "score": 57495.58420049118 }, { "content": "/// /////////////////////////////////////////////////////////////////////////////\n\n///\n\n/// Network\n\n///\n\n/// /////////////////////////////////////////////////////////////////////////////\n\nstruct Network {\n\n nodes: HashMap<usize, UniqueId>,\n\n our_connection_infos: BTreeMap<u32, PrivConnectionInfo>,\n\n performance_start: Instant,\n\n performance_interval: Duration,\n\n received_msgs: u32,\n\n received_bytes: usize,\n\n peer_index: usize,\n\n connection_info_index: u32,\n\n}\n\n\n\n// simple \"routing table\" without any structure\n\nimpl Network {\n\n pub fn new() -> Network {\n\n Network {\n\n nodes: HashMap::new(),\n\n our_connection_infos: BTreeMap::new(),\n\n performance_start: Instant::now(),\n\n performance_interval: Duration::from_secs(10),\n\n received_msgs: 0,\n", "file_path": "examples/crust_peer.rs", "rank": 35, "score": 56591.540811320825 }, { "content": "struct Heartbeat {\n\n recv_timeout: Timeout,\n\n recv_timer: CoreTimer,\n\n send_timeout: Timeout,\n\n send_timer: CoreTimer,\n\n}\n\n\n\nimpl Heartbeat {\n\n fn try_new(core: &mut EventLoopCore, state_id: Token) -> crate::Res<Self> {\n\n let recv_timer = CoreTimer::new(state_id, 0);\n\n let recv_timeout =\n\n core.set_timeout(Duration::from_millis(INACTIVITY_TIMEOUT_MS), recv_timer);\n\n\n\n let send_timer = CoreTimer::new(state_id, 1);\n\n let send_timeout = core.set_timeout(Duration::from_millis(HEARTBEAT_PERIOD_MS), send_timer);\n\n\n\n Ok(Heartbeat {\n\n recv_timeout,\n\n recv_timer,\n\n send_timeout,\n", "file_path": "src/main/active_connection.rs", "rank": 36, "score": 55049.0243598753 }, { "content": "struct Inner {\n\n file_name: Option<OsString>,\n\n peers: HashSet<PeerInfo>,\n\n}\n\n\n\nimpl Cache {\n\n /// Constructs new bootstrap cache. You can optionally specify the file name which will\n\n /// be used to read/write the cache to. If no file name is give, the default path is used, see\n\n /// `#get_default_file_name()`.\n\n pub fn new(file_name: Option<OsString>) -> Self {\n\n let inner = Inner {\n\n file_name,\n\n peers: Default::default(),\n\n };\n\n Cache {\n\n inner: Rc::new(RefCell::new(inner)),\n\n }\n\n }\n\n\n\n /// Default bootstrap cache file name is executable file + '.bootstrap.cache' suffix.\n", "file_path": "src/main/bootstrap/cache.rs", "rank": 37, "score": 55049.0243598753 }, { "content": "fn localhost_contact_info(port: u16, pk: PublicEncryptKey) -> PeerInfo {\n\n use std::net::IpAddr;\n\n let addr = SocketAddr::new(unwrap!(IpAddr::from_str(\"127.0.0.1\")), port);\n\n PeerInfo::new(addr, pk)\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 38, "score": 54334.04443995001 }, { "content": "struct ServiceDiscMeta {\n\n rx: Receiver<Vec<PeerInfo>>,\n\n timeout: Timeout,\n\n}\n\n\n", "file_path": "src/main/bootstrap/mod.rs", "rank": 39, "score": 52362.902913085345 }, { "content": "enum NextState<UID> {\n\n None,\n\n ActiveConnection(UID, CrustUser),\n\n ConnectionCandidate(UID),\n\n}\n", "file_path": "src/main/connection_listener/exchange_msg.rs", "rank": 40, "score": 52040.76743119023 }, { "content": "/// Trait for specifying a unique identifier for a Crust peer\n\npub trait Uid:\n\n 'static\n\n + Send\n\n + fmt::Debug\n\n + Clone\n\n + Copy\n\n + Eq\n\n + PartialEq\n\n + Ord\n\n + PartialOrd\n\n + Hash\n\n + Serialize\n\n + DeserializeOwned\n\n{\n\n}\n\n\n\n/// Information necessary to connect to peer.\n\n#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug, Serialize, Deserialize)]\n\npub struct PeerInfo {\n\n /// Peer public address.\n", "file_path": "src/common/mod.rs", "rank": 41, "score": 50600.423599918366 }, { "content": "fn main() {\n\n unwrap!(maidsafe_utilities::log::init(true));\n\n\n\n let matches = App::new(\"crust_peer\")\n\n .about(\n\n \"The crust peer will run, using any config file it can find to \\\n\n try and bootstrap off any provided peers.\",\n\n )\n\n .arg(\n\n Arg::with_name(\"discovery-port\")\n\n .long(\"discovery-port\")\n\n .value_name(\"PORT\")\n\n .help(\"Set the port for local network service discovery\")\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"speed\")\n\n .short(\"s\")\n\n .long(\"speed\")\n\n .value_name(\"RATE\")\n", "file_path": "examples/crust_peer.rs", "rank": 42, "score": 49328.698077166846 }, { "content": "#[test]\n\nfn bootstrap_with_blacklist() {\n\n use std::net::TcpListener;\n\n\n\n let (event_tx0, event_rx0) = get_event_sender();\n\n let mut service0 = unwrap!(Service::with_config(\n\n event_tx0,\n\n Config::default(),\n\n rand::random()\n\n ));\n\n unwrap!(service0.start_listening_tcp());\n\n let port = expect_event!(event_rx0, Event::ListenerStarted(port) => port);\n\n unwrap!(service0.set_accept_bootstrap(true));\n\n let valid_address = localhost_contact_info(port, service0.pub_key());\n\n\n\n let blacklisted_listener = unwrap!(TcpListener::bind(\"127.0.0.1:0\"));\n\n let blacklisted_address = PeerInfo::new(\n\n unwrap!(blacklisted_listener.local_addr()),\n\n service0.pub_key(),\n\n );\n\n\n", "file_path": "src/tests/mod.rs", "rank": 43, "score": 47810.90113435128 }, { "content": "/// Runs service discovery state with a timeout. When timeout happens, `Bootstrap::timeout()`\n\n/// callback is called.\n\nfn seek_peers(\n\n core: &mut EventLoopCore,\n\n service_discovery_token: Token,\n\n token: Token,\n\n) -> crate::Res<(Receiver<Vec<PeerInfo>>, Timeout)> {\n\n if let Some(state) = core.get_state(service_discovery_token) {\n\n let mut state = state.borrow_mut();\n\n let state = unwrap!(state\n\n .as_any()\n\n .downcast_mut::<ServiceDiscovery<BootstrapCache>>());\n\n\n\n let (obs, rx) = mpsc::channel();\n\n state.register_observer(obs);\n\n state.seek_peers()?;\n\n let timeout = core.set_timeout(\n\n Duration::from_secs(SERVICE_DISCOVERY_TIMEOUT_SEC),\n\n CoreTimer::new(token, SERVICE_DISCOVERY_TIMER_ID),\n\n );\n\n\n\n Ok((rx, timeout))\n\n } else {\n\n Err(CrustError::ServiceDiscNotEnabled)\n\n }\n\n}\n\n\n", "file_path": "src/main/bootstrap/mod.rs", "rank": 44, "score": 46438.70053293163 }, { "content": "#[test]\n\nfn bootstrap_fails_if_there_are_no_contacts() {\n\n let config = gen_config();\n\n let (event_tx, event_rx) = get_event_sender();\n\n let mut service = unwrap!(Service::with_config(event_tx, config, rand::random()));\n\n\n\n unwrap!(service.start_bootstrap(HashSet::new(), CrustUser::Client));\n\n expect_event!(event_rx, Event::BootstrapFailed);\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 45, "score": 46429.837205700154 }, { "content": "#[test]\n\nfn bootstrap_with_multiple_contact_endpoints() {\n\n use std::net::TcpListener;\n\n\n\n let (event_tx0, event_rx0) = get_event_sender();\n\n let mut service0 = unwrap!(Service::with_config(\n\n event_tx0,\n\n Config::default(),\n\n rand::random()\n\n ));\n\n unwrap!(service0.start_listening_tcp());\n\n let port = expect_event!(event_rx0, Event::ListenerStarted(port) => port);\n\n unwrap!(service0.set_accept_bootstrap(true));\n\n let valid_address = localhost_contact_info(port, service0.pub_key());\n\n\n\n let deaf_listener = unwrap!(TcpListener::bind(\"127.0.0.1:0\"));\n\n let invalid_address = PeerInfo::new(unwrap!(deaf_listener.local_addr()), service0.pub_key());;\n\n\n\n let mut config1 = gen_config();\n\n config1.hard_coded_contacts = vec![invalid_address, valid_address];\n\n\n", "file_path": "src/tests/mod.rs", "rank": 46, "score": 45167.825972236526 }, { "content": "/// Peers from bootstrap cache and hard coded contacts are shuffled individually.\n\nfn shuffled_bootstrap_peers(\n\n cached_peers: HashSet<PeerInfo>,\n\n config: CrustConfig,\n\n blacklist: HashSet<SocketAddr>,\n\n) -> Vec<PeerInfo> {\n\n let mut peers = Vec::with_capacity(MAX_CONTACTS_EXPECTED);\n\n let mut rng = rand::thread_rng();\n\n\n\n let mut cached: Vec<_> = cached_peers.iter().cloned().collect();\n\n cached.shuffle(&mut rng);\n\n peers.extend(cached);\n\n\n\n let mut hard_coded = unwrap!(config.lock()).cfg.hard_coded_contacts.clone();\n\n hard_coded.shuffle(&mut rng);\n\n peers.extend(hard_coded);\n\n\n\n peers.retain(|peer| !blacklist.contains(&peer.addr));\n\n peers\n\n}\n\n\n", "file_path": "src/main/bootstrap/mod.rs", "rank": 47, "score": 45167.825972236526 }, { "content": "#[test]\n\nfn bootstrap_fails_only_blacklisted_contact() {\n\n use std::net::TcpListener;\n\n\n\n let blacklisted_listener = unwrap!(TcpListener::bind(\"127.0.0.1:0\"));\n\n let (pk, _sk) = gen_encrypt_keypair();\n\n let blacklisted_address = PeerInfo::new(unwrap!(blacklisted_listener.local_addr()), pk);\n\n\n\n let mut config = gen_config();\n\n config.hard_coded_contacts = vec![blacklisted_address];\n\n let (event_tx, event_rx) = get_event_sender();\n\n let mut service = unwrap!(Service::with_config(event_tx, config, rand::random()));\n\n\n\n let mut blacklist = HashSet::new();\n\n let _ = blacklist.insert(blacklisted_address.addr);\n\n unwrap!(service.start_bootstrap(blacklist, CrustUser::Client));\n\n\n\n expect_event!(event_rx, Event::BootstrapFailed);\n\n\n\n let blacklisted_listener = unwrap!(mio::net::TcpListener::from_std(blacklisted_listener));\n\n thread::sleep(Duration::from_secs(5));\n\n let res = blacklisted_listener.accept();\n\n assert!(res.is_err())\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 48, "score": 45167.825972236526 }, { "content": "type Service = main::Service<UniqueId>;\n\n\n", "file_path": "src/tests/mod.rs", "rank": 49, "score": 45096.86831149044 }, { "content": "type Service = crust::Service<UniqueId>;\n\n\n", "file_path": "examples/crust_peer.rs", "rank": 50, "score": 45096.86831149044 }, { "content": "#[test]\n\nfn bootstrap_with_skipped_external_reachability_test() {\n\n let config = Config::default();\n\n let (event_tx0, event_rx0) = get_event_sender();\n\n let mut service0 = unwrap!(Service::with_config(event_tx0, config, rand::random()));\n\n unwrap!(service0.start_listening_tcp());\n\n let port = expect_event!(event_rx0, Event::ListenerStarted(port) => port);\n\n unwrap!(service0.set_accept_bootstrap(true));\n\n unwrap!(service0.set_ext_reachability_test(false));\n\n\n\n let mut config1 = gen_config();\n\n config1.hard_coded_contacts = vec![localhost_contact_info(port, service0.pub_key())];\n\n\n\n let (event_tx1, event_rx1) = get_event_sender();\n\n let mut service1 = unwrap!(Service::with_config(event_tx1, config1, rand::random()));\n\n unwrap!(service1.start_bootstrap(HashSet::new(), CrustUser::Node));\n\n\n\n let peer_id0 = expect_event!(event_rx1, Event::BootstrapConnect(peer_id, _) => peer_id);\n\n assert_eq!(peer_id0, service0.id());\n\n\n\n let peer_id1 = expect_event!(event_rx0, Event::BootstrapAccept(peer_id, _) => peer_id);\n\n assert_eq!(peer_id1, service1.id());\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 51, "score": 44010.109374921885 }, { "content": "#[test]\n\nfn bootstrap_two_services_and_exchange_messages() {\n\n let (mut service0, event_rx0) = test_service();\n\n unwrap!(service0.start_listening_tcp());\n\n\n\n let port0 = expect_event!(event_rx0, Event::ListenerStarted(port) => port);\n\n unwrap!(service0.set_accept_bootstrap(true));\n\n\n\n let mut config1 = gen_config();\n\n config1.hard_coded_contacts = vec![localhost_contact_info(port0, service0.pub_key())];\n\n\n\n let (event_tx1, event_rx1) = get_event_sender();\n\n let mut service1 = unwrap!(Service::with_config(event_tx1, config1, rand::random()));\n\n\n\n unwrap!(service1.start_bootstrap(HashSet::new(), CrustUser::Client));\n\n\n\n let peer_id0 = expect_event!(event_rx1, Event::BootstrapConnect(peer_id, _) => peer_id);\n\n assert_eq!(peer_id0, service0.id());\n\n\n\n let peer_id1 = expect_event!(event_rx0,\n\n Event::BootstrapAccept(peer_id, CrustUser::Client) => peer_id);\n", "file_path": "src/tests/mod.rs", "rank": 52, "score": 44010.109374921885 }, { "content": "fn gen_service_discovery_port() -> u16 {\n\n const BASE: u16 = 40_000;\n\n static COUNTER: AtomicUsize = ATOMIC_USIZE_INIT;\n\n\n\n BASE + COUNTER.fetch_add(1, Ordering::Relaxed) as u16\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 53, "score": 42249.45682165911 }, { "content": "type PrivConnectionInfo = crust::PrivConnectionInfo<UniqueId>;\n", "file_path": "examples/crust_peer.rs", "rank": 54, "score": 41234.22058847288 }, { "content": "fn parse_user_command(cmd: &str) -> Option<UserCommand> {\n\n let app = App::new(\"cli\")\n\n .setting(AppSettings::NoBinaryName)\n\n .subcommand(\n\n SubCommand::with_name(\"prepare-connection-info\").about(\"Prepare a connection info\"),\n\n )\n\n .subcommand(\n\n SubCommand::with_name(\"connect\")\n\n .about(\"Initiate a connection to the remote peer\")\n\n .arg(\n\n Arg::with_name(\"our-info-id\")\n\n .help(\"The ID of the connection info we gave to the peer\")\n\n .required(true)\n\n .index(1),\n\n )\n\n .arg(\n\n Arg::with_name(\"their-info\")\n\n .help(\"The connection info received from the peer\")\n\n .required(true)\n\n .index(2),\n", "file_path": "examples/crust_peer.rs", "rank": 55, "score": 35663.37911230032 }, { "content": "// Copyright 2018 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse crate::common::Core;\n\nuse mio::{Poll, Ready};\n\nuse std::any::Any;\n\n\n\nuse socket_collection::Priority;\n\n\n", "file_path": "src/common/state.rs", "rank": 56, "score": 34447.35176463313 }, { "content": " /// Invoked when we bootstrap to a new peer.\n\n BootstrapConnect(UID, SocketAddr),\n\n /// Invoked when we failed to connect to all bootstrap contacts.\n\n BootstrapFailed,\n\n /// Invoked when we are ready to listen for incomming connection. Contains\n\n /// the listening port.\n\n ListenerStarted(u16),\n\n /// Invoked when listener failed to start.\n\n ListenerFailed,\n\n /// Invoked as a result to the call of `Service::prepare_contact_info`.\n\n ConnectionInfoPrepared(ConnectionInfoResult<UID>),\n\n /// Invoked when connection to a new peer has been established.\n\n ConnectSuccess(UID),\n\n /// Invoked when connection to a new peer has failed.\n\n ConnectFailure(UID),\n\n /// Invoked when a peer disconnects or can no longer be contacted.\n\n LostPeer(UID),\n\n /// Invoked when a new message is received. Passes the message.\n\n NewMessage(UID, CrustUser, Vec<u8>),\n\n /// Invoked when trying to sending a too large data.\n\n WriteMsgSizeProhibitive(UID, Vec<u8>),\n\n}\n", "file_path": "src/main/event.rs", "rank": 57, "score": 34369.39394045091 }, { "content": "// Copyright 2018 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse super::ConnectionInfoResult;\n\n\n\nuse crate::common::{CrustUser, Uid};\n\nuse std::net::SocketAddr;\n\n\n\n/// Enum representing different events that will be sent over the asynchronous channel to the user\n\n/// of this module.\n\n#[derive(Debug)]\n\npub enum Event<UID: Uid> {\n\n /// Invoked when a bootstrap peer connects to us\n\n BootstrapAccept(UID, CrustUser),\n", "file_path": "src/main/event.rs", "rank": 58, "score": 34368.269755877234 }, { "content": "// ========================================================================================\n\n#[derive(Debug, Clone, Copy)]\n\npub struct ConnectionId {\n\n pub active_connection: Option<Token>,\n\n pub currently_handshaking: usize,\n\n}\n\n\n\n// ========================================================================================\n\n// ConnectionInfoResult\n\n// ========================================================================================\n\n/// The result of a `Service::prepare_contact_info` call.\n\n#[derive(Debug)]\n\npub struct ConnectionInfoResult<UID> {\n\n /// The token that was passed to `prepare_connection_info`.\n\n pub result_token: u32,\n\n /// The new contact info, if successful.\n\n pub result: crate::Res<PrivConnectionInfo<UID>>,\n\n}\n\n\n\n// ========================================================================================\n", "file_path": "src/main/types.rs", "rank": 59, "score": 34351.694477390636 }, { "content": " }\n\n}\n\n\n\n/// Crust event loop state object. It is owned by the same thread event loop is running on,\n\n/// it holds bootstrap cache and manages Crust states like `Connect`, `ConnectionCandidate`, etc.\n\npub type EventLoopCore = Core<BootstrapCache>;\n\n\n\n/// Handle to Crust event loop that owns `EventLoopCore`.\n\npub type EventLoop = common::EventLoop<BootstrapCache>;\n\n\n\npub type ConnectionMap<UID> = Arc<Mutex<HashMap<UID, ConnectionId>>>;\n\npub type CrustConfig = Arc<Mutex<ConfigWrapper>>;\n", "file_path": "src/main/types.rs", "rank": 60, "score": 34344.69913915961 }, { "content": " our_pk: self.our_pk,\n\n }\n\n }\n\n}\n\n\n\n// ========================================================================================\n\n// PubConnectionInfo\n\n// ========================================================================================\n\n/// Contact info used to connect to another peer.\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct PubConnectionInfo<UID> {\n\n #[doc(hidden)]\n\n pub id: UID,\n\n #[doc(hidden)]\n\n pub for_direct: Vec<SocketAddr>,\n\n #[doc(hidden)]\n\n pub our_pk: PublicEncryptKey,\n\n}\n\n\n\nimpl<UID: Uid> PubConnectionInfo<UID> {\n", "file_path": "src/main/types.rs", "rank": 61, "score": 34340.28815227628 }, { "content": " /// Returns the `UID` of the node that created this connection info.\n\n pub fn id(&self) -> UID {\n\n self.id\n\n }\n\n}\n\n\n\n// ========================================================================================\n\n// ConfigWrapper\n\n// ========================================================================================\n\n#[derive(Default)]\n\npub struct ConfigWrapper {\n\n pub cfg: Config,\n\n pub is_modified_for_next_refresh: bool,\n\n}\n\nimpl ConfigWrapper {\n\n pub fn new(cfg: Config) -> Self {\n\n Self {\n\n cfg,\n\n is_modified_for_next_refresh: false,\n\n }\n", "file_path": "src/main/types.rs", "rank": 62, "score": 34338.734297657444 }, { "content": "// PrivConnectionInfo\n\n// ========================================================================================\n\n/// Contact info generated by a call to `Service::prepare_contact_info`.\n\n#[derive(Debug)]\n\npub struct PrivConnectionInfo<UID> {\n\n #[doc(hidden)]\n\n pub id: UID,\n\n #[doc(hidden)]\n\n pub for_direct: Vec<SocketAddr>,\n\n #[doc(hidden)]\n\n pub our_pk: PublicEncryptKey,\n\n}\n\n\n\nimpl<UID: Uid> PrivConnectionInfo<UID> {\n\n /// Use private connection info to create public connection info that can be shared with the\n\n /// peer.\n\n pub fn to_pub_connection_info(&self) -> PubConnectionInfo<UID> {\n\n PubConnectionInfo {\n\n for_direct: self.for_direct.clone(),\n\n id: self.id,\n", "file_path": "src/main/types.rs", "rank": 63, "score": 34338.42548829254 }, { "content": "// Copyright 2018 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse crate::common::{self, Core, Uid};\n\nuse crate::main::bootstrap::Cache as BootstrapCache;\n\nuse crate::main::Config;\n\nuse mio::Token;\n\nuse safe_crypto::PublicEncryptKey;\n\nuse std::collections::HashMap;\n\nuse std::net::SocketAddr;\n\nuse std::sync::{Arc, Mutex};\n\n\n\n// ========================================================================================\n\n// ConnectionId\n", "file_path": "src/main/types.rs", "rank": 64, "score": 34336.5760300234 }, { "content": " }\n\n\n\n pub fn check_for_update_and_mark_modified(&mut self, new_cfg: Config) {\n\n if self.cfg != new_cfg {\n\n self.cfg = new_cfg;\n\n self.is_modified_for_next_refresh = true;\n\n }\n\n }\n\n\n\n /// Checks if `ActiveConnection` refresh is needed.\n\n pub fn check_for_refresh_and_reset_modified(&mut self, new_cfg: Config) -> bool {\n\n let should_refresh = if self.cfg != new_cfg {\n\n self.cfg = new_cfg;\n\n true\n\n } else {\n\n self.is_modified_for_next_refresh\n\n };\n\n\n\n self.is_modified_for_next_refresh = false;\n\n should_refresh\n", "file_path": "src/main/types.rs", "rank": 65, "score": 34335.77829487814 }, { "content": " ZeroByteRead {\n\n description(\"Read zero bytes from the socket - indicates EOF\")\n\n }\n\n /// CoreMessage send error\n\n CoreMsgTx {\n\n display(\"CoreMessage channel was destroyed\")\n\n }\n\n }\n\n}\n", "file_path": "src/common/error.rs", "rank": 66, "score": 34332.101286643876 }, { "content": " from()\n\n }\n\n /// Common module errors\n\n Common(e: common::CommonError) {\n\n description(\"Common module error\")\n\n from()\n\n }\n\n /// CoreMessage send error\n\n CoreMsgTx {\n\n display(\"CoreMessage channel was destroyed\")\n\n }\n\n /// Peer not found\n\n PeerNotFound {\n\n description(\"Peer not found\")\n\n }\n\n /// Serialisation error\n\n Serialisation(e: SerialisationError) {\n\n description(\"Serialisation error\")\n\n display(\"Serialisation error: {}\", e)\n\n cause(e)\n", "file_path": "src/main/error.rs", "rank": 67, "score": 34331.30539226953 }, { "content": " /// Crust's universal error type.\n\n #[derive(Debug)]\n\n pub enum CrustError {\n\n /// Failed receiving from an mpsc::channel\n\n ChannelRecv(e: mpsc::RecvError) {\n\n description(\"Channel receive error\")\n\n display(\"Channel receive error: {}\", e)\n\n cause(e)\n\n from()\n\n }\n\n /// Config file handling errors\n\n ConfigFileHandler(e: config_file_handler::Error) {\n\n description(\"Config file handling error\")\n\n display(\"Config file handling error: {}\", e)\n\n cause(e)\n\n from()\n\n }\n\n /// Wrapper for a `std::io::Error`\n\n Io(e: io::Error) {\n\n description(\"IO error\")\n", "file_path": "src/main/error.rs", "rank": 68, "score": 34329.218203860146 }, { "content": "// Copyright 2018 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse crate::common::CommonError;\n\nuse socket_collection::SocketError;\n\nuse std::io;\n\n\n\nquick_error! {\n\n /// Nat Traversal specific error\n\n #[derive(Debug)]\n\n pub enum NatError {\n\n /// IO error\n\n Io(e: io::Error) {\n\n description(\"Io error during nat traversal\")\n", "file_path": "src/nat/error.rs", "rank": 69, "score": 34325.402167496366 }, { "content": "// Copyright 2018 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse maidsafe_utilities::serialisation::SerialisationError;\n\nuse std::io;\n\n\n\nquick_error! {\n\n /// Common module specific error\n\n #[derive(Debug)]\n\n pub enum CommonError {\n\n /// IO error\n\n Io(e: io::Error) {\n\n description(e.description())\n\n display(\"Io error: {}\", e)\n", "file_path": "src/common/error.rs", "rank": 70, "score": 34325.23367050774 }, { "content": "// Copyright 2018 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse crate::common;\n\nuse crate::nat;\n\nuse crate::service_discovery;\n\nuse config_file_handler;\n\nuse maidsafe_utilities::serialisation::SerialisationError;\n\nuse safe_crypto;\n\nuse socket_collection::SocketError;\n\nuse std::io;\n\nuse std::sync::mpsc;\n\n\n\nquick_error! {\n", "file_path": "src/main/error.rs", "rank": 71, "score": 34320.87261093797 }, { "content": " from()\n\n }\n\n /// Requested connect to self\n\n RequestedConnectToSelf {\n\n description(\"Requested connection to self\")\n\n display(\"Requested connection to self\")\n\n }\n\n /// Listener is not initialised yet.\n\n ListenerNotIntialised {\n\n description(\"Listener is not initialised yet\")\n\n display(\"Listener is not initialised yet\")\n\n }\n\n /// `socket-collection` error\n\n SocketError(e: SocketError) {\n\n display(\"Socket error: {}\", e)\n\n cause(e)\n\n from()\n\n }\n\n /// Crypto error.\n\n Crypto(e: safe_crypto::Error) {\n\n display(\"Crypto error: {}\", e)\n\n cause(e)\n\n from()\n\n }\n\n }\n\n}\n", "file_path": "src/main/error.rs", "rank": 72, "score": 34318.190020847826 }, { "content": " cause(e)\n\n from()\n\n }\n\n /// Socket is uninitialised and invalid for any operation\n\n UninitialisedSocket {\n\n description(\"Socket is uninitialised and invalid for any operation\")\n\n display(\"Socket is uninitialised and invalid for any operation\")\n\n }\n\n /// Size of a message to send or about to be read is too large\n\n PayloadSizeProhibitive {\n\n description(\"Payload is too large\")\n\n }\n\n /// Serialisation error\n\n Serialisation(e: SerialisationError) {\n\n description(e.description())\n\n display(\"Serialisation error: {}\", e)\n\n cause(e)\n\n from()\n\n }\n\n /// A zero byte socket read - means EOF\n", "file_path": "src/common/error.rs", "rank": 73, "score": 34317.358799982736 }, { "content": " display(\"Io error during nat traversal: {}\", e)\n\n cause(e)\n\n from()\n\n }\n\n /// Common error\n\n CommonError(e: CommonError) {\n\n description(e.description())\n\n display(\"NatError: {}\", e)\n\n cause(e)\n\n from()\n\n }\n\n /// `socket-collection` error\n\n SocketError(e: SocketError) {\n\n display(\"Socket error: {}\", e)\n\n cause(e)\n\n from()\n\n }\n\n }\n\n}\n", "file_path": "src/nat/error.rs", "rank": 74, "score": 34315.59335369223 }, { "content": " display(\"IO error: {}\", e)\n\n cause(e)\n\n from()\n\n }\n\n /// ServiceDiscovery not enabled yet\n\n ServiceDiscNotEnabled {\n\n description(\"ServiceDiscovery is not yet enabled or registered\")\n\n }\n\n /// ServiceDiscovery Errors\n\n ServiceDisc(e: service_discovery::ServiceDiscoveryError) {\n\n description(\"ServiceDiscovery error\")\n\n from()\n\n }\n\n /// ServiceDiscovery not enabled yet\n\n InsufficientConnectionInfo {\n\n description(\"Not enough information to initiate connection to peer\")\n\n }\n\n /// Nat Traversal errors\n\n Nat(e: nat::NatError) {\n\n description(\"Nat Traversal module error\")\n", "file_path": "src/main/error.rs", "rank": 75, "score": 34315.19698218037 }, { "content": "// Copyright 2018 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse maidsafe_utilities::serialisation::SerialisationError;\n\nuse socket_collection::SocketError;\n\nuse std::io;\n\nuse std::net::AddrParseError;\n\n\n\nquick_error! {\n\n #[derive(Debug)]\n\n pub enum ServiceDiscoveryError {\n\n Io(e: io::Error) {\n\n description(\"Io error during service discovery\")\n\n display(\"Io error during service discovery: {}\", e)\n", "file_path": "src/service_discovery/errors.rs", "rank": 87, "score": 32626.809218156402 }, { "content": " from()\n\n }\n\n AddrParse(e: AddrParseError) {\n\n description(\"Error parsing address for service discovery\")\n\n display(\"Error parsing address for service discovery: {}\", e)\n\n from()\n\n }\n\n Serialisation(e: SerialisationError) {\n\n description(\"Serialisation error during service discovery\")\n\n display(\"Serialisation error during service discovery: {}\", e)\n\n from()\n\n }\n\n /// `socket-collection` error\n\n SocketError(e: SocketError) {\n\n display(\"Socket error: {}\", e)\n\n cause(e)\n\n from()\n\n }\n\n }\n\n}\n", "file_path": "src/service_discovery/errors.rs", "rank": 88, "score": 32617.08300132224 }, { "content": " Ok(_) => (*self.finish)(core, poll, token, Some(socket)),\n\n Err(e) => {\n\n warn!(\"Failed to set socket encrypt context: {}\", e);\n\n self.handle_error(core, poll);\n\n }\n\n }\n\n }\n\n Ok(None) => (),\n\n Ok(Some(_)) | Err(_) => self.handle_error(core, poll),\n\n }\n\n }\n\n\n\n fn handle_error(&mut self, core: &mut EventLoopCore, poll: &Poll) {\n\n self.terminate(core, poll);\n\n let token = self.token;\n\n (*self.finish)(core, poll, token, None);\n\n }\n\n}\n\n\n\nimpl<UID: Uid> State<BootstrapCache> for ExchangeMsg<UID> {\n", "file_path": "src/main/connect/exchange_msg.rs", "rank": 89, "score": 30722.103356160518 }, { "content": " poll: &Poll,\n\n msg: Option<(Message<UID>, Priority)>,\n\n ) {\n\n if self.socket.write(msg).is_err() {\n\n self.handle_error(core, poll);\n\n }\n\n }\n\n\n\n fn receive_response(&mut self, core: &mut EventLoopCore, poll: &Poll) {\n\n match self.socket.read::<Message<UID>>() {\n\n Ok(Some(Message::ConnectResponse(their_uid, name_hash))) => {\n\n if their_uid != self.expected_id || name_hash != self.expected_nh {\n\n return self.handle_error(core, poll);\n\n }\n\n let _ = core.remove_state(self.token);\n\n let token = self.token;\n\n\n\n let mut socket = mem::replace(&mut self.socket, Default::default());\n\n match socket.set_encrypt_ctx(EncryptContext::authenticated(self.shared_key.clone()))\n\n {\n", "file_path": "src/main/connect/exchange_msg.rs", "rank": 90, "score": 30721.473664104582 }, { "content": " fn ready(&mut self, core: &mut EventLoopCore, poll: &Poll, kind: Ready) {\n\n if kind.is_writable() {\n\n let req = self.msg.take();\n\n self.write(core, poll, req);\n\n }\n\n if kind.is_readable() {\n\n self.receive_response(core, poll)\n\n }\n\n }\n\n\n\n fn terminate(&mut self, core: &mut EventLoopCore, poll: &Poll) {\n\n let _ = core.remove_state(self.token);\n\n let _ = poll.deregister(&self.socket);\n\n\n\n let mut guard = unwrap!(self.cm.lock());\n\n if let Entry::Occupied(mut oe) = guard.entry(self.expected_id) {\n\n oe.get_mut().currently_handshaking -= 1;\n\n if oe.get().currently_handshaking == 0 && oe.get().active_connection.is_none() {\n\n let _ = oe.remove();\n\n }\n", "file_path": "src/main/connect/exchange_msg.rs", "rank": 91, "score": 30715.11990691619 }, { "content": " expected_id,\n\n expected_nh: name_hash,\n\n socket,\n\n cm,\n\n msg: Some((\n\n Message::ConnectRequest(our_id, name_hash, our_global_direct_listeners, our_pk),\n\n 0,\n\n )),\n\n shared_key,\n\n finish,\n\n };\n\n\n\n let _ = core.insert_state(token, Rc::new(RefCell::new(state)));\n\n\n\n Ok(token)\n\n }\n\n\n\n fn write(\n\n &mut self,\n\n core: &mut EventLoopCore,\n", "file_path": "src/main/connect/exchange_msg.rs", "rank": 92, "score": 30711.275958168906 }, { "content": "use std::net::SocketAddr;\n\nuse std::rc::Rc;\n\n\n\n/// When connection messages are exchanged a callback is called with these parameters.\n\n/// A new mio `Token` is assigned to the given socket.\n\npub type Finish = Box<FnMut(&mut EventLoopCore, &Poll, Token, Option<TcpSock>)>;\n\n\n\n/// Exchanges connect messages.\n\npub struct ExchangeMsg<UID: Uid> {\n\n token: Token,\n\n expected_id: UID,\n\n expected_nh: NameHash,\n\n socket: TcpSock,\n\n cm: ConnectionMap<UID>,\n\n msg: Option<(Message<UID>, Priority)>,\n\n shared_key: SharedSecretKey,\n\n finish: Finish,\n\n}\n\n\n\nimpl<UID: Uid> ExchangeMsg<UID> {\n", "file_path": "src/main/connect/exchange_msg.rs", "rank": 93, "score": 30709.627767628546 }, { "content": " pub fn start(\n\n core: &mut EventLoopCore,\n\n poll: &Poll,\n\n socket: TcpSock,\n\n our_id: UID,\n\n expected_id: UID,\n\n name_hash: NameHash,\n\n cm: ConnectionMap<UID>,\n\n our_pk: PublicEncryptKey,\n\n shared_key: SharedSecretKey,\n\n our_global_direct_listeners: HashSet<SocketAddr>,\n\n finish: Finish,\n\n ) -> crate::Res<Token> {\n\n let token = core.get_new_token();\n\n\n\n poll.register(\n\n &socket,\n\n token,\n\n Ready::writable() | Ready::readable(),\n\n PollOpt::edge(),\n", "file_path": "src/main/connect/exchange_msg.rs", "rank": 94, "score": 30708.966470045783 }, { "content": "// Copyright 2018 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse crate::common::{Message, NameHash, State, Uid};\n\nuse crate::main::bootstrap::Cache as BootstrapCache;\n\nuse crate::main::{ConnectionId, ConnectionMap, EventLoopCore};\n\nuse mio::{Poll, PollOpt, Ready, Token};\n\nuse safe_crypto::{PublicEncryptKey, SharedSecretKey};\n\nuse socket_collection::{EncryptContext, Priority, TcpSock};\n\nuse std::any::Any;\n\nuse std::cell::RefCell;\n\nuse std::collections::hash_map::Entry;\n\nuse std::collections::HashSet;\n\nuse std::mem;\n", "file_path": "src/main/connect/exchange_msg.rs", "rank": 95, "score": 30697.72967827941 }, { "content": " )?;\n\n\n\n {\n\n let mut guard = unwrap!(cm.lock());\n\n guard\n\n .entry(expected_id)\n\n .or_insert(ConnectionId {\n\n active_connection: None,\n\n currently_handshaking: 0,\n\n })\n\n .currently_handshaking += 1;\n\n trace!(\n\n \"Connection Map inserted: {:?} -> {:?}\",\n\n expected_id,\n\n guard.get(&expected_id)\n\n );\n\n }\n\n\n\n let state = Self {\n\n token,\n", "file_path": "src/main/connect/exchange_msg.rs", "rank": 96, "score": 30689.616138039073 }, { "content": " }\n\n trace!(\n\n \"Connection Map removed: {:?} -> {:?}\",\n\n self.expected_id,\n\n guard.get(&self.expected_id)\n\n );\n\n }\n\n\n\n fn as_any(&mut self) -> &mut Any {\n\n self\n\n }\n\n}\n", "file_path": "src/main/connect/exchange_msg.rs", "rank": 97, "score": 30678.53254005311 }, { "content": " return self.terminate(core, poll);\n\n }\n\n }\n\n\n\n match self.socket.write(msg) {\n\n Ok(true) => self.done(core, poll),\n\n Ok(false) => (),\n\n Err(e) => {\n\n debug!(\"Error in writting: {:?}\", e);\n\n self.terminate(core, poll)\n\n }\n\n }\n\n }\n\n\n\n fn done(&mut self, core: &mut EventLoopCore, poll: &Poll) {\n\n let _ = core.remove_state(self.token);\n\n let _ = core.cancel_timeout(&self.timeout);\n\n\n\n let our_uid = self.our_uid;\n\n let event_tx = self.event_tx.clone();\n", "file_path": "src/main/connection_listener/exchange_msg.rs", "rank": 98, "score": 29349.58679080998 }, { "content": " ) -> crate::Res<()> {\n\n let token = core.get_new_token();\n\n\n\n let kind = Ready::readable();\n\n poll.register(&socket, token, kind, PollOpt::edge())?;\n\n\n\n let timeout = core.set_timeout(\n\n Duration::from_secs(timeout_sec.unwrap_or(EXCHANGE_MSG_TIMEOUT_SEC)),\n\n CoreTimer::new(token, 0),\n\n );\n\n\n\n let state = Rc::new(RefCell::new(Self {\n\n token,\n\n cm,\n\n config,\n\n event_tx,\n\n name_hash,\n\n next_state: NextState::None,\n\n our_uid,\n\n socket,\n", "file_path": "src/main/connection_listener/exchange_msg.rs", "rank": 99, "score": 29343.860714348277 } ]
Rust
src/main.rs
adumbidiot/pikadick-rs
da2610a36f6a137543e6261dfb43d3d6fd288138
#![deny( unused_qualifications, clippy::all, unused_qualifications, unused_import_braces, unreachable_pub, trivial_numeric_casts, rustdoc::all, missing_debug_implementations, missing_copy_implementations, deprecated_in_future, meta_variable_misuse, non_ascii_idents, rust_2018_compatibility, rust_2018_idioms, future_incompatible, nonstandard_style )] #![allow(missing_doc_code_examples)] pub mod checks; pub mod client_data; pub mod commands; pub mod config; pub mod database; pub mod logger; pub mod util; use crate::{ client_data::ClientData, commands::*, config::{ ActivityKind, Config, Severity, }, database::Database, }; use anyhow::Context as _; use serenity::{ client::bridge::gateway::ShardManager, framework::standard::{ help_commands, macros::{ group, help, }, Args, CommandGroup, CommandResult, DispatchError, HelpOptions, Reason, StandardFramework, }, futures::future::BoxFuture, model::prelude::*, prelude::*, FutureExt, }; use std::{ collections::HashSet, path::Path, sync::Arc, time::{ Duration, Instant, }, }; use tokio::runtime::Builder as RuntimeBuilder; use tracing::{ error, info, warn, }; use tracing_appender::non_blocking::WorkerGuard; const TOKIO_RT_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(10); struct Handler; #[serenity::async_trait] impl EventHandler for Handler { async fn ready(&self, ctx: Context, ready: Ready) { let data_lock = ctx.data.read().await; let client_data = data_lock .get::<ClientDataKey>() .expect("missing client data"); let config = client_data.config.clone(); drop(data_lock); if let (Some(status), Some(kind)) = (config.status_name(), config.status_type()) { match kind { ActivityKind::Listening => { ctx.set_activity(Activity::listening(status)).await; } ActivityKind::Streaming => { ctx.set_activity(Activity::streaming(status, config.status_url().unwrap())) .await; } ActivityKind::Playing => { ctx.set_activity(Activity::playing(status)).await; } } } info!("logged in as '{}'", ready.user.name); } async fn resume(&self, _ctx: Context, resumed: ResumedEvent) { warn!("resumed connection. trace: {:?}", resumed.trace); } #[tracing::instrument(skip(self, ctx, msg), fields(author = %msg.author.id, guild = ?msg.guild_id, content = %msg.content))] async fn message(&self, ctx: Context, msg: Message) { let data_lock = ctx.data.read().await; let client_data = data_lock .get::<ClientDataKey>() .expect("missing client data"); let reddit_embed_data = client_data.reddit_embed_data.clone(); drop(data_lock); if let Err(e) = reddit_embed_data.process_msg(&ctx, &msg).await { error!("failed to generate reddit embed: {}", e); } } } #[derive(Debug, Clone, Copy)] pub struct ClientDataKey; impl TypeMapKey for ClientDataKey { type Value = ClientData; } #[help] async fn help( ctx: &Context, msg: &Message, args: Args, help_options: &'static HelpOptions, groups: &[&'static CommandGroup], owners: HashSet<UserId>, ) -> CommandResult { let _ = help_commands::with_embeds(ctx, msg, args, help_options, groups, owners) .await .is_some(); Ok(()) } #[group] #[commands( ping, nekos, r6stats, r6tracker, rule34, system, quizizz, fml, zalgo, shift, reddit_embed, invite, vaporwave, cmd, latency, uwuify, cache_stats, insta_dl, deviantart, urban, xkcd, tic_tac_toe, iqdb )] struct General; async fn handle_ctrl_c(shard_manager: Arc<Mutex<ShardManager>>) { match tokio::signal::ctrl_c().await { Ok(_) => { info!("shutting down..."); info!("stopping client..."); shard_manager.lock().await.shutdown_all().await; } Err(e) => { warn!("failed to set ctrl-c handler: {}", e); } }; } #[tracing::instrument(skip(_ctx, msg), fields(author = %msg.author.id, guild = ?msg.guild_id, content = %msg.content))] fn before_handler<'fut>( _ctx: &'fut Context, msg: &'fut Message, cmd_name: &'fut str, ) -> BoxFuture<'fut, bool> { info!("allowing command to process"); async move { true }.boxed() } fn after_handler<'fut>( _ctx: &'fut Context, _msg: &'fut Message, command_name: &'fut str, command_result: CommandResult, ) -> BoxFuture<'fut, ()> { async move { if let Err(e) = command_result { error!("failed to process command '{}': {}", command_name, e); } } .boxed() } fn unrecognised_command_handler<'fut>( ctx: &'fut Context, msg: &'fut Message, command_name: &'fut str, ) -> BoxFuture<'fut, ()> { async move { error!("unrecognized command '{}'", command_name); let _ = msg .channel_id .say( &ctx.http, format!("Could not find command '{}'", command_name), ) .await .is_ok(); } .boxed() } fn process_dispatch_error<'fut>( ctx: &'fut Context, msg: &'fut Message, error: DispatchError, ) -> BoxFuture<'fut, ()> { process_dispatch_error_future(ctx, msg, error).boxed() } async fn process_dispatch_error_future<'fut>( ctx: &'fut Context, msg: &'fut Message, error: DispatchError, ) { match error { DispatchError::Ratelimited(s) => { let _ = msg .channel_id .say( &ctx.http, format!("Wait {} seconds to use that command again", s.as_secs()), ) .await .is_ok(); } DispatchError::NotEnoughArguments { min, given } => { let _ = msg .channel_id .say( &ctx.http, format!( "Expected at least {} argument(s) for this command, but only got {}", min, given ), ) .await .is_ok(); } DispatchError::TooManyArguments { max, given } => { let response_str = format!("Expected no more than {} argument(s) for this command, but got {}. Try using quotation marks if your argument has spaces.", max, given ); let _ = msg.channel_id.say(&ctx.http, response_str).await.is_ok(); } DispatchError::CheckFailed(check_name, reason) => match reason { Reason::User(user_reason_str) => { let _ = msg.channel_id.say(&ctx.http, user_reason_str).await.is_ok(); } _ => { let _ = msg .channel_id .say( &ctx.http, format!("{} check failed: {:#?}", check_name, reason), ) .await .is_ok(); } }, e => { let _ = msg .channel_id .say(&ctx.http, format!("Unhandled Dispatch Error: {:?}", e)) .await .is_ok(); } }; } fn load_config() -> anyhow::Result<Config> { let config_path: &Path = "./config.toml".as_ref(); eprintln!("loading `{}`...", config_path.display()); let mut config = Config::load_from_path(config_path) .with_context(|| format!("failed to load `{}`", config_path.display()))?; eprintln!("validating config..."); let errors = config.validate(); let mut error_count = 0; for e in errors { match e.severity() { Severity::Warn => { eprintln!("validation warning: {}", e.error()); } Severity::Error => { eprintln!("validation error: {}", e.error()); error_count += 1; } } } if error_count != 0 { anyhow::bail!("validation failed with {} errors.", error_count); } Ok(config) } fn setup() -> anyhow::Result<(tokio::runtime::Runtime, Config, bool, WorkerGuard)> { eprintln!("starting tokio runtime..."); let tokio_rt = RuntimeBuilder::new_multi_thread() .enable_all() .thread_name("pikadick-tokio-worker") .build() .context("failed to start tokio runtime")?; let config = load_config().context("failed to load config")?; eprintln!("opening data directory..."); if config.data_dir.is_file() { anyhow::bail!("failed to create or open data directory, the path is a file"); } let missing_data_dir = !config.data_dir.exists(); if missing_data_dir { eprintln!("data directory does not exist. creating..."); std::fs::create_dir_all(&config.data_dir).context("failed to create data directory")?; } else if config.data_dir.is_dir() { eprintln!("data directory already exists."); } std::fs::create_dir_all(&config.log_file_dir()).context("failed to create log file dir")?; eprintln!("setting up logger..."); let guard = tokio_rt .block_on(async { crate::logger::setup(&config) }) .context("failed to initialize logger")?; eprintln!(); Ok((tokio_rt, config, missing_data_dir, guard)) } fn main() { let (tokio_rt, config, missing_data_dir, worker_guard) = match setup() { Ok(data) => data, Err(e) => { eprintln!("{:?}", e); drop(e); std::process::exit(1); } }; let exit_code = match real_main(tokio_rt, config, missing_data_dir, worker_guard) { Ok(()) => 0, Err(e) => { error!("{:?}", e); 1 } }; std::process::exit(exit_code); } fn real_main( tokio_rt: tokio::runtime::Runtime, config: Config, missing_data_dir: bool, _worker_guard: WorkerGuard, ) -> anyhow::Result<()> { let ret = tokio_rt.block_on(async_main(config, missing_data_dir)); let shutdown_start = Instant::now(); info!( "shutting down tokio runtime (shutdown timeout is {:?})...", TOKIO_RT_SHUTDOWN_TIMEOUT ); tokio_rt.shutdown_timeout(TOKIO_RT_SHUTDOWN_TIMEOUT); info!("shutdown tokio runtime in {:?}", shutdown_start.elapsed()); info!("successful shutdown"); ret } async fn async_main(config: Config, _missing_data_dir: bool) -> anyhow::Result<()> { info!("opening database..."); let db_path = config.data_dir.join("pikadick.sqlite"); let db = Database::new(&db_path, true) .await .context("failed to open database")?; let uppercase_prefix = config.prefix.to_uppercase(); let framework = StandardFramework::new() .configure(|c| { c.prefixes(&[&config.prefix, &uppercase_prefix]) .case_insensitivity(true) }) .help(&HELP) .group(&GENERAL_GROUP) .bucket("nekos", |b| b.delay(1)) .await .bucket("r6stats", |b| b.delay(7)) .await .bucket("r6tracker", |b| b.delay(7)) .await .bucket("system", |b| b.delay(30)) .await .bucket("quizizz", |b| b.delay(10)) .await .bucket("insta-dl", |b| b.delay(10)) .await .bucket("ttt-board", |b| b.delay(1)) .await .bucket("default", |b| b.delay(1)) .await .before(before_handler) .after(after_handler) .unrecognised_command(unrecognised_command_handler) .on_dispatch_error(process_dispatch_error); info!("using prefix '{}'", &config.prefix); let mut client = Client::builder(&config.token) .event_handler(Handler) .framework(framework) .await .context("failed to create client")?; tokio::spawn(handle_ctrl_c(client.shard_manager.clone())); let client_data = ClientData::init(client.shard_manager.clone(), config, db) .await .context("client data initialization failed")?; { client_data.enabled_check_data.add_groups(&[&GENERAL_GROUP]); } { let mut data = client.data.write().await; data.insert::<ClientDataKey>(client_data); } info!("logging in..."); if let Err(why) = client.start().await { error!("error while running client: {}", why); } drop(client); Ok(()) }
#![deny( unused_qualifications, clippy::all, unused_qualifications, unused_import_braces, unreachable_pub, trivial_numeric_casts, rustdoc::all, missing_debug_implementations, missing_copy_implementations, deprecated_in_future, meta_variable_misuse, non_ascii_idents, rust_2018_compatibility, rust_2018_idioms, future_incompatible, nonstandard_style )] #![allow(missing_doc_code_examples)] pub mod checks; pub mod client_data; pub mod commands; pub mod config; pub mod database; pub mod logger; pub mod util; use crate::{ client_data::ClientData, commands::*, config::{ ActivityKind, Config, Severity, }, database::Database, }; use anyhow::Context as _; use serenity::{ client::bridge::gateway::ShardManager, framework::standard::{ help_commands, macros::{ group, help, }, Args, CommandGroup, CommandResult, DispatchError, HelpOptions, Reason, StandardFramework, }, futures::future::BoxFuture, model::prelude::*, prelude::*, FutureExt, }; use std::{ collections::HashSet, path::Path, sync::Arc, time::{ Duration, Instant, }, }; use tokio::runtime::Builder as RuntimeBuilder; use tracing::{ error, info, warn, }; use tracing_appender::non_blocking::WorkerGuard; const TOKIO_RT_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(10); struct Handler; #[serenity::async_trait] impl EventHandler for Handler { async fn ready(&self, ctx: Context, ready: Ready) { let data_lock = ctx.data.read().await; let client_data = data_lock .get::<ClientDataKey>() .expect("missing client data"); let config = client_data.config.clone(); drop(data_lock); if let (Some(status), Some(kind)) = (config.status_name(), config.status_type()) { match kind { ActivityKind::Listening => { ctx.set_activity(Activity::listening(status)).await; } ActivityKind::Streaming => { ctx.set_activity(Activity::streaming(status, config.status_url().unwrap())) .await; } ActivityKind::Playing => { ctx.set_activity(Activity::playing(status)).await; } } } info!("logged in as '{}'", ready.user.name); } async fn resume(&self, _ctx: Context, resumed: ResumedEvent) { warn!("resumed connection. trace: {:?}", resumed.trace); } #[tracing::instrument(skip(self, ctx, msg), fields(author = %msg.author.id, guild = ?msg.guild_id, content = %msg.content))] async fn message(&self, ctx: Context, msg: Message) { let data_lock = ctx.data.read().await; let client_data = data_lock .get::<ClientDataKey>() .expect("missing client data"); let reddit_embed_data = client_data.reddit_embed_data.clone(); drop(data_lock); if let Err(e) = reddit_embed_data.process_msg(&ctx, &msg).await { error!("failed to generate reddit embed: {}", e); } } } #[derive(Debug, Clone, Copy)] pub struct ClientDataKey; impl TypeMapKey for ClientDataKey { type Value = ClientData; } #[help] async fn help( ctx: &Context, msg: &Message, args: Args, help_options: &'static HelpOptions, groups: &[&'static CommandGroup], owners: HashSet<UserId>, ) -> CommandResult { let _ = help_commands::with_embeds(ctx, msg, args, help_options, groups, owners) .await .is_some(); Ok(()) } #[group] #[commands( ping, nekos, r6stats, r6tracker, rule34, system, quizizz, fml, zalgo, shift, reddit_embed, invite, vaporwave, cmd, latency, uwuify, cache_stats, insta_dl, deviantart, urban, xkcd, tic_tac_toe, iqdb )] struct General; async fn handle_ctrl_c(shard_manager: Arc<Mutex<ShardManager>>) { match tokio::signal::ctrl_c().await { Ok(_) => { info!("shutting down..."); info!("stopping client..."); shard_manager.lock().await.shutdown_all().await; } Err(e) => { warn!("failed to set ctrl-c handler: {}", e); } }; } #[tracing::instrument(skip(_ctx, msg), fields(author = %msg.author.id, guild = ?msg.guild_id, content = %msg.content))] fn before_handler<'fut>( _ctx: &'fut Context, msg: &'fut Message, cmd_name: &'fut str, ) -> BoxFuture<'fut, bool> { info!("allowing command to process"); async move { true }.boxed() } fn after_handler<'fut>( _ctx: &'fut Context, _msg: &'fut Message, command_name: &'fut str, command_result: CommandResult, ) -> BoxFuture<'fut, ()> { async move { if let Err(e) = command_result { error!("failed to process command '{}': {}", command_name, e); } } .boxed() } fn unrecognised_command_handler<'fut>( ctx: &'fut Context, msg: &'fut Message, command_name: &'fut str, ) -> BoxFuture<'fut, ()> { async move { error!("unrecognized command '{}'", command_name); let _ = msg .channel_id .say( &ctx.http, format!("Could not find command '{}'", command_name), ) .await .is_ok(); } .boxed() } fn process_dispatch_error<'fut>( ctx: &'fut Context, msg: &'fut Message, error: DispatchError, ) -> BoxFuture<'fut, ()> { process_dispatch_error_future(ctx, msg, error).boxed() } async fn process_dispatch_error_future<'fut>( ctx: &'fut Context, msg: &'fut Message, error: DispatchError, ) { match error { DispatchError::Ratelimited(s) => { let _ = msg .channel_id .say( &ctx.http, format!("Wait {} seconds to use that command again", s.as_secs()), ) .await .is_ok(); } DispatchError::NotEnoughArguments { min, given } => { let _ = msg .channel_id .say( &ctx.http, format!( "Expected at least {} argument(s) for this command, but only got {}", min, given ), ) .await .is_ok(); } DispatchError::TooManyArguments { max, given } => { let response_str = format!("Expected no more than {} argument(s) for this command, but got {}. Try using quotation marks if your argument has spaces.", max, given ); let _ = msg.channel_id.say(&ctx.http, response_str).await.is_ok(); } DispatchError::CheckFailed(check_name, reason) => match reason { Reason::User(user_reason_str) => { let _ = msg.channel_id.say(&ctx.http, user_reason_str).await.is_ok(); } _ => { let _ = msg .channel_id .say( &ctx.http, format!("{} check failed: {:#?}", check_name, reason), ) .await .is_ok(); } }, e => { let _ = msg .channel_id .say(&ctx.http, format!("Unhandled Dispatch Error: {:?}", e)) .await .is_ok(); } }; } fn load_config() -> anyhow::Result<Config> {
fn setup() -> anyhow::Result<(tokio::runtime::Runtime, Config, bool, WorkerGuard)> { eprintln!("starting tokio runtime..."); let tokio_rt = RuntimeBuilder::new_multi_thread() .enable_all() .thread_name("pikadick-tokio-worker") .build() .context("failed to start tokio runtime")?; let config = load_config().context("failed to load config")?; eprintln!("opening data directory..."); if config.data_dir.is_file() { anyhow::bail!("failed to create or open data directory, the path is a file"); } let missing_data_dir = !config.data_dir.exists(); if missing_data_dir { eprintln!("data directory does not exist. creating..."); std::fs::create_dir_all(&config.data_dir).context("failed to create data directory")?; } else if config.data_dir.is_dir() { eprintln!("data directory already exists."); } std::fs::create_dir_all(&config.log_file_dir()).context("failed to create log file dir")?; eprintln!("setting up logger..."); let guard = tokio_rt .block_on(async { crate::logger::setup(&config) }) .context("failed to initialize logger")?; eprintln!(); Ok((tokio_rt, config, missing_data_dir, guard)) } fn main() { let (tokio_rt, config, missing_data_dir, worker_guard) = match setup() { Ok(data) => data, Err(e) => { eprintln!("{:?}", e); drop(e); std::process::exit(1); } }; let exit_code = match real_main(tokio_rt, config, missing_data_dir, worker_guard) { Ok(()) => 0, Err(e) => { error!("{:?}", e); 1 } }; std::process::exit(exit_code); } fn real_main( tokio_rt: tokio::runtime::Runtime, config: Config, missing_data_dir: bool, _worker_guard: WorkerGuard, ) -> anyhow::Result<()> { let ret = tokio_rt.block_on(async_main(config, missing_data_dir)); let shutdown_start = Instant::now(); info!( "shutting down tokio runtime (shutdown timeout is {:?})...", TOKIO_RT_SHUTDOWN_TIMEOUT ); tokio_rt.shutdown_timeout(TOKIO_RT_SHUTDOWN_TIMEOUT); info!("shutdown tokio runtime in {:?}", shutdown_start.elapsed()); info!("successful shutdown"); ret } async fn async_main(config: Config, _missing_data_dir: bool) -> anyhow::Result<()> { info!("opening database..."); let db_path = config.data_dir.join("pikadick.sqlite"); let db = Database::new(&db_path, true) .await .context("failed to open database")?; let uppercase_prefix = config.prefix.to_uppercase(); let framework = StandardFramework::new() .configure(|c| { c.prefixes(&[&config.prefix, &uppercase_prefix]) .case_insensitivity(true) }) .help(&HELP) .group(&GENERAL_GROUP) .bucket("nekos", |b| b.delay(1)) .await .bucket("r6stats", |b| b.delay(7)) .await .bucket("r6tracker", |b| b.delay(7)) .await .bucket("system", |b| b.delay(30)) .await .bucket("quizizz", |b| b.delay(10)) .await .bucket("insta-dl", |b| b.delay(10)) .await .bucket("ttt-board", |b| b.delay(1)) .await .bucket("default", |b| b.delay(1)) .await .before(before_handler) .after(after_handler) .unrecognised_command(unrecognised_command_handler) .on_dispatch_error(process_dispatch_error); info!("using prefix '{}'", &config.prefix); let mut client = Client::builder(&config.token) .event_handler(Handler) .framework(framework) .await .context("failed to create client")?; tokio::spawn(handle_ctrl_c(client.shard_manager.clone())); let client_data = ClientData::init(client.shard_manager.clone(), config, db) .await .context("client data initialization failed")?; { client_data.enabled_check_data.add_groups(&[&GENERAL_GROUP]); } { let mut data = client.data.write().await; data.insert::<ClientDataKey>(client_data); } info!("logging in..."); if let Err(why) = client.start().await { error!("error while running client: {}", why); } drop(client); Ok(()) }
let config_path: &Path = "./config.toml".as_ref(); eprintln!("loading `{}`...", config_path.display()); let mut config = Config::load_from_path(config_path) .with_context(|| format!("failed to load `{}`", config_path.display()))?; eprintln!("validating config..."); let errors = config.validate(); let mut error_count = 0; for e in errors { match e.severity() { Severity::Warn => { eprintln!("validation warning: {}", e.error()); } Severity::Error => { eprintln!("validation error: {}", e.error()); error_count += 1; } } } if error_count != 0 { anyhow::bail!("validation failed with {} errors.", error_count); } Ok(config) }
function_block-function_prefix_line
[ { "content": "pub fn vaporwave_str(data: &str) -> String {\n\n data.chars()\n\n .filter_map(|c| {\n\n let c = c as u32;\n\n if (33..=270).contains(&c) {\n\n std::char::from_u32(c + 65248) // unwrap or c ?\n\n } else {\n\n Some(32 as char)\n\n }\n\n })\n\n .collect()\n\n}\n", "file_path": "src/commands/vaporwave.rs", "rank": 0, "score": 297402.19592342916 }, { "content": "fn status_to_str(status: bool) -> &'static str {\n\n if status {\n\n \"disabled\"\n\n } else {\n\n \"enabled\"\n\n }\n\n}\n", "file_path": "src/commands/cmd.rs", "rank": 1, "score": 286644.9470921897 }, { "content": "/// Gets the subreddit and post id from a reddit url.\n\n///\n\n/// # Returns\n\n/// Returns a tuple or the the subreddit and post id in that order.\n\npub fn parse_post_url(url: &Url) -> Option<(&str, &str)> {\n\n // Reddit path:\n\n // /r/dankmemes/comments/h966lq/davie_is_shookt/\n\n\n\n // Template:\n\n // /r/<subreddit>/comments/<post_id>/<post_title (irrelevant)>/\n\n\n\n // Parts:\n\n // r\n\n // <subreddit>\n\n // comments\n\n // <post_id>\n\n // <post_title>\n\n // (Nothing, should be empty or not existent)\n\n\n\n let mut iter = url.path_segments()?;\n\n\n\n if iter.next()? != \"r\" {\n\n return None;\n\n }\n", "file_path": "src/commands/reddit_embed.rs", "rank": 2, "score": 246410.63866734953 }, { "content": "/// A rust-optimized version of:\n\n/// ```javascript\n\n/// /// Taken from: https://honk.moe/tools/owo.html\n\n/// var faces = [\"(・`ω´・)\", \";;w;;\", \"owo\", \"UwU\", \">w<\", \"^w^\"];\n\n/// function OwoifyText(){\n\n/// v = document.getElementById(\"textarea\").value\n\n/// v = v.replace(/(?:r|l)/g, \"w\");\n\n/// v = v.replace(/(?:R|L)/g, \"W\");\n\n/// v = v.replace(/n([aeiou])/g, 'ny$1');\n\n/// v = v.replace(/N([aeiou])/g, 'Ny$1');\n\n/// v = v.replace(/N([AEIOU])/g, 'Ny$1');\n\n/// v = v.replace(/ove/g, \"uv\");\n\n/// v = v.replace(/\\!+/g, \" \" + faces[Math.floor(Math.random() * faces.length)] + \" \");\n\n/// document.getElementById(\"textarea\").value = v\n\n/// };\n\n/// ```\n\n/// This version doesn't use regexes and completes the uwufication in 1 iteration with a lookahead buffer of 2 elements.\n\n/// NOTE: It may be buggy due to its complexity and discrepancies with the js version should be reported on the issue tracker.\n\npub fn uwuify_str(input: &str) -> String {\n\n let mut iter = input.chars().peekable();\n\n let mut buf = None;\n\n let mut output = String::with_capacity(input.len());\n\n\n\n // Buf has 1 cap so it must be empty in each match arm since we try to fetch a value from it here.\n\n // We can then treat peek/next as the first value from here on.\n\n while let Some(c) = buf.take().or_else(|| iter.next()) {\n\n match c {\n\n 'r' | 'l' => {\n\n output.push('w');\n\n }\n\n 'R' | 'L' => {\n\n output.push('W');\n\n }\n\n 'n' => {\n\n if let Some(c) = iter.peek().copied() {\n\n if matches!(c, 'a' | 'e' | 'i' | 'o' | 'u') {\n\n let c = iter.next().unwrap();\n\n\n", "file_path": "src/commands/uwuify.rs", "rank": 3, "score": 244535.02254844917 }, { "content": "fn sqlite_logger_func(error_code: c_int, msg: &str) {\n\n warn!(\"sqlite error code ({}): {}\", error_code, msg);\n\n}\n\n\n\n/// The database\n\n#[derive(Clone, Debug)]\n\npub struct Database {\n\n db: Arc<parking_lot::Mutex<rusqlite::Connection>>,\n\n}\n\n\n\nimpl Database {\n\n //// Make a new [`Database`].\n\n pub async fn new(path: &Path, create_if_missing: bool) -> anyhow::Result<Self> {\n\n LOGGER_INIT\n\n .clone()\n\n .context(\"failed to init sqlite logger\")?;\n\n\n\n let mut flags = rusqlite::OpenFlags::default();\n\n if !create_if_missing {\n\n flags.remove(rusqlite::OpenFlags::SQLITE_OPEN_CREATE)\n", "file_path": "src/database.rs", "rank": 5, "score": 219196.45424672184 }, { "content": "#[derive(Debug)]\n\nstruct NsfwArgParseError;\n\n\n", "file_path": "src/commands/nekos.rs", "rank": 6, "score": 208395.03003523953 }, { "content": "/// Try to setup a logger.\n\n///\n\n/// Must be called from a tokio runtime.\n\npub fn setup(config: &Config) -> anyhow::Result<WorkerGuard> {\n\n let file_writer = tracing_appender::rolling::hourly(&config.log_file_dir(), \"log.txt\");\n\n let (nonblocking_file_writer, guard) = tracing_appender::non_blocking(file_writer);\n\n\n\n // Only enable pikadick since serenity likes puking in the logs during connection failures\n\n // serenity's framework section seems ok as well\n\n let env_filter = EnvFilter::default()\n\n .add_directive(\n\n \"pikadick=info\"\n\n .parse()\n\n .context(\"failed to parse logging directive\")?,\n\n )\n\n .add_directive(\n\n \"serenity::framework::standard=info\"\n\n .parse()\n\n .context(\"failed to parse logging directive\")?,\n\n );\n\n let stderr_formatting_layer = tracing_subscriber::fmt::layer().with_writer(std::io::stderr);\n\n let file_formatting_layer = tracing_subscriber::fmt::layer()\n\n .with_ansi(false)\n", "file_path": "src/logger.rs", "rank": 7, "score": 197417.85494497436 }, { "content": "fn print_post_info(post: &rule34::HtmlPost, image_name: &str, out_path: &Path) {\n\n println!(\"ID: {}\", post.id);\n\n println!(\"Post Date: {}\", post.date);\n\n println!(\"Post Url: {}\", post.get_html_post_url());\n\n if let Some(source) = post.source.as_ref() {\n\n println!(\"Post Source: {}\", source);\n\n }\n\n println!(\"Image Url: {}\", post.image_url);\n\n println!(\"Image Name: {}\", image_name);\n\n println!(\"Copyright Tags: {}\", post.copyright_tags.join(\", \"));\n\n println!(\"Character Tags: {}\", post.character_tags.join(\", \"));\n\n println!(\"Artist Tags: {}\", post.artist_tags.join(\", \"));\n\n println!(\"General Tags: {}\", post.general_tags.join(\", \"));\n\n println!(\"Meta Tags: {}\", post.meta_tags.join(\", \"));\n\n println!(\"Has Child Posts: {}\", post.has_child_posts);\n\n println!(\n\n \"Parent Post: {}\",\n\n post.parent_post\n\n .as_ref()\n\n .map(|id| id.to_string())\n\n .unwrap_or_else(|| \"none\".to_string())\n\n );\n\n println!(\"Out Path: {}\", out_path.display());\n\n println!();\n\n}\n", "file_path": "lib/rule34-rs/src/cli/commands/download.rs", "rank": 9, "score": 183954.6075129388 }, { "content": "struct NsfwArg;\n\n\n\nimpl FromStr for NsfwArg {\n\n type Err = NsfwArgParseError;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n if s == \"nsfw\" {\n\n Ok(NsfwArg)\n\n } else {\n\n Err(NsfwArgParseError)\n\n }\n\n }\n\n}\n\n\n\n/// A nekos cache\n\n#[derive(Clone, Debug)]\n\npub struct Cache(Arc<CacheInner>);\n\n\n\nimpl Cache {\n\n /// Make a new cache\n", "file_path": "src/commands/nekos.rs", "rank": 10, "score": 181472.87071688785 }, { "content": "/// Check if 2 [`Check`]s are the same.\n\n///\n\n/// This includes their function pointers, though the argument references do not necessarily have to point to the same check.\n\n/// This is necessary as `serenity`'s `PartialEq` for [`Check`] only checks the name.\n\nfn checks_are_same(check1: &Check, check2: &Check) -> bool {\n\n let is_same_partial_eq = check1 == check2;\n\n\n\n // HACK:\n\n // Use pointers as ids since checks have no unique identifiers\n\n let function1_addr = check1.function as usize;\n\n let function2_addr = check2.function as usize;\n\n let is_same_function_ptr = function1_addr == function2_addr;\n\n\n\n is_same_partial_eq && is_same_function_ptr\n\n}\n\n\n\n#[check]\n\n#[name(\"Enabled\")]\n\npub async fn enabled_check(\n\n ctx: &Context,\n\n msg: &Message,\n\n _args: &mut Args,\n\n opts: &CommandOptions,\n\n) -> Result<(), Reason> {\n", "file_path": "src/checks/enabled.rs", "rank": 11, "score": 179059.25264474202 }, { "content": "type SubReddit = String;\n", "file_path": "src/commands/reddit_embed.rs", "rank": 12, "score": 178562.82902826465 }, { "content": "struct GameArg(Game);\n\n\n\nimpl FromStr for GameArg {\n\n type Err = GameParseError;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s {\n\n \"bl\" => Ok(Self(Game::Borderlands)),\n\n \"bl2\" => Ok(Self(Game::Borderlands2)),\n\n \"blps\" => Ok(Self(Game::BorderlandsPreSequel)),\n\n \"bl3\" => Ok(Self(Game::Borderlands3)),\n\n _ => Err(GameParseError(s.into())),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Default, Clone)]\n\npub struct ShiftClient {\n\n orcz_client: OrczClient,\n\n cache: TimedCache<Game, Vec<Arc<ShiftCode>>>,\n", "file_path": "src/commands/shift.rs", "rank": 13, "score": 173833.6300483661 }, { "content": "/// Fixup a url for parsing\n\nfn fixup_url(link: &str) -> Cow<str> {\n\n let mut link = Cow::Borrowed(link);\n\n\n\n // Fixup no protocol\n\n if link.starts_with(\"//\") {\n\n link = format!(\"https:{}\", link).into()\n\n }\n\n\n\n // Fixup relative urls\n\n if link.starts_with('/') {\n\n link = format!(\"https://iqdb.org{}\", link).into();\n\n }\n\n\n\n link\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n", "file_path": "lib/iqdb-rs/src/types/search_results.rs", "rank": 14, "score": 173753.12466044744 }, { "content": "#[derive(Debug)]\n\nstruct GameParseError(String);\n\n\n", "file_path": "src/commands/shift.rs", "rank": 15, "score": 168949.66842722072 }, { "content": "type PostId = String;\n\n\n\nlazy_static! {\n\n /// Source: https://urlregex.com/\n\n static ref URL_REGEX: Regex = Regex::new(include_str!(\"./url_regex.txt\")).expect(\"invalid url regex\");\n\n}\n\n\n\n#[derive(Clone, Default)]\n\npub struct RedditEmbedData {\n\n reddit_client: reddit::Client,\n\n reddit_tube_client: reddit_tube::Client,\n\n cache: TimedCache<(SubReddit, PostId), String>,\n\n}\n\n\n\nimpl RedditEmbedData {\n\n pub fn new() -> Self {\n\n RedditEmbedData {\n\n reddit_client: reddit::Client::new(),\n\n reddit_tube_client: reddit_tube::Client::new(),\n\n cache: Default::default(),\n", "file_path": "src/commands/reddit_embed.rs", "rank": 16, "score": 167766.07886526585 }, { "content": "fn load_cookie_jar(client: &deviantart::Client) -> anyhow::Result<()> {\n\n use std::{\n\n fs::File,\n\n io::BufReader,\n\n };\n\n\n\n let cookie_file = File::open(get_cookie_file_path()?).context(\"failed to read cookies\")?;\n\n client.cookie_store.load_json(BufReader::new(cookie_file))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "lib/deviantart-rs/src/cli/main.rs", "rank": 17, "score": 167120.30171387977 }, { "content": "fn save_cookie_jar(client: &deviantart::Client) -> anyhow::Result<()> {\n\n use std::fs::File;\n\n\n\n let cookie_file =\n\n File::create(get_cookie_file_path()?).context(\"failed to create cookie file\")?;\n\n client.cookie_store.save_json(cookie_file)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "lib/deviantart-rs/src/cli/main.rs", "rank": 18, "score": 167120.30171387977 }, { "content": "fn fmt_uptime(uptime: Duration) -> String {\n\n let raw_secs = uptime.as_secs();\n\n\n\n let days = raw_secs / (60 * 60 * 24);\n\n let hours = (raw_secs % (60 * 60 * 24)) / (60 * 60);\n\n let minutes = (raw_secs % (60 * 60)) / 60;\n\n let secs = raw_secs % 60;\n\n\n\n format!(\n\n \"{} days {} hours {} minutes {} seconds\",\n\n days, hours, minutes, secs\n\n )\n\n}\n\n\n", "file_path": "src/commands/system.rs", "rank": 21, "score": 156848.98544248572 }, { "content": "fn real_main(command: Command) -> anyhow::Result<()> {\n\n let tokio_rt = tokio::runtime::Builder::new_multi_thread()\n\n .enable_all()\n\n .build()\n\n .context(\"failed to start tokio runtime\")?;\n\n\n\n tokio_rt.block_on(async_main(command))?;\n\n\n\n eprintln!(\"Done.\");\n\n\n\n Ok(())\n\n}\n\n\n\nasync fn async_main(command: Command) -> anyhow::Result<()> {\n\n let client = nekos::Client::new();\n\n\n\n let mut image_list = client.get_random(command.nsfw, 1).await?;\n\n\n\n if image_list.images.is_empty() {\n\n anyhow::bail!(\"image list is empty\");\n", "file_path": "lib/nekos-rs/src/main.rs", "rank": 22, "score": 147460.67508299844 }, { "content": "#[derive(Debug)]\n\nstruct CacheInner {\n\n primary: ArrayQueue<Url>,\n\n secondary: RwLock<IndexSet<Url>>,\n\n}\n\n\n\n/// The nekos client\n\n#[derive(Clone, Debug)]\n\npub struct NekosClient {\n\n client: nekos::Client,\n\n\n\n cache: Cache,\n\n nsfw_cache: Cache,\n\n}\n\n\n\nimpl NekosClient {\n\n /// Make a new nekos client\n\n pub fn new() -> Self {\n\n NekosClient {\n\n client: Default::default(),\n\n cache: Cache::new(),\n", "file_path": "src/commands/nekos.rs", "rank": 23, "score": 141921.36583203575 }, { "content": "CREATE TABLE IF NOT EXISTS reddit_embed_guild_settings (\n\n guild_id INTEGER NOT NULL PRIMARY KEY UNIQUE CHECK(TYPEOF(guild_id) = 'integer'),\n\n enabled INTEGER NOT NULL CHECK(TYPEOF(enabled) = 'integer' AND enabled IN (0, 1))\n\n);\n\n\n", "file_path": "sql/setup_tables.sql", "rank": 25, "score": 138650.27377134195 }, { "content": "struct TimedCacheInner<K, V> {\n\n cache: DashMap<K, Arc<TimedCacheEntry<V>>>,\n\n last_trim: Mutex<Instant>,\n\n\n\n trim_time: Duration,\n\n expiry_time: Duration,\n\n}\n\n\n\nimpl<K, V> TimedCache<K, V>\n\nwhere\n\n K: Eq + Hash + 'static,\n\n V: 'static,\n\n{\n\n /// Create a cache with timed entries with a default expire time\n\n pub fn new() -> Self {\n\n TimedCache(Arc::new(TimedCacheInner {\n\n cache: DashMap::new(),\n\n last_trim: Mutex::new(Instant::now()),\n\n\n\n trim_time: DEFAULT_EXPIRE_TIME,\n", "file_path": "src/util/timed_cache.rs", "rank": 27, "score": 136281.0609528703 }, { "content": "fn epoch_nanos_to_local_datetime(nanos: u64) -> DateTime<chrono::Local> {\n\n DateTime::from(UNIX_EPOCH + Duration::from_nanos(nanos))\n\n}\n\n\n", "file_path": "src/commands/system.rs", "rank": 28, "score": 135478.90201868484 }, { "content": "/// A type that can provide cache stats\n\npub trait CacheStatsProvider {\n\n /// Publish stats to the provided [`CacheStatsBuilder`].\n\n fn publish_cache_stats(&self, cache_stats_builder: &mut CacheStatsBuilder);\n\n}\n\n\n\n/// The [`ClientData`].\n\n#[derive(Debug)]\n\npub struct ClientData {\n\n /// The discord shard_manager\n\n pub shard_manager: Arc<Mutex<ShardManager>>,\n\n\n\n /// The client for nekos\n\n pub nekos_client: NekosClient,\n\n /// The R6Stats client\n\n pub r6stats_client: R6StatsClient,\n\n /// The r6tracker client\n\n pub r6tracker_client: R6TrackerClient,\n\n /// The rule34 client\n\n pub rule34_client: Rule34Client,\n\n /// The quizizz client\n", "file_path": "src/client_data.rs", "rank": 30, "score": 133608.0184438264 }, { "content": "fn escape_filename(path: &str) -> String {\n\n path.chars()\n\n .map(|c| {\n\n if [':', '?', '/', '|', '*'].contains(&c) {\n\n '-'\n\n } else {\n\n c\n\n }\n\n })\n\n .collect()\n\n}\n", "file_path": "lib/deviantart-rs/src/cli/main.rs", "rank": 31, "score": 132634.01601049484 }, { "content": "fn fmt_swap(swap: &Swap) -> String {\n\n let fmt_args = InformationF32::format_args(gigabyte, DisplayStyle::Abbreviation);\n\n\n\n let used = InformationF32::new::<byte>(swap.used().get::<byte>() as f32);\n\n let total = InformationF32::new::<byte>(swap.total().get::<byte>() as f32);\n\n\n\n format!(\"{:.2} / {:.2}\", fmt_args.with(used), fmt_args.with(total),)\n\n}\n\n\n", "file_path": "src/commands/system.rs", "rank": 32, "score": 120994.2362322659 }, { "content": "fn fmt_memory(memory: &Memory) -> String {\n\n let fmt_args = InformationF32::format_args(gigabyte, DisplayStyle::Abbreviation);\n\n\n\n let avail_mem = InformationF32::new::<byte>(memory.available().get::<byte>() as f32);\n\n let total_mem = InformationF32::new::<byte>(memory.total().get::<byte>() as f32);\n\n let used_mem = total_mem - avail_mem;\n\n\n\n format!(\n\n \"{:.2} / {:.2}\",\n\n fmt_args.with(used_mem),\n\n fmt_args.with(total_mem),\n\n )\n\n}\n\n\n", "file_path": "src/commands/system.rs", "rank": 33, "score": 120994.2362322659 }, { "content": " let video_data = self\n\n .reddit_tube_client\n\n .get_video(&main_page, url.as_str())\n\n .await\n\n .context(\"failed to get video data\")?;\n\n\n\n match video_data {\n\n GetVideoResponse::Ok(video_data) => Ok(video_data),\n\n GetVideoResponse::Error(e) => Err(e).context(\"bad video response\"),\n\n }\n\n }\n\n\n\n /// Process a message and insert an embed if neccesary.\n\n #[tracing::instrument(level = \"info\", skip(self, ctx, msg))]\n\n pub async fn process_msg(&self, ctx: &Context, msg: &Message) -> CommandResult {\n\n let data_lock = ctx.data.read().await;\n\n let client_data = data_lock\n\n .get::<ClientDataKey>()\n\n .expect(\"missing client data\");\n\n let db = client_data.db.clone();\n", "file_path": "src/commands/reddit_embed.rs", "rank": 34, "score": 120290.94356814997 }, { "content": "async fn reddit_embed(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n let data_lock = ctx.data.read().await;\n\n let client_data = data_lock.get::<ClientDataKey>().unwrap();\n\n let db = client_data.db.clone();\n\n drop(data_lock);\n\n\n\n let enable = match args.trimmed().current().expect(\"missing arg\") {\n\n \"enable\" => true,\n\n \"disable\" => false,\n\n arg => {\n\n msg.channel_id\n\n .say(\n\n &ctx.http,\n\n format!(\n\n \"The argument '{}' is not recognized. Valid: enable, disable\",\n\n arg\n\n ),\n\n )\n\n .await?;\n\n return Ok(());\n", "file_path": "src/commands/reddit_embed.rs", "rank": 35, "score": 120283.71769887101 }, { "content": "\n\n let data = if let Some(value) = maybe_url.clone() {\n\n Some(value)\n\n } else {\n\n match self.get_original_post(subreddit, post_id).await {\n\n Ok(post) => {\n\n if !post.is_video {\n\n Some(post.url)\n\n } else {\n\n match self.get_video_data(url).await {\n\n Ok(video_data) => Some(video_data.url.into()),\n\n Err(e) => {\n\n warn!(\"Failed to get reddit video info, got error: {}\", e);\n\n None\n\n }\n\n }\n\n }\n\n }\n\n Err(e) => {\n\n warn!(\"Failed to get reddit post, got error: {}\", e);\n", "file_path": "src/commands/reddit_embed.rs", "rank": 36, "score": 120272.75611623732 }, { "content": "impl std::fmt::Debug for RedditEmbedData {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n // TODO: Replace with manual impl if/when reddit_client becomes debug\n\n f.debug_struct(\"RedditEmbedData\")\n\n .field(\"reddit_tube_client\", &self.reddit_tube_client)\n\n .field(\"cache\", &self.cache)\n\n .finish()\n\n }\n\n}\n\n\n\n// Broken in help:\n\n// #[required_permissions(\"ADMINISTRATOR\")]\n\n\n\n#[command(\"reddit-embed\")]\n\n#[description(\"Enable automaitc reddit embedding for this server\")]\n\n#[usage(\"<enable/disable>\")]\n\n#[example(\"enable\")]\n\n#[min_args(1)]\n\n#[max_args(1)]\n\n#[checks(Admin, Enabled)]\n", "file_path": "src/commands/reddit_embed.rs", "rank": 37, "score": 120267.11600184836 }, { "content": " }\n\n };\n\n\n\n // TODO: Probably can unwrap if i add a check to the command\n\n let guild_id = match msg.guild_id {\n\n Some(id) => id,\n\n None => {\n\n msg.channel_id\n\n .say(\n\n &ctx.http,\n\n \"Missing server id. Are you in a server right now?\",\n\n )\n\n .await?;\n\n return Ok(());\n\n }\n\n };\n\n\n\n let old_val = db.set_reddit_embed_enabled(guild_id, enable).await?;\n\n\n\n let status_str = if enable { \"enabled\" } else { \"disabled\" };\n", "file_path": "src/commands/reddit_embed.rs", "rank": 38, "score": 120265.05795283763 }, { "content": "use crate::{\n\n checks::{\n\n ADMIN_CHECK,\n\n ENABLED_CHECK,\n\n },\n\n client_data::{\n\n CacheStatsBuilder,\n\n CacheStatsProvider,\n\n },\n\n util::{\n\n LoadingReaction,\n\n TimedCache,\n\n },\n\n ClientDataKey,\n\n};\n\nuse anyhow::Context as _;\n\nuse lazy_static::lazy_static;\n\nuse reddit_tube::{\n\n types::get_video_response::GetVideoResponseOk,\n\n GetVideoResponse,\n", "file_path": "src/commands/reddit_embed.rs", "rank": 39, "score": 120262.13704149763 }, { "content": " None\n\n } else {\n\n Some(l.swap_remove(0))\n\n }\n\n }) {\n\n // TODO: Crossposts are not stored in boxes, but in a vec. We need to unify the return types somehow.\n\n // Should we choose to move out of a box, or move into a box? Which will be used more?\n\n Ok(Box::new(post))\n\n } else {\n\n Ok(post)\n\n }\n\n }\n\n\n\n /// Get video data from reddit.tube. Takes a reddit url.\n\n pub async fn get_video_data(&self, url: &Url) -> anyhow::Result<GetVideoResponseOk> {\n\n let main_page = self\n\n .reddit_tube_client\n\n .get_main_page()\n\n .await\n\n .context(\"failed to get main page\")?;\n", "file_path": "src/commands/reddit_embed.rs", "rank": 40, "score": 120261.58234074421 }, { "content": " drop(data_lock);\n\n\n\n let guild_id = match msg.guild_id {\n\n Some(id) => id,\n\n None => {\n\n // Only embed guild links\n\n return Ok(());\n\n }\n\n };\n\n\n\n let is_enabled_for_guild =\n\n db.get_reddit_embed_enabled(guild_id)\n\n .await\n\n .unwrap_or_else(|e| {\n\n error!(\n\n \"failed to get reddit-embed guild data for '{}': {}\",\n\n guild_id, e\n\n );\n\n false\n\n });\n", "file_path": "src/commands/reddit_embed.rs", "rank": 41, "score": 120259.8263082114 }, { "content": "\n\n if enable == old_val {\n\n msg.channel_id\n\n .say(\n\n &ctx.http,\n\n format!(\"Reddit embeds are already {} for this server\", status_str),\n\n )\n\n .await?;\n\n } else {\n\n msg.channel_id\n\n .say(\n\n &ctx.http,\n\n format!(\"Reddit embeds are now {} for this guild\", status_str),\n\n )\n\n .await?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/commands/reddit_embed.rs", "rank": 42, "score": 120259.51153501036 }, { "content": "};\n\nuse regex::Regex;\n\nuse serenity::{\n\n framework::standard::{\n\n macros::command,\n\n Args,\n\n CommandResult,\n\n },\n\n model::prelude::*,\n\n prelude::*,\n\n};\n\nuse tracing::{\n\n error,\n\n warn,\n\n};\n\nuse url::Url;\n\n\n", "file_path": "src/commands/reddit_embed.rs", "rank": 43, "score": 120259.15652575609 }, { "content": " None\n\n }\n\n }\n\n };\n\n\n\n if let Some(data) = data {\n\n self.cache\n\n .insert((subreddit.into(), post_id.into()), data.clone());\n\n\n\n // TODO: Consider downloading and reposting?\n\n msg.channel_id.say(&ctx.http, data).await?;\n\n if let Some(mut loading_reaction) = loading_reaction.take() {\n\n loading_reaction.send_ok();\n\n }\n\n }\n\n } else {\n\n error!(\"Failed to parse reddit post url\");\n\n // TODO: Maybe expand this to an actual error to give better feedback\n\n }\n\n }\n\n\n\n self.cache.trim();\n\n\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/commands/reddit_embed.rs", "rank": 44, "score": 120254.7747948024 }, { "content": " }\n\n }\n\n\n\n /// Get the original post from a given subreddit and post id.\n\n ///\n\n /// This resolves crossposts. Currently only resolves 1 layer.\n\n pub async fn get_original_post(\n\n &self,\n\n subreddit: &str,\n\n post_id: &str,\n\n ) -> anyhow::Result<Box<reddit::Link>> {\n\n let mut post_data = self.reddit_client.get_post(subreddit, post_id).await?;\n\n\n\n if post_data.is_empty() {\n\n anyhow::bail!(\"missing post\");\n\n }\n\n\n\n let mut post_data = post_data\n\n .swap_remove(0)\n\n .data\n", "file_path": "src/commands/reddit_embed.rs", "rank": 45, "score": 120251.00709521599 }, { "content": "\n\n if !is_enabled_for_guild || msg.author.bot {\n\n // Don't process if it isn't enabled or the author is a bot\n\n return Ok(());\n\n }\n\n\n\n // NOTE: Regex doesn't HAVE to be perfect.\n\n // Ideally, it just needs to be aggressive since parsing it into a url will weed out invalids.\n\n // We collect into a `Vec` as the regex iterator is not Sync and cannot be held across await points.\n\n let urls: Vec<Url> = URL_REGEX\n\n .find_iter(&msg.content)\n\n .filter_map(|url_match| Url::parse(url_match.as_str()).ok())\n\n .filter(|url| {\n\n let host_str = match url.host_str() {\n\n Some(url) => url,\n\n None => return false,\n\n };\n\n\n\n host_str == \"www.reddit.com\" || host_str == \"reddit.com\"\n\n })\n", "file_path": "src/commands/reddit_embed.rs", "rank": 46, "score": 120250.55395966416 }, { "content": " .collect();\n\n\n\n let mut loading_reaction = if !urls.is_empty() {\n\n Some(LoadingReaction::new(ctx.http.clone(), msg))\n\n } else {\n\n None\n\n };\n\n\n\n // Embed for each url\n\n // NOTE: we short circuit on failure since sending a msg to a channel and failing is most likely a permissions problem,\n\n // especially since serenity retries each req once\n\n for url in urls.iter() {\n\n // This is sometimes TOO smart and finds data for invalid urls...\n\n // TODO: Consider making parsing stricter\n\n if let Some((subreddit, post_id)) = parse_post_url(url) {\n\n // Try cache\n\n let maybe_url = self\n\n .cache\n\n .get_if_fresh(&(subreddit.into(), post_id.into()))\n\n .map(|el| el.data().clone());\n", "file_path": "src/commands/reddit_embed.rs", "rank": 47, "score": 120244.72174159405 }, { "content": "\n\n let subreddit = iter.next()?;\n\n\n\n if iter.next()? != \"comments\" {\n\n return None;\n\n }\n\n\n\n let post_id = iter.next()?;\n\n\n\n // TODO: Should we reject urls with the wrong ending?\n\n\n\n Some((subreddit, post_id))\n\n}\n\n\n\nimpl CacheStatsProvider for RedditEmbedData {\n\n fn publish_cache_stats(&self, cache_stats_builder: &mut CacheStatsBuilder) {\n\n cache_stats_builder.publish_stat(\"reddit_embed\", \"link_cache\", self.cache.len() as f32);\n\n }\n\n}\n\n\n", "file_path": "src/commands/reddit_embed.rs", "rank": 48, "score": 120234.6796185064 }, { "content": " .into_listing()\n\n .ok_or_else(|| anyhow::anyhow!(\"missing post\"))?\n\n .children;\n\n\n\n if post_data.is_empty() {\n\n anyhow::bail!(\"missing post\");\n\n }\n\n\n\n let mut post = post_data\n\n .swap_remove(0)\n\n .data\n\n .into_link()\n\n .ok_or_else(|| anyhow::anyhow!(\"missing post\"))?;\n\n\n\n // If cross post, resolve one level. Is it possible to crosspost a crosspost?\n\n\n\n // Remove crosspost list from response...\n\n let crosspost_parent_list = std::mem::take(&mut post.crosspost_parent_list);\n\n if let Some(post) = crosspost_parent_list.and_then(|mut l| {\n\n if l.is_empty() {\n", "file_path": "src/commands/reddit_embed.rs", "rank": 49, "score": 120231.25544628379 }, { "content": "INSERT OR REPLACE INTO reddit_embed_guild_settings (\n\n guild_id, \n\n enabled\n\n) VALUES (\n\n ?, \n\n ?\n\n);", "file_path": "sql/set_reddit_embed_enabled.sql", "rank": 50, "score": 118243.03537880434 }, { "content": "fn fmt_cpu_frequency(freq: &Frequency) -> String {\n\n let fmt_args = FrequencyF32::format_args(gigahertz, DisplayStyle::Abbreviation);\n\n let freq = FrequencyF32::new::<hertz>(freq.get::<hertz>() as f32);\n\n\n\n format!(\"{:.2}\", fmt_args.with(freq))\n\n}\n\n\n\nasync fn get_cpu_usage() -> Result<f32, heim::Error> {\n\n let start = heim::cpu::usage().await?;\n\n tokio::time::sleep(Duration::from_secs(1)).await;\n\n let end = heim::cpu::usage().await?;\n\n\n\n Ok((end - start).get::<heim::units::ratio::percent>())\n\n}\n\n\n\n#[command]\n\n#[description(\"Get System Stats\")]\n\n#[bucket(\"system\")]\n\n#[checks(Enabled)]\n\nasync fn system(ctx: &Context, msg: &Message, _args: Args) -> CommandResult {\n", "file_path": "src/commands/system.rs", "rank": 51, "score": 117805.60967675263 }, { "content": "pub mod generic_stats;\n\n\n\npub use self::generic_stats::GenericStats;\n\nuse chrono::{\n\n DateTime,\n\n Utc,\n\n};\n\nuse std::collections::HashMap;\n\nuse url::Url;\n\n\n\n/// Api Response\n\n#[derive(Debug, serde::Deserialize, serde::Serialize)]\n\npub struct ApiResponse<T> {\n\n pub data: T,\n\n}\n\n\n\n/// User Data\n\n#[derive(Debug, serde::Deserialize, serde::Serialize)]\n\npub struct UserData {\n\n pub avatar_banned: bool,\n", "file_path": "lib/r6stats-rs/src/types/search_data.rs", "rank": 52, "score": 109020.73690175512 }, { "content": " pub losses: u32,\n\n pub max_mmr: f64,\n\n pub max_rank: u32,\n\n pub mmr: f64,\n\n pub next_rank_mmr: u32,\n\n pub prev_rank_mmr: u32,\n\n pub rank: u32,\n\n pub region: String,\n\n pub skill_mean: f64,\n\n pub skill_standard_deviation: f64,\n\n pub updated_at: DateTime<Utc>,\n\n pub wins: u32,\n\n\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n", "file_path": "lib/r6stats-rs/src/types/search_data.rs", "rank": 53, "score": 109014.61598762922 }, { "content": "pub struct ProgressionStats {\n\n pub level: u32,\n\n pub lootbox_probability: u32,\n\n pub total_xp: u64,\n\n\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n\n#[derive(Debug, serde::Deserialize, serde::Serialize)]\n\npub struct SeasonalStats {\n\n pub abandons: u32,\n\n pub champions_rank_position: Option<u32>,\n\n pub created_at: DateTime<Utc>,\n\n pub created_for_date: DateTime<Utc>,\n\n pub deaths: Option<u32>,\n\n pub kills: Option<u32>,\n\n pub last_match_mmr_change: Option<i32>,\n\n pub last_match_skill_mean_change: Option<f64>,\n\n pub last_match_skill_standard_deviation_change: Option<f64>,\n", "file_path": "lib/r6stats-rs/src/types/search_data.rs", "rank": 54, "score": 109009.62269344654 }, { "content": "\n\n const VALID_DATA: &str = include_str!(\"../../test_data/search_data_valid.json\");\n\n const SEARCH_ASDF_DATA: &str = include_str!(\"../../test_data/search_asdf.json\");\n\n\n\n #[tokio::test]\n\n async fn parse_valid() {\n\n let valid = serde_json::from_str::<Vec<UserData>>(VALID_DATA).unwrap();\n\n dbg!(&valid);\n\n }\n\n\n\n #[tokio::test]\n\n async fn parse_asdf() {\n\n let valid = serde_json::from_str::<Vec<UserData>>(SEARCH_ASDF_DATA).unwrap();\n\n dbg!(&valid);\n\n }\n\n}\n", "file_path": "lib/r6stats-rs/src/types/search_data.rs", "rank": 55, "score": 109009.10831218962 }, { "content": "\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n\nimpl UserData {\n\n pub fn kd(&self) -> Option<f64> {\n\n Some(self.generic_stats.as_ref()?.general.kd)\n\n }\n\n\n\n pub fn wl(&self) -> Option<f64> {\n\n Some(self.generic_stats.as_ref()?.general.wl)\n\n }\n\n\n\n pub fn mmr(&self) -> Option<u32> {\n\n Some(self.seasonal_stats.as_ref()?.mmr as u32)\n\n }\n\n}\n\n\n\n#[derive(Debug, serde::Deserialize, serde::Serialize)]\n", "file_path": "lib/r6stats-rs/src/types/search_data.rs", "rank": 56, "score": 109008.22482191405 }, { "content": " pub avatar_url_146: Url,\n\n pub avatar_url_256: Url,\n\n pub claimed: bool,\n\n\n\n #[serde(rename = \"genericStats\")]\n\n pub generic_stats: Option<GenericStats>,\n\n\n\n pub last_updated: DateTime<Utc>,\n\n pub platform: String,\n\n\n\n #[serde(rename = \"progressionStats\")]\n\n pub progression_stats: Option<ProgressionStats>,\n\n\n\n #[serde(rename = \"seasonalStats\")]\n\n pub seasonal_stats: Option<SeasonalStats>,\n\n\n\n pub ubisoft_id: String,\n\n pub uplay_id: String,\n\n\n\n pub username: String,\n", "file_path": "lib/r6stats-rs/src/types/search_data.rs", "rank": 57, "score": 109005.19155375008 }, { "content": " }\n\n}\n\n\n\nimpl<T> ApiResponse<T> {\n\n /// Convert this into as Result.\n\n pub fn into_result(self) -> Result<T, InvalidApiResponseError> {\n\n match self {\n\n Self::Valid(data) => Ok(data),\n\n Self::Invalid(err) => Err(err),\n\n }\n\n }\n\n\n\n /// Consume self and return the valid variant, or None.\n\n pub fn take_valid(self) -> Option<T> {\n\n match self {\n\n Self::Valid(data) => Some(data),\n\n Self::Invalid(_) => None,\n\n }\n\n }\n\n\n", "file_path": "lib/r6tracker-rs/src/types/user_data.rs", "rank": 58, "score": 108645.79108863739 }, { "content": "\n\n#[derive(Debug)]\n\npub struct InvalidApiResponseError(pub Vec<ApiError>);\n\n\n\nimpl std::error::Error for InvalidApiResponseError {}\n\n\n\nimpl std::fmt::Display for InvalidApiResponseError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n writeln!(f, \"the api request failed due to the following: \")?;\n\n for error in self.0.iter() {\n\n writeln!(f, \" {}\", error.message)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n/// Errors that occured while procesing an API Request\n\n#[derive(serde::Deserialize, Debug)]\n\npub struct ApiError {\n", "file_path": "lib/r6tracker-rs/src/types/user_data.rs", "rank": 59, "score": 108645.2060135737 }, { "content": "/// Season data type\n\npub mod season;\n\n\n\npub use self::season::Season;\n\nuse crate::{\n\n types::platform::Platform,\n\n Stat,\n\n};\n\nuse std::collections::HashMap;\n\nuse url::Url;\n\n\n\n/// A json response from the UserData API.\n\n#[derive(Debug)]\n\npub enum ApiResponse<T> {\n\n /// A Valid Response\n\n Valid(T),\n\n\n\n /// An Invalid Response\n\n Invalid(InvalidApiResponseError),\n\n}\n", "file_path": "lib/r6tracker-rs/src/types/user_data.rs", "rank": 60, "score": 108644.52446412227 }, { "content": " pub segment_controls: Vec<serde_json::Value>,\n\n\n\n #[serde(rename = \"statsCategoryOrder\")]\n\n pub stats_category_order: Vec<String>,\n\n\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::types::ApiResponse;\n\n\n\n const SAMPLE_1: &str = include_str!(\"../../test_data/user_data_1.json\");\n\n const SAMPLE_2: &str = include_str!(\"../../test_data/user_data_2.json\");\n\n const INVALID_USER_DATA: &str = include_str!(\"../../test_data/invalid_user_data.json\");\n\n const SMACK_ASH_USER_DATA: &str = include_str!(\"../../test_data/smack_ash_user_data.json\");\n\n\n\n #[test]\n", "file_path": "lib/r6tracker-rs/src/types/user_data.rs", "rank": 61, "score": 108644.49709079518 }, { "content": " let mut map = serde_json::Map::deserialize(deserializer)?;\n\n\n\n let data: Option<Result<T, _>> = map\n\n .remove(\"data\")\n\n .map(|data| serde::Deserialize::deserialize(data).map_err(serde::de::Error::custom));\n\n let rest = serde_json::Value::Object(map);\n\n\n\n match data {\n\n Some(data) => Ok(Self::Valid(data?)),\n\n None => {\n\n #[derive(serde::Deserialize)]\n\n struct ErrorReason {\n\n errors: Vec<ApiError>,\n\n }\n\n\n\n ErrorReason::deserialize(rest)\n\n .map(|e| Self::Invalid(InvalidApiResponseError(e.errors)))\n\n .map_err(serde::de::Error::custom)\n\n }\n\n }\n", "file_path": "lib/r6tracker-rs/src/types/user_data.rs", "rank": 62, "score": 108644.46611889468 }, { "content": "\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::types::ApiResponse;\n\n\n\n const SAMPLE_1: &str = include_str!(\"../../test_data/sessions_data.json\");\n\n\n\n #[test]\n\n fn parse_sample_1() {\n\n let data = serde_json::from_str::<ApiResponse<SessionsData>>(SAMPLE_1)\n\n .unwrap()\n\n .take_valid()\n\n .unwrap();\n\n\n\n dbg!(&data);\n\n }\n\n}\n", "file_path": "lib/r6tracker-rs/src/types/sessions_data.rs", "rank": 63, "score": 108644.41936467096 }, { "content": "use crate::types::stat::Stat;\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Debug, serde::Deserialize, serde::Serialize)]\n\npub struct SessionsData {\n\n pub items: Vec<Session>,\n\n\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n\n#[derive(Debug, serde::Deserialize, serde::Serialize)]\n\npub struct Session {\n\n #[serde(rename = \"startedAt\")]\n\n pub started_at: String,\n\n\n\n #[serde(rename = \"endedAt\")]\n\n pub ended_at: Option<String>,\n\n\n\n pub duration: f64,\n", "file_path": "lib/r6tracker-rs/src/types/sessions_data.rs", "rank": 64, "score": 108641.61997540068 }, { "content": " /// The error message\n\n pub message: String,\n\n}\n\n\n\nimpl std::fmt::Display for ApiError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"api error ({})\", self.message)\n\n }\n\n}\n\n\n\nimpl std::error::Error for ApiError {}\n\n\n\nimpl<'de, T> serde::Deserialize<'de> for ApiResponse<T>\n\nwhere\n\n T: serde::Deserialize<'de>,\n\n{\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: serde::Deserializer<'de>,\n\n {\n", "file_path": "lib/r6tracker-rs/src/types/user_data.rs", "rank": 65, "score": 108640.50780489069 }, { "content": " /// Consume self and return the invalid variant, or None.\n\n pub fn take_invalid(self) -> Option<InvalidApiResponseError> {\n\n match self {\n\n Self::Valid(_) => None,\n\n Self::Invalid(err) => Some(err),\n\n }\n\n }\n\n}\n\n\n\n#[allow(clippy::upper_case_acronyms)]\n\n/// An R6 Rank.\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub enum Rank {\n\n Unranked,\n\n\n\n CopperV,\n\n CopperIV,\n\n CopperIII,\n\n CopperII,\n\n CopperI,\n", "file_path": "lib/r6tracker-rs/src/types/user_data.rs", "rank": 66, "score": 108638.75467027417 }, { "content": "\n\n #[serde(rename = \"isActive\")]\n\n pub is_active: bool,\n\n\n\n pub matches: Vec<Match>,\n\n pub stats: Vec<Stat>,\n\n\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n\n#[derive(Debug, serde::Deserialize, serde::Serialize)]\n\npub struct Match {\n\n pub id: String,\n\n\n\n #[serde(rename = \"type\")]\n\n pub kind: String,\n\n\n\n pub metadata: serde_json::Value,\n\n pub stats: Vec<Stat>,\n", "file_path": "lib/r6tracker-rs/src/types/sessions_data.rs", "rank": 67, "score": 108638.49411610764 }, { "content": " pub stats: Vec<Stat>,\n\n\n\n /// Unknown fields\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n\nimpl UserData {\n\n /// Utility function to get a stat by name. Currently an O(n) linear search.\n\n fn get_stat_by_name(&self, name: &str) -> Option<&Stat> {\n\n self.stats.iter().find(|s| s.name() == name)\n\n }\n\n\n\n /// Gets top mmr from all servers.\n\n pub fn current_mmr(&self) -> Option<u32> {\n\n self.get_stat_by_name(\"MMR\").map(|s| s.value as u32)\n\n }\n\n\n\n /// Get the image url for the rank this user is at gloablly\n\n pub fn current_mmr_image(&self) -> Option<&Url> {\n", "file_path": "lib/r6tracker-rs/src/types/user_data.rs", "rank": 68, "score": 108636.80023425499 }, { "content": " self.get_stat_by_name(\"Global MMR\")\n\n .and_then(|s| s.icon_url())\n\n }\n\n\n\n /// Get the MMR for this user.\n\n pub fn current_mmr_america(&self) -> Option<u32> {\n\n self.get_stat_by_name(\"Global MMR\").map(|s| s.value as u32)\n\n }\n\n\n\n /// Gets this season's color as a string hex value\n\n pub fn season_color(&self) -> &str {\n\n &self.metadata.current_season_color\n\n }\n\n\n\n /// Tries to parse this season's hex color as a u32\n\n pub fn season_color_u32(&self) -> Option<u32> {\n\n u32::from_str_radix(self.season_color().get(1..)?, 16).ok()\n\n }\n\n\n\n /// Get total # of kills\n", "file_path": "lib/r6tracker-rs/src/types/user_data.rs", "rank": 69, "score": 108635.6243669573 }, { "content": "\n\n Diamond,\n\n\n\n Champion,\n\n}\n\n\n\nimpl Rank {\n\n /// Get a string rep of this rank\n\n pub fn name(self) -> &'static str {\n\n match self {\n\n Self::Unranked => \"Unranked\",\n\n\n\n Self::CopperV => \"Copper V\",\n\n Self::CopperIV => \"Copper IV\",\n\n Self::CopperIII => \"Copper III\",\n\n Self::CopperII => \"Copper II\",\n\n Self::CopperI => \"Copper I\",\n\n\n\n Self::BronzeV => \"Bronze V\",\n\n Self::BronzeIV => \"Bronze IV\",\n", "file_path": "lib/r6tracker-rs/src/types/user_data.rs", "rank": 70, "score": 108633.10953337356 }, { "content": " fn parse_sample_1() {\n\n let data = serde_json::from_str::<ApiResponse<UserData>>(SAMPLE_1)\n\n .unwrap()\n\n .take_valid()\n\n .unwrap();\n\n let season = data.get_latest_season().unwrap();\n\n dbg!(season);\n\n\n\n let max_season = data.get_max_season().unwrap();\n\n dbg!(max_season.max_mmr());\n\n dbg!(max_season.max_rank());\n\n }\n\n\n\n #[test]\n\n fn parse_sample_2() {\n\n let data = serde_json::from_str::<ApiResponse<UserData>>(SAMPLE_2)\n\n .unwrap()\n\n .take_valid()\n\n .unwrap();\n\n let season = data.get_latest_season().unwrap();\n", "file_path": "lib/r6tracker-rs/src/types/user_data.rs", "rank": 71, "score": 108632.65833518974 }, { "content": " Self::Champion => \"Champion\",\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, serde::Deserialize, serde::Serialize)]\n\npub struct UserData {\n\n /// Unique user id\n\n pub id: String,\n\n\n\n #[serde(rename = \"type\")]\n\n pub kind: String,\n\n\n\n /// Collection of ranked seasons stats\n\n pub children: Vec<Season>,\n\n\n\n /// Metadata\n\n pub metadata: Metadata,\n\n\n\n /// A collection of all stats\n", "file_path": "lib/r6tracker-rs/src/types/user_data.rs", "rank": 72, "score": 108629.82396469485 }, { "content": "\n\n dbg!(season);\n\n }\n\n\n\n #[test]\n\n fn parse_smack_ash_user_data() {\n\n let data = serde_json::from_str::<ApiResponse<UserData>>(SMACK_ASH_USER_DATA)\n\n .unwrap()\n\n .take_valid()\n\n .unwrap();\n\n assert!(data.get_latest_season().is_none());\n\n }\n\n\n\n #[test]\n\n fn parse_invalid_sample() {\n\n let data = serde_json::from_str::<ApiResponse<UserData>>(INVALID_USER_DATA).unwrap();\n\n\n\n dbg!(data);\n\n }\n\n}\n", "file_path": "lib/r6tracker-rs/src/types/user_data.rs", "rank": 73, "score": 108627.84627652254 }, { "content": " /// Get the season where the user attained their max ranking\n\n pub fn get_max_season(&self) -> Option<&Season> {\n\n self.children\n\n .iter()\n\n .filter_map(|child| child.max_mmr().map(|mmr| (child, mmr)))\n\n .max_by_key(|(_, mmr)| *mmr)\n\n .map(|(child, _)| child)\n\n }\n\n}\n\n\n\n#[derive(Debug, serde::Deserialize, serde::Serialize)]\n\npub struct Metadata {\n\n #[serde(rename = \"accountId\")]\n\n pub account_id: String,\n\n\n\n #[serde(rename = \"countryCode\")]\n\n pub country_code: Option<String>,\n\n\n\n #[serde(rename = \"currentSeasonColor\")]\n\n pub current_season_color: String,\n", "file_path": "lib/r6tracker-rs/src/types/user_data.rs", "rank": 74, "score": 108627.55505753694 }, { "content": " pub fn get_kills(&self) -> Option<u64> {\n\n self.get_stat_by_name(\"Kills\").map(|s| s.value as u64)\n\n }\n\n\n\n /// Get total # of deaths\n\n pub fn get_deaths(&self) -> Option<u64> {\n\n self.get_stat_by_name(\"Deaths\").map(|s| s.value as u64)\n\n }\n\n\n\n /// Get overall K/D\n\n pub fn kd(&self) -> Option<f64> {\n\n self.get_stat_by_name(\"KD Ratio\").map(|s| s.value)\n\n }\n\n\n\n /// Get Overall W/L\n\n pub fn wl(&self) -> Option<f64> {\n\n self.get_stat_by_name(\"WL Ratio\").map(|s| s.value)\n\n }\n\n\n\n /// Get user tag name\n", "file_path": "lib/r6tracker-rs/src/types/user_data.rs", "rank": 75, "score": 108627.36050982797 }, { "content": " pub fn name(&self) -> &str {\n\n &self.metadata.platform_user_handle\n\n }\n\n\n\n /// Get user avatar url\n\n pub fn avatar_url(&self) -> &Url {\n\n &self.metadata.picture_url\n\n }\n\n\n\n /// Get the latest stats for the latest ranked region/season the user has played in\n\n pub fn get_latest_season(&self) -> Option<&Season> {\n\n let target_id = format!(\n\n \"region-{}.season-{}\",\n\n self.metadata.latest_region.unwrap_or(100),\n\n self.metadata.latest_season\n\n );\n\n\n\n self.children.iter().find(|s| s.id == target_id)\n\n }\n\n\n", "file_path": "lib/r6tracker-rs/src/types/user_data.rs", "rank": 76, "score": 108625.27843462495 }, { "content": "\n\n #[serde(rename = \"currentSeasonName\")]\n\n pub current_season_name: String,\n\n\n\n #[serde(rename = \"latestRegion\")]\n\n pub latest_region: Option<u32>,\n\n\n\n #[serde(rename = \"latestSeason\")]\n\n pub latest_season: u32,\n\n\n\n #[serde(rename = \"pictureUrl\")]\n\n pub picture_url: Url,\n\n\n\n #[serde(rename = \"platformId\")]\n\n pub platform_id: Platform,\n\n\n\n #[serde(rename = \"platformUserHandle\")]\n\n pub platform_user_handle: String,\n\n\n\n #[serde(rename = \"segmentControls\")]\n", "file_path": "lib/r6tracker-rs/src/types/user_data.rs", "rank": 77, "score": 108622.91636749545 }, { "content": " Self::BronzeIII => \"Bronze III\",\n\n Self::BronzeII => \"Bronze II\",\n\n Self::BronzeI => \"Bronze I\",\n\n\n\n Self::SilverV => \"Silver V\",\n\n Self::SilverIV => \"Silver IV\",\n\n Self::SilverIII => \"Silver III\",\n\n Self::SilverII => \"Silver II\",\n\n Self::SilverI => \"Silver I\",\n\n\n\n Self::GoldIII => \"Gold III\",\n\n Self::GoldII => \"Gold II\",\n\n Self::GoldI => \"Gold I\",\n\n\n\n Self::PlatinumIII => \"Platinum III\",\n\n Self::PlatinumII => \"Platinum II\",\n\n Self::PlatinumI => \"Platinum I\",\n\n\n\n Self::Diamond => \"Diamond\",\n\n\n", "file_path": "lib/r6tracker-rs/src/types/user_data.rs", "rank": 78, "score": 108617.9746740608 }, { "content": "\n\n BronzeV,\n\n BronzeIV,\n\n BronzeIII,\n\n BronzeII,\n\n BronzeI,\n\n\n\n SilverV,\n\n SilverIV,\n\n SilverIII,\n\n SilverII,\n\n SilverI,\n\n\n\n GoldIII,\n\n GoldII,\n\n GoldI,\n\n\n\n PlatinumIII,\n\n PlatinumII,\n\n PlatinumI,\n", "file_path": "lib/r6tracker-rs/src/types/user_data.rs", "rank": 79, "score": 108617.9746740608 }, { "content": " #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n\n/// ?\n\n#[derive(Debug, serde::Deserialize)]\n\npub struct PublicSession {\n\n /// Whether the user is logged in\n\n #[serde(rename = \"isLoggedIn\")]\n\n pub is_logged_in: bool,\n\n\n\n /// Unknown data\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n", "file_path": "lib/deviantart-rs/src/types/scraped_webpage_info.rs", "rank": 80, "score": 105299.22847616627 }, { "content": " const SCRAPED_WEBPAGE: &str = include_str!(\"../../test_data/scraped_webpage.json\");\n\n const LOGIN_WEBPAGE: &str = include_str!(\"../../test_data/login_webpage.json\");\n\n\n\n #[test]\n\n fn parse_scraped_webpage() {\n\n let scraped_webpage_info: ScrapedWebPageInfo =\n\n serde_json::from_str(SCRAPED_WEBPAGE).expect(\"failed to parse scraped webpage info\");\n\n assert_eq!(\n\n scraped_webpage_info\n\n .get_current_deviation_id()\n\n .expect(\"missing current deviation id\"),\n\n 119577071\n\n );\n\n // dbg!(scraped_deviation_info.entities.deviation);\n\n }\n\n\n\n #[test]\n\n fn parse_login_webpage() {\n\n let _scraped_webpage_info: ScrapedWebPageInfo =\n\n serde_json::from_str(LOGIN_WEBPAGE).expect(\"failed to parse scraped webpage info\");\n\n }\n\n}\n", "file_path": "lib/deviantart-rs/src/types/scraped_webpage_info.rs", "rank": 81, "score": 105299.07732405752 }, { "content": "use super::Deviation;\n\nuse std::collections::HashMap;\n\nuse url::Url;\n\n\n\n/// Info scraped from a deviation url\n\n#[derive(Debug, serde::Deserialize)]\n\npub struct ScrapedWebPageInfo {\n\n /// Page config like csrf tokens\n\n #[serde(rename = \"@@config\")]\n\n pub config: Config,\n\n\n\n /// Deviations extended deviations maybe?\n\n #[serde(rename = \"@@entities\")]\n\n pub entities: Option<Entities>,\n\n\n\n /// ?\n\n #[serde(rename = \"@@DUPERBROWSE\")]\n\n pub duper_browse: Option<DuperBrowse>,\n\n\n\n /// Info about the current session\n", "file_path": "lib/deviantart-rs/src/types/scraped_webpage_info.rs", "rank": 82, "score": 105299.0274639261 }, { "content": "use std::collections::HashMap;\n\nuse url::Url;\n\n\n\n/// Scraped info from a sta.sh link\n\n#[derive(Debug, serde::Deserialize)]\n\npub struct ScrapedStashInfo {\n\n /// Csrf token\n\n pub csrf: String,\n\n\n\n /// ?\n\n pub deviationid: u64,\n\n\n\n /// ?\n\n pub film: Film,\n\n\n\n /// ?\n\n pub deviation_width: u64,\n\n\n\n /// Unknown data\n\n #[serde(flatten)]\n", "file_path": "lib/deviantart-rs/src/types/scraped_stash_info.rs", "rank": 83, "score": 105297.7690284648 }, { "content": " pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n\n/// Film data from a sta.sh link\n\n#[derive(Debug, serde::Deserialize)]\n\npub struct Film {\n\n /// Video sizes\n\n pub sizes: HashMap<String, Size>,\n\n\n\n /// Unknown data\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n\nimpl Film {\n\n /// Get the best size\n\n pub fn get_best_size(&self) -> Option<&Size> {\n\n self.sizes.values().max_by_key(|v| v.width)\n\n }\n\n}\n", "file_path": "lib/deviantart-rs/src/types/scraped_stash_info.rs", "rank": 84, "score": 105296.72824775183 }, { "content": " #[serde(rename = \"@@publicSession\")]\n\n pub public_session: PublicSession,\n\n\n\n /// Unknown data\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n\nimpl ScrapedWebPageInfo {\n\n /// Get the current deviation's id\n\n pub fn get_current_deviation_id(&self) -> Option<&serde_json::Value> {\n\n Some(\n\n &self\n\n .duper_browse\n\n .as_ref()?\n\n .root_stream\n\n .as_ref()?\n\n .current_open_item,\n\n )\n\n }\n", "file_path": "lib/deviantart-rs/src/types/scraped_webpage_info.rs", "rank": 85, "score": 105295.47720673284 }, { "content": " pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n\n/// Extended Info about a deviation\n\n#[derive(Debug, serde::Deserialize)]\n\npub struct DeviationExtended {\n\n /// Download info\n\n pub download: Option<Download>,\n\n\n\n /// HTML description\n\n pub description: Option<String>,\n\n\n\n /// Unknown data\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n\n#[derive(Debug, serde::Deserialize)]\n\npub struct Download {\n\n /// The file size\n", "file_path": "lib/deviantart-rs/src/types/scraped_webpage_info.rs", "rank": 86, "score": 105293.80832037001 }, { "content": " pub filesize: u64,\n\n\n\n /// The image height\n\n pub height: u32,\n\n\n\n /// The image width\n\n pub width: u32,\n\n\n\n /// ?\n\n #[serde(rename = \"type\")]\n\n pub kind: String,\n\n\n\n /// The url\n\n pub url: Url,\n\n\n\n /// Unknown data\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n", "file_path": "lib/deviantart-rs/src/types/scraped_webpage_info.rs", "rank": 87, "score": 105293.1070533219 }, { "content": " let key = match id {\n\n serde_json::Value::Number(n) => {\n\n let n = n.as_u64()?;\n\n key_buffer.format(n)\n\n }\n\n serde_json::Value::String(s) => s,\n\n _ => return None,\n\n };\n\n self.entities\n\n .as_ref()?\n\n .deviation_extended\n\n .as_ref()?\n\n .get(key)\n\n }\n\n}\n\n\n\n/// ?\n\n#[derive(Debug, serde::Deserialize)]\n\npub struct Config {\n\n /// The page's csrf token\n", "file_path": "lib/deviantart-rs/src/types/scraped_webpage_info.rs", "rank": 88, "score": 105292.45456588134 }, { "content": " #[serde(rename = \"csrfToken\")]\n\n pub csrf_token: String,\n\n\n\n /// Unknown data\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n\n/// ?\n\n#[derive(Debug, serde::Deserialize)]\n\npub struct Entities {\n\n /// Deviations\n\n pub deviation: HashMap<String, Deviation>,\n\n\n\n /// Extended Deviation Info\n\n #[serde(rename = \"deviationExtended\")]\n\n pub deviation_extended: Option<HashMap<String, DeviationExtended>>,\n\n\n\n /// Unknown data\n\n #[serde(flatten)]\n", "file_path": "lib/deviantart-rs/src/types/scraped_webpage_info.rs", "rank": 89, "score": 105292.26214860173 }, { "content": "/// ?\n\n#[derive(Debug, serde::Deserialize)]\n\npub struct DuperBrowse {\n\n /// ?\n\n #[serde(rename = \"rootStream\")]\n\n pub root_stream: Option<RootStream>,\n\n\n\n /// Unknown data\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n\n/// ?\n\n#[derive(Debug, serde::Deserialize)]\n\npub struct RootStream {\n\n /// The id of the current deviation. This is either a number or string.\n\n #[serde(rename = \"currentOpenItem\")]\n\n pub current_open_item: serde_json::Value,\n\n\n\n /// Unknown data\n", "file_path": "lib/deviantart-rs/src/types/scraped_webpage_info.rs", "rank": 90, "score": 105290.18494227956 }, { "content": "\n\n/// Film size\n\n#[derive(Debug, serde::Deserialize)]\n\npub struct Size {\n\n /// Video height\n\n pub height: u32,\n\n\n\n /// Video width\n\n pub width: u32,\n\n\n\n /// Video src\n\n pub src: Url,\n\n\n\n /// Unknown data\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n", "file_path": "lib/deviantart-rs/src/types/scraped_stash_info.rs", "rank": 91, "score": 105289.341767244 }, { "content": "\n\n /// Get the [`Deviation`] for this page.\n\n pub fn get_current_deviation(&self) -> Option<&Deviation> {\n\n let id = self.get_current_deviation_id()?;\n\n let mut key_buffer = itoa::Buffer::new();\n\n let key = match id {\n\n serde_json::Value::Number(n) => {\n\n let n = n.as_u64()?;\n\n key_buffer.format(n)\n\n }\n\n serde_json::Value::String(s) => s,\n\n _ => return None,\n\n };\n\n self.entities.as_ref()?.deviation.get(key)\n\n }\n\n\n\n /// Get the [`DeviationExtended`] for this page.\n\n pub fn get_current_deviation_extended(&self) -> Option<&DeviationExtended> {\n\n let id = self.get_current_deviation_id()?;\n\n let mut key_buffer = itoa::Buffer::new();\n", "file_path": "lib/deviantart-rs/src/types/scraped_webpage_info.rs", "rank": 92, "score": 105287.57778041656 }, { "content": "use crate::types::{\n\n stat::Stat,\n\n user_data::Rank,\n\n};\n\nuse std::collections::HashMap;\n\n\n\n/// A representation of a ranked season/region\n\n#[derive(Debug, serde::Deserialize, serde::Serialize)]\n\npub struct Season {\n\n pub id: String,\n\n\n\n #[serde(rename = \"type\")]\n\n pub kind: String,\n\n\n\n pub metadata: Metadata,\n\n pub stats: Vec<Stat>,\n\n\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n", "file_path": "lib/r6tracker-rs/src/types/user_data/season.rs", "rank": 93, "score": 105246.41597693195 }, { "content": "\n\nimpl Season {\n\n /// Utility function to get a stat by name. Currently an O(n) linear search.\n\n fn get_stat_by_name(&self, name: &str) -> Option<&Stat> {\n\n self.stats.iter().find(|s| s.name() == name)\n\n }\n\n\n\n /// Gets current mmr for this region in this season\n\n pub fn current_mmr(&self) -> Option<u32> {\n\n self.get_stat_by_name(\"MMR\").map(|s| s.value as u32)\n\n }\n\n\n\n /// Get Win / Loss this season/region\n\n pub fn wl(&self) -> Option<f64> {\n\n // Why is this different from UserData?\n\n self.get_stat_by_name(\"WLRatio\").map(|s| s.value)\n\n }\n\n\n\n /// Get the max mmr\n\n pub fn max_mmr(&self) -> Option<u64> {\n", "file_path": "lib/r6tracker-rs/src/types/user_data/season.rs", "rank": 94, "score": 105244.32557310429 }, { "content": " self.get_stat_by_name(\"Max MMR\").map(|s| s.value as u64)\n\n }\n\n\n\n /// Get the max rank\n\n pub fn max_rank(&self) -> Option<Rank> {\n\n match self.get_stat_by_name(\"Max Rank\")?.value as u64 {\n\n 0 => Some(Rank::Unranked),\n\n\n\n 1 => Some(Rank::CopperV),\n\n 2 => Some(Rank::CopperIV),\n\n 3 => Some(Rank::CopperIII),\n\n 4 => Some(Rank::CopperII),\n\n 5 => Some(Rank::CopperI),\n\n\n\n 6 => Some(Rank::BronzeV),\n\n 7 => Some(Rank::BronzeIV),\n\n 8 => Some(Rank::BronzeIII),\n\n 9 => Some(Rank::BronzeII),\n\n 10 => Some(Rank::BronzeI),\n\n\n", "file_path": "lib/r6tracker-rs/src/types/user_data/season.rs", "rank": 95, "score": 105236.71635851645 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, serde::Deserialize, serde::Serialize)]\n\npub struct Metadata {\n\n pub name: String,\n\n pub segment: String,\n\n\n\n #[serde(rename = \"statsCategoryOrder\")]\n\n pub stats_category_order: Vec<String>,\n\n\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n", "file_path": "lib/r6tracker-rs/src/types/user_data/season.rs", "rank": 96, "score": 105230.3968726786 }, { "content": " 11 => Some(Rank::SilverV),\n\n 12 => Some(Rank::SilverIV),\n\n 13 => Some(Rank::SilverIII),\n\n 14 => Some(Rank::SilverII),\n\n 15 => Some(Rank::SilverI),\n\n\n\n 16 => Some(Rank::GoldIII),\n\n 17 => Some(Rank::GoldII),\n\n 18 => Some(Rank::GoldI),\n\n\n\n 19 => Some(Rank::PlatinumIII),\n\n 20 => Some(Rank::PlatinumII),\n\n 21 => Some(Rank::PlatinumI),\n\n\n\n 22 => Some(Rank::Diamond),\n\n\n\n 23 => Some(Rank::Champion),\n\n\n\n _ => None,\n\n }\n", "file_path": "lib/r6tracker-rs/src/types/user_data/season.rs", "rank": 97, "score": 105221.86939873637 }, { "content": "use chrono::{\n\n DateTime,\n\n Utc,\n\n};\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Debug, serde::Deserialize, serde::Serialize)]\n\npub struct GenericStats {\n\n pub gamemode: GameMode,\n\n pub general: General,\n\n pub queue: Queue,\n\n pub timestamps: Timestamps,\n\n\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n\n#[derive(Debug, serde::Deserialize, serde::Serialize)]\n\npub struct GameMode {\n\n pub bomb: Bomb,\n", "file_path": "lib/r6stats-rs/src/types/search_data/generic_stats.rs", "rank": 98, "score": 102403.17486109983 }, { "content": " pub wins: i64,\n\n pub wl: f64,\n\n\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n\n\n\n#[derive(Debug, serde::Deserialize, serde::Serialize)]\n\npub struct Timestamps {\n\n pub created: DateTime<Utc>,\n\n pub last_updated: DateTime<Utc>,\n\n\n\n #[serde(flatten)]\n\n pub unknown: HashMap<String, serde_json::Value>,\n\n}\n", "file_path": "lib/r6stats-rs/src/types/search_data/generic_stats.rs", "rank": 99, "score": 102398.86928156139 } ]
Rust
phper/src/functions.rs
erasin/phper
ec1a67cac3e3d101242786e950246f76fd92f921
use std::{mem::zeroed, os::raw::c_char}; use crate::{ alloc::EBox, classes::Visibility, errors::{ArgumentCountError, CallFunctionError, CallMethodError}, objects::Object, strings::ZendString, sys::*, utils::ensure_end_with_zero, values::{ExecuteData, SetVal, Val}, }; use std::{ marker::PhantomData, mem::{forget, size_of}, ptr::null_mut, }; pub(crate) trait Callable { fn call(&self, execute_data: &mut ExecuteData, arguments: &mut [Val], return_value: &mut Val); } pub(crate) struct Function<F, R>(F) where F: Fn(&mut [Val]) -> R + Send + Sync, R: SetVal; impl<F, R> Function<F, R> where F: Fn(&mut [Val]) -> R + Send + Sync, R: SetVal, { pub fn new(f: F) -> Self { Self(f) } } impl<F, R> Callable for Function<F, R> where F: Fn(&mut [Val]) -> R + Send + Sync, R: SetVal, { fn call(&self, _: &mut ExecuteData, arguments: &mut [Val], return_value: &mut Val) { let r = (self.0)(arguments); unsafe { r.set_val(return_value); } } } pub(crate) struct Method<F, R, T> where F: Fn(&mut Object<T>, &mut [Val]) -> R + Send + Sync, R: SetVal, { f: F, _p0: PhantomData<R>, _p1: PhantomData<T>, } impl<F, R, T> Method<F, R, T> where F: Fn(&mut Object<T>, &mut [Val]) -> R + Send + Sync, R: SetVal, { pub(crate) fn new(f: F) -> Self { Self { f, _p0: Default::default(), _p1: Default::default(), } } } impl<F, R, T: 'static> Callable for Method<F, R, T> where F: Fn(&mut Object<T>, &mut [Val]) -> R + Send + Sync, R: SetVal, { fn call(&self, execute_data: &mut ExecuteData, arguments: &mut [Val], return_value: &mut Val) { unsafe { let this = execute_data.get_this::<T>().unwrap(); let r = (self.f)(this, arguments); r.set_val(return_value); } } } #[repr(transparent)] pub struct FunctionEntry { #[allow(dead_code)] inner: zend_function_entry, } pub struct FunctionEntity { pub(crate) name: String, pub(crate) handler: Box<dyn Callable>, pub(crate) arguments: Vec<Argument>, pub(crate) visibility: Option<Visibility>, pub(crate) r#static: Option<bool>, } impl FunctionEntity { pub(crate) fn new( name: impl ToString, handler: Box<dyn Callable>, arguments: Vec<Argument>, visibility: Option<Visibility>, r#static: Option<bool>, ) -> Self { let name = ensure_end_with_zero(name); FunctionEntity { name, handler, arguments, visibility, r#static, } } pub(crate) unsafe fn entry(&self) -> zend_function_entry { let mut infos = Vec::new(); let require_arg_count = self.arguments.iter().filter(|arg| arg.required).count(); infos.push(create_zend_arg_info( require_arg_count as *const c_char, false, )); for arg in &self.arguments { infos.push(create_zend_arg_info( arg.name.as_ptr().cast(), arg.pass_by_ref, )); } infos.push(zeroed::<zend_internal_arg_info>()); let translator = CallableTranslator { callable: self.handler.as_ref(), }; let last_arg_info: zend_internal_arg_info = translator.internal_arg_info; infos.push(last_arg_info); let flags = self.visibility.map(|v| v as u32).unwrap_or_default() | self .r#static .and_then(|v| if v { Some(ZEND_ACC_STATIC) } else { None }) .unwrap_or_default(); zend_function_entry { fname: self.name.as_ptr().cast(), handler: Some(invoke), arg_info: Box::into_raw(infos.into_boxed_slice()).cast(), num_args: self.arguments.len() as u32, flags, } } } pub struct Argument { pub(crate) name: String, pub(crate) pass_by_ref: bool, pub(crate) required: bool, } impl Argument { pub fn by_val(name: impl ToString) -> Self { let name = ensure_end_with_zero(name); Self { name, pass_by_ref: false, required: true, } } pub fn by_ref(name: impl ToString) -> Self { let name = ensure_end_with_zero(name); Self { name, pass_by_ref: true, required: true, } } pub fn by_val_optional(name: impl ToString) -> Self { let name = ensure_end_with_zero(name); Self { name, pass_by_ref: false, required: false, } } pub fn by_ref_optional(name: impl ToString) -> Self { let name = ensure_end_with_zero(name); Self { name, pass_by_ref: true, required: false, } } } #[repr(transparent)] pub struct ZendFunction { inner: zend_function, } impl ZendFunction { pub(crate) unsafe fn from_mut_ptr<'a>(ptr: *mut zend_function) -> &'a mut ZendFunction { let ptr = ptr as *mut Self; ptr.as_mut().expect("ptr shouldn't be null") } #[inline] pub fn as_ptr(&self) -> *const zend_function { &self.inner } #[inline] pub fn as_mut_ptr(&mut self) -> *mut zend_function { &mut self.inner } pub fn get_name(&self) -> EBox<ZendString> { unsafe { let s = phper_get_function_or_method_name(self.as_ptr()); ZendString::from_raw(s) } } pub fn call_method<T: 'static>( &mut self, object: &mut Object<T>, mut arguments: impl AsMut<[Val]>, ) -> crate::Result<EBox<Val>> { let mut ret_val = EBox::new(Val::undef()); let arguments = arguments.as_mut(); let mut fci = zend_fcall_info { size: size_of::<zend_fcall_info>(), function_name: Val::undef().into_inner(), retval: ret_val.as_mut_ptr(), params: arguments.as_mut_ptr().cast(), object: object.as_mut_ptr(), param_count: arguments.len() as u32, #[cfg(phper_major_version = "8")] named_params: null_mut(), #[cfg(phper_major_version = "7")] no_separation: 1, #[cfg(all(phper_major_version = "7", phper_minor_version = "0"))] function_table: null_mut(), #[cfg(all(phper_major_version = "7", phper_minor_version = "0"))] symbol_table: null_mut(), }; let called_scope = unsafe { let mut called_scope = object.get_class().as_ptr() as *mut zend_class_entry; if called_scope.is_null() { called_scope = self.inner.common.scope; } called_scope }; let mut fcc = zend_fcall_info_cache { function_handler: self.as_mut_ptr(), calling_scope: null_mut(), called_scope, object: object.as_mut_ptr(), #[cfg(all( phper_major_version = "7", any( phper_minor_version = "0", phper_minor_version = "1", phper_minor_version = "2", ) ))] initialized: 1, }; unsafe { if zend_call_function(&mut fci, &mut fcc) != ZEND_RESULT_CODE_SUCCESS || ret_val.get_type().is_undef() { Err(CallMethodError::new( object.get_class().get_name().as_str()?.to_owned(), self.get_name().as_str()?.to_owned(), ) .into()) } else { Ok(ret_val) } } } } pub(crate) union CallableTranslator { pub(crate) callable: *const dyn Callable, pub(crate) internal_arg_info: zend_internal_arg_info, pub(crate) arg_info: zend_arg_info, } unsafe extern "C" fn invoke(execute_data: *mut zend_execute_data, return_value: *mut zval) { let execute_data = ExecuteData::from_mut_ptr(execute_data); let return_value = Val::from_mut_ptr(return_value); let num_args = execute_data.common_num_args(); let arg_info = execute_data.common_arg_info(); let last_arg_info = arg_info.offset((num_args + 1) as isize); let translator = CallableTranslator { arg_info: *last_arg_info, }; let handler = translator.callable; let handler = handler.as_ref().expect("handler is null"); let num_args = execute_data.num_args() as usize; let required_num_args = execute_data.common_required_num_args() as usize; if num_args < required_num_args { let func_name = execute_data.func().get_name(); let result = func_name .as_str() .map(|func_name| { Err::<(), _>(ArgumentCountError::new( func_name.to_owned(), required_num_args, num_args, )) }) .map_err(crate::Error::Utf8); SetVal::set_val(result, return_value); return; } let mut arguments = execute_data.get_parameters_array(); handler.call(execute_data, &mut arguments, return_value); for argument in arguments { forget(argument); } } pub(crate) const fn create_zend_arg_info( name: *const c_char, _pass_by_ref: bool, ) -> zend_internal_arg_info { #[cfg(phper_php_version = "8.0")] { zend_internal_arg_info { name, type_: zend_type { ptr: null_mut(), type_mask: 0, }, default_value: null_mut(), } } #[cfg(any( phper_php_version = "7.4", phper_php_version = "7.3", phper_php_version = "7.2" ))] { zend_internal_arg_info { name, type_: 0 as crate::sys::zend_type, pass_by_reference: _pass_by_ref as zend_uchar, is_variadic: 0, } } #[cfg(any(phper_php_version = "7.1", phper_php_version = "7.0"))] { zend_internal_arg_info { name, class_name: std::ptr::null(), type_hint: 0, allow_null: 0, pass_by_reference: _pass_by_ref as zend_uchar, is_variadic: 0, } } } pub fn call(fn_name: &str, arguments: &mut [Val]) -> Result<EBox<Val>, CallFunctionError> { let mut func = Val::new(fn_name); let mut ret = EBox::new(Val::null()); unsafe { if phper_call_user_function( compiler_globals.function_table, null_mut(), func.as_mut_ptr(), ret.as_mut_ptr(), arguments.len() as u32, arguments.as_mut_ptr().cast(), ) && !ret.get_type().is_undef() { Ok(ret) } else { Err(CallFunctionError::new(fn_name.to_owned())) } } }
use std::{mem::zeroed, os::raw::c_char}; use crate::{ alloc::EBox, classes::Visibility, errors::{ArgumentCountError, CallFunctionError, CallMethodError}, objects::Object, strings::ZendString, sys::*, utils::ensure_end_with_zero, values::{ExecuteData, SetVal, Val}, }; use std::{ marker::PhantomData, mem::{forget, size_of}, ptr::null_mut, }; pub(crate) trait Callable { fn call(&self, execute_data: &mut ExecuteData, arguments: &mut [Val], return_value: &mut Val); } pub(crate) struct Function<F, R>(F) where F: Fn(&mut [Val]) -> R + Send + Sync, R: SetVal; impl<F, R> Function<F, R> where F: Fn(&mut [Val]) -> R + Send + Sync, R: SetVal, { pub fn new(f: F) -> Self { Self(f) } } impl<F, R> Callable for Function<F, R> where F: Fn(&mut [Val]) -> R + Send + Sync, R: SetVal, { fn call(&self, _: &mut ExecuteDa
} pub(crate) struct Method<F, R, T> where F: Fn(&mut Object<T>, &mut [Val]) -> R + Send + Sync, R: SetVal, { f: F, _p0: PhantomData<R>, _p1: PhantomData<T>, } impl<F, R, T> Method<F, R, T> where F: Fn(&mut Object<T>, &mut [Val]) -> R + Send + Sync, R: SetVal, { pub(crate) fn new(f: F) -> Self { Self { f, _p0: Default::default(), _p1: Default::default(), } } } impl<F, R, T: 'static> Callable for Method<F, R, T> where F: Fn(&mut Object<T>, &mut [Val]) -> R + Send + Sync, R: SetVal, { fn call(&self, execute_data: &mut ExecuteData, arguments: &mut [Val], return_value: &mut Val) { unsafe { let this = execute_data.get_this::<T>().unwrap(); let r = (self.f)(this, arguments); r.set_val(return_value); } } } #[repr(transparent)] pub struct FunctionEntry { #[allow(dead_code)] inner: zend_function_entry, } pub struct FunctionEntity { pub(crate) name: String, pub(crate) handler: Box<dyn Callable>, pub(crate) arguments: Vec<Argument>, pub(crate) visibility: Option<Visibility>, pub(crate) r#static: Option<bool>, } impl FunctionEntity { pub(crate) fn new( name: impl ToString, handler: Box<dyn Callable>, arguments: Vec<Argument>, visibility: Option<Visibility>, r#static: Option<bool>, ) -> Self { let name = ensure_end_with_zero(name); FunctionEntity { name, handler, arguments, visibility, r#static, } } pub(crate) unsafe fn entry(&self) -> zend_function_entry { let mut infos = Vec::new(); let require_arg_count = self.arguments.iter().filter(|arg| arg.required).count(); infos.push(create_zend_arg_info( require_arg_count as *const c_char, false, )); for arg in &self.arguments { infos.push(create_zend_arg_info( arg.name.as_ptr().cast(), arg.pass_by_ref, )); } infos.push(zeroed::<zend_internal_arg_info>()); let translator = CallableTranslator { callable: self.handler.as_ref(), }; let last_arg_info: zend_internal_arg_info = translator.internal_arg_info; infos.push(last_arg_info); let flags = self.visibility.map(|v| v as u32).unwrap_or_default() | self .r#static .and_then(|v| if v { Some(ZEND_ACC_STATIC) } else { None }) .unwrap_or_default(); zend_function_entry { fname: self.name.as_ptr().cast(), handler: Some(invoke), arg_info: Box::into_raw(infos.into_boxed_slice()).cast(), num_args: self.arguments.len() as u32, flags, } } } pub struct Argument { pub(crate) name: String, pub(crate) pass_by_ref: bool, pub(crate) required: bool, } impl Argument { pub fn by_val(name: impl ToString) -> Self { let name = ensure_end_with_zero(name); Self { name, pass_by_ref: false, required: true, } } pub fn by_ref(name: impl ToString) -> Self { let name = ensure_end_with_zero(name); Self { name, pass_by_ref: true, required: true, } } pub fn by_val_optional(name: impl ToString) -> Self { let name = ensure_end_with_zero(name); Self { name, pass_by_ref: false, required: false, } } pub fn by_ref_optional(name: impl ToString) -> Self { let name = ensure_end_with_zero(name); Self { name, pass_by_ref: true, required: false, } } } #[repr(transparent)] pub struct ZendFunction { inner: zend_function, } impl ZendFunction { pub(crate) unsafe fn from_mut_ptr<'a>(ptr: *mut zend_function) -> &'a mut ZendFunction { let ptr = ptr as *mut Self; ptr.as_mut().expect("ptr shouldn't be null") } #[inline] pub fn as_ptr(&self) -> *const zend_function { &self.inner } #[inline] pub fn as_mut_ptr(&mut self) -> *mut zend_function { &mut self.inner } pub fn get_name(&self) -> EBox<ZendString> { unsafe { let s = phper_get_function_or_method_name(self.as_ptr()); ZendString::from_raw(s) } } pub fn call_method<T: 'static>( &mut self, object: &mut Object<T>, mut arguments: impl AsMut<[Val]>, ) -> crate::Result<EBox<Val>> { let mut ret_val = EBox::new(Val::undef()); let arguments = arguments.as_mut(); let mut fci = zend_fcall_info { size: size_of::<zend_fcall_info>(), function_name: Val::undef().into_inner(), retval: ret_val.as_mut_ptr(), params: arguments.as_mut_ptr().cast(), object: object.as_mut_ptr(), param_count: arguments.len() as u32, #[cfg(phper_major_version = "8")] named_params: null_mut(), #[cfg(phper_major_version = "7")] no_separation: 1, #[cfg(all(phper_major_version = "7", phper_minor_version = "0"))] function_table: null_mut(), #[cfg(all(phper_major_version = "7", phper_minor_version = "0"))] symbol_table: null_mut(), }; let called_scope = unsafe { let mut called_scope = object.get_class().as_ptr() as *mut zend_class_entry; if called_scope.is_null() { called_scope = self.inner.common.scope; } called_scope }; let mut fcc = zend_fcall_info_cache { function_handler: self.as_mut_ptr(), calling_scope: null_mut(), called_scope, object: object.as_mut_ptr(), #[cfg(all( phper_major_version = "7", any( phper_minor_version = "0", phper_minor_version = "1", phper_minor_version = "2", ) ))] initialized: 1, }; unsafe { if zend_call_function(&mut fci, &mut fcc) != ZEND_RESULT_CODE_SUCCESS || ret_val.get_type().is_undef() { Err(CallMethodError::new( object.get_class().get_name().as_str()?.to_owned(), self.get_name().as_str()?.to_owned(), ) .into()) } else { Ok(ret_val) } } } } pub(crate) union CallableTranslator { pub(crate) callable: *const dyn Callable, pub(crate) internal_arg_info: zend_internal_arg_info, pub(crate) arg_info: zend_arg_info, } unsafe extern "C" fn invoke(execute_data: *mut zend_execute_data, return_value: *mut zval) { let execute_data = ExecuteData::from_mut_ptr(execute_data); let return_value = Val::from_mut_ptr(return_value); let num_args = execute_data.common_num_args(); let arg_info = execute_data.common_arg_info(); let last_arg_info = arg_info.offset((num_args + 1) as isize); let translator = CallableTranslator { arg_info: *last_arg_info, }; let handler = translator.callable; let handler = handler.as_ref().expect("handler is null"); let num_args = execute_data.num_args() as usize; let required_num_args = execute_data.common_required_num_args() as usize; if num_args < required_num_args { let func_name = execute_data.func().get_name(); let result = func_name .as_str() .map(|func_name| { Err::<(), _>(ArgumentCountError::new( func_name.to_owned(), required_num_args, num_args, )) }) .map_err(crate::Error::Utf8); SetVal::set_val(result, return_value); return; } let mut arguments = execute_data.get_parameters_array(); handler.call(execute_data, &mut arguments, return_value); for argument in arguments { forget(argument); } } pub(crate) const fn create_zend_arg_info( name: *const c_char, _pass_by_ref: bool, ) -> zend_internal_arg_info { #[cfg(phper_php_version = "8.0")] { zend_internal_arg_info { name, type_: zend_type { ptr: null_mut(), type_mask: 0, }, default_value: null_mut(), } } #[cfg(any( phper_php_version = "7.4", phper_php_version = "7.3", phper_php_version = "7.2" ))] { zend_internal_arg_info { name, type_: 0 as crate::sys::zend_type, pass_by_reference: _pass_by_ref as zend_uchar, is_variadic: 0, } } #[cfg(any(phper_php_version = "7.1", phper_php_version = "7.0"))] { zend_internal_arg_info { name, class_name: std::ptr::null(), type_hint: 0, allow_null: 0, pass_by_reference: _pass_by_ref as zend_uchar, is_variadic: 0, } } } pub fn call(fn_name: &str, arguments: &mut [Val]) -> Result<EBox<Val>, CallFunctionError> { let mut func = Val::new(fn_name); let mut ret = EBox::new(Val::null()); unsafe { if phper_call_user_function( compiler_globals.function_table, null_mut(), func.as_mut_ptr(), ret.as_mut_ptr(), arguments.len() as u32, arguments.as_mut_ptr().cast(), ) && !ret.get_type().is_undef() { Ok(ret) } else { Err(CallFunctionError::new(fn_name.to_owned())) } } }
ta, arguments: &mut [Val], return_value: &mut Val) { let r = (self.0)(arguments); unsafe { r.set_val(return_value); } }
function_block-function_prefixed
[ { "content": "pub fn replace_and_get<T: Default, R>(t: &mut T, f: impl FnOnce(T) -> R) -> R {\n\n f(replace(t, Default::default()))\n\n}\n", "file_path": "examples/http-client/src/utils.rs", "rank": 1, "score": 210667.59407539124 }, { "content": "fn integration_values_return_val(_: &mut [Val]) -> Val {\n\n Val::new(\"foo\")\n\n}\n", "file_path": "tests/integration/src/values.rs", "rank": 2, "score": 202529.21321245644 }, { "content": "pub fn integrate(module: &mut Module) {\n\n integrate_arguments(module);\n\n}\n\n\n", "file_path": "tests/integration/src/arguments.rs", "rank": 3, "score": 193084.55292656826 }, { "content": "/// The trait for setting the value of [Val], mainly as the return value of\n\n/// functions and methods, and initializer of [Val].\n\n///\n\n/// TODO Better name, distinguish between non-referenced and referenced cases.\n\npub trait SetVal {\n\n unsafe fn set_val(self, val: &mut Val);\n\n}\n\n\n\nimpl SetVal for () {\n\n unsafe fn set_val(self, val: &mut Val) {\n\n val.set_type(Type::null());\n\n }\n\n}\n\n\n\nimpl SetVal for bool {\n\n unsafe fn set_val(self, val: &mut Val) {\n\n val.set_type(Type::bool(self));\n\n }\n\n}\n\n\n\nimpl SetVal for i32 {\n\n unsafe fn set_val(self, val: &mut Val) {\n\n SetVal::set_val(self as i64, val)\n\n }\n", "file_path": "phper/src/values.rs", "rank": 4, "score": 192967.08993692754 }, { "content": "fn integration_values_return_null(_: &mut [Val]) {}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 5, "score": 187698.19225566462 }, { "content": "fn integration_values_return_u32(_: &mut [Val]) -> u32 {\n\n 32\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 6, "score": 178638.257698367 }, { "content": "fn integration_values_return_true(_: &mut [Val]) -> bool {\n\n true\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 7, "score": 178638.257698367 }, { "content": "fn integration_values_return_string(_: &mut [Val]) -> String {\n\n \"foo\".to_string()\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 8, "score": 178638.257698367 }, { "content": "fn integration_values_return_i32(_: &mut [Val]) -> i32 {\n\n 32\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 9, "score": 178638.257698367 }, { "content": "fn integration_values_return_false(_: &mut [Val]) -> bool {\n\n false\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 10, "score": 178638.257698367 }, { "content": "fn integration_values_return_i64(_: &mut [Val]) -> i64 {\n\n 64\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 11, "score": 178638.257698367 }, { "content": "fn integration_values_return_f64(_: &mut [Val]) -> f64 {\n\n 64.0\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 12, "score": 178638.257698367 }, { "content": "fn say_hello(arguments: &mut [Val]) -> phper::Result<String> {\n\n let name = arguments[0].as_string_value()?;\n\n Ok(format!(\"Hello, {}!\\n\", name))\n\n}\n\n\n", "file_path": "examples/hello/src/lib.rs", "rank": 13, "score": 176046.38788569378 }, { "content": "fn throw_exception(_: &mut [Val]) -> phper::Result<()> {\n\n Err(phper::Error::other(\"I am sorry\"))\n\n}\n\n\n", "file_path": "examples/hello/src/lib.rs", "rank": 14, "score": 175429.39422120425 }, { "content": "fn integration_values_return_str(_: &mut [Val]) -> &'static str {\n\n \"foo\"\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 15, "score": 170568.54622702237 }, { "content": "pub fn try_make() -> crate::Result<()> {\n\n let make: Make = Make::parse();\n\n match make.sub {\n\n SubCommand::Install(_) => {\n\n let (lib_path, ext_name) = get_lib_path_and_ext_name()?;\n\n let extension_dir = CStr::from_bytes_with_nul(PHP_EXTENSION_DIR)?.to_str()?;\n\n println!(\"Installing shared extensions: {}\", extension_dir);\n\n let ext_path = Path::new(extension_dir).join(ext_name);\n\n fs::create_dir_all(extension_dir)?;\n\n fs::copy(lib_path, ext_path)?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "phper/src/cmd.rs", "rank": 16, "score": 169417.5398170495 }, { "content": "fn integration_values_return_array(_: &mut [Val]) -> EBox<Array> {\n\n let mut arr = Array::new();\n\n arr.insert(\"a\", Val::new(1));\n\n arr.insert(\"b\", Val::new(\"foo\"));\n\n arr\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 17, "score": 168321.50738437692 }, { "content": "fn integration_values_return_object(_: &mut [Val]) -> EBox<Object<()>> {\n\n let mut object = Object::new_by_std_class();\n\n object.set_property(\"foo\", Val::new(\"bar\"));\n\n object\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 18, "score": 168321.50738437692 }, { "content": "fn integration_values_return_i64_vec(_: &mut [Val]) -> Vec<i64> {\n\n vec![0, 1, 2]\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 19, "score": 168321.50738437692 }, { "content": "fn integration_values_return_string_vec(_: &mut [Val]) -> Vec<String> {\n\n vec![\"a\".to_string(), \"b\".to_string(), \"c\".to_string()]\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 20, "score": 168321.50738437692 }, { "content": "fn integration_values_return_option_i64_some(_: &mut [Val]) -> Option<i64> {\n\n Some(64)\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 21, "score": 168321.50738437692 }, { "content": "fn integration_values_return_result_string_err(_: &mut [Val]) -> phper::Result<()> {\n\n Err(phper::Error::other(\"a zhe\"))\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 22, "score": 166184.72739566158 }, { "content": "fn integration_values_return_option_i64_none(_: &mut [Val]) -> Option<i64> {\n\n None\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 23, "score": 166184.72739566158 }, { "content": "pub fn integrate(module: &mut Module) {\n\n integrate_a(module);\n\n}\n\n\n", "file_path": "tests/integration/src/classes.rs", "rank": 24, "score": 160280.294507025 }, { "content": "pub fn integrate(module: &mut Module) {\n\n module.add_function(\n\n \"integrate_arrays_new_drop\",\n\n |_: &mut [Val]| -> phper::Result<String> {\n\n let mut a1 = Array::new();\n\n a1.insert(\"foo\", Val::new(\"FOO\"));\n\n let foo = a1.get(\"foo\").unwrap();\n\n let foo = foo.as_string()?;\n\n\n\n let mut a2 = Array::new();\n\n a2.insert(\"bar\", Val::new(\"BAR\"));\n\n let bar = a2.get(\"bar\").unwrap();\n\n let bar = bar.as_string()?;\n\n\n\n Ok(format!(\"{} {}\", foo, bar))\n\n },\n\n vec![],\n\n );\n\n\n\n module.add_function(\n", "file_path": "tests/integration/src/arrays.rs", "rank": 25, "score": 160280.29450702498 }, { "content": "pub fn integrate(module: &mut Module) {\n\n module.add_function(\n\n \"integrate_strings_zend_string_new\",\n\n |_: &mut [Val]| -> phper::Result<()> {\n\n let zs = ZendString::new(\"hello\");\n\n assert_eq!(zs.as_str()?, \"hello\");\n\n\n\n let zs = ZendString::new([1, 2, 3]);\n\n assert_eq!(zs.as_ref(), &[1, 2, 3]);\n\n\n\n assert!(&*ZendString::new(\"hello\") == &*ZendString::new(b\"hello\"));\n\n\n\n Ok(())\n\n },\n\n vec![],\n\n );\n\n}\n", "file_path": "tests/integration/src/strings.rs", "rank": 26, "score": 160280.29450702498 }, { "content": "pub fn integrate(module: &mut Module) {\n\n module.add_function(\n\n \"integrate_functions_call\",\n\n |_: &mut [Val]| -> phper::Result<()> {\n\n let mut arr = Array::new();\n\n arr.insert(\"a\", Val::new(1));\n\n arr.insert(\"b\", Val::new(2));\n\n let ret = call(\"array_sum\", &mut [Val::new(arr)])?;\n\n assert_eq!(ret.as_long()?, 3);\n\n Ok(())\n\n },\n\n vec![],\n\n );\n\n\n\n module.add_function(\n\n \"integrate_functions_call_callable\",\n\n |arguments: &mut [Val]| {\n\n if let [head, tail @ ..] = arguments {\n\n Ok::<_, phper::Error>(head.call(tail)?)\n\n } else {\n\n unreachable!()\n\n }\n\n },\n\n vec![Argument::by_val(\"fn\")],\n\n );\n\n}\n", "file_path": "tests/integration/src/functions.rs", "rank": 27, "score": 160280.29450702498 }, { "content": "pub fn integrate(module: &mut Module) {\n\n integrate_returns(module);\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 28, "score": 160280.29450702498 }, { "content": "pub fn integrate(module: &mut Module) {\n\n module.add_function(\n\n \"integrate_objects_new_drop\",\n\n |_: &mut [Val]| -> phper::Result<()> {\n\n let o = Object::new_by_std_class();\n\n drop(o);\n\n Ok(())\n\n },\n\n vec![],\n\n );\n\n\n\n module.add_function(\n\n \"integrate_objects_get_set\",\n\n |_: &mut [Val]| -> phper::Result<()> {\n\n let mut o = Object::new_by_std_class();\n\n\n\n o.set_property(\"foo\", Val::new(\"bar\"));\n\n let foo = o.get_property(\"foo\");\n\n assert_eq!(foo.as_string()?, \"bar\");\n\n\n", "file_path": "tests/integration/src/objects.rs", "rank": 29, "score": 160280.29450702498 }, { "content": "fn integration_values_return_string_map(_: &mut [Val]) -> HashMap<String, String> {\n\n let mut map = HashMap::new();\n\n map.insert(\"a\".to_string(), \"x\".to_string());\n\n map.insert(\"b\".to_string(), \"y\".to_string());\n\n map.insert(\"c\".to_string(), \"z\".to_string());\n\n map\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 30, "score": 159148.9932590215 }, { "content": "fn integration_values_return_result_string_ok(_: &mut [Val]) -> phper::Result<String> {\n\n Ok(\"foo\".to_string())\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 31, "score": 159148.9932590215 }, { "content": "fn integration_values_return_i64_map(_: &mut [Val]) -> HashMap<&'static str, i64> {\n\n let mut map = HashMap::new();\n\n map.insert(\"a\", 0);\n\n map.insert(\"b\", 1);\n\n map.insert(\"c\", 2);\n\n map\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 32, "score": 152788.56750746118 }, { "content": "fn integrate_arguments(module: &mut Module) {\n\n module.add_function(\n\n \"integrate_arguments_null\",\n\n |arguments: &mut [Val]| arguments[0].as_null(),\n\n vec![Argument::by_val(\"a\")],\n\n );\n\n\n\n module.add_function(\n\n \"integrate_arguments_long\",\n\n |arguments: &mut [Val]| -> phper::Result<i64> {\n\n let a = arguments[0].as_long()?;\n\n let b = arguments[1].as_long_value();\n\n Ok(a + b)\n\n },\n\n vec![Argument::by_val(\"a\"), Argument::by_val(\"b\")],\n\n );\n\n\n\n module.add_function(\n\n \"integrate_arguments_double\",\n\n |arguments: &mut [Val]| arguments[0].as_double(),\n", "file_path": "tests/integration/src/arguments.rs", "rank": 33, "score": 152031.6451948185 }, { "content": "fn integration_values_return_i64_index_map(_: &mut [Val]) -> IndexMap<&'static str, i64> {\n\n let mut map = IndexMap::new();\n\n map.insert(\"a\", 0);\n\n map.insert(\"b\", 1);\n\n map.insert(\"c\", 2);\n\n map\n\n}\n\n\n", "file_path": "tests/integration/src/values.rs", "rank": 34, "score": 150937.9172360279 }, { "content": "pub fn replace_and_set<T: Default>(t: &mut T, f: impl FnOnce(T) -> T) {\n\n let x = f(replace(t, Default::default()));\n\n let _ = replace(t, x);\n\n}\n\n\n", "file_path": "examples/http-server/src/utils.rs", "rank": 35, "score": 149133.11269190023 }, { "content": "pub fn replace_and_set<T: Default>(t: &mut T, f: impl FnOnce(T) -> T) {\n\n let x = f(replace(t, Default::default()));\n\n let _ = replace(t, x);\n\n}\n\n\n", "file_path": "examples/http-client/src/utils.rs", "rank": 36, "score": 149133.11269190023 }, { "content": "fn get_type_by_const(mut t: u32) -> crate::Result<String> {\n\n unsafe {\n\n t = get_base_type_by_raw(t);\n\n let s = zend_get_type_by_const(t as c_int);\n\n let mut s = CStr::from_ptr(s).to_str()?.to_string();\n\n\n\n // Compact with PHP7.\n\n if s == \"boolean\" {\n\n s = \"bool\".to_string();\n\n } else if s == \"integer\" {\n\n s = \"int\".to_string();\n\n }\n\n\n\n Ok(s)\n\n }\n\n}\n\n\n\nconst fn get_base_type_by_raw(t: u32) -> u32 {\n\n t & !(!0 << Z_TYPE_FLAGS_SHIFT)\n\n}\n", "file_path": "phper/src/types.rs", "rank": 37, "score": 148866.9785453635 }, { "content": "pub fn replace_and_get<T: Default>(t: &mut T) -> T {\n\n replace(t, Default::default())\n\n}\n", "file_path": "examples/http-server/src/utils.rs", "rank": 38, "score": 139480.399793859 }, { "content": "pub trait Classifiable {\n\n fn state_constructor(&self) -> Box<StateConstructor<Box<dyn Any>>>;\n\n fn state_type_id(&self) -> TypeId;\n\n fn class_name(&self) -> &str;\n\n fn methods(&mut self) -> &mut [FunctionEntity];\n\n fn properties(&mut self) -> &mut [PropertyEntity];\n\n fn parent(&self) -> Option<&str>;\n\n}\n\n\n\npub type StateConstructor<T> = dyn Fn() -> T + Send + Sync;\n\n\n\npub struct DynamicClass<T: Send + 'static> {\n\n class_name: String,\n\n state_constructor: Arc<StateConstructor<T>>,\n\n pub(crate) method_entities: Vec<FunctionEntity>,\n\n pub(crate) property_entities: Vec<PropertyEntity>,\n\n pub(crate) parent: Option<String>,\n\n _p: PhantomData<T>,\n\n}\n\n\n", "file_path": "phper/src/classes.rs", "rank": 39, "score": 130285.22758806567 }, { "content": "/// The item which can be placed into container [EBox].\n\npub trait EAllocatable {\n\n /// The method to free the heap allocated by `emalloc`, should call `efree` at the end.\n\n fn free(ptr: *mut Self) {\n\n unsafe {\n\n _efree(ptr.cast());\n\n }\n\n }\n\n}\n\n\n\n/// The Box which use php `emalloc` and `efree` to manage memory.\n\n///\n\n/// TODO now feature `allocator_api` is still unstable, implement myself, use Box<T, Alloc> later.\n\npub struct EBox<T: EAllocatable> {\n\n ptr: *mut T,\n\n}\n\n\n\nimpl<T: EAllocatable> EBox<T> {\n\n /// Allocates heap memory using `emalloc` then places `x` into it.\n\n ///\n\n /// # Panic\n", "file_path": "phper-alloc/src/lib.rs", "rank": 40, "score": 125933.2378771677 }, { "content": "/// Make.\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// use phper::cmd::make;\n\n///\n\n/// fn main() {\n\n/// make();\n\n/// }\n\n/// ```\n\npub fn make() {\n\n try_make().expect(\"make failed\");\n\n}\n\n\n", "file_path": "phper/src/cmd.rs", "rank": 41, "score": 124999.67403780171 }, { "content": "/// Register useful rust cfg for project using phper.\n\npub fn register_configures() {\n\n // versions\n\n println!(\n\n \"cargo:rustc-cfg=phper_major_version=\\\"{}\\\"\",\n\n PHP_MAJOR_VERSION\n\n );\n\n println!(\n\n \"cargo:rustc-cfg=phper_minor_version=\\\"{}\\\"\",\n\n PHP_MINOR_VERSION\n\n );\n\n println!(\n\n \"cargo:rustc-cfg=phper_release_version=\\\"{}\\\"\",\n\n PHP_RELEASE_VERSION\n\n );\n\n println!(\n\n \"cargo:rustc-cfg=phper_php_version=\\\"{}.{}\\\"\",\n\n PHP_MAJOR_VERSION, PHP_MINOR_VERSION,\n\n );\n\n\n\n if PHP_DEBUG > 0 {\n\n println!(\"cargo:rustc-cfg=phper_debug\");\n\n }\n\n\n\n if USING_ZTS > 0 {\n\n println!(\"cargo:rustc-cfg=phper_zts\");\n\n }\n\n}\n", "file_path": "phper-build/src/lib.rs", "rank": 42, "score": 120718.7566432354 }, { "content": "/// PHP Throwable, can cause throwing an exception when setting to [crate::values::Val].\n\npub trait Throwable: error::Error {\n\n fn class_entry(&self) -> &StatelessClassEntry;\n\n\n\n fn code(&self) -> u64 {\n\n 0\n\n }\n\n\n\n fn message(&self) -> String {\n\n self.to_string()\n\n }\n\n}\n\n\n\nimpl Throwable for Infallible {\n\n fn class_entry(&self) -> &StatelessClassEntry {\n\n unreachable!()\n\n }\n\n}\n\n\n\n/// Type of [std::result::Result]<T, [crate::Error]>.\n\npub type Result<T> = std::result::Result<T, self::Error>;\n", "file_path": "phper/src/errors.rs", "rank": 43, "score": 117357.72759481476 }, { "content": "/// Check your extension by executing the php script, if the all your specified checkers are pass, than the test is pass.\n\n///\n\n/// - `exec_path` is the path of the make executable, which will be used to detect the path of\n\n/// extension lib.\n\n///\n\n/// - `scripts` is the slice of the tuple, format is `(path of your php test script, checker function or closure)`.\n\n///\n\n/// See [example logging integration test](https://github.com/jmjoy/phper/blob/master/examples/logging/tests/integration.rs).\n\npub fn test_php_scripts_with_condition(\n\n exe_path: impl AsRef<Path>,\n\n scripts: &[(&dyn AsRef<Path>, &dyn Fn(Output) -> bool)],\n\n) {\n\n let context = Context::get_global();\n\n let lib_path = utils::get_lib_path(exe_path);\n\n let tmp_php_ini_file = context.create_tmp_php_ini_file(&lib_path);\n\n\n\n for (script, condition) in scripts {\n\n let mut cmd = context.create_command_with_tmp_php_ini_args(&tmp_php_ini_file, script);\n\n\n\n let output = cmd.output().unwrap();\n\n let path = script.as_ref().to_str().unwrap();\n\n\n\n let mut stdout = String::from_utf8(output.stdout.clone()).unwrap();\n\n if stdout.is_empty() {\n\n stdout.push_str(\"<empty>\");\n\n }\n\n\n\n let mut stderr = String::from_utf8(output.stderr.clone()).unwrap();\n", "file_path": "phper-test/src/lib.rs", "rank": 44, "score": 116917.48398968455 }, { "content": "#[php_get_module]\n\npub fn get_module() -> Module {\n\n let mut module = Module::new(\n\n env!(\"CARGO_PKG_NAME\"),\n\n env!(\"CARGO_PKG_VERSION\"),\n\n env!(\"CARGO_PKG_AUTHORS\"),\n\n );\n\n\n\n // ...\n\n\n\n module\n\n}\n\n```\n\n\n\n6. Build and install, if your php isn't installed globally, you should specify the path of `php-config`.\n\n\n\n```bash\n\n# Specify if php isn't installed globally.\n\nexport PHP_CONFIG = <Your path of php-config>\n\n\n\n# Build libmyapp.so.\n", "file_path": "phper/src/lib.rs", "rank": 45, "score": 116402.25761263346 }, { "content": "#[php_get_module]\n\npub fn get_module() -> Module {\n\n let mut module = Module::new(\n\n env!(\"CARGO_PKG_NAME\"),\n\n env!(\"CARGO_PKG_VERSION\"),\n\n env!(\"CARGO_PKG_AUTHORS\"),\n\n );\n\n\n\n arguments::integrate(&mut module);\n\n arrays::integrate(&mut module);\n\n classes::integrate(&mut module);\n\n functions::integrate(&mut module);\n\n objects::integrate(&mut module);\n\n strings::integrate(&mut module);\n\n values::integrate(&mut module);\n\n\n\n module\n\n}\n", "file_path": "tests/integration/src/lib.rs", "rank": 46, "score": 114447.95421808666 }, { "content": "#[php_get_module]\n\npub fn get_module() -> Module {\n\n let mut module = Module::new(\n\n env!(\"CARGO_PKG_NAME\"),\n\n env!(\"CARGO_PKG_VERSION\"),\n\n env!(\"CARGO_PKG_AUTHORS\"),\n\n );\n\n\n\n module.add_function(\n\n \"log_say\",\n\n |params: &mut [Val]| -> phper::Result<()> {\n\n let message = params[0].as_string_value()?;\n\n echo!(\"Hello, {}!\", message);\n\n Ok(())\n\n },\n\n vec![Argument::by_val(\"message\")],\n\n );\n\n\n\n module.add_function(\n\n \"log_notice\",\n\n |params: &mut [Val]| -> phper::Result<()> {\n", "file_path": "examples/logging/src/lib.rs", "rank": 47, "score": 114447.95421808666 }, { "content": "#[php_get_module]\n\npub fn get_module() -> Module {\n\n let mut module = Module::new(\n\n env!(\"CARGO_PKG_NAME\"),\n\n env!(\"CARGO_PKG_VERSION\"),\n\n env!(\"CARGO_PKG_AUTHORS\"),\n\n );\n\n\n\n // register module ini\n\n Ini::add(\"hello.enable\", false, Policy::All);\n\n Ini::add(\"hello.num\", 100, Policy::All);\n\n Ini::add(\"hello.ratio\", 1.5, Policy::All);\n\n Ini::add(\"hello.description\", \"hello world.\".to_owned(), Policy::All);\n\n\n\n // register hook functions\n\n module.on_module_init(module_init);\n\n module.on_module_shutdown(|_| true);\n\n module.on_request_init(|_| true);\n\n module.on_request_shutdown(|_| true);\n\n\n\n // register functions\n", "file_path": "examples/hello/src/lib.rs", "rank": 48, "score": 114447.95421808666 }, { "content": "pub fn test_long_term_php_script_with_condition(\n\n exe_path: impl AsRef<Path>,\n\n script: impl AsRef<Path>,\n\n condition: impl FnOnce(&Child),\n\n) {\n\n let context = Context::get_global();\n\n let lib_path = utils::get_lib_path(exe_path);\n\n let tmp_php_ini_file = context.create_tmp_php_ini_file(lib_path);\n\n let mut command = context.create_command_with_tmp_php_ini_args(&tmp_php_ini_file, script);\n\n let mut child = command.spawn().unwrap();\n\n condition(&child);\n\n child.kill().unwrap();\n\n}\n", "file_path": "phper-test/src/lib.rs", "rank": 49, "score": 113518.05774299422 }, { "content": "#[php_get_module]\n\npub fn get_module() -> Module {\n\n let mut module = Module::new(\n\n env!(\"CARGO_PKG_NAME\"),\n\n env!(\"CARGO_PKG_VERSION\"),\n\n env!(\"CARGO_PKG_AUTHORS\"),\n\n );\n\n\n\n module.add_class(make_exception_class());\n\n module.add_class(make_client_class());\n\n module.add_class(make_client_builder_class());\n\n module.add_class(make_request_builder_class());\n\n module.add_class(make_response_class());\n\n\n\n module\n\n}\n", "file_path": "examples/http-client/src/lib.rs", "rank": 50, "score": 112602.95393750732 }, { "content": "#[php_get_module]\n\npub fn get_module() -> Module {\n\n let mut module = Module::new(\n\n env!(\"CARGO_PKG_NAME\"),\n\n env!(\"CARGO_PKG_VERSION\"),\n\n env!(\"CARGO_PKG_AUTHORS\"),\n\n );\n\n\n\n let rt = runtime::Builder::new_multi_thread()\n\n .enable_all()\n\n .build()\n\n .unwrap();\n\n let rt = Arc::new(rt);\n\n let rt_ = rt.clone();\n\n\n\n module.on_module_init(move |_| {\n\n let guard = rt_.enter();\n\n forget(guard);\n\n true\n\n });\n\n module.on_module_shutdown(move |_| true);\n\n\n\n module.add_class(make_exception_class());\n\n module.add_class(make_server_class());\n\n module.add_class(make_request_class());\n\n module.add_class(make_response_class());\n\n\n\n module\n\n}\n", "file_path": "examples/http-server/src/lib.rs", "rank": 51, "score": 112602.95393750732 }, { "content": "fn integrate_a(module: &mut Module) {\n\n let mut class = DynamicClass::new(\"IntegrationTest\\\\A\");\n\n\n\n class.add_property(\"name\", Visibility::Private, \"default\");\n\n class.add_property(\"number\", Visibility::Private, 100);\n\n\n\n class.add_method(\n\n \"__construct\",\n\n Visibility::Public,\n\n |this, arguments| {\n\n let name = arguments[0].as_string()?;\n\n let number = arguments[1].as_long()?;\n\n this.set_property(\"name\", Val::new(name));\n\n this.set_property(\"number\", Val::new(number));\n\n Ok::<_, phper::Error>(())\n\n },\n\n vec![Argument::by_val(\"name\"), Argument::by_val(\"number\")],\n\n );\n\n\n\n class.add_method(\n", "file_path": "tests/integration/src/classes.rs", "rank": 52, "score": 111070.27103947647 }, { "content": "fn integrate_returns(module: &mut Module) {\n\n module.add_function(\n\n \"integration_values_return_null\",\n\n integration_values_return_null,\n\n vec![],\n\n );\n\n module.add_function(\n\n \"integration_values_return_true\",\n\n integration_values_return_true,\n\n vec![],\n\n );\n\n module.add_function(\n\n \"integration_values_return_false\",\n\n integration_values_return_false,\n\n vec![],\n\n );\n\n module.add_function(\n\n \"integration_values_return_i32\",\n\n integration_values_return_i32,\n\n vec![],\n", "file_path": "tests/integration/src/values.rs", "rank": 53, "score": 109223.18077028435 }, { "content": "pub fn make_request_class() -> DynamicClass<()> {\n\n let mut class = DynamicClass::new(HTTP_REQUEST_CLASS_NAME);\n\n\n\n class.add_property(\"header\", Visibility::Public, ());\n\n class.add_property(\"server\", Visibility::Public, ());\n\n class.add_property(\"data\", Visibility::Private, ());\n\n\n\n class\n\n}\n", "file_path": "examples/http-server/src/request.rs", "rank": 54, "score": 109206.12594840553 }, { "content": "pub fn make_exception_class() -> DynamicClass<()> {\n\n let mut exception_class = DynamicClass::new(EXCEPTION_CLASS_NAME);\n\n exception_class.extends(\"Exception\");\n\n exception_class\n\n}\n", "file_path": "examples/http-client/src/errors.rs", "rank": 55, "score": 109206.12594840553 }, { "content": "pub fn make_exception_class() -> DynamicClass<()> {\n\n let mut exception_class = DynamicClass::new(EXCEPTION_CLASS_NAME);\n\n exception_class.extends(\"Exception\");\n\n exception_class\n\n}\n", "file_path": "examples/http-server/src/errors.rs", "rank": 56, "score": 109206.12594840553 }, { "content": "/// The Type which can transform to an ini value.\n\n///\n\n/// Be careful that the size of `arg2` must litter than size of `usize`.\n\n///\n\n/// TODO Add a size compare with usize trait bound, after const generic supports.\n\npub trait TransformIniValue: Sized + ToString + 'static {\n\n fn on_modify() -> OnModify;\n\n\n\n unsafe fn transform(data: usize) -> Option<Self>;\n\n\n\n fn arg2_type() -> TypeId;\n\n\n\n fn arg2_size() -> usize;\n\n\n\n fn to_text(&self) -> String {\n\n self.to_string()\n\n }\n\n}\n\n\n\nimpl TransformIniValue for bool {\n\n fn on_modify() -> OnModify {\n\n Some(OnUpdateBool)\n\n }\n\n\n\n unsafe fn transform(data: usize) -> Option<Self> {\n", "file_path": "phper/src/ini.rs", "rank": 57, "score": 108326.21601922894 }, { "content": "pub fn echo(message: impl ToString) {\n\n let message = ensure_end_with_zero(message);\n\n unsafe {\n\n zend_write.expect(\"function zend_write can't be null\")(\n\n message.as_ptr().cast(),\n\n message.len() - 1,\n\n );\n\n }\n\n}\n", "file_path": "phper/src/output.rs", "rank": 58, "score": 107640.74257478947 }, { "content": "#[proc_macro]\n\npub fn c_str(input: TokenStream) -> TokenStream {\n\n utils::c_str(input)\n\n}\n\n\n\n/// C style string end with '\\0'.\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_test\n\n/// assert_eq!(c_str_ptr!(\"foo\"), \"foo\\0\".as_ptr().cast());\n\n/// ```\n", "file_path": "phper-macros/src/lib.rs", "rank": 59, "score": 102398.91430510835 }, { "content": "pub fn make_client_builder_class() -> DynamicClass<ClientBuilder> {\n\n let mut class = DynamicClass::new_with_default(HTTP_CLIENT_BUILDER_CLASS_NAME);\n\n\n\n class.add_method(\n\n \"timeout\",\n\n Visibility::Public,\n\n |this, arguments| {\n\n let ms = arguments[0].as_long()?;\n\n let state: &mut ClientBuilder = this.as_mut_state();\n\n replace_and_set(state, |builder| {\n\n builder.timeout(Duration::from_millis(ms as u64))\n\n });\n\n Ok::<_, HttpClientError>(this.duplicate())\n\n },\n\n vec![Argument::by_val(\"ms\")],\n\n );\n\n\n\n class.add_method(\n\n \"cookie_store\",\n\n Visibility::Public,\n", "file_path": "examples/http-client/src/client.rs", "rank": 60, "score": 101075.27444654919 }, { "content": "#[proc_macro_derive(Throwable, attributes(throwable, throwable_class, throwable_crate))]\n\npub fn derive_throwable(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n derives::derive_throwable(input).unwrap_or_else(|e| e.into_compile_error().into())\n\n}\n", "file_path": "phper-macros/src/lib.rs", "rank": 61, "score": 100835.68550392828 }, { "content": "#[proc_macro]\n\npub fn c_str_ptr(input: TokenStream) -> TokenStream {\n\n utils::c_str_ptr(input)\n\n}\n\n\n\n/// PHP module entry, wrap the `phper::modules::Module` write operation.\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_test\n\n/// use phper::{php_get_module, modules::Module};\n\n///\n\n/// #[php_get_module]\n\n/// pub fn get_module() -> Module {\n\n/// let mut module = Module::new(\n\n/// env!(\"CARGO_PKG_NAME\"),\n\n/// env!(\"CARGO_PKG_VERSION\"),\n\n/// env!(\"CARGO_PKG_AUTHORS\"),\n\n/// );\n\n///\n\n/// // ...\n\n///\n\n/// module\n\n/// }\n\n///\n\n/// ```\n", "file_path": "phper-macros/src/lib.rs", "rank": 62, "score": 100831.95875377128 }, { "content": "fn get_lib_path_and_ext_name() -> crate::Result<(PathBuf, OsString)> {\n\n let exe_path = env::current_exe()?;\n\n let exe_stem = exe_path\n\n .file_stem()\n\n .context(\"failed to get current exe stem\")?;\n\n let target_dir = exe_path\n\n .parent()\n\n .context(\"failed to get current exe directory\")?;\n\n\n\n let mut exe_name = OsString::new();\n\n exe_name.push(\"lib\");\n\n let lib_stem = exe_stem\n\n .to_str()\n\n .context(\"failed to generate target lib name\")?\n\n .replace(\"-\", \"_\");\n\n exe_name.push(lib_stem);\n\n exe_name.push(\".so\");\n\n\n\n let mut ext_name = OsString::new();\n\n ext_name.push(exe_stem);\n\n ext_name.push(\".so\");\n\n\n\n Ok((target_dir.join(exe_name), ext_name))\n\n}\n", "file_path": "phper/src/cmd.rs", "rank": 63, "score": 99592.57192948091 }, { "content": "pub fn make_response_class() -> DynamicClass<Response<Body>> {\n\n let mut class = DynamicClass::new_with_default(HTTP_RESPONSE_CLASS_NAME);\n\n\n\n class.add_method(\n\n \"header\",\n\n Visibility::Public,\n\n |this, arguments| {\n\n let response: &mut Response<Body> = this.as_mut_state();\n\n response.headers_mut().insert(\n\n HeaderName::from_bytes(arguments[0].as_string()?.as_bytes())?,\n\n HeaderValue::from_bytes(arguments[1].as_string()?.as_bytes())?,\n\n );\n\n Ok::<_, HttpServerError>(())\n\n },\n\n vec![Argument::by_val(\"data\")],\n\n );\n\n\n\n class.add_method(\n\n \"end\",\n\n Visibility::Public,\n", "file_path": "examples/http-server/src/response.rs", "rank": 64, "score": 99343.82522035787 }, { "content": "pub fn make_client_class() -> DynamicClass<Option<Client>> {\n\n let mut class = DynamicClass::new_with_default(HTTP_CLIENT_CLASS_NAME);\n\n\n\n class.add_method(\n\n \"__construct\",\n\n Visibility::Private,\n\n |_: &mut Object<Option<Client>>, _| {},\n\n vec![],\n\n );\n\n\n\n class.add_method(\n\n \"get\",\n\n Visibility::Public,\n\n |this, arguments| {\n\n let url = arguments[0].as_string()?;\n\n let client = this.as_state().as_ref().unwrap();\n\n let request_builder = client.get(url);\n\n let mut object =\n\n ClassEntry::<Option<RequestBuilder>>::from_globals(REQUEST_BUILDER_CLASS_NAME)?\n\n .init_object()?;\n", "file_path": "examples/http-client/src/client.rs", "rank": 65, "score": 99343.82522035787 }, { "content": "pub fn make_response_class() -> DynamicClass<Option<Response>> {\n\n let mut class = DynamicClass::new_with_default(RESPONSE_CLASS_NAME);\n\n\n\n class.add_method(\n\n \"body\",\n\n Visibility::Public,\n\n |this: &mut Object<Option<Response>>, _arguments| {\n\n let response = this.as_mut_state();\n\n let body = replace_and_get(response, |response| {\n\n response\n\n .ok_or(HttpClientError::ResponseHadRead)\n\n .and_then(|response| response.bytes().map_err(Into::into))\n\n })?;\n\n Ok::<_, HttpClientError>((&body).to_vec())\n\n },\n\n vec![],\n\n );\n\n\n\n class.add_method(\n\n \"status\",\n", "file_path": "examples/http-client/src/response.rs", "rank": 66, "score": 99343.82522035787 }, { "content": "pub fn log(level: LogLevel, message: impl ToString) {\n\n let message = ensure_end_with_zero(message);\n\n unsafe {\n\n php_error_docref1(\n\n null(),\n\n \"\\0\".as_ptr().cast(),\n\n level as i32,\n\n message.as_ptr().cast(),\n\n );\n\n }\n\n}\n\n\n", "file_path": "phper/src/output.rs", "rank": 67, "score": 97268.21558220818 }, { "content": "pub fn make_request_builder_class() -> DynamicClass<Option<RequestBuilder>> {\n\n let mut class = DynamicClass::new_with_default(REQUEST_BUILDER_CLASS_NAME);\n\n\n\n class.add_method(\n\n \"__construct\",\n\n Visibility::Private,\n\n |_: &mut Object<Option<RequestBuilder>>, _| {},\n\n vec![],\n\n );\n\n\n\n class.add_method(\n\n \"send\",\n\n Visibility::Public,\n\n |this, _arguments| {\n\n let state = this.as_mut_state();\n\n let response = replace_and_get(state, |builder| builder.unwrap().send())?;\n\n let mut object =\n\n ClassEntry::<Option<Response>>::from_globals(RESPONSE_CLASS_NAME)?.init_object()?;\n\n *object.as_mut_state() = Some(response);\n\n Ok::<_, HttpClientError>(object)\n\n },\n\n vec![],\n\n );\n\n\n\n class\n\n}\n", "file_path": "examples/http-client/src/request.rs", "rank": 68, "score": 96581.37384205607 }, { "content": "pub fn make_server_class() -> DynamicClass<Option<Builder<AddrIncoming>>> {\n\n let mut class = DynamicClass::new_with_default(HTTP_SERVER_CLASS_NAME);\n\n\n\n class.add_property(\"host\", Visibility::Private, \"127.0.0.1\");\n\n class.add_property(\"port\", Visibility::Private, 8080);\n\n class.add_property(\"onRequestHandle\", Visibility::Private, ());\n\n\n\n class.add_method(\n\n \"__construct\",\n\n Visibility::Public,\n\n |this, arguments| {\n\n let host = arguments[0].as_string()?;\n\n let port = arguments[1].as_long()?;\n\n this.set_property(\"host\", Val::new(&*host));\n\n this.set_property(\"port\", Val::new(port));\n\n let addr = format!(\"{}:{}\", host, port).parse::<SocketAddr>()?;\n\n let builder = Server::bind(&addr);\n\n *this.as_mut_state() = Some(builder);\n\n Ok::<_, HttpServerError>(())\n\n },\n", "file_path": "examples/http-server/src/server.rs", "rank": 69, "score": 93847.70946843358 }, { "content": "pub fn get_lib_path(exe_path: impl AsRef<Path>) -> PathBuf {\n\n let exe_path = exe_path.as_ref();\n\n let exe_stem = exe_path\n\n .file_stem()\n\n .expect(\"failed to get current exe stem\")\n\n .to_str()\n\n .expect(\"failed to convert to utf-8 str\");\n\n let target_dir = exe_path\n\n .parent()\n\n .expect(\"failed to get current exe directory\");\n\n\n\n let mut ext_name = OsString::new();\n\n ext_name.push(\"lib\");\n\n ext_name.push(exe_stem.replace('-', \"_\"));\n\n #[cfg(target_os = \"linux\")]\n\n ext_name.push(\".so\");\n\n #[cfg(target_os = \"macos\")]\n\n ext_name.push(\".dylib\");\n\n #[cfg(target_os = \"windows\")]\n\n ext_name.push(\".dll\");\n\n\n\n target_dir.join(ext_name)\n\n}\n", "file_path": "phper-test/src/utils.rs", "rank": 70, "score": 91450.6751191559 }, { "content": "fn find_global_class_entry_ptr(name: impl AsRef<str>) -> *mut zend_class_entry {\n\n let name = name.as_ref();\n\n let name = name.to_lowercase();\n\n unsafe {\n\n phper_zend_hash_str_find_ptr(\n\n compiler_globals.class_table,\n\n name.as_ptr().cast(),\n\n name.len(),\n\n )\n\n .cast()\n\n }\n\n}\n\n\n\npub struct ClassEntity {\n\n pub(crate) name: String,\n\n pub(crate) entry: AtomicPtr<ClassEntry<Box<dyn Any>>>,\n\n pub(crate) classifiable: Box<dyn Classifiable>,\n\n pub(crate) function_entries: OnceCell<AtomicPtr<FunctionEntry>>,\n\n}\n\n\n", "file_path": "phper/src/classes.rs", "rank": 71, "score": 90396.87700304225 }, { "content": "#[proc_macro_attribute]\n\npub fn php_get_module(attr: TokenStream, input: TokenStream) -> TokenStream {\n\n inner::php_get_module(attr, input)\n\n}\n\n\n\n/// Auto derive for `phper::errors::Throwable`.\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_test\n\n/// #[derive(thiserror::Error, crate::Throwable, Debug)]\n\n/// #[throwable(class = \"Exception\")]\n\n/// pub enum Error {\n\n/// #[error(transparent)]\n\n/// Io(#[from] std::io::Error),\n\n///\n\n/// #[error(transparent)]\n\n/// #[throwable(transparent)]\n\n/// My(#[from] MyError),\n\n/// }\n\n/// ```\n\n///\n\n/// TODO Support attribute `throwable` with `code` and `message`, integration tests.\n", "file_path": "phper-macros/src/lib.rs", "rank": 72, "score": 90166.35638519723 }, { "content": "pub fn execute_command<S: AsRef<OsStr> + Debug>(argv: &[S]) -> String {\n\n let mut command = Command::new(&argv[0]);\n\n command.args(&argv[1..]);\n\n let output = command\n\n .output()\n\n .expect(&format!(\"Execute command {:?} failed\", &argv))\n\n .stdout;\n\n String::from_utf8(output).unwrap().trim().to_owned()\n\n}\n\n\n", "file_path": "phper-test/src/utils.rs", "rank": 73, "score": 87445.04586004354 }, { "content": "/// Check your extension by executing the php script, if the all executing return success, than the test is pass.\n\n///\n\n/// - `exec_path` is the path of the make executable, which will be used to detect the path of\n\n/// extension lib.\n\n///\n\n/// - `scripts` is the path of your php test scripts.\n\n///\n\n/// See [example hello integration test](https://github.com/jmjoy/phper/blob/master/examples/hello/tests/integration.rs).\n\npub fn test_php_scripts(exe_path: impl AsRef<Path>, scripts: &[&dyn AsRef<Path>]) {\n\n let condition = |output: Output| output.status.success();\n\n let scripts = scripts\n\n .into_iter()\n\n .map(|s| (*s, &condition as _))\n\n .collect::<Vec<_>>();\n\n test_php_scripts_with_condition(exe_path, &*scripts);\n\n}\n\n\n", "file_path": "phper-test/src/lib.rs", "rank": 74, "score": 83247.28804099193 }, { "content": "fn parse_throwable_crate_ident(input: &DeriveInput) -> TokenStream2 {\n\n let has_throwable_crate = attributes_find_ident(&input.attrs, \"throwable_crate\");\n\n let crate_ident = if has_throwable_crate.is_some() {\n\n quote! { crate }\n\n } else {\n\n quote! { phper }\n\n };\n\n crate_ident\n\n}\n\n\n", "file_path": "phper-macros/src/derives.rs", "rank": 75, "score": 79445.82025056207 }, { "content": "#[derive(Clap)]\n\nstruct Make {\n\n #[clap(subcommand)]\n\n sub: SubCommand,\n\n}\n\n\n", "file_path": "phper/src/cmd.rs", "rank": 76, "score": 70359.82969687085 }, { "content": "#[derive(Clap)]\n\nstruct InstallCommand {}\n\n\n", "file_path": "phper/src/cmd.rs", "rank": 77, "score": 69155.69670333454 }, { "content": "fn main() {\n\n phper_build::register_configures();\n\n}\n", "file_path": "phper/build.rs", "rank": 78, "score": 65723.10131277653 }, { "content": "fn main() {\n\n phper_build::register_configures();\n\n}\n", "file_path": "examples/hello/build.rs", "rank": 79, "score": 64482.83216364047 }, { "content": "fn main() {\n\n make();\n\n}\n\n```\n\n\n\n5. Write you owned extension logic in `lib.rs`.\n\n\n\n```no_run\n\nuse phper::{php_get_module, modules::Module};\n\n\n", "file_path": "phper/src/lib.rs", "rank": 80, "score": 64482.83216364047 }, { "content": "fn main() {\n\n println!(\"cargo:rerun-if-changed=php_wrapper.h\");\n\n println!(\"cargo:rerun-if-changed=php_wrapper.c\");\n\n println!(\"cargo:rerun-if-env-changed=PHP_CONFIG\");\n\n\n\n let out_path = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n let php_config = env::var(\"PHP_CONFIG\").unwrap_or(\"php-config\".to_string());\n\n\n\n let includes = execute_command(&[php_config.as_str(), \"--includes\"]);\n\n let includes = includes.split(' ').collect::<Vec<_>>();\n\n\n\n // Generate php const.\n\n\n\n let php_bin = execute_command(&[php_config.as_str(), \"--php-binary\"]);\n\n let php_info = execute_command(&[php_bin.as_str(), \"-i\"]);\n\n\n\n println!(\n\n \"cargo:rustc-env=ZEND_MODULE_BUILD_ID={}\",\n\n php_info\n\n .lines()\n", "file_path": "phper-sys/build.rs", "rank": 81, "score": 64482.83216364047 }, { "content": "fn main() {\n\n phper_build::register_configures();\n\n}\n", "file_path": "phper-alloc/build.rs", "rank": 82, "score": 64482.83216364047 }, { "content": "fn main() {\n\n make();\n\n}\n", "file_path": "tests/integration/src/main.rs", "rank": 83, "score": 63320.6672384244 }, { "content": "fn main() {\n\n make();\n\n}\n", "file_path": "examples/hello/src/main.rs", "rank": 84, "score": 63320.6672384244 }, { "content": "fn main() {\n\n make();\n\n}\n", "file_path": "examples/logging/src/main.rs", "rank": 85, "score": 63320.6672384244 }, { "content": "#[test]\n\nfn test_c_str() {\n\n assert_eq!(c_str!(\"foo\"), unsafe {\n\n CStr::from_ptr(\"foo\\0\".as_ptr().cast())\n\n });\n\n assert_eq!(unsafe { c_str!(\"bar\") }, unsafe {\n\n CStr::from_ptr(\"bar\\0\".as_ptr().cast())\n\n });\n\n}\n\n\n", "file_path": "phper-macros/tests/integration.rs", "rank": 86, "score": 62229.453990719936 }, { "content": "#[test]\n\nfn test_php() {\n\n let base_dir = Path::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n .join(\"tests\")\n\n .join(\"php\");\n\n\n\n test_php_scripts_with_condition(\n\n env!(\"CARGO_BIN_EXE_logging\"),\n\n &[\n\n (&base_dir.join(\"test_php_say.php\"), &|output| {\n\n let stdout = str::from_utf8(&output.stdout).unwrap();\n\n stdout == \"Hello, world!\" && output.status.success()\n\n }),\n\n (&base_dir.join(\"test_php_notice.php\"), &|output| {\n\n let stdout = str::from_utf8(&output.stdout).unwrap();\n\n stdout.contains(\"Notice:\")\n\n && stdout.contains(\"Something happened: just for test\")\n\n && output.status.success()\n\n }),\n\n (&base_dir.join(\"test_php_warning.php\"), &|output| {\n\n let stdout = str::from_utf8(&output.stdout).unwrap();\n", "file_path": "examples/logging/tests/integration.rs", "rank": 87, "score": 62229.453990719936 }, { "content": "#[test]\n\nfn test_php() {\n\n test_php_scripts(\n\n env!(\"CARGO_BIN_EXE_hello\"),\n\n &[&Path::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n .join(\"tests\")\n\n .join(\"php\")\n\n .join(\"test.php\")],\n\n );\n\n}\n", "file_path": "examples/hello/tests/integration.rs", "rank": 88, "score": 62229.453990719936 }, { "content": "fn main() {\n\n make();\n\n}\n", "file_path": "examples/http-client/src/main.rs", "rank": 89, "score": 62229.453990719936 }, { "content": "fn main() {\n\n make();\n\n}\n", "file_path": "examples/http-server/src/main.rs", "rank": 90, "score": 62229.453990719936 }, { "content": "#[test]\n\nfn test_php() {\n\n let tests_php_dir = Path::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n .join(\"tests\")\n\n .join(\"php\");\n\n\n\n test_php_scripts(\n\n env!(\"CARGO_BIN_EXE_integration\"),\n\n &[\n\n &tests_php_dir.join(\"arguments.php\"),\n\n &tests_php_dir.join(\"arrays.php\"),\n\n &tests_php_dir.join(\"classes.php\"),\n\n &tests_php_dir.join(\"functions.php\"),\n\n &tests_php_dir.join(\"objects.php\"),\n\n &tests_php_dir.join(\"strings.php\"),\n\n &tests_php_dir.join(\"values.php\"),\n\n ],\n\n );\n\n}\n", "file_path": "tests/integration/tests/integration.rs", "rank": 91, "score": 62229.453990719936 }, { "content": "fn parse_throwable_input(\n\n input: &DeriveInput,\n\n crate_ident: TokenStream2,\n\n exception: TokenStream2,\n\n) -> syn::Result<TokenStream> {\n\n let input_ident = &input.ident;\n\n\n\n match &input.data {\n\n Data::Enum(e) => {\n\n let mut transparent_idents = Vec::new();\n\n\n\n for variant in &e.variants {\n\n let attr = attributes_find_ident(&variant.attrs, \"throwable\");\n\n match attr {\n\n Some(attr) => {\n\n if attr.tokens.to_string() != \"(transparent)\" {\n\n return Err(syn::Error::new_spanned(\n\n &attr,\n\n \"now only support #[throwable(transparent)] for variant\",\n\n ));\n", "file_path": "phper-macros/src/derives.rs", "rank": 92, "score": 61202.88734888981 }, { "content": "#[test]\n\nfn test_c_str_ptr() {\n\n assert_eq!(c_str_ptr!(\"foo\"), \"foo\\0\".as_ptr().cast());\n\n}\n", "file_path": "phper-macros/tests/integration.rs", "rank": 93, "score": 61202.88734888981 }, { "content": "#[test]\n\nfn test_php() {\n\n test_long_term_php_script_with_condition(\n\n env!(\"CARGO_BIN_EXE_http-server\"),\n\n Path::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n .join(\"tests\")\n\n .join(\"php\")\n\n .join(\"test.php\"),\n\n |_| {\n\n // wait for server startup.\n\n sleep(Duration::from_secs(3));\n\n\n\n runtime::Builder::new_multi_thread()\n\n .enable_all()\n\n .build()\n\n .unwrap()\n\n .block_on(async {\n\n let client = Client::new();\n\n let response = client.get(\"http://127.0.0.1:9000/\").send().await.unwrap();\n\n let content_type = response.headers().get(CONTENT_TYPE).unwrap();\n\n assert_eq!(content_type, \"text/plain\");\n\n let body = response.text().await.unwrap();\n\n assert_eq!(body, \"Hello World\\n\");\n\n });\n\n },\n\n );\n\n}\n", "file_path": "examples/http-server/tests/integration.rs", "rank": 94, "score": 61202.88734888981 }, { "content": "#[test]\n\nfn test_php() {\n\n test_php_scripts(\n\n env!(\"CARGO_BIN_EXE_http-client\"),\n\n &[&Path::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n .join(\"tests\")\n\n .join(\"php\")\n\n .join(\"test.php\")],\n\n );\n\n}\n", "file_path": "examples/http-client/tests/integration.rs", "rank": 95, "score": 61202.88734888981 }, { "content": "fn get_object_handlers() -> &'static zend_object_handlers {\n\n static HANDLERS: OnceCell<zend_object_handlers> = OnceCell::new();\n\n HANDLERS.get_or_init(|| unsafe {\n\n let mut handlers = std_object_handlers;\n\n handlers.offset = ExtendObject::offset() as c_int;\n\n handlers.free_obj = Some(free_object);\n\n handlers\n\n })\n\n}\n\n\n\nunsafe extern \"C\" fn create_object(ce: *mut zend_class_entry) -> *mut zend_object {\n\n // Alloc more memory size to store state data.\n\n let extend_object: *mut ExtendObject =\n\n phper_zend_object_alloc(size_of::<ExtendObject>(), ce).cast();\n\n\n\n // Common initialize process.\n\n let object = ExtendObject::as_mut_object(extend_object);\n\n zend_object_std_init(object, ce);\n\n object_properties_init(object, ce);\n\n rebuild_object_properties(object);\n", "file_path": "phper/src/classes.rs", "rank": 96, "score": 53953.3752393531 }, { "content": "fn module_init(_args: ModuleContext) -> bool {\n\n true\n\n}\n\n\n", "file_path": "examples/hello/src/lib.rs", "rank": 97, "score": 52211.23420105723 }, { "content": "fn get_registered_class_type_map() -> &'static DashMap<usize, TypeId> {\n\n static MAP: OnceCell<DashMap<usize, TypeId>> = OnceCell::new();\n\n MAP.get_or_init(DashMap::new)\n\n}\n\n\n", "file_path": "phper/src/classes.rs", "rank": 98, "score": 47682.78444592445 }, { "content": "fn parse_throwable_attrs(input: &DeriveInput) -> syn::Result<TokenStream2> {\n\n let attr = attributes_find_ident(&input.attrs, \"throwable_class\");\n\n attr.map(|attr| attr.parse_args::<Expr>().map(|expr| quote! { #expr }))\n\n .unwrap_or_else(|| Ok(quote! { \"Exception\" }))\n\n}\n\n\n", "file_path": "phper-macros/src/derives.rs", "rank": 99, "score": 46381.805195482506 } ]
Rust
ezgui/src/event_ctx.rs
jinzhong2/abstreet
e1c5edc76d636af4f3e4593efc25055bdd637dd7
use crate::widgets::ContextMenu; use crate::{ Canvas, Color, GfxCtx, HorizontalAlignment, Line, Prerender, Text, UserInput, VerticalAlignment, }; use abstutil::{elapsed_seconds, Timer, TimerSink}; use geom::Angle; use glium_glyph::glyph_brush::rusttype::Font; use glium_glyph::GlyphBrush; use std::collections::VecDeque; use std::time::Instant; pub struct EventCtx<'a> { pub input: &'a mut UserInput, pub canvas: &'a mut Canvas, pub prerender: &'a Prerender<'a>, pub(crate) program: &'a glium::Program, } impl<'a> EventCtx<'a> { pub fn loading_screen<O, F: FnOnce(&mut EventCtx, &mut Timer) -> O>( &mut self, timer_name: &str, f: F, ) -> O { let mut timer = Timer::new_with_sink( timer_name, Box::new(LoadingScreen::new( self.prerender, self.program, self.canvas.window_width, self.canvas.window_height, self.canvas.font_size, timer_name.to_string(), )), ); f(self, &mut timer) } pub fn redo_mouseover(&self) -> bool { self.input.window_lost_cursor() || (!self.canvas.is_dragging() && self.input.get_moved_mouse().is_some()) || self.input.get_mouse_scroll().is_some() } pub fn set_textures( &mut self, skip_textures: Vec<(&str, Color)>, textures: Vec<(&str, TextureType)>, timer: &mut Timer, ) { self.canvas.textures.clear(); self.canvas.texture_lookups.clear(); for (filename, fallback) in skip_textures { self.canvas .texture_lookups .insert(filename.to_string(), fallback); } if textures.len() > 15 { panic!("Due to lovely hacks, only 15 textures supported"); } timer.start_iter("upload textures", textures.len()); for (idx, (filename, tex_type)) in textures.into_iter().enumerate() { timer.next(); let img = image::open(filename).unwrap().to_rgba(); let dims = img.dimensions(); let tex = glium::texture::Texture2d::new( self.prerender.display, glium::texture::RawImage2d::from_raw_rgba_reversed(&img.into_raw(), dims), ) .unwrap(); self.canvas.textures.push((filename.to_string(), tex)); self.canvas.texture_lookups.insert( filename.to_string(), match tex_type { TextureType::Stretch => Color::StretchTexture(idx as f32, Angle::ZERO), TextureType::Tile => { Color::TileTexture(idx as f32, (f64::from(dims.0), f64::from(dims.1))) } TextureType::CustomUV => Color::CustomUVTexture(idx as f32), }, ); } } } pub struct LoadingScreen<'a> { canvas: Canvas, prerender: &'a Prerender<'a>, program: &'a glium::Program, lines: VecDeque<String>, max_capacity: usize, last_drawn: Option<Instant>, title: String, } impl<'a> LoadingScreen<'a> { pub fn new( prerender: &'a Prerender<'a>, program: &'a glium::Program, initial_width: f64, initial_height: f64, font_size: usize, title: String, ) -> LoadingScreen<'a> { let dejavu: &[u8] = include_bytes!("assets/DejaVuSans.ttf"); let screenspace_glyphs = GlyphBrush::new(prerender.display, vec![Font::from_bytes(dejavu).unwrap()]); let mapspace_glyphs = GlyphBrush::new(prerender.display, vec![Font::from_bytes(dejavu).unwrap()]); let canvas = Canvas::new( initial_width, initial_height, screenspace_glyphs, mapspace_glyphs, font_size, ); LoadingScreen { prerender, program, lines: VecDeque::new(), max_capacity: (0.8 * initial_height / canvas.line_height) as usize, last_drawn: None, title, canvas, } } fn redraw(&mut self) { if let Some(t) = self.last_drawn { if elapsed_seconds(t) < 0.2 { return; } } self.last_drawn = Some(Instant::now()); let mut txt = Text::prompt(&self.title); txt.override_width = Some(self.canvas.window_width * 0.8); txt.override_height = Some(self.canvas.window_height * 0.8); for l in &self.lines { txt.add(Line(l)); } let mut target = self.prerender.display.draw(); let context_menu = ContextMenu::new(); let mut g = GfxCtx::new( &self.canvas, self.prerender, &mut target, self.program, &context_menu, false, ); g.clear(Color::BLACK); g.draw_blocking_text( &txt, (HorizontalAlignment::Center, VerticalAlignment::Center), ); self.canvas .screenspace_glyphs .borrow_mut() .draw_queued(self.prerender.display, &mut target); target.finish().unwrap(); } } impl<'a> TimerSink for LoadingScreen<'a> { fn println(&mut self, line: String) { if self.lines.len() == self.max_capacity { self.lines.pop_front(); } self.lines.push_back(line); self.redraw(); } fn reprintln(&mut self, line: String) { self.lines.pop_back(); self.lines.push_back(line); self.redraw(); } } pub enum TextureType { Stretch, Tile, CustomUV, }
use crate::widgets::ContextMenu; use crate::{ Canvas, Color, GfxCtx, HorizontalAlignment, Line, Prerender, Text, UserInput, VerticalAlignment, }; use abstutil::{elapsed_seconds, Timer, TimerSink}; use geom::Angle; use glium_glyph::glyph_brush::rusttype::Font; use glium_glyph::GlyphBrush; use std::collections::VecDeque; use std::time::Instant; pub struct EventCtx<'a> { pub input: &'a mut UserInput, pub canvas: &'a mut Canvas, pub prerender: &'a Prerender<'a>, pub(crate) program: &'a glium::Program, } impl<'a> EventCtx<'a> { pub fn loading_screen<O, F: FnOnce(&mut EventCtx, &mut Timer) -> O>( &mut self, timer_name: &str, f: F, ) -> O { let mut timer = Timer::new_with_sink( timer_na
pub fn redo_mouseover(&self) -> bool { self.input.window_lost_cursor() || (!self.canvas.is_dragging() && self.input.get_moved_mouse().is_some()) || self.input.get_mouse_scroll().is_some() } pub fn set_textures( &mut self, skip_textures: Vec<(&str, Color)>, textures: Vec<(&str, TextureType)>, timer: &mut Timer, ) { self.canvas.textures.clear(); self.canvas.texture_lookups.clear(); for (filename, fallback) in skip_textures { self.canvas .texture_lookups .insert(filename.to_string(), fallback); } if textures.len() > 15 { panic!("Due to lovely hacks, only 15 textures supported"); } timer.start_iter("upload textures", textures.len()); for (idx, (filename, tex_type)) in textures.into_iter().enumerate() { timer.next(); let img = image::open(filename).unwrap().to_rgba(); let dims = img.dimensions(); let tex = glium::texture::Texture2d::new( self.prerender.display, glium::texture::RawImage2d::from_raw_rgba_reversed(&img.into_raw(), dims), ) .unwrap(); self.canvas.textures.push((filename.to_string(), tex)); self.canvas.texture_lookups.insert( filename.to_string(), match tex_type { TextureType::Stretch => Color::StretchTexture(idx as f32, Angle::ZERO), TextureType::Tile => { Color::TileTexture(idx as f32, (f64::from(dims.0), f64::from(dims.1))) } TextureType::CustomUV => Color::CustomUVTexture(idx as f32), }, ); } } } pub struct LoadingScreen<'a> { canvas: Canvas, prerender: &'a Prerender<'a>, program: &'a glium::Program, lines: VecDeque<String>, max_capacity: usize, last_drawn: Option<Instant>, title: String, } impl<'a> LoadingScreen<'a> { pub fn new( prerender: &'a Prerender<'a>, program: &'a glium::Program, initial_width: f64, initial_height: f64, font_size: usize, title: String, ) -> LoadingScreen<'a> { let dejavu: &[u8] = include_bytes!("assets/DejaVuSans.ttf"); let screenspace_glyphs = GlyphBrush::new(prerender.display, vec![Font::from_bytes(dejavu).unwrap()]); let mapspace_glyphs = GlyphBrush::new(prerender.display, vec![Font::from_bytes(dejavu).unwrap()]); let canvas = Canvas::new( initial_width, initial_height, screenspace_glyphs, mapspace_glyphs, font_size, ); LoadingScreen { prerender, program, lines: VecDeque::new(), max_capacity: (0.8 * initial_height / canvas.line_height) as usize, last_drawn: None, title, canvas, } } fn redraw(&mut self) { if let Some(t) = self.last_drawn { if elapsed_seconds(t) < 0.2 { return; } } self.last_drawn = Some(Instant::now()); let mut txt = Text::prompt(&self.title); txt.override_width = Some(self.canvas.window_width * 0.8); txt.override_height = Some(self.canvas.window_height * 0.8); for l in &self.lines { txt.add(Line(l)); } let mut target = self.prerender.display.draw(); let context_menu = ContextMenu::new(); let mut g = GfxCtx::new( &self.canvas, self.prerender, &mut target, self.program, &context_menu, false, ); g.clear(Color::BLACK); g.draw_blocking_text( &txt, (HorizontalAlignment::Center, VerticalAlignment::Center), ); self.canvas .screenspace_glyphs .borrow_mut() .draw_queued(self.prerender.display, &mut target); target.finish().unwrap(); } } impl<'a> TimerSink for LoadingScreen<'a> { fn println(&mut self, line: String) { if self.lines.len() == self.max_capacity { self.lines.pop_front(); } self.lines.push_back(line); self.redraw(); } fn reprintln(&mut self, line: String) { self.lines.pop_back(); self.lines.push_back(line); self.redraw(); } } pub enum TextureType { Stretch, Tile, CustomUV, }
me, Box::new(LoadingScreen::new( self.prerender, self.program, self.canvas.window_width, self.canvas.window_height, self.canvas.font_size, timer_name.to_string(), )), ); f(self, &mut timer) }
function_block-function_prefixed
[ { "content": "fn use_parking_hints(map: &mut RawMap, path: &str, timer: &mut Timer) {\n\n timer.start(\"apply parking hints\");\n\n let shapes: ExtraShapes = abstutil::read_binary(path, timer).expect(\"loading blockface failed\");\n\n\n\n // Match shapes with the nearest road + direction (true for forwards)\n\n let mut closest: FindClosest<(OriginalRoad, bool)> =\n\n FindClosest::new(&map.gps_bounds.to_bounds());\n\n for (id, r) in &map.roads {\n\n let center = PolyLine::new(r.center_points.clone());\n\n closest.add(\n\n (*id, true),\n\n center.shift_right(LANE_THICKNESS).get(timer).points(),\n\n );\n\n closest.add(\n\n (*id, false),\n\n center.shift_left(LANE_THICKNESS).get(timer).points(),\n\n );\n\n }\n\n\n\n for s in shapes.shapes.into_iter() {\n", "file_path": "convert_osm/src/lib.rs", "rank": 0, "score": 332104.5907812335 }, { "content": "fn use_offstreet_parking(map: &mut RawMap, path: &str, timer: &mut Timer) {\n\n timer.start(\"match offstreet parking points\");\n\n let shapes = kml::load(path, &map.gps_bounds, timer).expect(\"loading offstreet_parking failed\");\n\n\n\n let mut closest: FindClosest<OriginalBuilding> = FindClosest::new(&map.gps_bounds.to_bounds());\n\n for (id, b) in &map.buildings {\n\n closest.add(*id, b.polygon.points());\n\n }\n\n\n\n // TODO Another function just to use ?. Try blocks would rock.\n\n let mut handle_shape: Box<dyn FnMut(kml::ExtraShape) -> Option<()>> = Box::new(|s| {\n\n assert_eq!(s.points.len(), 1);\n\n let pt = Pt2D::from_gps(s.points[0], &map.gps_bounds)?;\n\n let (id, _) = closest.closest_pt(pt, Distance::meters(50.0))?;\n\n // TODO Handle parking lots.\n\n if !map.buildings[&id].polygon.contains_pt(pt) {\n\n return None;\n\n }\n\n let name = s.attributes.get(\"DEA_FACILITY_NAME\")?.to_string();\n\n let num_stalls = s.attributes.get(\"DEA_STALLS\")?.parse::<usize>().ok()?;\n", "file_path": "convert_osm/src/lib.rs", "rank": 1, "score": 332104.5907812335 }, { "content": "fn use_sidewalk_hints(map: &mut RawMap, path: &str, timer: &mut Timer) {\n\n timer.start(\"apply sidewalk hints\");\n\n let shapes: ExtraShapes = abstutil::read_binary(path, timer).unwrap();\n\n\n\n // Match shapes with the nearest road + direction (true for forwards)\n\n let mut closest: FindClosest<(OriginalRoad, bool)> =\n\n FindClosest::new(&map.gps_bounds.to_bounds());\n\n for (id, r) in &map.roads {\n\n let center = PolyLine::new(r.center_points.clone());\n\n closest.add(\n\n (*id, true),\n\n center.shift_right(LANE_THICKNESS).get(timer).points(),\n\n );\n\n closest.add(\n\n (*id, false),\n\n center.shift_left(LANE_THICKNESS).get(timer).points(),\n\n );\n\n }\n\n\n\n for s in shapes.shapes.into_iter() {\n", "file_path": "convert_osm/src/lib.rs", "rank": 2, "score": 332104.5907812335 }, { "content": "pub fn read_binary<T: DeserializeOwned>(path: &str, timer: &mut Timer) -> Result<T, Error> {\n\n if !path.ends_with(\".bin\") {\n\n panic!(\"read_binary needs {} to end with .bin\", path);\n\n }\n\n\n\n timer.read_file(path)?;\n\n let obj: T =\n\n bincode::deserialize_from(timer).map_err(|err| Error::new(ErrorKind::Other, err))?;\n\n Ok(obj)\n\n}\n\n\n\n// For BTreeMaps with struct keys. See https://github.com/serde-rs/json/issues/402.\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 3, "score": 319422.4924746683 }, { "content": "pub fn read_json<T: DeserializeOwned>(path: &str, timer: &mut Timer) -> Result<T, Error> {\n\n if !path.ends_with(\".json\") && !path.ends_with(\".geojson\") {\n\n panic!(\"read_json needs {} to end with .json or .geojson\", path);\n\n }\n\n\n\n timer.start(&format!(\"parse {}\", path));\n\n // TODO timer.read_file isn't working here. And we need to call stop() if there's no file.\n\n match File::open(path) {\n\n Ok(mut file) => {\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n let obj: T = serde_json::from_str(&contents)?;\n\n timer.stop(&format!(\"parse {}\", path));\n\n Ok(obj)\n\n }\n\n Err(e) => {\n\n timer.stop(&format!(\"parse {}\", path));\n\n Err(e)\n\n }\n\n }\n\n}\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 4, "score": 319422.49247466825 }, { "content": "// TODO This needs to update turn restrictions too\n\npub fn clip_map(map: &mut RawMap, timer: &mut Timer) {\n\n timer.start(\"clipping map to boundary\");\n\n\n\n // So we can use retain_btreemap without borrowing issues\n\n let boundary_polygon = map.boundary_polygon.clone();\n\n let boundary_lines: Vec<PolyLine> = map\n\n .boundary_polygon\n\n .points()\n\n .windows(2)\n\n .map(|pair| PolyLine::new(pair.to_vec()))\n\n .collect();\n\n\n\n // This is kind of indirect and slow, but first pass -- just remove roads that start or end\n\n // outside the boundary polygon.\n\n retain_btreemap(&mut map.roads, |_, r| {\n\n let first_in = boundary_polygon.contains_pt(r.center_points[0]);\n\n let last_in = boundary_polygon.contains_pt(*r.center_points.last().unwrap());\n\n first_in || last_in\n\n });\n\n\n", "file_path": "convert_osm/src/clip.rs", "rank": 5, "score": 303949.1163869528 }, { "content": "pub fn remove_disconnected_roads(map: &mut RawMap, timer: &mut Timer) {\n\n timer.start(\"removing disconnected roads\");\n\n // This is a simple floodfill, not Tarjan's. Assumes all roads bidirectional.\n\n // All the usizes are indices into the original list of roads\n\n\n\n let mut next_roads: MultiMap<OriginalIntersection, OriginalRoad> = MultiMap::new();\n\n for id in map.roads.keys() {\n\n next_roads.insert(id.i1, *id);\n\n next_roads.insert(id.i2, *id);\n\n }\n\n\n\n let mut partitions: Vec<Vec<OriginalRoad>> = Vec::new();\n\n let mut unvisited_roads: BTreeSet<OriginalRoad> = map.roads.keys().cloned().collect();\n\n\n\n while !unvisited_roads.is_empty() {\n\n let mut queue_roads: Vec<OriginalRoad> = vec![*unvisited_roads.iter().next().unwrap()];\n\n let mut current_partition: Vec<OriginalRoad> = Vec::new();\n\n while !queue_roads.is_empty() {\n\n let current = queue_roads.pop().unwrap();\n\n if !unvisited_roads.contains(&current) {\n", "file_path": "map_model/src/make/remove_disconnected.rs", "rank": 6, "score": 294629.281863822 }, { "content": "pub fn run<G: GUI, F: FnOnce(&mut EventCtx) -> G>(settings: Settings, make_gui: F) {\n\n let events_loop = glutin::EventsLoop::new();\n\n let window = glutin::WindowBuilder::new()\n\n .with_title(settings.window_title)\n\n .with_dimensions(glutin::dpi::LogicalSize::new(\n\n settings.initial_dims.0,\n\n settings.initial_dims.1,\n\n ));\n\n // multisampling: 2 looks bad, 4 looks fine\n\n //\n\n // The Z values are very simple:\n\n // 1.0: The buffer is reset every frame\n\n // 0.5: Map-space geometry and text\n\n // 0.1: Screen-space text\n\n // 0.0: Screen-space geometry\n\n // Had weird issues with Z buffering not working as intended, so this is slightly more\n\n // complicated than necessary to work.\n\n let context = glutin::ContextBuilder::new()\n\n .with_multisampling(4)\n\n .with_depth_buffer(2);\n", "file_path": "ezgui/src/runner.rs", "rank": 7, "score": 290581.1220007051 }, { "content": "#[allow(non_snake_case)]\n\npub fn Line<S: Into<String>>(text: S) -> TextSpan {\n\n TextSpan {\n\n text: text.into(),\n\n fg_color: FG_COLOR,\n\n size: None,\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Text {\n\n // The bg_color will cover the entire block, but some lines can have extra highlighting.\n\n lines: Vec<(Option<Color>, Vec<TextSpan>)>,\n\n bg_color: Option<Color>,\n\n pub override_width: Option<f64>,\n\n pub override_height: Option<f64>,\n\n}\n\n\n\nimpl Text {\n\n pub fn new() -> Text {\n\n Text {\n", "file_path": "ezgui/src/text.rs", "rank": 8, "score": 290318.6081865045 }, { "content": "pub fn trips_to_scenario(map: &Map, timer: &mut Timer) -> Scenario {\n\n let (trips, _) = clip_trips(map, timer);\n\n // TODO Don't clone trips for parallelize\n\n let individ_trips = timer\n\n .parallelize(\"turn PSRC trips into SpawnTrips\", trips.clone(), |trip| {\n\n trip.to_spawn_trip(map)\n\n })\n\n .into_iter()\n\n .flatten()\n\n .collect();\n\n\n\n // How many parked cars do we need to spawn near each building?\n\n // TODO This assumes trips are instantaneous. At runtime, somebody might try to use a parked\n\n // car from a building, but one hasn't been delivered yet.\n\n let mut individ_parked_cars = BTreeMap::new();\n\n let mut avail_per_bldg = BTreeMap::new();\n\n for b in map.all_buildings() {\n\n individ_parked_cars.insert(b.id, 0);\n\n avail_per_bldg.insert(b.id, 0);\n\n }\n", "file_path": "popdat/src/trips.rs", "rank": 9, "score": 288680.99457022524 }, { "content": "pub fn convert(flags: &Flags, timer: &mut abstutil::Timer) -> RawMap {\n\n let mut map = split_ways::split_up_roads(\n\n osm_reader::extract_osm(&flags.osm, &flags.clip, timer),\n\n timer,\n\n );\n\n clip::clip_map(&mut map, timer);\n\n\n\n // Need to do a first pass of removing cul-de-sacs here, or we wind up with loop PolyLines when doing the parking hint matching.\n\n abstutil::retain_btreemap(&mut map.roads, |r, _| r.i1 != r.i2);\n\n\n\n if let Some(ref path) = flags.parking_shapes {\n\n use_parking_hints(&mut map, path, timer);\n\n }\n\n if let Some(ref path) = flags.offstreet_parking {\n\n use_offstreet_parking(&mut map, path, timer);\n\n }\n\n if let Some(ref path) = flags.sidewalks {\n\n use_sidewalk_hints(&mut map, path, timer);\n\n }\n\n if let Some(ref path) = flags.gtfs {\n", "file_path": "convert_osm/src/lib.rs", "rank": 10, "score": 277910.5550625431 }, { "content": "pub fn retain_btreeset<K: Ord + Clone, F: FnMut(&K) -> bool>(set: &mut BTreeSet<K>, mut keep: F) {\n\n let mut remove: Vec<K> = Vec::new();\n\n for k in set.iter() {\n\n if !keep(k) {\n\n remove.push(k.clone());\n\n }\n\n }\n\n for k in remove {\n\n set.remove(&k);\n\n }\n\n}\n\n\n", "file_path": "abstutil/src/collections.rs", "rank": 11, "score": 275620.24788454827 }, { "content": "// TODO Temporarily public for debugging.\n\n// TODO This should just draw the turn geometry thickened, once that's stable.\n\npub fn calculate_corners(i: &Intersection, map: &Map, timer: &mut Timer) -> Vec<Polygon> {\n\n let mut corners = Vec::new();\n\n\n\n for turn in &map.get_turns_in_intersection(i.id) {\n\n if turn.turn_type == TurnType::SharedSidewalkCorner {\n\n // Avoid double-rendering\n\n if map.get_l(turn.id.src).dst_i != i.id {\n\n continue;\n\n }\n\n\n\n // Special case for dead-ends: just thicken the geometry.\n\n if i.roads.len() == 1 {\n\n corners.push(turn.geom.make_polygons(LANE_THICKNESS));\n\n continue;\n\n }\n\n\n\n let l1 = map.get_l(turn.id.src);\n\n let l2 = map.get_l(turn.id.dst);\n\n\n\n let src_line = l1.last_line().shift_left(LANE_THICKNESS / 2.0);\n", "file_path": "game/src/render/intersection.rs", "rank": 12, "score": 267837.06446180464 }, { "content": "pub fn clip_trips(map: &Map, timer: &mut Timer) -> (Vec<Trip>, HashMap<BuildingID, Parcel>) {\n\n let popdat: PopDat = abstutil::read_binary(\"../data/shapes/popdat.bin\", timer)\n\n .expect(\"Couldn't load popdat.bin\");\n\n\n\n let mut osm_id_to_bldg = HashMap::new();\n\n for b in map.all_buildings() {\n\n osm_id_to_bldg.insert(b.osm_way_id, b.id);\n\n }\n\n let bounds = map.get_gps_bounds();\n\n // TODO Figure out why some polygon centers are broken\n\n let incoming_borders_walking: Vec<(IntersectionID, LonLat)> = map\n\n .all_incoming_borders()\n\n .into_iter()\n\n .filter(|i| {\n\n !i.get_outgoing_lanes(map, PathConstraints::Pedestrian)\n\n .is_empty()\n\n })\n\n .filter_map(|i| i.polygon.center().to_gps(bounds).map(|pt| (i.id, pt)))\n\n .collect();\n\n let incoming_borders_driving: Vec<(IntersectionID, LonLat)> = map\n", "file_path": "popdat/src/trips.rs", "rank": 13, "score": 255614.61401109537 }, { "content": "fn calculate_driving_lines(lane: &Lane, parent: &Road, timer: &mut Timer) -> Vec<Polygon> {\n\n // The leftmost lanes don't have dashed lines.\n\n let (dir, idx) = parent.dir_and_offset(lane.id);\n\n if idx == 0 || (dir && parent.children_forwards[idx - 1].1 == LaneType::SharedLeftTurn) {\n\n return Vec::new();\n\n }\n\n let lane_edge_pts = lane\n\n .lane_center_pts\n\n .shift_left(LANE_THICKNESS / 2.0)\n\n .get(timer);\n\n dashed_lines(\n\n &lane_edge_pts,\n\n Distance::meters(0.25),\n\n Distance::meters(1.0),\n\n Distance::meters(1.5),\n\n )\n\n}\n\n\n", "file_path": "game/src/render/lane.rs", "rank": 14, "score": 242559.86981812812 }, { "content": "pub fn rotating_color(idx: usize) -> Color {\n\n rotating_color_total(idx, 9)\n\n}\n\n\n", "file_path": "game/src/helpers.rs", "rank": 15, "score": 229460.189631555 }, { "content": "pub fn path1(map_name: &str, category: &str, dir: &str) -> String {\n\n format!(\"../data/{}/{}/{}\", category, map_name, dir)\n\n}\n\n\n", "file_path": "abstutil/src/lib.rs", "rank": 16, "score": 225499.83224234785 }, { "content": "pub fn path1_json(map_name: &str, category: &str, instance: &str) -> String {\n\n format!(\"../data/{}/{}/{}.json\", category, map_name, instance)\n\n}\n\n\n", "file_path": "abstutil/src/lib.rs", "rank": 17, "score": 223066.99205946553 }, { "content": "pub fn path1_bin(map_name: &str, category: &str, instance: &str) -> String {\n\n format!(\"../data/{}/{}/{}.bin\", category, map_name, instance)\n\n}\n\n\n", "file_path": "abstutil/src/lib.rs", "rank": 18, "score": 223066.99205946553 }, { "content": "pub fn path2_dir(map_name: &str, category: &str, dir: &str) -> String {\n\n format!(\"../data/{}/{}/{}/\", category, map_name, dir)\n\n}\n\n\n", "file_path": "abstutil/src/lib.rs", "rank": 19, "score": 223066.99205946553 }, { "content": "fn input_weighted_usize(wizard: &mut WrappedWizard, query: &str) -> Option<WeightedUsizeChoice> {\n\n wizard.input_something(\n\n query,\n\n None,\n\n Box::new(|line| WeightedUsizeChoice::parse(&line)),\n\n )\n\n}\n\n\n", "file_path": "game/src/mission/scenario.rs", "rank": 20, "score": 221528.79838965074 }, { "content": "pub fn path2_bin(map_name: &str, category: &str, dir: &str, instance: &str) -> String {\n\n format!(\"../data/{}/{}/{}/{}.bin\", category, map_name, dir, instance)\n\n}\n\n\n", "file_path": "abstutil/src/lib.rs", "rank": 21, "score": 221072.80686445232 }, { "content": "pub fn basename(path: &str) -> String {\n\n Path::new(path)\n\n .file_stem()\n\n .unwrap()\n\n .to_os_string()\n\n .into_string()\n\n .unwrap()\n\n}\n", "file_path": "abstutil/src/io.rs", "rank": 22, "score": 219751.10424720187 }, { "content": "fn pick_color(wiz: &mut Wizard, ctx: &mut EventCtx, ui: &mut UI) -> Option<Transition> {\n\n let name = wiz\n\n .wrap(ctx)\n\n .choose_string(\"Change which color?\", || ui.cs.color_names())?;\n\n Some(Transition::Replace(Box::new(ColorChanger {\n\n name: name.clone(),\n\n original: ui.cs.get_modified(&name),\n\n menu: ModalMenu::new(\n\n &format!(\"Color Picker for {}\", name),\n\n vec![\n\n (hotkey(Key::Backspace), \"revert\"),\n\n (hotkey(Key::Escape), \"finalize\"),\n\n ],\n\n ctx,\n\n ),\n\n })))\n\n}\n\n\n", "file_path": "game/src/debug/color_picker.rs", "rank": 23, "score": 219634.42279758648 }, { "content": "#[allow(clippy::unreadable_literal)]\n\npub fn run(t: &mut TestRunner) {\n\n t.run_fast(\"dist_along_horiz_line\", |_| {\n\n let l = Line::new(\n\n Pt2D::new(147.17832753158294, 1651.034235433578),\n\n Pt2D::new(185.9754103560146, 1651.0342354335778),\n\n );\n\n let pt = Pt2D::new(179.1628455160347, 1651.0342354335778);\n\n\n\n assert!(l.contains_pt(pt));\n\n assert!(l.dist_along_of_point(pt).is_some());\n\n });\n\n\n\n t.run_fast(\"trim_with_epsilon\", |_| {\n\n /*\n\n // EPSILON_DIST needs to be tuned correctly, or this point seems like it's not on the line.\n\n let mut pl = PolyLine::new(vec![\n\n Pt2D::new(1130.2653468611902, 2124.099702776818),\n\n Pt2D::new(1175.9652436108408, 2124.1094748373457),\n\n Pt2D::new(1225.8319649025132, 2124.120594334445),\n\n ]);\n", "file_path": "tests/src/geom.rs", "rank": 24, "score": 219237.63469927508 }, { "content": "pub fn run(t: &mut TestRunner) {\n\n t.run_slow(\"bike_from_border\", |h| {\n\n let mut flags = SimFlags::for_test(\"bike_from_border\");\n\n flags.opts.savestate_every = Some(Duration::seconds(30.0));\n\n let (map, mut sim, mut rng) = flags.load(&mut Timer::throwaway());\n\n // TODO Hardcoding IDs is fragile\n\n let goal_bldg = BuildingID(319);\n\n let (ped, bike) = sim.schedule_trip(\n\n Duration::ZERO,\n\n TripSpec::UsingBike {\n\n start: SidewalkSpot::start_at_border(IntersectionID(186), &map).unwrap(),\n\n vehicle: Scenario::rand_bike(&mut rng),\n\n goal: DrivingGoal::ParkNear(goal_bldg),\n\n ped_speed: Scenario::rand_ped_speed(&mut rng),\n\n },\n\n &map,\n\n );\n\n sim.spawn_all_trips(&map, &mut Timer::throwaway(), false);\n\n h.setup_done(&sim);\n\n\n", "file_path": "tests/src/trips.rs", "rank": 25, "score": 219237.63469927508 }, { "content": "pub fn run(t: &mut TestRunner) {\n\n t.run_slow(\"bus_reaches_stops\", |h| {\n\n let mut flags = SimFlags::for_test(\"bus_reaches_stops\");\n\n flags.opts.savestate_every = Some(Duration::seconds(30.0));\n\n let (map, mut sim, _) = flags.load(&mut Timer::throwaway());\n\n let route = map.get_bus_route(\"49\").unwrap();\n\n let buses = sim.seed_bus_route(route, &map, &mut Timer::throwaway());\n\n let bus = buses[0];\n\n h.setup_done(&sim);\n\n\n\n let mut expectations: Vec<Event> = Vec::new();\n\n // TODO assert stuff about other buses as well, although the timing is a little unclear\n\n for stop in route.stops.iter().skip(1) {\n\n expectations.push(Event::BusArrivedAtStop(bus, route.id, *stop));\n\n expectations.push(Event::BusDepartedFromStop(bus, route.id, *stop));\n\n }\n\n\n\n sim.run_until_expectations_met(&map, expectations, Duration::minutes(10));\n\n // Make sure buses don't block a sim from being considered done\n\n sim.just_run_until_done(&map, Some(Duration::minutes(11)));\n", "file_path": "tests/src/transit.rs", "rank": 26, "score": 219237.63469927508 }, { "content": "pub fn run(t: &mut TestRunner) {\n\n // TODO Lots of boilerplate between these two. Can we do better?\n\n\n\n /*t.run_slow(\"park_on_goal_st\", |h| {\n\n let (map, mut sim, mut rng) = SimFlags::synthetic_test(\"parking_test\", \"park_on_goal_st\")\n\n .load(&mut Timer::throwaway());\n\n let north_bldg = map.bldg(\"north\").id;\n\n let south_bldg = map.bldg(\"south\").id;\n\n let north_parking = map.parking_lane(\"north\", 23).id;\n\n let south_parking = map.parking_lane(\"south\", 23).id;\n\n\n\n let (spot, car) =\n\n h.seed_parked_cars(&mut sim, &mut rng, south_parking, Some(south_bldg), vec![2])[0];\n\n // Fill up some of the first spots, forcing parking to happen at spot 4\n\n h.seed_parked_cars(&mut sim, &mut rng, north_parking, None, (0..4).collect());\n\n h.seed_parked_cars(&mut sim, &mut rng, north_parking, None, (5..10).collect());\n\n sim.schedule_trip(\n\n Duration::ZERO,\n\n TripSpec::UsingParkedCar {\n\n start: SidewalkSpot::building(south_bldg, &map),\n", "file_path": "tests/src/parking.rs", "rank": 27, "score": 219237.63469927508 }, { "content": "// Just list all things from a directory, return sorted by name, with file extension removed.\n\n// Hacky that map_name can be blank. ;)\n\npub fn list_all_objects(dir: &str, map_name: &str) -> Vec<String> {\n\n let mut results: BTreeSet<String> = BTreeSet::new();\n\n match std::fs::read_dir(format!(\"../data/{}/{}\", dir, map_name)) {\n\n Ok(iter) => {\n\n for entry in iter {\n\n let filename = entry.unwrap().file_name();\n\n let path = Path::new(&filename);\n\n if path.to_string_lossy().ends_with(\".swp\") {\n\n continue;\n\n }\n\n let name = path\n\n .file_stem()\n\n .unwrap()\n\n .to_os_string()\n\n .into_string()\n\n .unwrap();\n\n results.insert(name);\n\n }\n\n }\n\n Err(ref e) if e.kind() == ErrorKind::NotFound => {}\n\n Err(e) => panic!(e),\n\n };\n\n results.into_iter().collect()\n\n}\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 28, "score": 216923.8177735525 }, { "content": "pub fn path_fixes(name: &str) -> String {\n\n format!(\"../data/fixes/{}.json\", name)\n\n}\n\n\n", "file_path": "abstutil/src/lib.rs", "rank": 29, "score": 216371.67401630455 }, { "content": "pub fn path_shortcut(name: &str) -> String {\n\n format!(\"../data/shortcuts/{}.json\", name)\n\n}\n", "file_path": "abstutil/src/lib.rs", "rank": 30, "score": 216371.67401630455 }, { "content": "pub fn run(t: &mut TestRunner) {\n\n t.run_slow(\"small_spawn_completes\", |h| {\n\n let mut flags = SimFlags::for_test(\"aorta_model_completes\");\n\n flags.opts.savestate_every = Some(Duration::seconds(30.0));\n\n let (map, mut sim, mut rng) = flags.load(&mut Timer::throwaway());\n\n Scenario::small_run(&map).instantiate(&mut sim, &map, &mut rng, &mut Timer::throwaway());\n\n h.setup_done(&sim);\n\n sim.just_run_until_done(&map, Some(Duration::minutes(70)));\n\n });\n\n}\n", "file_path": "tests/src/sim_completion.rs", "rank": 31, "score": 215859.5019868964 }, { "content": "pub fn run(t: &mut TestRunner) {\n\n t.run_slow(\"convert_osm_twice\", |_| {\n\n let flags = convert_osm::Flags {\n\n osm: \"../data/input/montlake.osm\".to_string(),\n\n parking_shapes: Some(\"../data/shapes/blockface.bin\".to_string()),\n\n offstreet_parking: Some(\"../data/input/offstreet_parking.kml\".to_string()),\n\n sidewalks: Some(\"../data/shapes/sidewalks.bin\".to_string()),\n\n gtfs: Some(\"../data/input/google_transit_2018_18_08\".to_string()),\n\n neighborhoods: Some(\"../data/input/neighborhoods.geojson\".to_string()),\n\n clip: Some(abstutil::path_polygon(\"montlake\")),\n\n output: \"convert_osm_twice.bin\".to_string(),\n\n };\n\n\n\n let map1 = convert_osm::convert(&flags, &mut abstutil::Timer::throwaway());\n\n let map2 = convert_osm::convert(&flags, &mut abstutil::Timer::throwaway());\n\n\n\n if abstutil::to_json(&map1) != abstutil::to_json(&map2) {\n\n // TODO tmp files\n\n abstutil::write_json(\"map1.json\", &map1).unwrap();\n\n abstutil::write_json(\"map2.json\", &map2).unwrap();\n", "file_path": "tests/src/map_conversion.rs", "rank": 32, "score": 215859.5019868964 }, { "content": "pub fn run(t: &mut TestRunner) {\n\n t.run_slow(\"serialization\", |_| {\n\n let (map, mut sim, mut rng) =\n\n SimFlags::for_test(\"serialization\").load(&mut Timer::throwaway());\n\n Scenario::small_run(&map).instantiate(&mut sim, &map, &mut rng, &mut Timer::throwaway());\n\n\n\n // Does savestating produce the same string?\n\n let save1 = abstutil::to_json(&sim);\n\n let save2 = abstutil::to_json(&sim);\n\n assert_eq!(save1, save2);\n\n });\n\n\n\n t.run_slow(\"from_scratch\", |_| {\n\n println!(\"Creating two simulations\");\n\n let flags = SimFlags::for_test(\"from_scratch_1\");\n\n let (map, mut sim1, _) = flags.load(&mut Timer::throwaway());\n\n let mut sim2 = Sim::new(\n\n &map,\n\n SimOptions::new(\"from_scratch_2\"),\n\n &mut Timer::throwaway(),\n", "file_path": "tests/src/sim_determinism.rs", "rank": 33, "score": 215859.5019868964 }, { "content": "pub fn rotating_color_total(idx: usize, total: usize) -> Color {\n\n if total > 9 {\n\n return rotating_color_total(idx, 9);\n\n }\n\n if total < 3 {\n\n return rotating_color_total(idx, 3);\n\n }\n\n\n\n // TODO Cache this\n\n // TODO This palette doesn't contrast well with other stuff\n\n let colors: Vec<Color> =\n\n colorbrewer::get_color_ramp(colorbrewer::Palette::YlOrBr, total as u32)\n\n .unwrap()\n\n .into_iter()\n\n .map(Color::from_hex)\n\n .collect();\n\n\n\n colors[idx % total]\n\n}\n", "file_path": "game/src/helpers.rs", "rank": 34, "score": 213866.53502806032 }, { "content": "pub fn path_polygon(polygon_name: &str) -> String {\n\n format!(\"../data/polygons/{}.poly\", polygon_name)\n\n}\n\n\n", "file_path": "abstutil/src/lib.rs", "rank": 35, "score": 213160.51296445465 }, { "content": "pub fn path_map(map_name: &str) -> String {\n\n format!(\"../data/maps/{}.bin\", map_name)\n\n}\n\n\n", "file_path": "abstutil/src/lib.rs", "rank": 36, "score": 213160.51296445465 }, { "content": "pub fn draw_text_bubble(\n\n g: &mut GfxCtx,\n\n top_left: ScreenPt,\n\n txt: &Text,\n\n // Callers almost always calculate this anyway\n\n (total_width, total_height): (f64, f64),\n\n) -> ScreenRectangle {\n\n // TODO Is it expensive to constantly change uniforms and the shader program?\n\n g.fork_screenspace();\n\n\n\n if let Some(c) = txt.bg_color {\n\n g.draw_polygon(\n\n c,\n\n &Polygon::rectangle_topleft(\n\n Pt2D::new(top_left.x, top_left.y),\n\n Distance::meters(total_width),\n\n Distance::meters(total_height),\n\n ),\n\n );\n\n }\n", "file_path": "ezgui/src/text.rs", "rank": 37, "score": 210929.50665835926 }, { "content": "fn calculate_border_arrows(i: &Intersection, r: &Road, timer: &mut Timer) -> Vec<Polygon> {\n\n let mut result = Vec::new();\n\n\n\n // These arrows should point from the void to the road\n\n if !i.outgoing_lanes.is_empty() {\n\n // The line starts at the border and points down the road\n\n let (line, width) = if r.dst_i == i.id {\n\n let width = (r.children_forwards.len() as f64) * LANE_THICKNESS;\n\n (\n\n r.center_pts.last_line().shift_left(width / 2.0).reverse(),\n\n width,\n\n )\n\n } else {\n\n let width = (r.children_forwards.len() as f64) * LANE_THICKNESS;\n\n (r.center_pts.first_line().shift_right(width / 2.0), width)\n\n };\n\n result.push(\n\n // DEGENERATE_INTERSECTION_HALF_LENGTH is 5m...\n\n PolyLine::new(vec![\n\n line.unbounded_dist_along(Distance::meters(-9.5)),\n", "file_path": "game/src/render/intersection.rs", "rank": 38, "score": 210688.90944146545 }, { "content": "pub fn path_prebaked_results(map_name: &str) -> String {\n\n format!(\"../data/prebaked_results/{}.bin\", map_name)\n\n}\n\n\n", "file_path": "abstutil/src/lib.rs", "rank": 39, "score": 210105.35864612908 }, { "content": "pub fn path_raw_map(map_name: &str) -> String {\n\n format!(\"../data/raw_maps/{}.bin\", map_name)\n\n}\n\n\n", "file_path": "abstutil/src/lib.rs", "rank": 40, "score": 210105.35864612908 }, { "content": "pub fn path_pending_screenshots(map_name: &str) -> String {\n\n format!(\"../data/screenshots/pending_{}\", map_name)\n\n}\n\n\n", "file_path": "abstutil/src/lib.rs", "rank": 41, "score": 210105.35864612908 }, { "content": "pub fn path_camera_state(map_name: &str) -> String {\n\n format!(\"../data/camera_state/{}.json\", map_name)\n\n}\n\n\n", "file_path": "abstutil/src/lib.rs", "rank": 42, "score": 210105.35864612908 }, { "content": "// TODO Word wrap\n\npub fn msg<S: Into<String>>(title: &'static str, lines: Vec<S>) -> Box<dyn State> {\n\n let str_lines: Vec<String> = lines.into_iter().map(|l| l.into()).collect();\n\n WizardState::new(Box::new(move |wiz, ctx, _| {\n\n wiz.wrap(ctx).acknowledge(title, || str_lines.clone())?;\n\n Some(Transition::Pop)\n\n }))\n\n}\n", "file_path": "game/src/game.rs", "rank": 43, "score": 209380.45992290028 }, { "content": "pub fn spawn_agents_around(i: IntersectionID, ui: &mut UI, ctx: &EventCtx) {\n\n let map = &ui.primary.map;\n\n let sim = &mut ui.primary.sim;\n\n let mut rng = ui.primary.current_flags.sim_flags.make_rng();\n\n\n\n for l in &map.get_i(i).incoming_lanes {\n\n let lane = map.get_l(*l);\n\n if lane.is_driving() || lane.is_biking() {\n\n for _ in 0..10 {\n\n let vehicle_spec = if rng.gen_bool(0.7) && lane.is_driving() {\n\n Scenario::rand_car(&mut rng)\n\n } else {\n\n Scenario::rand_bike(&mut rng)\n\n };\n\n if vehicle_spec.length > lane.length() {\n\n continue;\n\n }\n\n sim.schedule_trip(\n\n sim.time(),\n\n TripSpec::CarAppearing {\n", "file_path": "game/src/sandbox/gameplay/spawner.rs", "rank": 44, "score": 209360.89728400065 }, { "content": "pub fn save_edits(wizard: &mut WrappedWizard, ui: &mut UI) -> Option<()> {\n\n let map = &mut ui.primary.map;\n\n\n\n let rename = if map.get_edits().edits_name == \"no_edits\" {\n\n Some(wizard.input_string(\"Name these map edits\")?)\n\n } else {\n\n None\n\n };\n\n // TODO Don't allow naming them no_edits!\n\n\n\n // TODO Do it this weird way to avoid saving edits on every event. :P\n\n // TODO Do some kind of versioning? Don't ask this if the file doesn't exist yet?\n\n let save = \"save edits\";\n\n let cancel = \"cancel\";\n\n if wizard\n\n .choose_string(\"Overwrite edits?\", || vec![save, cancel])?\n\n .as_str()\n\n == save\n\n {\n\n if let Some(name) = rename {\n\n let mut edits = map.get_edits().clone();\n\n edits.edits_name = name;\n\n map.apply_edits(edits, &mut Timer::new(\"name map edits\"));\n\n }\n\n map.save_edits();\n\n }\n\n Some(())\n\n}\n\n\n", "file_path": "game/src/edit/mod.rs", "rank": 45, "score": 208633.32291015954 }, { "content": "pub fn draw_text_bubble_mapspace(\n\n g: &mut GfxCtx,\n\n top_left: Pt2D,\n\n txt: &Text,\n\n // Callers almost always calculate this anyway\n\n (total_width, total_height): (f64, f64),\n\n) {\n\n if let Some(c) = txt.bg_color {\n\n g.draw_polygon(\n\n c,\n\n &Polygon::rectangle_topleft(\n\n Pt2D::new(top_left.x(), top_left.y()),\n\n Distance::meters(total_width / SCALE_DOWN),\n\n Distance::meters(total_height / SCALE_DOWN),\n\n ),\n\n );\n\n }\n\n\n\n let mut y = top_left.y();\n\n for (line_color, line) in &txt.lines {\n", "file_path": "ezgui/src/text.rs", "rank": 46, "score": 207599.37221824971 }, { "content": "fn calculate_turn_markings(map: &Map, lane: &Lane, timer: &mut Timer) -> Vec<Polygon> {\n\n let mut results = Vec::new();\n\n\n\n // Are there multiple driving lanes on this side of the road?\n\n if map\n\n .find_closest_lane(lane.id, vec![LaneType::Driving])\n\n .is_err()\n\n {\n\n return results;\n\n }\n\n if lane.length() < Distance::meters(7.0) {\n\n return results;\n\n }\n\n\n\n let thickness = Distance::meters(0.2);\n\n\n\n let common_base = lane.lane_center_pts.exact_slice(\n\n lane.length() - Distance::meters(7.0),\n\n lane.length() - Distance::meters(5.0),\n\n );\n", "file_path": "game/src/render/lane.rs", "rank": 47, "score": 205872.3621189049 }, { "content": "pub fn save_json_object<T: Serialize>(dir: &str, map_name: &str, obj_name: &str, obj: &T) {\n\n let path = format!(\"../data/{}/{}/{}.json\", dir, map_name, obj_name);\n\n write_json(&path, obj).expect(&format!(\"Saving {} failed\", path));\n\n println!(\"Saved {}\", path);\n\n}\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 48, "score": 205404.84793849016 }, { "content": "pub fn save_binary_object<T: Serialize>(dir: &str, map_name: &str, obj_name: &str, obj: &T) {\n\n let path = format!(\"../data/{}/{}/{}.bin\", dir, map_name, obj_name);\n\n write_binary(&path, obj).expect(&format!(\"Saving {} failed\", path));\n\n println!(\"Saved {}\", path);\n\n}\n\n\n\n// TODO I'd like to get rid of this and just use Timer.read_file, but external libraries consume\n\n// the reader. :\\\n\npub struct FileWithProgress {\n\n inner: BufReader<File>,\n\n\n\n path: String,\n\n processed_bytes: usize,\n\n total_bytes: usize,\n\n started_at: Instant,\n\n last_printed_at: Instant,\n\n}\n\n\n\nimpl FileWithProgress {\n\n // Also hands back a callback that'll add the final result to the timer. The caller must run\n", "file_path": "abstutil/src/io.rs", "rank": 49, "score": 205404.84793849016 }, { "content": "fn choose_scenario(map_name: &str, wizard: &mut WrappedWizard, query: &str) -> Option<String> {\n\n wizard.choose_string(query, || {\n\n abstutil::list_all_objects(abstutil::SCENARIOS, map_name)\n\n })\n\n}\n\n\n", "file_path": "game/src/abtest/setup.rs", "rank": 50, "score": 202977.80951243767 }, { "content": "pub fn retain_btreemap<K: Ord + Clone, V, F: Fn(&K, &V) -> bool>(\n\n map: &mut BTreeMap<K, V>,\n\n keep: F,\n\n) {\n\n let mut remove_keys: Vec<K> = Vec::new();\n\n for (k, v) in map.iter() {\n\n if !keep(k, v) {\n\n remove_keys.push(k.clone());\n\n }\n\n }\n\n for k in remove_keys {\n\n map.remove(&k);\n\n }\n\n}\n\n\n", "file_path": "abstutil/src/collections.rs", "rank": 51, "score": 202331.3386811908 }, { "content": "// Load all serialized things from a directory, return sorted by name, with file extension removed.\n\n// Detects JSON or binary.\n\npub fn load_all_objects<T: DeserializeOwned>(dir: &str, map_name: &str) -> Vec<(String, T)> {\n\n let mut timer = Timer::new(&format!(\n\n \"load_all_objects from ../data/{}/{}/\",\n\n dir, map_name\n\n ));\n\n let mut tree: BTreeMap<String, T> = BTreeMap::new();\n\n match std::fs::read_dir(format!(\"../data/{}/{}/\", dir, map_name)) {\n\n Ok(iter) => {\n\n for entry in iter {\n\n let filename = entry.unwrap().file_name();\n\n let path = Path::new(&filename);\n\n let path_str = path.to_string_lossy();\n\n if path_str.ends_with(\".swp\") {\n\n continue;\n\n }\n\n let name = path\n\n .file_stem()\n\n .unwrap()\n\n .to_os_string()\n\n .into_string()\n", "file_path": "abstutil/src/io.rs", "rank": 52, "score": 199697.24033702357 }, { "content": "pub fn dashed_lines(\n\n pl: &PolyLine,\n\n width: Distance,\n\n dash_len: Distance,\n\n dash_separation: Distance,\n\n) -> Vec<Polygon> {\n\n if pl.length() < dash_separation * 2.0 + EPSILON_DIST {\n\n return vec![pl.make_polygons(width)];\n\n }\n\n // Don't draw the dashes too close to the ends.\n\n pl.exact_slice(dash_separation, pl.length() - dash_separation)\n\n .dashed_polygons(width, dash_len, dash_separation)\n\n}\n\n\n\npub struct DrawCtx<'a> {\n\n pub cs: &'a ColorScheme,\n\n pub map: &'a Map,\n\n pub draw_map: &'a DrawMap,\n\n pub sim: &'a Sim,\n\n}\n", "file_path": "game/src/render/mod.rs", "rank": 53, "score": 198941.3229740174 }, { "content": "pub fn load(dir_path: &str) -> Result<Vec<Route>, Error> {\n\n println!(\"Loading GTFS from {}\", dir_path);\n\n let timer = Instant::now();\n\n\n\n let mut route_id_to_name: HashMap<String, String> = HashMap::new();\n\n for rec in csv::Reader::from_reader(File::open(format!(\"{}/routes.txt\", dir_path))?).records() {\n\n let rec = rec?;\n\n route_id_to_name.insert(rec[0].to_string(), rec[2].to_string());\n\n }\n\n\n\n let mut stop_id_to_pt: HashMap<String, LonLat> = HashMap::new();\n\n for rec in csv::Reader::from_reader(File::open(format!(\"{}/stops.txt\", dir_path))?).records() {\n\n let rec = rec?;\n\n let lon: f64 = rec[5].parse()?;\n\n let lat: f64 = rec[4].parse()?;\n\n stop_id_to_pt.insert(rec[0].to_string(), LonLat::new(lon, lat));\n\n }\n\n\n\n let mut trip_id_to_route_id_and_direction: HashMap<String, (String, bool)> = HashMap::new();\n\n for rec in csv::Reader::from_reader(File::open(format!(\"{}/trips.txt\", dir_path))?).records() {\n", "file_path": "gtfs/src/lib.rs", "rank": 54, "score": 197341.15314152287 }, { "content": "fn parse_pt(input: &str, gps_bounds: &GPSBounds) -> Option<LonLat> {\n\n let coords: Vec<&str> = input.split(',').collect();\n\n if coords.len() != 2 {\n\n return None;\n\n }\n\n let pt = match (coords[0].parse::<f64>(), coords[1].parse::<f64>()) {\n\n (Ok(lon), Ok(lat)) => Some(LonLat::new(lon, lat)),\n\n _ => None,\n\n }?;\n\n if gps_bounds.contains(pt) {\n\n Some(pt)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "kml/src/lib.rs", "rank": 55, "score": 197144.834420822 }, { "content": "fn clip_tracts(popdat: &PopDat, ui: &UI, timer: &mut Timer) -> BTreeMap<String, Tract> {\n\n // TODO Partial clipping could be neat, except it'd be confusing to interpret totals.\n\n let mut results = BTreeMap::new();\n\n timer.start_iter(\"clip tracts\", popdat.tracts.len());\n\n for (name, tract) in &popdat.tracts {\n\n timer.next();\n\n if let Some(pts) = ui.primary.map.get_gps_bounds().try_convert(&tract.pts) {\n\n // TODO We should actually make sure the polygon is completely contained within the\n\n // map's boundary.\n\n let polygon = Polygon::new(&pts);\n\n\n\n // TODO Don't just use the center...\n\n let mut num_bldgs = 0;\n\n let mut num_parking_spots = 0;\n\n for id in ui\n\n .primary\n\n .draw_map\n\n .get_matching_objects(polygon.get_bounds())\n\n {\n\n match id {\n", "file_path": "game/src/mission/dataviz.rs", "rank": 56, "score": 196896.6080984808 }, { "content": "fn fix_field_names(orig_path: &str, shapes: &mut ExtraShapes) -> Option<()> {\n\n let new_path = orig_path.replace(\".kml\", \".xml\");\n\n if !path::Path::new(&new_path).exists() {\n\n return None;\n\n }\n\n println!(\"Loading extra metadata from {}\", new_path);\n\n let root = Element::parse(fs::read_to_string(new_path).ok()?.as_bytes()).ok()?;\n\n\n\n let mut rename = BTreeMap::new();\n\n for attr in &root.get_child(\"eainfo\")?.get_child(\"detailed\")?.children {\n\n if attr.name != \"attr\" {\n\n continue;\n\n }\n\n let key = attr.get_child(\"attrlabl\")?.text.clone()?;\n\n let value = attr.get_child(\"attrdef\")?.text.clone()?;\n\n rename.insert(key, value);\n\n }\n\n\n\n for shp in shapes.shapes.iter_mut() {\n\n let mut attribs = BTreeMap::new();\n", "file_path": "kml/src/lib.rs", "rank": 57, "score": 195953.15665830567 }, { "content": "// Need to explain this trick -- basically keeps consistency between two different simulations when\n\n// each one might make slightly different sequences of calls to the RNG.\n\npub fn fork_rng(base_rng: &mut XorShiftRng) -> XorShiftRng {\n\n XorShiftRng::from_seed([base_rng.next_u32() as u8; 16])\n\n}\n\n\n\n// Represents the probability of sampling 0, 1, 2, 3... The sum can be anything.\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct WeightedUsizeChoice {\n\n pub weights: Vec<usize>,\n\n}\n\n\n\nimpl WeightedUsizeChoice {\n\n pub fn parse(string: &str) -> Option<WeightedUsizeChoice> {\n\n let parts: Vec<&str> = string.split(',').collect();\n\n if parts.is_empty() {\n\n return None;\n\n }\n\n let mut weights: Vec<usize> = Vec::new();\n\n for x in parts.into_iter() {\n\n let x = x.parse::<usize>().ok()?;\n\n weights.push(x);\n\n }\n\n Some(WeightedUsizeChoice { weights })\n\n }\n\n\n\n pub fn sample(&self, rng: &mut XorShiftRng) -> usize {\n\n WeightedIndex::new(&self.weights).unwrap().sample(rng)\n\n }\n\n}\n", "file_path": "abstutil/src/random.rs", "rank": 58, "score": 195199.13747981816 }, { "content": "fn styled_kv(txt: &mut Text, tags: &BTreeMap<String, String>) {\n\n for (k, v) in tags {\n\n txt.add_appended(vec![\n\n Line(k).fg(Color::RED),\n\n Line(\" = \"),\n\n Line(v).fg(Color::CYAN),\n\n ]);\n\n }\n\n}\n", "file_path": "game/src/common/info.rs", "rank": 59, "score": 190931.84959844936 }, { "content": "pub fn faster_trips_panel(mode: TripMode, ui: &UI) -> Text {\n\n let time = ui.primary.sim.time();\n\n let now = ui.primary.sim.get_analytics().finished_trips(time, mode);\n\n let baseline = ui.prebaked.finished_trips(time, mode);\n\n\n\n // Enable to debug why sim results don't match prebaked.\n\n if false && !now.seems_eq(&baseline) {\n\n abstutil::write_json(\n\n \"../current_sim.json\",\n\n &ui.primary.sim.get_analytics().finished_trips,\n\n )\n\n .unwrap();\n\n let filtered = ui\n\n .prebaked\n\n .finished_trips\n\n .iter()\n\n .filter(|(t, _, _, _)| *t <= time)\n\n .cloned()\n\n .collect::<Vec<_>>();\n\n abstutil::write_json(\"../prebaked.json\", &filtered).unwrap();\n", "file_path": "game/src/sandbox/gameplay/faster_trips.rs", "rank": 60, "score": 190586.35361897445 }, { "content": "// TODO Validate the intersection exists? Let them pick it with the cursor?\n\nfn choose_intersection(wizard: &mut WrappedWizard, query: &str) -> Option<IntersectionID> {\n\n wizard.input_something(\n\n query,\n\n None,\n\n Box::new(|line| usize::from_str_radix(&line, 10).ok().map(IntersectionID)),\n\n )\n\n}\n\n\n", "file_path": "game/src/mission/scenario.rs", "rank": 61, "score": 190361.70981896567 }, { "content": "pub fn convert(geojson_path: &str, map_name: String, gps_bounds: &GPSBounds) {\n\n println!(\"Extracting neighborhoods from {}...\", geojson_path);\n\n let document: GeoJson = abstutil::read_json(geojson_path, &mut Timer::throwaway()).unwrap();\n\n match document {\n\n GeoJson::FeatureCollection(c) => {\n\n for f in c.features.into_iter() {\n\n let name = f.properties.unwrap()[\"name\"].as_str().unwrap().to_string();\n\n match f.geometry.unwrap().value {\n\n Value::Polygon(p) => {\n\n convert_polygon(p, name, map_name.clone(), gps_bounds);\n\n }\n\n Value::MultiPolygon(polygons) => {\n\n for (idx, p) in polygons.into_iter().enumerate() {\n\n convert_polygon(\n\n p,\n\n format!(\"{} portion #{}\", name, idx + 1),\n\n map_name.clone(),\n\n gps_bounds,\n\n );\n\n }\n\n }\n\n x => panic!(\"Unexpected GeoJson value {:?}\", x),\n\n }\n\n }\n\n }\n\n _ => panic!(\"Unexpected GeoJson root {:?}\", document),\n\n }\n\n}\n\n\n", "file_path": "convert_osm/src/neighborhoods.rs", "rank": 62, "score": 186960.91480882466 }, { "content": "pub fn fix_bus_route(map: &Map, r: &mut BusRoute) -> bool {\n\n // Trim out stops if needed; map borders sometimes mean some paths don't work.\n\n let mut stops = Vec::new();\n\n for stop in r.stops.drain(..) {\n\n if stops.is_empty() {\n\n stops.push(stop);\n\n } else {\n\n if check_stops(*stops.last().unwrap(), stop, map) {\n\n stops.push(stop);\n\n }\n\n }\n\n }\n\n // Don't forget the last and first\n\n while stops.len() >= 2 {\n\n if check_stops(*stops.last().unwrap(), stops[0], map) {\n\n break;\n\n }\n\n // TODO Or the front one\n\n stops.pop();\n\n }\n\n r.stops = stops;\n\n r.stops.len() >= 2\n\n}\n\n\n", "file_path": "map_model/src/make/bus_stops.rs", "rank": 63, "score": 186508.4197135209 }, { "content": "// TODO Idea: Have a wrapper type DotJSON(...) and DotBin(...) to distinguish raw path strings\n\npub fn write_json<T: Serialize>(path: &str, obj: &T) -> Result<(), Error> {\n\n if !path.ends_with(\".json\") {\n\n panic!(\"write_json needs {} to end with .json\", path);\n\n }\n\n std::fs::create_dir_all(std::path::Path::new(path).parent().unwrap())\n\n .expect(\"Creating parent dir failed\");\n\n\n\n let mut file = File::create(path)?;\n\n file.write_all(to_json(obj).as_bytes())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 64, "score": 186494.9734400856 }, { "content": "pub fn write_binary<T: Serialize>(path: &str, obj: &T) -> Result<(), Error> {\n\n if !path.ends_with(\".bin\") {\n\n panic!(\"write_binary needs {} to end with .bin\", path);\n\n }\n\n\n\n std::fs::create_dir_all(std::path::Path::new(path).parent().unwrap())\n\n .expect(\"Creating parent dir failed\");\n\n\n\n let file = BufWriter::new(File::create(path)?);\n\n bincode::serialize_into(file, obj).map_err(|err| Error::new(ErrorKind::Other, err))\n\n}\n\n\n", "file_path": "abstutil/src/io.rs", "rank": 65, "score": 186494.9734400856 }, { "content": "fn choose_neighborhood(map: &Map, wizard: &mut WrappedWizard, query: &str) -> Option<String> {\n\n // Load the full object, since we usually visualize the neighborhood when menuing over it\n\n wizard\n\n .choose(query, || {\n\n Choice::from(Neighborhood::load_all(map.get_name(), map.get_gps_bounds()))\n\n })\n\n .map(|(n, _)| n)\n\n}\n\n\n", "file_path": "game/src/mission/scenario.rs", "rank": 66, "score": 182448.33321823215 }, { "content": "fn warp_point(line: &str, primary: &PerMapUI) -> Option<(Option<ID>, Pt2D, f64)> {\n\n if line.is_empty() {\n\n return None;\n\n }\n\n // TODO Weird magic shortcut to go to last spot. What should this be?\n\n if line == \"j\" {\n\n if let Some((pt, zoom)) = primary.last_warped_from {\n\n return Some((None, pt, zoom));\n\n }\n\n return None;\n\n }\n\n\n\n let id = match usize::from_str_radix(&line[1..line.len()], 10) {\n\n Ok(idx) => match line.chars().next().unwrap() {\n\n 'r' => {\n\n let r = primary.map.maybe_get_r(RoadID(idx))?;\n\n ID::Lane(r.children_forwards[0].0)\n\n }\n\n 'l' => ID::Lane(LaneID(idx)),\n\n 'i' => ID::Intersection(IntersectionID(idx)),\n", "file_path": "game/src/common/warp.rs", "rank": 67, "score": 178915.01694167958 }, { "content": "fn warp_to(wiz: &mut Wizard, ctx: &mut EventCtx, ui: &mut UI) -> Option<Transition> {\n\n let mut wizard = wiz.wrap(ctx);\n\n let to = wizard.input_string(\"Warp to what?\")?;\n\n if let Some((id, pt, cam_zoom)) = warp_point(&to, &ui.primary) {\n\n return Some(Transition::ReplaceWithMode(\n\n Warping::new(ctx, pt, Some(cam_zoom), id, &mut ui.primary),\n\n EventLoopMode::Animation,\n\n ));\n\n }\n\n wizard.acknowledge(\"Bad warp ID\", || vec![format!(\"{} isn't a valid ID\", to)])?;\n\n Some(Transition::Pop)\n\n}\n\n\n\npub struct Warping {\n\n warper: Warper,\n\n id: Option<ID>,\n\n}\n\n\n\nimpl Warping {\n\n pub fn new(\n", "file_path": "game/src/common/warp.rs", "rank": 68, "score": 174477.6437928796 }, { "content": "fn jump_to_time(wiz: &mut Wizard, ctx: &mut EventCtx, ui: &mut UI) -> Option<Transition> {\n\n let t = wiz.wrap(ctx).input_time_slider(\n\n \"Jump to what time?\",\n\n ui.primary.sim.time(),\n\n Duration::END_OF_DAY,\n\n )?;\n\n let dt = t - ui.primary.sim.time();\n\n ctx.loading_screen(&format!(\"step forwards {}\", dt), |_, mut timer| {\n\n ui.primary.sim.timed_step(&ui.primary.map, dt, &mut timer);\n\n if let Some(ref mut s) = ui.secondary {\n\n s.sim.timed_step(&s.map, dt, &mut timer);\n\n }\n\n });\n\n Some(Transition::Pop)\n\n}\n", "file_path": "game/src/common/time.rs", "rank": 69, "score": 173129.00857578445 }, { "content": "fn load_edits(wiz: &mut Wizard, ctx: &mut EventCtx, ui: &mut UI) -> Option<Transition> {\n\n let mut wizard = wiz.wrap(ctx);\n\n\n\n if ui.primary.map.get_edits().dirty {\n\n let save = \"save edits\";\n\n let discard = \"discard\";\n\n if wizard\n\n .choose_string(\"Save current edits first?\", || vec![save, discard])?\n\n .as_str()\n\n == save\n\n {\n\n save_edits(&mut wizard, ui)?;\n\n wizard.reset();\n\n }\n\n }\n\n\n\n // TODO Exclude current\n\n let map_name = ui.primary.map.get_name().to_string();\n\n let (_, new_edits) = wizard.choose(\"Load which map edits?\", || {\n\n let mut list = Choice::from(abstutil::load_all_objects(abstutil::EDITS, &map_name));\n\n list.push(Choice::new(\"no_edits\", MapEdits::new(map_name.clone())));\n\n list\n\n })?;\n\n apply_map_edits(&mut ui.primary, &ui.cs, ctx, new_edits);\n\n ui.primary.map.mark_edits_fresh();\n\n Some(Transition::Pop)\n\n}\n\n\n", "file_path": "game/src/edit/mod.rs", "rank": 70, "score": 173129.00857578445 }, { "content": "fn load_scenario(wiz: &mut Wizard, ctx: &mut EventCtx, ui: &mut UI) -> Option<Transition> {\n\n let map_name = ui.primary.map.get_name().to_string();\n\n let s = wiz.wrap(ctx).choose_string(\"Load which scenario?\", || {\n\n abstutil::list_all_objects(abstutil::SCENARIOS, &map_name)\n\n })?;\n\n let scenario = abstutil::read_binary(\n\n &abstutil::path1_bin(&map_name, abstutil::SCENARIOS, &s),\n\n &mut Timer::throwaway(),\n\n )\n\n .unwrap();\n\n Some(Transition::Replace(Box::new(\n\n scenario::ScenarioManager::new(scenario, ctx, ui),\n\n )))\n\n}\n\n\n", "file_path": "game/src/mission/mod.rs", "rank": 71, "score": 173129.00857578445 }, { "content": "fn browse_trips(wiz: &mut Wizard, ctx: &mut EventCtx, ui: &mut UI) -> Option<Transition> {\n\n let mut wizard = wiz.wrap(ctx);\n\n let mode = wizard\n\n .choose(\"Browse which trips?\", || {\n\n let trips = CompareTrips::new(\n\n ui.primary.sim.get_finished_trips(),\n\n ui.secondary.as_ref().unwrap().sim.get_finished_trips(),\n\n );\n\n let modes = trips\n\n .finished_trips\n\n .iter()\n\n .map(|(_, m, _, _)| *m)\n\n .collect::<BTreeSet<TripMode>>();\n\n TripMode::all()\n\n .into_iter()\n\n .map(|m| Choice::new(m.to_string(), m).active(modes.contains(&m)))\n\n .collect()\n\n })?\n\n .1;\n\n wizard.choose(\"Examine which trip?\", || {\n", "file_path": "game/src/abtest/score.rs", "rank": 72, "score": 173129.00857578445 }, { "content": "fn choose_shortcut(wiz: &mut Wizard, ctx: &mut EventCtx, ui: &mut UI) -> Option<Transition> {\n\n let center = ctx\n\n .canvas\n\n .center_to_map_pt()\n\n .forcibly_to_gps(&ui.primary.map.get_gps_bounds());\n\n let cam_zoom = ctx.canvas.cam_zoom;\n\n\n\n let mut wizard = wiz.wrap(ctx);\n\n let (_, mut s) = wizard.choose(\"Jump to which shortcut?\", || {\n\n // TODO Handle >9\n\n // TODO Allow deleting\n\n let keys = vec![\n\n Key::Num1,\n\n Key::Num2,\n\n Key::Num3,\n\n Key::Num4,\n\n Key::Num5,\n\n Key::Num6,\n\n Key::Num7,\n\n Key::Num8,\n", "file_path": "game/src/common/shortcuts.rs", "rank": 73, "score": 173129.00857578445 }, { "content": "fn browse_trips(wiz: &mut Wizard, ctx: &mut EventCtx, ui: &mut UI) -> Option<Transition> {\n\n let mut wizard = wiz.wrap(ctx);\n\n let (_, mode) = wizard.choose(\"Browse which trips?\", || {\n\n let trips = ui.primary.sim.get_finished_trips();\n\n let modes = trips\n\n .finished_trips\n\n .iter()\n\n .map(|(_, m, _)| *m)\n\n .collect::<BTreeSet<TripMode>>();\n\n TripMode::all()\n\n .into_iter()\n\n .map(|m| Choice::new(m.to_string(), m).active(modes.contains(&m)))\n\n .collect()\n\n })?;\n\n wizard.choose(\"Examine which trip?\", || {\n\n let trips = ui.primary.sim.get_finished_trips();\n\n let mut filtered: Vec<&(TripID, TripMode, Duration)> = trips\n\n .finished_trips\n\n .iter()\n\n .filter(|(_, m, _)| *m == mode)\n", "file_path": "game/src/sandbox/score.rs", "rank": 74, "score": 173129.00857578445 }, { "content": "fn load_savestate(wiz: &mut Wizard, ctx: &mut EventCtx, ui: &mut UI) -> Option<Transition> {\n\n let path = ui.primary.sim.save_dir();\n\n\n\n let ss = wiz.wrap(ctx).choose_string(\"Load which savestate?\", || {\n\n abstutil::list_dir(std::path::Path::new(&path))\n\n })?;\n\n\n\n ctx.loading_screen(\"load savestate\", |ctx, mut timer| {\n\n ui.primary.sim = Sim::load_savestate(ss, &mut timer).expect(\"Can't load savestate\");\n\n ui.recalculate_current_selection(ctx);\n\n });\n\n Some(Transition::Pop)\n\n}\n", "file_path": "game/src/sandbox/mod.rs", "rank": 75, "score": 173129.00857578445 }, { "content": "fn search_osm(wiz: &mut Wizard, ctx: &mut EventCtx, ui: &mut UI) -> Option<Transition> {\n\n let filter = wiz.wrap(ctx).input_string(\"Search for what?\")?;\n\n let mut ids = HashSet::new();\n\n let mut batch = GeomBatch::new();\n\n\n\n let map = &ui.primary.map;\n\n let color = ui.cs.get_def(\"search result\", Color::RED);\n\n for r in map.all_roads() {\n\n if r.osm_tags\n\n .iter()\n\n .any(|(k, v)| format!(\"{} = {}\", k, v).contains(&filter))\n\n {\n\n for l in r.all_lanes() {\n\n ids.insert(ID::Lane(l));\n\n }\n\n batch.push(color, r.get_thick_polygon().unwrap());\n\n }\n\n }\n\n for b in map.all_buildings() {\n\n if b.osm_tags\n", "file_path": "game/src/debug/mod.rs", "rank": 76, "score": 173129.00857578445 }, { "content": "fn change_scenario(wiz: &mut Wizard, ctx: &mut EventCtx, ui: &mut UI) -> Option<Transition> {\n\n let num_agents = ui.primary.current_flags.num_agents;\n\n let builtin = if let Some(n) = num_agents {\n\n format!(\"random scenario with {} agents\", n)\n\n } else {\n\n \"random scenario with some agents\".to_string()\n\n };\n\n let scenario_name = wiz\n\n .wrap(ctx)\n\n .choose_string(\"Instantiate which scenario?\", || {\n\n let mut list =\n\n abstutil::list_all_objects(abstutil::SCENARIOS, ui.primary.map.get_name());\n\n list.push(builtin.clone());\n\n list.push(\"just buses\".to_string());\n\n list\n\n })?;\n\n Some(Transition::PopThenReplace(Box::new(SandboxMode::new(\n\n ctx,\n\n ui,\n\n GameplayMode::PlayScenario(scenario_name),\n\n ))))\n\n}\n\n\n", "file_path": "game/src/sandbox/gameplay/mod.rs", "rank": 77, "score": 171823.56146164925 }, { "content": "fn load_map(wiz: &mut Wizard, ctx: &mut EventCtx, ui: &mut UI) -> Option<Transition> {\n\n if let Some(name) = wiz.wrap(ctx).choose_string(\"Load which map?\", || {\n\n let current_map = ui.primary.map.get_name();\n\n abstutil::list_all_objects(\"maps\", \"\")\n\n .into_iter()\n\n .filter(|n| n != current_map)\n\n .collect()\n\n }) {\n\n ui.switch_map(ctx, &name);\n\n Some(Transition::PopThenReplace(Box::new(SandboxMode::new(\n\n ctx,\n\n ui,\n\n // TODO If we were playing a scenario, load that one...\n\n GameplayMode::Freeform,\n\n ))))\n\n } else if wiz.aborted() {\n\n Some(Transition::Pop)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "game/src/sandbox/gameplay/mod.rs", "rank": 78, "score": 171823.56146164925 }, { "content": "fn splash_screen(raw_wizard: &mut Wizard, ctx: &mut EventCtx, ui: &mut UI) -> Option<Transition> {\n\n let mut wizard = raw_wizard.wrap(ctx);\n\n let sandbox = \"Sandbox mode\";\n\n let challenge = \"Challenge mode\";\n\n let abtest = \"A/B Test Mode (internal/unfinished)\";\n\n let tutorial = \"Tutorial (unfinished)\";\n\n let mission = \"Internal developer tools\";\n\n let about = \"About\";\n\n let quit = \"Quit\";\n\n\n\n let dev = ui.primary.current_flags.dev;\n\n\n\n match wizard\n\n .choose(\"Welcome to A/B Street!\", || {\n\n vec![\n\n Some(Choice::new(sandbox, ()).key(Key::S)),\n\n Some(Choice::new(challenge, ()).key(Key::C)),\n\n if dev {\n\n Some(Choice::new(abtest, ()).key(Key::A))\n\n } else {\n", "file_path": "game/src/splash_screen.rs", "rank": 79, "score": 171823.56146164925 }, { "content": "fn pick_ab_test(wiz: &mut Wizard, ctx: &mut EventCtx, ui: &mut UI) -> Option<Transition> {\n\n let mut wizard = wiz.wrap(ctx);\n\n let load_existing = \"Load existing A/B test\";\n\n let create_new = \"Create new A/B test\";\n\n let ab_test = if wizard.choose_string(\"What A/B test to manage?\", || {\n\n vec![load_existing, create_new]\n\n })? == load_existing\n\n {\n\n wizard\n\n .choose(\"Load which A/B test?\", || {\n\n Choice::from(abstutil::load_all_objects(\n\n abstutil::AB_TESTS,\n\n ui.primary.map.get_name(),\n\n ))\n\n })?\n\n .1\n\n } else {\n\n let test_name = wizard.input_string(\"Name the A/B test\")?;\n\n let map_name = ui.primary.map.get_name();\n\n\n", "file_path": "game/src/abtest/setup.rs", "rank": 80, "score": 171823.56146164925 }, { "content": "fn create_new_scenario(wiz: &mut Wizard, ctx: &mut EventCtx, ui: &mut UI) -> Option<Transition> {\n\n let name = wiz.wrap(ctx).input_string(\"Name the scenario\")?;\n\n let mut s = Scenario::empty(&ui.primary.map);\n\n s.seed_buses = true;\n\n s.scenario_name = name;\n\n Some(Transition::Replace(Box::new(\n\n scenario::ScenarioManager::new(s, ctx, ui),\n\n )))\n\n}\n\n\n", "file_path": "game/src/mission/mod.rs", "rank": 81, "score": 171823.56146164925 }, { "content": "pub trait TimerSink {\n\n fn println(&mut self, line: String);\n\n fn reprintln(&mut self, line: String);\n\n}\n\n\n\n// Hierarchial magic\n\npub struct Timer<'a> {\n\n results: Vec<String>,\n\n stack: Vec<StackEntry>,\n\n\n\n outermost_name: String,\n\n\n\n notes: Vec<String>,\n\n pub(crate) warnings: Vec<String>,\n\n\n\n sink: Option<Box<dyn TimerSink + 'a>>,\n\n}\n\n\n", "file_path": "abstutil/src/time.rs", "rank": 82, "score": 166944.57884375838 }, { "content": "fn change_traffic_signal(signal: ControlTrafficSignal, ui: &mut UI, ctx: &mut EventCtx) {\n\n let mut edits = ui.primary.map.get_edits().clone();\n\n // TODO Only record one command for the entire session. Otherwise, we can exit this editor and\n\n // undo a few times, potentially ending at an invalid state!\n\n if edits\n\n .commands\n\n .last()\n\n .map(|cmd| match cmd {\n\n EditCmd::ChangeTrafficSignal(ref s) => s.id == signal.id,\n\n _ => false,\n\n })\n\n .unwrap_or(false)\n\n {\n\n edits.commands.pop();\n\n }\n\n edits.commands.push(EditCmd::ChangeTrafficSignal(signal));\n\n apply_map_edits(&mut ui.primary, &ui.cs, ctx, edits);\n\n}\n\n\n", "file_path": "game/src/edit/traffic_signals.rs", "rank": 83, "score": 166922.66929071106 }, { "content": "// TODO this always does it at pt1\n\nfn perp_line(l: Line, length: Distance) -> Line {\n\n let pt1 = l.shift_right(length / 2.0).pt1();\n\n let pt2 = l.shift_left(length / 2.0).pt1();\n\n Line::new(pt1, pt2)\n\n}\n\n\n", "file_path": "game/src/render/lane.rs", "rank": 84, "score": 166508.26794740497 }, { "content": "// TODO copied from DrawLane\n\nfn perp_line(l: Line, length: Distance) -> Line {\n\n let pt1 = l.shift_right(length / 2.0).pt1();\n\n let pt2 = l.shift_left(length / 2.0).pt1();\n\n Line::new(pt1, pt2)\n\n}\n", "file_path": "game/src/render/intersection.rs", "rank": 85, "score": 166508.26794740497 }, { "content": "// More is better\n\npub fn cmp_count_more(now: usize, baseline: usize) -> TextSpan {\n\n if now < baseline {\n\n Line(format!(\"{} fewer\", prettyprint_usize(baseline - now))).fg(Color::RED)\n\n } else if now > baseline {\n\n Line(format!(\"{} more\", prettyprint_usize(now - baseline))).fg(Color::GREEN)\n\n } else {\n\n Line(\"same as baseline\")\n\n }\n\n}\n", "file_path": "game/src/sandbox/gameplay/mod.rs", "rank": 86, "score": 165082.5706147028 }, { "content": "fn launch_test(test: &ABTest, ui: &mut UI, ctx: &mut EventCtx) -> ABTestMode {\n\n let secondary = ctx.loading_screen(\n\n &format!(\"Launching A/B test {}\", test.test_name),\n\n |ctx, mut timer| {\n\n let scenario: Scenario = abstutil::read_binary(\n\n &abstutil::path1_bin(&test.map_name, abstutil::SCENARIOS, &test.scenario_name),\n\n &mut timer,\n\n )\n\n .expect(\"loading scenario failed\");\n\n\n\n {\n\n timer.start(\"load primary\");\n\n if ui.primary.current_flags.sim_flags.rng_seed.is_none() {\n\n ui.primary.current_flags.sim_flags.rng_seed = Some(42);\n\n }\n\n ui.primary.current_flags.sim_flags.opts.run_name =\n\n format!(\"{} with {}\", test.test_name, test.edits1_name);\n\n ui.primary.current_flags.sim_flags.opts.savestate_every = None;\n\n\n\n apply_map_edits(\n", "file_path": "game/src/abtest/setup.rs", "rank": 87, "score": 164356.8791567829 }, { "content": "// Fewer is better\n\npub fn cmp_count_fewer(now: usize, baseline: usize) -> TextSpan {\n\n if now < baseline {\n\n Line(format!(\"{} fewer\", prettyprint_usize(baseline - now))).fg(Color::GREEN)\n\n } else if now > baseline {\n\n Line(format!(\"{} more\", prettyprint_usize(now - baseline))).fg(Color::RED)\n\n } else {\n\n Line(\"same as baseline\")\n\n }\n\n}\n\n\n", "file_path": "game/src/sandbox/gameplay/mod.rs", "rank": 88, "score": 162549.81780310927 }, { "content": "fn make_crosswalk(batch: &mut GeomBatch, turn: &Turn, cs: &ColorScheme) {\n\n // Start at least LANE_THICKNESS out to not hit sidewalk corners. Also account for the\n\n // thickness of the crosswalk line itself. Center the lines inside these two boundaries.\n\n let boundary = LANE_THICKNESS;\n\n let tile_every = LANE_THICKNESS * 0.6;\n\n let line = {\n\n // The middle line in the crosswalk geometry is the main crossing line.\n\n let pts = turn.geom.points();\n\n Line::new(pts[1], pts[2])\n\n };\n\n\n\n let available_length = line.length() - (boundary * 2.0);\n\n if available_length > Distance::ZERO {\n\n let num_markings = (available_length / tile_every).floor() as usize;\n\n let mut dist_along =\n\n boundary + (available_length - tile_every * (num_markings as f64)) / 2.0;\n\n // TODO Seems to be an off-by-one sometimes. Not enough of these.\n\n for _ in 0..=num_markings {\n\n let pt1 = line.dist_along(dist_along);\n\n // Reuse perp_line. Project away an arbitrary amount\n", "file_path": "game/src/render/intersection.rs", "rank": 89, "score": 161619.088766453 }, { "content": "// TODO Move to sim crate\n\npub fn prebake() {\n\n let mut timer = Timer::new(\"prebake all challenge results\");\n\n\n\n for map_name in vec![\"montlake\", \"23rd\"] {\n\n timer.start(&format!(\"run normal sim for {}\", map_name));\n\n let (map, mut sim, _) = SimFlags {\n\n load: abstutil::path1_bin(\n\n map_name,\n\n abstutil::SCENARIOS,\n\n \"weekday_typical_traffic_from_psrc\",\n\n ),\n\n use_map_fixes: true,\n\n rng_seed: Some(42),\n\n opts: SimOptions::new(\"prebaked\"),\n\n }\n\n .load(&mut timer);\n\n sim.timed_step(&map, Duration::END_OF_DAY, &mut timer);\n\n timer.stop(&format!(\"run normal sim for {}\", map_name));\n\n\n\n abstutil::write_binary(\n\n &abstutil::path_prebaked_results(map_name),\n\n sim.get_analytics(),\n\n )\n\n .unwrap();\n\n }\n\n}\n", "file_path": "game/src/challenges.rs", "rank": 90, "score": 158515.9391717252 }, { "content": "pub fn load(\n\n path: &str,\n\n gps_bounds: &GPSBounds,\n\n timer: &mut Timer,\n\n) -> Result<ExtraShapes, io::Error> {\n\n println!(\"Opening {}\", path);\n\n let (f, done) = FileWithProgress::new(path)?;\n\n // TODO FileWithProgress should implement BufRead, so we don't have to double wrap like this\n\n let mut reader = Reader::from_reader(io::BufReader::new(f));\n\n reader.trim_text(true);\n\n\n\n let mut buf = Vec::new();\n\n\n\n // TODO uncomfortably stateful\n\n let mut shapes = Vec::new();\n\n let mut scanned_schema = false;\n\n let mut attributes: BTreeMap<String, String> = BTreeMap::new();\n\n let mut attrib_key: Option<String> = None;\n\n\n\n let mut skipped_count = 0;\n", "file_path": "kml/src/lib.rs", "rank": 91, "score": 158510.24151731416 }, { "content": "// Shorter is better\n\npub fn cmp_duration_shorter(now: Duration, baseline: Duration) -> Vec<TextSpan> {\n\n if now.epsilon_eq(baseline) {\n\n vec![Line(\" (same as baseline)\")]\n\n } else if now < baseline {\n\n vec![\n\n Line(\" (\"),\n\n Line((baseline - now).minimal_tostring()).fg(Color::GREEN),\n\n Line(\" faster)\"),\n\n ]\n\n } else if now > baseline {\n\n vec![\n\n Line(\" (\"),\n\n Line((now - baseline).minimal_tostring()).fg(Color::RED),\n\n Line(\" slower)\"),\n\n ]\n\n } else {\n\n unreachable!()\n\n }\n\n}\n\n\n", "file_path": "game/src/sandbox/gameplay/mod.rs", "rank": 92, "score": 158221.68200297465 }, { "content": "fn percent_color(percent: f64) -> Color {\n\n rotating_color((percent * 10.0).round() as usize)\n\n}\n", "file_path": "game/src/render/map.rs", "rank": 93, "score": 157973.5802564834 }, { "content": "fn delay_color(delay: Duration) -> Color {\n\n // TODO Better gradient\n\n if delay <= Duration::minutes(1) {\n\n return Color::BLUE.alpha(0.3);\n\n }\n\n if delay <= Duration::minutes(5) {\n\n return Color::ORANGE.alpha(0.5);\n\n }\n\n Color::RED.alpha(0.8)\n\n}\n\n\n", "file_path": "game/src/render/map.rs", "rank": 94, "score": 157973.5802564834 }, { "content": "fn info_for(id: ID, ui: &UI, ctx: &EventCtx) -> Text {\n\n let (map, sim, draw_map) = (&ui.primary.map, &ui.primary.sim, &ui.primary.draw_map);\n\n let mut txt = Text::new();\n\n // TODO Technically we should recalculate all of this as the window resizes, then.\n\n txt.override_width = Some(0.7 * ctx.canvas.window_width);\n\n txt.override_height = Some(0.7 * ctx.canvas.window_height);\n\n\n\n txt.extend(&CommonState::default_osd(id.clone(), ui));\n\n txt.highlight_last_line(Color::BLUE);\n\n let id_color = ui.cs.get(\"OSD ID color\");\n\n let name_color = ui.cs.get(\"OSD name color\");\n\n\n\n match id {\n\n ID::Road(_) => unreachable!(),\n\n ID::Lane(id) => {\n\n let l = map.get_l(id);\n\n let r = map.get_r(l.parent);\n\n\n\n txt.add_appended(vec![\n\n Line(\"Parent \"),\n", "file_path": "game/src/common/info.rs", "rank": 95, "score": 157627.24912480777 }, { "content": "fn get_color(x: f32, y: f32) -> Color {\n\n assert!(x >= 0.0 && x <= 1.0);\n\n assert!(y >= 0.0 && y <= 1.0);\n\n Color::rgb_f(x, y, (x + y) / 2.0)\n\n}\n", "file_path": "game/src/debug/color_picker.rs", "rank": 96, "score": 157232.80659776018 }, { "content": "fn get_screen_offset(canvas: &Canvas) -> (f64, f64) {\n\n let total_width = TILE_DIMS * f64::from(WIDTH);\n\n let total_height = TILE_DIMS * f64::from(HEIGHT);\n\n let start_x = (canvas.window_width - total_width) / 2.0;\n\n let start_y = (canvas.window_height - total_height) / 2.0;\n\n (start_x, start_y)\n\n}\n\n\n", "file_path": "game/src/debug/color_picker.rs", "rank": 97, "score": 156960.50723351171 }, { "content": "// Adjust the path to start on the building's border, not center\n\nfn trim_path(poly: &Polygon, path: Line) -> Line {\n\n for bldg_line in poly.points().windows(2) {\n\n let l = Line::new(bldg_line[0], bldg_line[1]);\n\n if let Some(hit) = l.intersection(&path) {\n\n if let Some(l) = Line::maybe_new(hit, path.pt2()) {\n\n return l;\n\n }\n\n }\n\n }\n\n // Just give up\n\n path\n\n}\n", "file_path": "map_model/src/make/buildings.rs", "rank": 98, "score": 156748.02701671544 }, { "content": "pub fn import_trips(\n\n parcels_path: &str,\n\n trips_path: &str,\n\n timer: &mut Timer,\n\n) -> Result<(Vec<Trip>, BTreeMap<i64, Parcel>), failure::Error> {\n\n let (parcels, metadata) = import_parcels(parcels_path, timer)?;\n\n\n\n let mut trips = Vec::new();\n\n let (reader, done) = FileWithProgress::new(trips_path)?;\n\n for rec in csv::Reader::from_reader(reader).records() {\n\n let rec = rec?;\n\n\n\n // opcl\n\n let from = skip_fail!(parcels.get(rec[15].trim_end_matches(\".0\"))).clone();\n\n // dpcl\n\n let to = skip_fail!(parcels.get(rec[6].trim_end_matches(\".0\"))).clone();\n\n\n\n if from.osm_building == to.osm_building {\n\n // TODO Plumb along pass-through trips later\n\n if from.osm_building.is_some() {\n", "file_path": "popdat/src/psrc.rs", "rank": 99, "score": 155869.01368587927 } ]
Rust
vrp-core/src/solver/telemetry.rs
valerivp/vrp
27ee30e5f4c44e051e5cec1248e606305b52fc00
#[cfg(test)] #[path = "../../tests/unit/solver/telemetry_test.rs"] mod telemetry_test; use crate::algorithms::nsga2::Objective; use crate::construction::heuristics::InsertionContext; use crate::solver::population::SelectionPhase; use crate::solver::{RefinementContext, Statistics}; use crate::utils::Timer; use std::fmt::Write; use std::ops::Deref; use std::sync::Arc; pub type InfoLogger = Arc<dyn Fn(&str)>; pub struct Metrics { pub duration: usize, pub generations: usize, pub speed: f64, pub evolution: Vec<Generation>, } pub struct Generation { pub number: usize, pub timestamp: f64, pub i_all_ratio: f64, pub i_1000_ratio: f64, pub is_improvement: bool, pub population: Population, } pub struct Individual { pub rank: usize, pub tours: usize, pub unassigned: usize, pub cost: f64, pub improvement: f64, pub fitness: Vec<f64>, } pub struct Population { pub individuals: Vec<Individual>, } pub enum TelemetryMode { None, OnlyLogging { logger: InfoLogger, log_best: usize, log_population: usize, dump_population: bool, }, OnlyMetrics { track_population: usize, }, All { logger: InfoLogger, log_best: usize, log_population: usize, track_population: usize, dump_population: bool, }, } pub struct Telemetry { metrics: Metrics, time: Timer, mode: TelemetryMode, improvement_tracker: ImprovementTracker, next_generation: Option<usize>, } impl Telemetry { pub fn new(mode: TelemetryMode) -> Self { Self { time: Timer::start(), metrics: Metrics { duration: 0, generations: 0, speed: 0.0, evolution: vec![] }, mode, improvement_tracker: ImprovementTracker::new(1000), next_generation: None, } } pub fn start(&mut self) { self.time = Timer::start(); } pub fn on_initial(&mut self, item_idx: usize, total_items: usize, item_time: Timer, termination_estimate: f64) { match &self.mode { TelemetryMode::OnlyLogging { .. } | TelemetryMode::All { .. } => self.log( format!( "[{}s] created {} of {} initial solutions in {}ms (ts: {})", self.time.elapsed_secs(), item_idx + 1, total_items, item_time.elapsed_millis(), termination_estimate, ) .as_str(), ), _ => {} }; } pub fn on_generation( &mut self, refinement_ctx: &mut RefinementContext, termination_estimate: f64, generation_time: Timer, is_improved: bool, ) { let generation = self.next_generation.unwrap_or(0); self.metrics.generations = generation; self.improvement_tracker.track(generation, is_improved); refinement_ctx.statistics = Statistics { generation, improvement_all_ratio: self.improvement_tracker.i_all_ratio, improvement_1000_ratio: self.improvement_tracker.i_1000_ratio, termination_estimate, }; self.next_generation = Some(generation + 1); let (log_best, log_population, track_population, should_dump_population) = match &self.mode { TelemetryMode::None => return, TelemetryMode::OnlyLogging { log_best, log_population, dump_population, .. } => { (Some(log_best), Some(log_population), None, *dump_population) } TelemetryMode::OnlyMetrics { track_population, .. } => (None, None, Some(track_population), false), TelemetryMode::All { log_best, log_population, track_population, dump_population, .. } => { (Some(log_best), Some(log_population), Some(track_population), *dump_population) } }; if let Some((best_individual, rank)) = refinement_ctx.population.ranked().next() { let should_log_best = generation % *log_best.unwrap_or(&usize::MAX) == 0; let should_log_population = generation % *log_population.unwrap_or(&usize::MAX) == 0; let should_track_population = generation % *track_population.unwrap_or(&usize::MAX) == 0; if should_log_best { self.log_individual( &self.get_individual_metrics(refinement_ctx, &best_individual, rank), Some((refinement_ctx.statistics.generation, generation_time)), ) } self.on_population(&refinement_ctx, should_log_population, should_track_population, should_dump_population); } else { self.log("no progress yet"); } } fn on_population( &mut self, refinement_ctx: &RefinementContext, should_log_population: bool, should_track_population: bool, should_dump_population: bool, ) { if !should_log_population && !should_track_population { return; } if should_log_population { self.log( format!( "[{}s] population state (phase: {}, speed: {:.2} gen/sec, improvement ratio: {:.3}:{:.3}):", self.time.elapsed_secs(), Self::get_selection_phase(refinement_ctx), refinement_ctx.statistics.generation as f64 / self.time.elapsed_secs_as_f64(), self.improvement_tracker.i_all_ratio, self.improvement_tracker.i_1000_ratio, ) .as_str(), ); } let individuals = refinement_ctx .population .ranked() .map(|(insertion_ctx, rank)| self.get_individual_metrics(refinement_ctx, &insertion_ctx, rank)) .collect::<Vec<_>>(); if should_log_population { individuals.iter().for_each(|metrics| self.log_individual(&metrics, None)); if should_dump_population { self.log(&format!("\t{}", Self::get_population_state(refinement_ctx))); } } if should_track_population { self.metrics.evolution.push(Generation { number: refinement_ctx.statistics.generation, timestamp: self.time.elapsed_secs_as_f64(), i_all_ratio: self.improvement_tracker.i_all_ratio, i_1000_ratio: self.improvement_tracker.i_1000_ratio, is_improvement: self.improvement_tracker.is_last_improved, population: Population { individuals }, }); } } pub fn on_result(&mut self, refinement_ctx: &RefinementContext) { let generations = refinement_ctx.statistics.generation; let (should_log_population, should_track_population) = match &self.mode { TelemetryMode::OnlyLogging { .. } => (true, false), TelemetryMode::OnlyMetrics { track_population, .. } => (false, generations % track_population != 0), TelemetryMode::All { track_population, .. } => (true, generations % track_population != 0), _ => return, }; self.on_population(refinement_ctx, should_log_population, should_track_population, false); let elapsed = self.time.elapsed_secs() as usize; let speed = refinement_ctx.statistics.generation as f64 / self.time.elapsed_secs_as_f64(); self.log(format!("[{}s] total generations: {}, speed: {:.2} gen/sec", elapsed, generations, speed).as_str()); self.metrics.duration = elapsed; self.metrics.speed = speed; } pub fn get_metrics(self) -> Option<Metrics> { match &self.mode { TelemetryMode::OnlyMetrics { .. } | TelemetryMode::All { .. } => Some(self.metrics), _ => None, } } pub fn log(&self, message: &str) { match &self.mode { TelemetryMode::OnlyLogging { logger, .. } => logger.deref()(message), TelemetryMode::All { logger, .. } => logger.deref()(message), _ => {} } } fn get_individual_metrics( &self, refinement_ctx: &RefinementContext, insertion_ctx: &InsertionContext, rank: usize, ) -> Individual { let fitness_values = insertion_ctx.get_fitness_values().collect::<Vec<_>>(); let (cost, cost_difference) = Self::get_fitness(refinement_ctx, insertion_ctx); Individual { rank, tours: insertion_ctx.solution.routes.len(), unassigned: insertion_ctx.solution.unassigned.len(), cost, improvement: cost_difference, fitness: fitness_values, } } fn log_individual(&self, metrics: &Individual, gen_info: Option<(usize, Timer)>) { self.log( format!( "{} rank: {}, cost: {:.2}({:.3}%), tours: {}, unassigned: {}, fitness: ({})", gen_info.map_or("\t".to_string(), |(gen, gen_time)| format!( "[{}s] generation {} took {}ms,", self.time.elapsed_secs(), gen, gen_time.elapsed_millis() )), metrics.rank, metrics.cost, metrics.improvement, metrics.tours, metrics.unassigned, metrics.fitness.iter().map(|v| format!("{:.3}", v)).collect::<Vec<_>>().join(", ") ) .as_str(), ); } fn get_fitness(refinement_ctx: &RefinementContext, insertion_ctx: &InsertionContext) -> (f64, f64) { let fitness_value = refinement_ctx.problem.objective.fitness(insertion_ctx); let fitness_change = refinement_ctx .population .ranked() .next() .map(|(best_ctx, _)| refinement_ctx.problem.objective.fitness(best_ctx)) .map(|best_fitness| (fitness_value - best_fitness) / best_fitness * 100.) .unwrap_or(0.); (fitness_value, fitness_change) } fn get_population_state(refinement_ctx: &RefinementContext) -> String { let mut state = String::new(); write!(state, "{}", refinement_ctx.population).unwrap(); state } fn get_selection_phase(refinement_ctx: &RefinementContext) -> &str { match refinement_ctx.population.selection_phase() { SelectionPhase::Initial => "initial", SelectionPhase::Exploration => "exploration", SelectionPhase::Exploitation => "exploitation", } } } struct ImprovementTracker { buffer: Vec<bool>, total_improvements: usize, pub i_all_ratio: f64, pub i_1000_ratio: f64, pub is_last_improved: bool, } impl ImprovementTracker { pub fn new(size: usize) -> Self { Self { buffer: vec![false; size], total_improvements: 0, i_all_ratio: 0., i_1000_ratio: 0., is_last_improved: false, } } pub fn track(&mut self, generation: usize, is_improved: bool) { let length = self.buffer.len(); if is_improved { self.total_improvements += 1; } self.is_last_improved = is_improved; self.buffer[generation % length] = is_improved; let improvements = (0..generation + 1).zip(self.buffer.iter()).filter(|(_, is_improved)| **is_improved).count(); self.i_all_ratio = (self.total_improvements as f64) / ((generation + 1) as f64); self.i_1000_ratio = (improvements as f64) / ((generation + 1).min(self.buffer.len()) as f64); } }
#[cfg(test)] #[path = "../../tests/unit/solver/telemetry_test.rs"] mod telemetry_test; use crate::algorithms::nsga2::Objective; use crate::construction::heuristics::InsertionContext; use crate::solver::population::SelectionPhase; use crate::solver::{RefinementContext, Statistics}; use crate::utils::Ti
termination_estimate, }; self.next_generation = Some(generation + 1); let (log_best, log_population, track_population, should_dump_population) = match &self.mode { TelemetryMode::None => return, TelemetryMode::OnlyLogging { log_best, log_population, dump_population, .. } => { (Some(log_best), Some(log_population), None, *dump_population) } TelemetryMode::OnlyMetrics { track_population, .. } => (None, None, Some(track_population), false), TelemetryMode::All { log_best, log_population, track_population, dump_population, .. } => { (Some(log_best), Some(log_population), Some(track_population), *dump_population) } }; if let Some((best_individual, rank)) = refinement_ctx.population.ranked().next() { let should_log_best = generation % *log_best.unwrap_or(&usize::MAX) == 0; let should_log_population = generation % *log_population.unwrap_or(&usize::MAX) == 0; let should_track_population = generation % *track_population.unwrap_or(&usize::MAX) == 0; if should_log_best { self.log_individual( &self.get_individual_metrics(refinement_ctx, &best_individual, rank), Some((refinement_ctx.statistics.generation, generation_time)), ) } self.on_population(&refinement_ctx, should_log_population, should_track_population, should_dump_population); } else { self.log("no progress yet"); } } fn on_population( &mut self, refinement_ctx: &RefinementContext, should_log_population: bool, should_track_population: bool, should_dump_population: bool, ) { if !should_log_population && !should_track_population { return; } if should_log_population { self.log( format!( "[{}s] population state (phase: {}, speed: {:.2} gen/sec, improvement ratio: {:.3}:{:.3}):", self.time.elapsed_secs(), Self::get_selection_phase(refinement_ctx), refinement_ctx.statistics.generation as f64 / self.time.elapsed_secs_as_f64(), self.improvement_tracker.i_all_ratio, self.improvement_tracker.i_1000_ratio, ) .as_str(), ); } let individuals = refinement_ctx .population .ranked() .map(|(insertion_ctx, rank)| self.get_individual_metrics(refinement_ctx, &insertion_ctx, rank)) .collect::<Vec<_>>(); if should_log_population { individuals.iter().for_each(|metrics| self.log_individual(&metrics, None)); if should_dump_population { self.log(&format!("\t{}", Self::get_population_state(refinement_ctx))); } } if should_track_population { self.metrics.evolution.push(Generation { number: refinement_ctx.statistics.generation, timestamp: self.time.elapsed_secs_as_f64(), i_all_ratio: self.improvement_tracker.i_all_ratio, i_1000_ratio: self.improvement_tracker.i_1000_ratio, is_improvement: self.improvement_tracker.is_last_improved, population: Population { individuals }, }); } } pub fn on_result(&mut self, refinement_ctx: &RefinementContext) { let generations = refinement_ctx.statistics.generation; let (should_log_population, should_track_population) = match &self.mode { TelemetryMode::OnlyLogging { .. } => (true, false), TelemetryMode::OnlyMetrics { track_population, .. } => (false, generations % track_population != 0), TelemetryMode::All { track_population, .. } => (true, generations % track_population != 0), _ => return, }; self.on_population(refinement_ctx, should_log_population, should_track_population, false); let elapsed = self.time.elapsed_secs() as usize; let speed = refinement_ctx.statistics.generation as f64 / self.time.elapsed_secs_as_f64(); self.log(format!("[{}s] total generations: {}, speed: {:.2} gen/sec", elapsed, generations, speed).as_str()); self.metrics.duration = elapsed; self.metrics.speed = speed; } pub fn get_metrics(self) -> Option<Metrics> { match &self.mode { TelemetryMode::OnlyMetrics { .. } | TelemetryMode::All { .. } => Some(self.metrics), _ => None, } } pub fn log(&self, message: &str) { match &self.mode { TelemetryMode::OnlyLogging { logger, .. } => logger.deref()(message), TelemetryMode::All { logger, .. } => logger.deref()(message), _ => {} } } fn get_individual_metrics( &self, refinement_ctx: &RefinementContext, insertion_ctx: &InsertionContext, rank: usize, ) -> Individual { let fitness_values = insertion_ctx.get_fitness_values().collect::<Vec<_>>(); let (cost, cost_difference) = Self::get_fitness(refinement_ctx, insertion_ctx); Individual { rank, tours: insertion_ctx.solution.routes.len(), unassigned: insertion_ctx.solution.unassigned.len(), cost, improvement: cost_difference, fitness: fitness_values, } } fn log_individual(&self, metrics: &Individual, gen_info: Option<(usize, Timer)>) { self.log( format!( "{} rank: {}, cost: {:.2}({:.3}%), tours: {}, unassigned: {}, fitness: ({})", gen_info.map_or("\t".to_string(), |(gen, gen_time)| format!( "[{}s] generation {} took {}ms,", self.time.elapsed_secs(), gen, gen_time.elapsed_millis() )), metrics.rank, metrics.cost, metrics.improvement, metrics.tours, metrics.unassigned, metrics.fitness.iter().map(|v| format!("{:.3}", v)).collect::<Vec<_>>().join(", ") ) .as_str(), ); } fn get_fitness(refinement_ctx: &RefinementContext, insertion_ctx: &InsertionContext) -> (f64, f64) { let fitness_value = refinement_ctx.problem.objective.fitness(insertion_ctx); let fitness_change = refinement_ctx .population .ranked() .next() .map(|(best_ctx, _)| refinement_ctx.problem.objective.fitness(best_ctx)) .map(|best_fitness| (fitness_value - best_fitness) / best_fitness * 100.) .unwrap_or(0.); (fitness_value, fitness_change) } fn get_population_state(refinement_ctx: &RefinementContext) -> String { let mut state = String::new(); write!(state, "{}", refinement_ctx.population).unwrap(); state } fn get_selection_phase(refinement_ctx: &RefinementContext) -> &str { match refinement_ctx.population.selection_phase() { SelectionPhase::Initial => "initial", SelectionPhase::Exploration => "exploration", SelectionPhase::Exploitation => "exploitation", } } } struct ImprovementTracker { buffer: Vec<bool>, total_improvements: usize, pub i_all_ratio: f64, pub i_1000_ratio: f64, pub is_last_improved: bool, } impl ImprovementTracker { pub fn new(size: usize) -> Self { Self { buffer: vec![false; size], total_improvements: 0, i_all_ratio: 0., i_1000_ratio: 0., is_last_improved: false, } } pub fn track(&mut self, generation: usize, is_improved: bool) { let length = self.buffer.len(); if is_improved { self.total_improvements += 1; } self.is_last_improved = is_improved; self.buffer[generation % length] = is_improved; let improvements = (0..generation + 1).zip(self.buffer.iter()).filter(|(_, is_improved)| **is_improved).count(); self.i_all_ratio = (self.total_improvements as f64) / ((generation + 1) as f64); self.i_1000_ratio = (improvements as f64) / ((generation + 1).min(self.buffer.len()) as f64); } }
mer; use std::fmt::Write; use std::ops::Deref; use std::sync::Arc; pub type InfoLogger = Arc<dyn Fn(&str)>; pub struct Metrics { pub duration: usize, pub generations: usize, pub speed: f64, pub evolution: Vec<Generation>, } pub struct Generation { pub number: usize, pub timestamp: f64, pub i_all_ratio: f64, pub i_1000_ratio: f64, pub is_improvement: bool, pub population: Population, } pub struct Individual { pub rank: usize, pub tours: usize, pub unassigned: usize, pub cost: f64, pub improvement: f64, pub fitness: Vec<f64>, } pub struct Population { pub individuals: Vec<Individual>, } pub enum TelemetryMode { None, OnlyLogging { logger: InfoLogger, log_best: usize, log_population: usize, dump_population: bool, }, OnlyMetrics { track_population: usize, }, All { logger: InfoLogger, log_best: usize, log_population: usize, track_population: usize, dump_population: bool, }, } pub struct Telemetry { metrics: Metrics, time: Timer, mode: TelemetryMode, improvement_tracker: ImprovementTracker, next_generation: Option<usize>, } impl Telemetry { pub fn new(mode: TelemetryMode) -> Self { Self { time: Timer::start(), metrics: Metrics { duration: 0, generations: 0, speed: 0.0, evolution: vec![] }, mode, improvement_tracker: ImprovementTracker::new(1000), next_generation: None, } } pub fn start(&mut self) { self.time = Timer::start(); } pub fn on_initial(&mut self, item_idx: usize, total_items: usize, item_time: Timer, termination_estimate: f64) { match &self.mode { TelemetryMode::OnlyLogging { .. } | TelemetryMode::All { .. } => self.log( format!( "[{}s] created {} of {} initial solutions in {}ms (ts: {})", self.time.elapsed_secs(), item_idx + 1, total_items, item_time.elapsed_millis(), termination_estimate, ) .as_str(), ), _ => {} }; } pub fn on_generation( &mut self, refinement_ctx: &mut RefinementContext, termination_estimate: f64, generation_time: Timer, is_improved: bool, ) { let generation = self.next_generation.unwrap_or(0); self.metrics.generations = generation; self.improvement_tracker.track(generation, is_improved); refinement_ctx.statistics = Statistics { generation, improvement_all_ratio: self.improvement_tracker.i_all_ratio, improvement_1000_ratio: self.improvement_tracker.i_1000_ratio,
random
[ { "content": "fn create_file(path: &str, description: &str) -> File {\n\n File::create(path).unwrap_or_else(|err| {\n\n eprintln!(\"Cannot create {} file '{}': '{}'\", description, path, err.to_string());\n\n process::exit(1);\n\n })\n\n}\n\n\n\n// TODO avoid code duplication (macros?)\n\n\n", "file_path": "vrp-cli/src/commands/mod.rs", "rank": 0, "score": 129033.49421391597 }, { "content": "fn open_file(path: &str, description: &str) -> File {\n\n File::open(path).unwrap_or_else(|err| {\n\n eprintln!(\"Cannot open {} file '{}': '{}'\", description, path, err.to_string());\n\n process::exit(1);\n\n })\n\n}\n\n\n", "file_path": "vrp-cli/src/commands/mod.rs", "rank": 1, "score": 129033.49421391597 }, { "content": "fn is_slow_search(statistics: &Statistics) -> bool {\n\n statistics.termination_estimate > 0.1 && statistics.generation < 200\n\n}\n", "file_path": "vrp-core/src/solver/hyper/mod.rs", "rank": 2, "score": 105433.30656252653 }, { "content": "//! This module contains some statistic related functionality.\n\n\n\nmod basics;\n\npub use self::basics::*;\n", "file_path": "vrp-core/src/algorithms/statistics/mod.rs", "rank": 3, "score": 102481.3507636481 }, { "content": "pub fn get_test_resource(resource_path: &str) -> std::io::Result<File> {\n\n let mut path = std::env::current_dir()?;\n\n path.push(\"tests\");\n\n path.push(resource_path);\n\n\n\n File::open(path)\n\n}\n\n\n", "file_path": "vrp-scientific/tests/helpers/mod.rs", "rank": 4, "score": 85920.58324776335 }, { "content": "fn open_file(path: &str) -> BufReader<File> {\n\n println!(\"Reading '{}'\", path);\n\n BufReader::new(File::open(path).unwrap_or_else(|err| panic!(\"cannot open {} file: '{}'\", path, err.to_string())))\n\n}\n\n\n", "file_path": "examples/json-pragmatic/src/main.rs", "rank": 5, "score": 85040.15212518758 }, { "content": "fn can_check_tour_statistic_impl(statistic: Statistic, expected_result: Result<(), Vec<String>>) {\n\n let problem = create_test_problem();\n\n let matrix = create_matrix_from_problem(&problem);\n\n let solution = create_test_solution(statistic, &[(1., 1), (3., 2), (6., 4)]);\n\n\n\n let result = check_routing(&CheckerContext::new(create_example_problem(), problem, Some(vec![matrix]), solution));\n\n\n\n assert_eq!(result, expected_result);\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/unit/checker/routing_test.rs", "rank": 6, "score": 60505.4516899541 }, { "content": "fn create_test_statistic() -> Statistic {\n\n Statistic {\n\n cost: 10.,\n\n distance: 4,\n\n duration: 6,\n\n times: Timing { driving: 4, serving: 2, waiting: 0, break_time: 0 },\n\n }\n\n}\n\n\n", "file_path": "vrp-pragmatic/tests/unit/checker/routing_test.rs", "rank": 7, "score": 58920.62704283462 }, { "content": "fn create_statistics(termination_estimate: f64, generation: usize) -> Statistics {\n\n let mut statistics = Statistics::default();\n\n statistics.termination_estimate = termination_estimate;\n\n statistics.generation = generation;\n\n statistics.improvement_1000_ratio = 0.5;\n\n\n\n statistics\n\n}\n\n\n", "file_path": "vrp-core/tests/unit/solver/population/rosomaxa_test.rs", "rank": 8, "score": 55837.47577498398 }, { "content": "fn create_test_solution(statistic: Statistic, stops: Vec<Stop>) -> Solution {\n\n Solution {\n\n tours: vec![Tour {\n\n vehicle_id: \"some_real_vehicle\".to_string(),\n\n type_id: \"my_vehicle\".to_string(),\n\n shift_index: 0,\n\n stops,\n\n statistic,\n\n ..create_empty_tour()\n\n }],\n\n ..create_empty_solution()\n\n }\n\n}\n\n\n\nparameterized_test! {can_check_shift_and_distance_limit, (max_distance, shift_time, actual, expected_result), {\n\n let expected_result = if let Err(prefix_msg) = expected_result {\n\n Err(format!(\n\n \"{} violation, expected: not more than {}, got: {}, vehicle id 'some_real_vehicle', shift index: 0\",\n\n prefix_msg, max_distance.unwrap_or_else(|| shift_time.unwrap()), actual,\n\n ))\n", "file_path": "vrp-pragmatic/tests/unit/checker/limits_test.rs", "rank": 9, "score": 55837.47577498398 }, { "content": "fn create_test_solution(statistic: Statistic, stop_data: &[(f64, i64); 3]) -> Solution {\n\n let [first, second, third] = stop_data;\n\n Solution {\n\n statistic: statistic.clone(),\n\n tours: vec![Tour {\n\n vehicle_id: \"my_vehicle_1\".to_string(),\n\n type_id: \"my_vehicle\".to_string(),\n\n shift_index: 0,\n\n stops: vec![\n\n create_stop_with_activity(\n\n \"departure\",\n\n \"departure\",\n\n (0., 0.),\n\n 2,\n\n (\"1970-01-01T00:00:00Z\", \"1970-01-01T00:00:00Z\"),\n\n 0,\n\n ),\n\n Stop {\n\n location: vec![1., 0.].to_loc(),\n\n time: Schedule { arrival: format_time(first.0), departure: \"1970-01-01T00:00:02Z\".to_string() },\n", "file_path": "vrp-pragmatic/tests/unit/checker/routing_test.rs", "rank": 10, "score": 55259.16433758555 }, { "content": "#[cfg(test)]\n\n#[path = \"../../../vrp-core/tests/helpers/macros.rs\"]\n\n#[macro_use]\n\npub mod macros;\n\n\n\nmod analysis;\n\npub use self::analysis::*;\n\n\n\nmod solomon;\n\npub use self::solomon::SolomonBuilder;\n\n\n\nmod lilim;\n\npub use self::lilim::LilimBuilder;\n\n\n\nuse crate::lilim::LilimProblem;\n\nuse crate::solomon::SolomonProblem;\n\nuse std::fs::File;\n\nuse std::io::BufReader;\n\nuse vrp_core::models::Problem;\n\n\n", "file_path": "vrp-scientific/tests/helpers/mod.rs", "rank": 11, "score": 52018.56194983566 }, { "content": "#[cfg(test)]\n\n#[path = \"../../../vrp-core/tests/helpers/macros.rs\"]\n\n#[macro_use]\n\npub mod macros;\n\n\n\npub mod generate;\n", "file_path": "vrp-cli/tests/helpers/mod.rs", "rank": 12, "score": 52018.18129540246 }, { "content": "#[cfg(test)]\n\n#[path = \"../../../vrp-core/tests/helpers/macros.rs\"]\n\n#[macro_use]\n\npub mod macros;\n\n\n", "file_path": "vrp-pragmatic/tests/helpers/mod.rs", "rank": 13, "score": 52017.55524812138 }, { "content": "//! A collection of various utility helpers.\n\n\n\nmod comparison;\n\npub use self::comparison::*;\n\n\n\nmod environment;\n\npub use self::environment::*;\n\n\n\nmod iterators;\n\npub use self::iterators::CollectGroupBy;\n\n\n\nmod mutability;\n\npub use self::mutability::*;\n\n\n\nmod parallel;\n\npub use self::parallel::*;\n\n\n\nmod random;\n\npub use self::random::*;\n\n\n\nmod time_quota;\n\npub use self::time_quota::TimeQuota;\n\n\n\nmod timing;\n\npub use self::timing::Timer;\n\n\n\nmod types;\n\npub use self::types::Either;\n", "file_path": "vrp-core/src/utils/mod.rs", "rank": 14, "score": 52015.89565753065 }, { "content": "//! This module is responsible for the logic which generates problems with specific characteristics.\n\n\n\nextern crate proptest;\n\nextern crate uuid;\n\n\n\nuse proptest::prelude::*;\n\n\n\nmod common;\n\npub use self::common::*;\n\n\n\nmod jobs;\n\npub use self::jobs::*;\n\n\n\nmod relations;\n\npub use self::relations::*;\n\n\n\nmod defaults;\n\npub use self::defaults::*;\n\n\n\nmod vehicles;\n", "file_path": "vrp-pragmatic/tests/generator/mod.rs", "rank": 15, "score": 52015.375190724124 }, { "content": "use std::sync::Arc;\n\n\n\npub mod hyper;\n\npub mod mutation;\n\npub mod objectives;\n\npub mod population;\n\npub mod termination;\n\n\n\nmod builder;\n\npub use self::builder::Builder;\n\n\n\nmod evolution;\n\nuse self::evolution::{EvolutionConfig, EvolutionSimulator};\n\n\n\nmod telemetry;\n\npub use self::telemetry::{Metrics, Telemetry, TelemetryMode};\n\nuse crate::utils::Environment;\n\n\n\n/// A key to store solution order information.\n\nconst SOLUTION_ORDER_KEY: i32 = 1;\n", "file_path": "vrp-core/src/solver/mod.rs", "rank": 16, "score": 52015.27152736479 }, { "content": "//! This module provides functionality to automatically check that given solution is feasible\n\n//! which means that there is no constraint violations.\n\n\n\n#[cfg(test)]\n\n#[path = \"../../tests/unit/checker/checker_test.rs\"]\n\nmod checker_test;\n\n\n\nuse crate::format::problem::*;\n\nuse crate::format::solution::*;\n\nuse crate::format::Location;\n\nuse crate::parse_time;\n\nuse hashbrown::{HashMap, HashSet};\n\nuse std::sync::Arc;\n\nuse vrp_core::models::common::TimeWindow;\n\nuse vrp_core::models::Problem as CoreProblem;\n\n\n\n/// Stores problem and solution together and provides some helper methods.\n\npub struct CheckerContext {\n\n /// An original problem definition.\n\n pub problem: Problem,\n", "file_path": "vrp-pragmatic/src/checker/mod.rs", "rank": 17, "score": 52015.00247144281 }, { "content": "use self::jobs::validate_jobs;\n\n\n\nmod objectives;\n\nuse self::objectives::validate_objectives;\n\n\n\nmod vehicles;\n\nuse self::vehicles::validate_vehicles;\n\n\n\nmod relations;\n\nuse self::relations::validate_relations;\n\n\n\nmod routing;\n\nuse self::routing::validate_routing;\n\nuse hashbrown::HashMap;\n\n\n\nimpl<'a> ValidationContext<'a> {\n\n /// Creates an instance of `ValidationContext`.\n\n pub fn new(problem: &'a Problem, matrices: Option<&'a Vec<Matrix>>) -> Self {\n\n Self {\n\n problem,\n", "file_path": "vrp-pragmatic/src/validation/mod.rs", "rank": 18, "score": 52014.88747734622 }, { "content": "//! Contains utility logic.\n\n\n\nmod approx_transporation;\n\npub use self::approx_transporation::get_approx_transportation;\n\n\n\nmod collections;\n\npub use self::collections::*;\n\n\n\nmod permutations;\n\npub use self::permutations::VariableJobPermutation;\n", "file_path": "vrp-pragmatic/src/utils/mod.rs", "rank": 19, "score": 52014.76416376786 }, { "content": "//! Contains functionality to read solomon problem and write its solution.\n\n\n\nmod initial_reader;\n\npub use self::initial_reader::read_init_solution;\n\n\n\nmod reader;\n\npub use self::reader::SolomonProblem;\n\n\n\nmod writer;\n\npub use self::writer::SolomonSolution;\n", "file_path": "vrp-scientific/src/solomon/mod.rs", "rank": 20, "score": 52014.52354433393 }, { "content": "//! Contains some algorithm extensions.\n\n\n\nmod only_vehicle_activity_cost;\n\npub use self::only_vehicle_activity_cost::OnlyVehicleActivityCost;\n\n\n\nmod route_modifier;\n\npub use self::route_modifier::get_route_modifier;\n\n\n\nmod typed_actor_group_key;\n\npub use self::typed_actor_group_key::*;\n", "file_path": "vrp-pragmatic/src/extensions/mod.rs", "rank": 21, "score": 52014.383695717996 }, { "content": "use clap::{App, Arg, ArgMatches, Values};\n\n\n\npub mod check;\n\npub mod generate;\n\npub mod import;\n\npub mod solve;\n\n\n\nuse std::fs::File;\n\nuse std::io::{stdout, BufWriter, Write};\n\nuse std::process;\n\nuse std::str::FromStr;\n\n\n\npub(crate) fn create_write_buffer(out_file: Option<File>) -> BufWriter<Box<dyn Write>> {\n\n if let Some(out_file) = out_file {\n\n BufWriter::new(Box::new(out_file))\n\n } else {\n\n BufWriter::new(Box::new(stdout()))\n\n }\n\n}\n\n\n", "file_path": "vrp-cli/src/commands/mod.rs", "rank": 22, "score": 52014.26537289771 }, { "content": "//! Contains functionality to read lilim problem and write its solution.\n\n\n\nmod reader;\n\npub use self::reader::LilimProblem;\n\n\n\nmod writer;\n\npub use self::writer::LilimSolution;\n", "file_path": "vrp-scientific/src/lilim/mod.rs", "rank": 23, "score": 52014.04536689854 }, { "content": "//! A collection of reusable algorithms without dependencies on any other module in the project.\n\n\n\npub mod dbscan;\n\npub mod gsom;\n\npub mod mdp;\n\npub mod nsga2;\n\n\n\npub mod geometry;\n\npub mod statistics;\n", "file_path": "vrp-core/src/algorithms/mod.rs", "rank": 24, "score": 52014.007284730156 }, { "content": "pub mod algorithms;\n\npub mod construction;\n\npub mod models;\n\npub mod solver;\n\npub mod utils;\n\n\n\n#[cfg(test)]\n\n#[macro_use]\n\npub mod macros;\n", "file_path": "vrp-core/tests/helpers/mod.rs", "rank": 25, "score": 52013.99193299431 }, { "content": "//! This module defines logic to serialize/deserialize problem and routing matrix in pragmatic\n\n//! format from json input and create and write pragmatic solution.\n\n//!\n\n\n\nextern crate serde_json;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::io::BufWriter;\n\nuse vrp_core::models::problem::Job as CoreJob;\n\nuse vrp_core::models::Problem as CoreProblem;\n\n\n\nmod coord_index;\n\npub(crate) use self::coord_index::CoordIndex;\n\nuse hashbrown::HashMap;\n\nuse std::fmt;\n\n\n\npub mod problem;\n\npub mod solution;\n\n\n\n/// Represents a location type.\n\n#[derive(Clone, Debug, Deserialize, Serialize)]\n", "file_path": "vrp-pragmatic/src/format/mod.rs", "rank": 26, "score": 52013.93522438733 }, { "content": "mod routing;\n\npub(crate) use self::routing::CoordIndex;\n", "file_path": "vrp-scientific/src/utils/mod.rs", "rank": 27, "score": 52013.13787130855 }, { "content": "//! This module provides functionality to validate problem definition for logical correctness.\n\n\n\nuse crate::format::problem::*;\n\nuse crate::format::{CoordIndex, FormatError};\n\n\n\n/// A validation context which keeps essential information.\n\npub struct ValidationContext<'a> {\n\n /// An original problem.\n\n pub problem: &'a Problem,\n\n /// Routing matrices.\n\n pub matrices: Option<&'a Vec<Matrix>>,\n\n\n\n coord_index: CoordIndex,\n\n job_index: HashMap<String, Job>,\n\n}\n\n\n\nmod common;\n\nuse self::common::*;\n\n\n\nmod jobs;\n", "file_path": "vrp-pragmatic/src/validation/mod.rs", "rank": 28, "score": 52012.87306132715 }, { "content": "//! A collection of models to represent problem and solution in Vehicle Routing Problem domain.\n\n\n\npub(crate) const OP_START_MSG: &str = \"Optional start is not yet implemented.\";\n\n\n\npub mod common;\n\n\n\nmod domain;\n\npub use self::domain::*;\n\n\n\n/// TODO avoid it in production code\n\n#[doc(hidden)]\n\npub mod examples;\n\n\n\npub mod problem;\n\npub mod solution;\n", "file_path": "vrp-core/src/models/mod.rs", "rank": 29, "score": 52012.872365763455 }, { "content": "//! Contains common text reading and writing functionality.\n\n\n\nmod text_reader;\n\npub(crate) use self::text_reader::*;\n\n\n\nmod text_writer;\n\npub(crate) use self::text_writer::*;\n\n\n\n/// A trait to get tuple from collection items.\n\n/// See https://stackoverflow.com/questions/38863781/how-to-create-a-tuple-from-a-vector\n\npub(crate) trait TryCollect<T> {\n\n fn try_collect(&mut self) -> Option<T>;\n\n}\n\n\n\n/// A macro to get tuple from collection items.\n\n#[macro_export]\n\nmacro_rules! impl_try_collect_tuple {\n\n () => { };\n\n ($A:ident $($I:ident)*) => {\n\n impl_try_collect_tuple!($($I)*);\n", "file_path": "vrp-scientific/src/common/mod.rs", "rank": 30, "score": 52011.794583808834 }, { "content": "//! This module contains feature tests: minimalistic tests which check features in isolation\n\n//! and their combination.\n\n\n\nmod breaks;\n\nmod dispatch;\n\nmod fleet;\n\nmod format;\n\nmod limits;\n\nmod multjob;\n\nmod pickdev;\n\nmod priorities;\n\nmod relations;\n\nmod reload;\n\nmod skills;\n\nmod timing;\n\nmod values;\n\nmod work_balance;\n", "file_path": "vrp-pragmatic/tests/features/mod.rs", "rank": 31, "score": 52009.74768647589 }, { "content": "//! Module provides various helper functionality.\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\npub mod check;\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\npub mod generate;\n\n\n\npub mod import;\n\npub mod solve;\n", "file_path": "vrp-cli/src/extensions/mod.rs", "rank": 32, "score": 52009.09919179596 }, { "content": "mod generate;\n\nmod solve;\n", "file_path": "vrp-cli/tests/features/mod.rs", "rank": 33, "score": 52009.035269737935 }, { "content": "\n\n /// An environmental context.\n\n pub environment: Arc<Environment>,\n\n\n\n /// A refinement statistics.\n\n pub statistics: Statistics,\n\n}\n\n\n\n/// A refinement statistics to track evolution progress.\n\npub struct Statistics {\n\n /// A number which specifies refinement generation.\n\n pub generation: usize,\n\n\n\n /// An improvement ratio from beginning.\n\n pub improvement_all_ratio: f64,\n\n\n\n /// An improvement ratio for last 1000 iterations.\n\n pub improvement_1000_ratio: f64,\n\n\n\n /// A progress till algorithm's termination.\n", "file_path": "vrp-core/src/solver/mod.rs", "rank": 34, "score": 52008.97486655752 }, { "content": "//!\n\n//! [`Schrimpf et al. (2000)`]: https://www.sciencedirect.com/science/article/pii/S0021999199964136\n\n//! [`mutation`]: mutation/index.html\n\n//!\n\n//! # Solver usage\n\n//!\n\n//! Check [`Builder`] and [`Solver`] documentation to see how to run VRP solver.\n\n//!\n\n//! [`Builder`]: ./struct.Builder.html\n\n//! [`Solver`]: ./struct.Solver.html\n\n//!\n\n\n\nextern crate rand;\n\nuse crate::algorithms::nsga2::Objective;\n\nuse crate::construction::Quota;\n\nuse crate::models::common::Cost;\n\nuse crate::models::{Problem, Solution};\n\nuse crate::solver::population::Population;\n\nuse hashbrown::HashMap;\n\nuse std::any::Any;\n", "file_path": "vrp-core/src/solver/mod.rs", "rank": 35, "score": 52008.86068466175 }, { "content": "//! Contains implementation of extra constraints.\n\n\n\nuse std::sync::Arc;\n\nuse vrp_core::models::common::{Dimensions, IdDimension, ValueDimension};\n\nuse vrp_core::models::problem::Single;\n\nuse vrp_core::models::solution::{Activity, Route};\n\n\n", "file_path": "vrp-pragmatic/src/constraints/mod.rs", "rank": 36, "score": 52008.80104341974 }, { "content": " /// # use std::sync::Arc;\n\n /// use vrp_core::solver::Builder;\n\n /// use vrp_core::models::Problem;\n\n /// use vrp_core::utils::Environment;\n\n ///\n\n /// // create your VRP problem\n\n /// let problem: Arc<Problem> = create_example_problem();\n\n /// let environment = Arc::new(Environment::default());\n\n /// // build solver using builder with default settings\n\n /// let solver = Builder::new(problem, environment).build()?;\n\n /// // run solver and get the best known solution within its cost.\n\n /// let (solution, cost, _) = solver.solve()?;\n\n ///\n\n /// assert_eq!(cost, 42.);\n\n /// assert_eq!(solution.routes.len(), 1);\n\n /// assert_eq!(solution.unassigned.len(), 0);\n\n /// # Ok::<(), String>(())\n\n /// ```\n\n pub fn solve(self) -> Result<(Solution, Cost, Option<Metrics>), String> {\n\n let (population, metrics) = EvolutionSimulator::new(self.config)?.run()?;\n", "file_path": "vrp-core/src/solver/mod.rs", "rank": 37, "score": 52008.19734130986 }, { "content": " pub termination_estimate: f64,\n\n}\n\n\n\nimpl RefinementContext {\n\n /// Creates a new instance of `RefinementContext`.\n\n pub fn new(\n\n problem: Arc<Problem>,\n\n population: Box<dyn Population + Sync + Send>,\n\n environment: Arc<Environment>,\n\n quota: Option<Arc<dyn Quota + Send + Sync>>,\n\n ) -> Self {\n\n Self { problem, population, state: Default::default(), quota, environment, statistics: Statistics::default() }\n\n }\n\n}\n\n\n\nimpl Default for Statistics {\n\n fn default() -> Self {\n\n Self { generation: 0, improvement_all_ratio: 0., improvement_1000_ratio: 0., termination_estimate: 0. }\n\n }\n\n}\n", "file_path": "vrp-core/src/solver/mod.rs", "rank": 38, "score": 52008.07289956471 }, { "content": "\n\n /// Performs solution check.\n\n pub fn check(&self) -> Result<(), Vec<String>> {\n\n let errors = check_vehicle_load(&self)\n\n .err()\n\n .into_iter()\n\n .chain(check_relations(&self).err().into_iter())\n\n .chain(check_breaks(&self).err().into_iter())\n\n .chain(check_assignment(&self).err().into_iter())\n\n .chain(check_routing(&self).err().into_iter())\n\n .chain(check_limits(&self).err().into_iter())\n\n .flatten()\n\n .collect::<Vec<_>>();\n\n\n\n // avoid duplicates keeping original order\n\n let (_, errors) = errors.into_iter().fold((HashSet::new(), Vec::default()), |(mut used, mut errors), error| {\n\n if !used.contains(&error) {\n\n errors.push(error.clone());\n\n used.insert(error);\n\n }\n", "file_path": "vrp-pragmatic/src/checker/mod.rs", "rank": 39, "score": 52007.68512358524 }, { "content": "//! This module contains discovery tests which tries to explore problem space in order to catch some\n\n//! potential issues. Most of the test require a lot of time to run.\n\n\n\nmod property;\n", "file_path": "vrp-pragmatic/tests/discovery/mod.rs", "rank": 40, "score": 52007.49229518177 }, { "content": "\n\n/// A Vehicle Routing Problem Solver based on evolutionary algorithm.\n\npub struct Solver {\n\n /// A VRP problem definition.\n\n pub problem: Arc<Problem>,\n\n /// An evolution configuration.\n\n pub config: EvolutionConfig,\n\n}\n\n\n\nimpl Solver {\n\n /// Solves a Vehicle Routing Problem and returns a _(solution, its cost)_ pair in case of success\n\n /// or error description, if solution cannot be found.\n\n ///\n\n /// # Examples\n\n ///\n\n /// The most simple way to run solver is to use [`Builder`](./struct.Builder.html)\n\n /// which has preconfigured settings:\n\n ///\n\n /// ```\n\n /// # use vrp_core::models::examples::create_example_problem;\n", "file_path": "vrp-core/src/solver/mod.rs", "rank": 41, "score": 52007.314784184 }, { "content": "//! In the end, the top ranked individual is returned as best known solution.\n\n//!\n\n//! This crate contains NSGA-II buildings blocks which can be found in [`nsga2`] module.\n\n//!\n\n//! [`nsga2`]: ../algorithms/nsga2/index.html\n\n//!\n\n//! # Evolutionary algorithm\n\n//!\n\n//! An evolutionary algorithm (EA) is a generic population-based metaheuristic optimization algorithm.\n\n//! This crate provides a custom implementation of EA which can be divided into the following steps:\n\n//!\n\n//! - **initialization**: on this step, an initial population is created using different construction\n\n//! heuristics.\n\n//! - **main loop begin**: enter an evolution loop\n\n//! - **selection**: an individual is selected from population. Best-fit individuals have more\n\n//! chances to be selected.\n\n//! - **mutation**: a mutation operator is applied to selected individual. Default implementation\n\n//! uses `ruin and recreate` principle described in next section.\n\n//! - **population adjustments**: new individual is added to population, then the population is\n\n//! sorted and shrinked to keep it under specific size limits with best-fit individuals and\n", "file_path": "vrp-core/src/solver/mod.rs", "rank": 42, "score": 52006.59621571775 }, { "content": "\n\n (used, errors)\n\n });\n\n\n\n if errors.is_empty() {\n\n Ok(())\n\n } else {\n\n Err(errors)\n\n }\n\n }\n\n\n\n /// Gets vehicle by its id.\n\n fn get_vehicle(&self, vehicle_id: &str) -> Result<&VehicleType, String> {\n\n self.problem\n\n .fleet\n\n .vehicles\n\n .iter()\n\n .find(|v| v.vehicle_ids.contains(&vehicle_id.to_string()))\n\n .ok_or_else(|| format!(\"cannot find vehicle with id '{}'\", vehicle_id))\n\n }\n", "file_path": "vrp-pragmatic/src/checker/mod.rs", "rank": 43, "score": 52006.56250305089 }, { "content": "pub use self::vehicles::*;\n\n\n\nprop_compose! {\n\n fn from_uints(vec: Vec<u64>)(index in 0..vec.len()) -> u64 {\n\n vec[index]\n\n }\n\n}\n\n\n\nprop_compose! {\n\n fn from_usize(vec: Vec<usize>)(index in 0..vec.len()) -> usize {\n\n vec[index]\n\n }\n\n}\n\n\n\nprop_compose! {\n\n fn from_strings(vec: Vec<String>)(index in 0..vec.len()) -> String {\n\n vec[index].clone()\n\n }\n\n}\n", "file_path": "vrp-pragmatic/tests/generator/mod.rs", "rank": 44, "score": 52006.500513123625 }, { "content": "\n\n impl<$A: Iterator> TryCollect<($A::Item, $($I::Item),*)> for $A {\n\n fn try_collect(&mut self) -> Option<($A::Item, $($I::Item),*)> {\n\n let r = (try_opt!(self.next()),\n\n // hack: we need to use $I in the expansion\n\n $({ let a: $I::Item = try_opt!(self.next()); a}),* );\n\n Some(r)\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// A helper macro for getting tuple of collection items.\n\n#[macro_export]\n\nmacro_rules! try_opt {\n\n ($e:expr) => {\n\n match $e {\n\n Some(e) => e,\n\n None => return None,\n\n }\n\n };\n\n}\n\n\n\n// implement TryCollect<T> where T is a tuple with size 1, 2, .., 10\n\nimpl_try_collect_tuple!(A A A A A A A A A A);\n", "file_path": "vrp-scientific/src/common/mod.rs", "rank": 45, "score": 52006.32571272369 }, { "content": "\n\n /// Gets activity operation time range in seconds since Unix epoch.\n\n fn get_activity_time(&self, stop: &Stop, activity: &Activity) -> TimeWindow {\n\n let time = activity\n\n .time\n\n .clone()\n\n .unwrap_or_else(|| Interval { start: stop.time.arrival.clone(), end: stop.time.departure.clone() });\n\n\n\n TimeWindow::new(parse_time(&time.start), parse_time(&time.end))\n\n }\n\n\n\n /// Gets activity location.\n\n fn get_activity_location(&self, stop: &Stop, activity: &Activity) -> Location {\n\n activity.location.clone().unwrap_or_else(|| stop.location.clone())\n\n }\n\n\n\n /// Gets vehicle shift where activity is used.\n\n fn get_vehicle_shift(&self, tour: &Tour) -> Result<VehicleShift, String> {\n\n let tour_time = TimeWindow::new(\n\n parse_time(\n", "file_path": "vrp-pragmatic/src/checker/mod.rs", "rank": 46, "score": 52005.5031711622 }, { "content": "//! The *solver* module contains basic building blocks for a metaheuristic among with the default\n\n//! implementation.\n\n//!\n\n//! # Metaheuristic\n\n//!\n\n//! A metaheuristic is a high-level algorithmic framework that provides a set of guidelines or strategies\n\n//! to develop heuristic optimization algorithms. Examples of metaheuristics include genetic/evolutionary\n\n//! algorithms, tabu search, simulated annealing, variable neighborhood search, (adaptive) large\n\n//! neighborhood search, ant colony optimization, etc.\n\n//!\n\n//! The default implementation can be roughly described as \"*Multi-objective Parthenogenesis based\n\n//! Evolutionary Algorithm with Ruin and Recreate Mutation Operator*\".\n\n//!\n\n//! # Multi-objective decision maker\n\n//!\n\n//! Most VRPs, frequently used to model real cases, are set up with a single objective (e.g. minimizing\n\n//! the cost of the solution), however the majority of the problems encountered in logistics industry,\n\n//! are multi-objective in nature as the complexity of real-life logistics planning often cannot be\n\n//! reduced to cost only. Such non-cost factors are:\n\n//!\n", "file_path": "vrp-core/src/solver/mod.rs", "rank": 47, "score": 52005.329101725336 }, { "content": " pub fn new(code: String, cause: String, action: String) -> Self {\n\n Self { code, cause, action, details: None }\n\n }\n\n\n\n /// Creates a new instance of `FormatError` action.\n\n pub fn new_with_details(code: String, cause: String, action: String, details: String) -> Self {\n\n Self { code, cause, action, details: Some(details) }\n\n }\n\n\n\n /// Serializes error into json.\n\n pub fn to_json(&self) -> String {\n\n let mut buffer = String::new();\n\n let writer = unsafe { BufWriter::new(buffer.as_mut_vec()) };\n\n serde_json::to_writer_pretty(writer, &self).unwrap();\n\n\n\n buffer\n\n }\n\n\n\n /// Formats multiple format errors into string.\n\n pub fn format_many(errors: &[Self], separator: &str) -> String {\n", "file_path": "vrp-pragmatic/src/format/mod.rs", "rank": 48, "score": 52002.61995313888 }, { "content": " }\n\n\n\n /// Creates a new `[Location]` as index reference.\n\n pub fn new_reference(index: usize) -> Self {\n\n Self::Reference { index }\n\n }\n\n\n\n /// Returns lat lng if location is coordinate, panics otherwise.\n\n pub fn to_lat_lng(&self) -> (f64, f64) {\n\n match self {\n\n Self::Coordinate { lat, lng } => (*lat, *lng),\n\n _ => unreachable!(\"expect coordinate\"),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Location {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n\n Location::Coordinate { lat, lng } => write!(f, \"lat={}, lng={}\", lat, lng),\n", "file_path": "vrp-pragmatic/src/format/mod.rs", "rank": 49, "score": 52002.61995313888 }, { "content": "\n\n // NOTE select the first best individual from population\n\n let (insertion_ctx, _) = population.ranked().next().ok_or_else(|| \"cannot find any solution\".to_string())?;\n\n let solution = insertion_ctx.solution.to_solution(self.problem.extras.clone());\n\n let cost = self.problem.objective.fitness(insertion_ctx);\n\n\n\n Ok((solution, cost, metrics))\n\n }\n\n}\n", "file_path": "vrp-core/src/solver/mod.rs", "rank": 50, "score": 52002.61995313888 }, { "content": " errors.iter().map(|err| err.to_string()).collect::<Vec<_>>().join(separator)\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for FormatError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"{}, cause: '{}', action: '{}'.\", self.code, self.cause, self.action)\n\n }\n\n}\n\n\n\nconst TIME_CONSTRAINT_CODE: i32 = 1;\n\nconst DISTANCE_LIMIT_CONSTRAINT_CODE: i32 = 2;\n\nconst DURATION_LIMIT_CONSTRAINT_CODE: i32 = 3;\n\nconst CAPACITY_CONSTRAINT_CODE: i32 = 4;\n\nconst BREAK_CONSTRAINT_CODE: i32 = 5;\n\nconst SKILL_CONSTRAINT_CODE: i32 = 6;\n\nconst LOCKING_CONSTRAINT_CODE: i32 = 7;\n\nconst REACHABLE_CONSTRAINT_CODE: i32 = 8;\n\nconst PRIORITY_CONSTRAINT_CODE: i32 = 9;\n\nconst AREA_CONSTRAINT_CODE: i32 = 10;\n", "file_path": "vrp-pragmatic/src/format/mod.rs", "rank": 51, "score": 52002.61995313888 }, { "content": "const DISPATCH_CONSTRAINT_CODE: i32 = 11;\n\nconst TOUR_SIZE_CONSTRAINT_CODE: i32 = 12;\n\n\n\npub(crate) const UNASSIGNABLE_ROUTE_KEY: i32 = 100;\n\n\n\n/// An job id to job index.\n\npub type JobIndex = HashMap<String, CoreJob>;\n\n\n\n/// Gets job index from core problem definition.\n\npub(crate) fn get_job_index(problem: &CoreProblem) -> &JobIndex {\n\n problem\n\n .extras\n\n .get(\"job_index\")\n\n .and_then(|s| s.downcast_ref::<JobIndex>())\n\n .unwrap_or_else(|| panic!(\"cannot get job index!\"))\n\n}\n\n\n\n/// Gets coord index from core problem definition.\n\npub(crate) fn get_coord_index(problem: &CoreProblem) -> &CoordIndex {\n\n problem\n\n .extras\n\n .get(\"coord_index\")\n\n .and_then(|s| s.downcast_ref::<CoordIndex>())\n\n .unwrap_or_else(|| panic!(\"Cannot get coord index!\"))\n\n}\n", "file_path": "vrp-pragmatic/src/format/mod.rs", "rank": 52, "score": 52002.61995313888 }, { "content": " matrices,\n\n coord_index: CoordIndex::new(problem),\n\n job_index: problem.plan.jobs.iter().map(|job| (job.id.clone(), job.clone())).collect(),\n\n }\n\n }\n\n\n\n /// Validates problem on set of rules.\n\n pub fn validate(&self) -> Result<(), Vec<FormatError>> {\n\n let errors = validate_jobs(&self)\n\n .err()\n\n .into_iter()\n\n .chain(validate_vehicles(&self).err().into_iter())\n\n .chain(validate_objectives(&self).err().into_iter())\n\n .chain(validate_routing(&self).err().into_iter())\n\n .chain(validate_relations(&self).err().into_iter())\n\n .flatten()\n\n .collect::<Vec<_>>();\n\n\n\n if errors.is_empty() {\n\n Ok(())\n", "file_path": "vrp-pragmatic/src/validation/mod.rs", "rank": 53, "score": 52002.61995313888 }, { "content": "\n\n/// Keys for balancing objectives.\n\nconst BALANCE_MAX_LOAD_KEY: i32 = 20;\n\nconst BALANCE_ACTIVITY_KEY: i32 = 21;\n\nconst BALANCE_DISTANCE_KEY: i32 = 22;\n\nconst BALANCE_DURATION_KEY: i32 = 23;\n\n\n\n/// A type which encapsulates information needed to perform solution refinement process.\n\npub struct RefinementContext {\n\n /// Original problem definition.\n\n pub problem: Arc<Problem>,\n\n\n\n /// A population which tracks best discovered solutions.\n\n pub population: Box<dyn Population + Sync + Send>,\n\n\n\n /// A collection of data associated with refinement process.\n\n pub state: HashMap<String, Box<dyn Any + Sync + Send>>,\n\n\n\n /// A quota for refinement process.\n\n pub quota: Option<Arc<dyn Quota + Send + Sync>>,\n", "file_path": "vrp-core/src/solver/mod.rs", "rank": 54, "score": 52002.61995313888 }, { "content": " Location::Reference { index } => write!(f, \"index={}\", index),\n\n }\n\n }\n\n}\n\n\n\n/// A format error.\n\n#[derive(Clone, Debug, Serialize)]\n\npub struct FormatError {\n\n /// An error code in registry.\n\n pub code: String,\n\n /// A possible error cause.\n\n pub cause: String,\n\n /// An action to take in order to recover from error.\n\n pub action: String,\n\n /// A details about exception.\n\n pub details: Option<String>,\n\n}\n\n\n\nimpl FormatError {\n\n /// Creates a new instance of `FormatError` action without details.\n", "file_path": "vrp-pragmatic/src/format/mod.rs", "rank": 55, "score": 52002.61995313888 }, { "content": " // TODO match reload's time windows\n\n .and_then(|reload| reload.iter().find(|r| r.location == location && r.tag == activity.job_tag))\n\n .map(|r| ActivityType::Reload(r.clone()))\n\n .ok_or_else(|| format!(\"cannot find reload for tour '{}'\", tour.vehicle_id)),\n\n \"dispatch\" => shift\n\n .dispatch\n\n .as_ref()\n\n .and_then(|dispatch| dispatch.iter().find(|d| d.location == location))\n\n .map(|d| ActivityType::Depot(d.clone()))\n\n .ok_or_else(|| format!(\"cannot find dispatch for tour '{}'\", tour.vehicle_id)),\n\n _ => Err(format!(\"unknown activity type: '{}'\", activity.activity_type)),\n\n }\n\n }\n\n\n\n fn get_job_by_id(&self, job_id: &str) -> Option<&Job> {\n\n self.problem.plan.jobs.iter().find(|job| job.id == job_id)\n\n }\n\n\n\n fn visit_job<F1, F2, R>(\n\n &self,\n", "file_path": "vrp-pragmatic/src/checker/mod.rs", "rank": 56, "score": 52002.61995313888 }, { "content": " .as_ref()\n\n .and_then(|breaks| {\n\n breaks.iter().find(|b| match &b.time {\n\n VehicleBreakTime::TimeWindow(tw) => parse_time_window(tw).intersects(&time),\n\n VehicleBreakTime::TimeOffset(offset) => {\n\n assert_eq!(offset.len(), 2);\n\n // NOTE make expected time window wider due to reschedule departure\n\n let stops = &tour.stops;\n\n let start = parse_time(&stops.first().unwrap().time.arrival) + *offset.first().unwrap();\n\n let end = parse_time(&stops.first().unwrap().time.departure) + *offset.last().unwrap();\n\n\n\n TimeWindow::new(start, end).intersects(&time)\n\n }\n\n })\n\n })\n\n .map(|b| ActivityType::Break(b.clone()))\n\n .ok_or_else(|| format!(\"cannot find break for tour '{}'\", tour.vehicle_id)),\n\n \"reload\" => shift\n\n .reloads\n\n .as_ref()\n", "file_path": "vrp-pragmatic/src/checker/mod.rs", "rank": 57, "score": 52002.61995313888 }, { "content": "//! some intermediate.\n\n//! - **main loop end**: exit evolution loop when one of termination criteria are met. See [`termination`]\n\n//! module for details.\n\n//!\n\n//! As there is no crossover operator involved and offspring is produced from one parent, this algorithm\n\n//! can be characterized as parthenogenesis based EA. This approach eliminates design of feasible\n\n//! crossover operator which is a challenging task in case of VRP.\n\n//!\n\n//! [`termination`]: termination/index.html\n\n//!\n\n//! # Ruin and Recreate principle\n\n//!\n\n//! A **ruin and recreate** principle is introduced by [`Schrimpf et al. (2000)`] and key idea here\n\n//! is to ruin a quite large fraction of the solution and try to restore the solution as best as it\n\n//! is possible in order to get a new solution better than the previous one. Original algorithm can\n\n//! be described as a large neighborhood search that combines elements of simulated annealing and\n\n//! threshold-accepting algorithms, but this crate only reuses ruin/recreate idea as a mutation\n\n//! operator.\n\n//!\n\n//! Implementation blocks can be found in [`mutation`] module.\n", "file_path": "vrp-core/src/solver/mod.rs", "rank": 58, "score": 52002.61995313888 }, { "content": " fn get_stop_activity_types(&self, stop: &Stop) -> Vec<String> {\n\n stop.activities.iter().map(|a| a.activity_type.clone()).collect()\n\n }\n\n\n\n /// Gets wrapped activity type.\n\n fn get_activity_type(&self, tour: &Tour, stop: &Stop, activity: &Activity) -> Result<ActivityType, String> {\n\n let shift = self.get_vehicle_shift(tour)?;\n\n let time = self.get_activity_time(stop, activity);\n\n let location = self.get_activity_location(stop, activity);\n\n\n\n match activity.activity_type.as_str() {\n\n \"departure\" | \"arrival\" => Ok(ActivityType::Terminal),\n\n \"pickup\" | \"delivery\" | \"service\" | \"replacement\" => {\n\n self.job_map.get(activity.job_id.as_str()).map_or_else(\n\n || Err(format!(\"cannot find job with id '{}'\", activity.job_id)),\n\n |job| Ok(ActivityType::Job(Box::new(job.clone()))),\n\n )\n\n }\n\n \"break\" => shift\n\n .breaks\n", "file_path": "vrp-pragmatic/src/checker/mod.rs", "rank": 59, "score": 52002.61995313888 }, { "content": " activity: &Activity,\n\n activity_type: &ActivityType,\n\n job_visitor: F1,\n\n other_visitor: F2,\n\n ) -> Result<R, String>\n\n where\n\n F1: Fn(&Job, &JobTask) -> R,\n\n F2: Fn() -> R,\n\n {\n\n match activity_type {\n\n ActivityType::Job(job) => {\n\n let pickups = job_task_size(&job.pickups);\n\n let deliveries = job_task_size(&job.deliveries);\n\n let tasks = pickups + deliveries + job_task_size(&job.services) + job_task_size(&job.replacements);\n\n\n\n if tasks < 2 || (tasks == 2 && pickups == 1 && deliveries == 1) {\n\n match_job_task(activity.activity_type.as_str(), job, |tasks| tasks.first())\n\n } else {\n\n activity.job_tag.as_ref().ok_or_else(|| {\n\n format!(\"checker requires that multi job activity must have tag: '{}'\", activity.job_id)\n", "file_path": "vrp-pragmatic/src/checker/mod.rs", "rank": 60, "score": 52002.61995313888 }, { "content": " } else {\n\n Err(errors)\n\n }\n\n }\n\n\n\n /// Gets list of jobs from the problem.\n\n fn jobs(&self) -> impl Iterator<Item = &Job> {\n\n self.problem.plan.jobs.iter()\n\n }\n\n\n\n /// Gets list of vehicles from the problem.\n\n fn vehicles(&self) -> impl Iterator<Item = &VehicleType> {\n\n self.problem.fleet.vehicles.iter()\n\n }\n\n\n\n /// Gets a flat list of job tasks from the job.\n\n fn tasks(&self, job: &'a Job) -> Vec<&'a JobTask> {\n\n job.pickups\n\n .as_ref()\n\n .iter()\n\n .flat_map(|tasks| tasks.iter())\n\n .chain(job.deliveries.as_ref().iter().flat_map(|tasks| tasks.iter()))\n\n .chain(job.replacements.as_ref().iter().flat_map(|tasks| tasks.iter()))\n\n .chain(job.services.as_ref().iter().flat_map(|tasks| tasks.iter()))\n\n .collect()\n\n }\n\n}\n\n\n", "file_path": "vrp-pragmatic/src/validation/mod.rs", "rank": 61, "score": 52002.61995313888 }, { "content": " /// Routing matrices.\n\n pub matrices: Option<Vec<Matrix>>,\n\n /// Solution to be checked\n\n pub solution: Solution,\n\n\n\n job_map: HashMap<String, Job>,\n\n core_problem: Arc<CoreProblem>,\n\n}\n\n\n\n/// Represents all possible activity types.\n", "file_path": "vrp-pragmatic/src/checker/mod.rs", "rank": 62, "score": 52002.61995313888 }, { "content": " })?;\n\n\n\n match_job_task(activity.activity_type.as_str(), job, |tasks| {\n\n tasks.iter().find(|task| task.tag == activity.job_tag)\n\n })\n\n }\n\n .map(|task| job_visitor(job, task))\n\n }\n\n .ok_or_else(|| \"cannot match activity to job place\".to_string()),\n\n _ => Ok(other_visitor()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "vrp-pragmatic/src/checker/mod.rs", "rank": 63, "score": 52002.61995313888 }, { "content": " &tour.stops.first().as_ref().ok_or_else(|| \"cannot get first activity\".to_string())?.time.arrival,\n\n ),\n\n parse_time(&tour.stops.last().as_ref().ok_or_else(|| \"cannot get last activity\".to_string())?.time.arrival),\n\n );\n\n\n\n self.get_vehicle(&tour.vehicle_id)?\n\n .shifts\n\n .iter()\n\n .find(|shift| {\n\n let shift_time = TimeWindow::new(\n\n parse_time(&shift.start.earliest),\n\n shift.end.as_ref().map_or_else(|| f64::MAX, |place| parse_time(&place.latest)),\n\n );\n\n shift_time.intersects(&tour_time)\n\n })\n\n .cloned()\n\n .ok_or_else(|| format!(\"cannot find shift for tour with vehicle if: '{}'\", tour.vehicle_id))\n\n }\n\n\n\n /// Returns stop's activity type names.\n", "file_path": "vrp-pragmatic/src/checker/mod.rs", "rank": 64, "score": 52002.61995313888 }, { "content": "#[serde(untagged)]\n\npub enum Location {\n\n /// A location type represented by geocoordinate with latitude and longitude.\n\n Coordinate {\n\n /// Latitude.\n\n lat: f64,\n\n /// Longitude.\n\n lng: f64,\n\n },\n\n /// A location type represented by index reference in routing matrix.\n\n Reference {\n\n /// An index in routing matrix.\n\n index: usize,\n\n },\n\n}\n\n\n\nimpl Location {\n\n /// Creates a new `[Location]` as coordinate.\n\n pub fn new_coordinate(lat: f64, lng: f64) -> Self {\n\n Self::Coordinate { lat, lng }\n", "file_path": "vrp-pragmatic/src/format/mod.rs", "rank": 65, "score": 52002.61995313888 }, { "content": "//! - **balancing work across multiple workers**\n\n//! - **minimization or maximization of fleet usage**\n\n//! - **minimization of unassigned jobs**\n\n//!\n\n//! In most of the cases, these additional factors are contradicting to the cost minimization\n\n//! objective which, in fact, leads to nontrivial multi-objective optimization problem, where no\n\n//! single solution exists that simultaneously optimizes each objective.\n\n//!\n\n//! That's why the concept of dominance is introduced: a solution is said to dominate another\n\n//! solution if its quality is at least as good on every objective and better on at least one.\n\n//! The set of all non-dominated solutions of an optimization problem is called the Pareto set and\n\n//! the projection of this set onto the objective function space is called the Pareto front.\n\n//!\n\n//! The aim of multi-objective metaheuristics is to approximate the Pareto front as closely as\n\n//! possible (Zitzler et al., 2004) and therefore generate a set of mutually non-dominated solutions\n\n//! called the Pareto set approximation.\n\n//!\n\n//! This library utilizes `NSGA-II` algorithm to apply Pareto-based ranking over population in order\n\n//! to find Pareto set approximation. However, that Pareto optimality of the solutions cannot be\n\n//! guaranteed: it is only known that none of the generated solutions dominates the others.\n", "file_path": "vrp-core/src/solver/mod.rs", "rank": 66, "score": 52002.61995313888 }, { "content": "//! This module contains building blocks for constructive heuristics.\n\n//!\n\n//! # Insertion heuristic\n\n//!\n\n//! Insertion heuristic is a popular method to find quickly a **feasible** solution, but without a\n\n//! guarantee of good quality. Essentially, it constructs the solution by repeatedly inserting an\n\n//! unrouted customer into a partially constructed route or as a first customer in an additional\n\n//! route.\n\n//!\n\n\n\n/// Specifies a computational quota for solving VRP.\n\n/// The main purpose is to allow to stop algorithm in reaction to external events such\n\n/// as user cancellation, timer, etc.\n", "file_path": "vrp-core/src/construction/mod.rs", "rank": 67, "score": 52002.61995313888 }, { "content": "use crate::utils::compare_floats;\n\nuse std::cmp::Ordering;\n\nuse std::cmp::Ordering::Equal;\n\n\n\n/// Returns coefficient variation.\n", "file_path": "vrp-core/src/algorithms/statistics/basics.rs", "rank": 68, "score": 51472.50819462679 }, { "content": "//! Specifies population types.\n\n\n\nmod elitism;\n\npub use self::elitism::Elitism;\n\n\n\nmod greedy;\n\npub use self::greedy::Greedy;\n\n\n\nmod rosomaxa;\n\npub use self::rosomaxa::Rosomaxa;\n\npub use self::rosomaxa::RosomaxaConfig;\n\n\n\nuse crate::construction::heuristics::InsertionContext;\n\nuse crate::models::Problem;\n\nuse crate::solver::Statistics;\n\nuse crate::utils::{compare_floats, Environment};\n\nuse std::cmp::Ordering;\n\nuse std::fmt::Display;\n\nuse std::sync::Arc;\n\n\n", "file_path": "vrp-core/src/solver/population/mod.rs", "rank": 69, "score": 51015.02394536569 }, { "content": "//! This module contains a hyper-heuristic logic.\n\n\n\nmod dynamic_selective;\n\npub use self::dynamic_selective::*;\n\n\n\nmod static_selective;\n\npub use self::static_selective::*;\n\n\n\nuse crate::models::Problem;\n\nuse crate::solver::population::Individual;\n\nuse crate::solver::{RefinementContext, Statistics};\n\nuse crate::utils::{Environment, Random};\n\nuse hashbrown::HashMap;\n\nuse std::sync::Arc;\n\n\n\n/// Represents a hyper heuristic functionality.\n", "file_path": "vrp-core/src/solver/hyper/mod.rs", "rank": 70, "score": 51014.548395417936 }, { "content": "#[cfg(test)]\n\n#[path = \"../../../tests/unit/solver/evolution/evolution_test.rs\"]\n\nmod evolution_test;\n\n\n\nuse crate::construction::heuristics::InsertionContext;\n\nuse crate::solver::hyper::HyperHeuristic;\n\nuse crate::solver::telemetry::Telemetry;\n\nuse crate::solver::termination::*;\n\nuse crate::solver::{Metrics, Population, RefinementContext};\n\nuse crate::utils::Timer;\n\n\n\nmod config;\n\npub use self::config::*;\n\n\n\nmod run_simple;\n\npub use self::run_simple::RunSimple;\n\n\n\n/// Defines evolution result type.\n\npub type EvolutionResult = Result<(Box<dyn Population + Send + Sync>, Option<Metrics>), String>;\n\n\n\n/// An evolution algorithm strategy.\n", "file_path": "vrp-core/src/solver/evolution/mod.rs", "rank": 71, "score": 51013.811349306176 }, { "content": "pub use self::pipeline::*;\n\n\n\nmod area;\n\npub use self::area::*;\n\n\n\nmod transport;\n\npub use self::transport::*;\n\n\n\nmod capacity;\n\npub use self::capacity::*;\n\n\n\nmod locking;\n\npub use self::locking::*;\n\n\n\nmod tour_size;\n\npub use self::tour_size::*;\n\n\n\nmod conditional;\n\npub use self::conditional::*;\n\n\n\nmod fleet_usage;\n\npub use self::fleet_usage::*;\n\n\n\nuse crate::construction::heuristics::RouteContext;\n\nuse crate::models::problem::TransportCost;\n\n\n\n/// Updates route schedule.\n", "file_path": "vrp-core/src/construction/constraints/mod.rs", "rank": 72, "score": 51012.46950291474 }, { "content": "//! A generalized insertion heuristic implementation.\n\n//!\n\n//! # Design\n\n//!\n\n\n\nmod context;\n\npub use self::context::*;\n\n\n\nmod evaluators;\n\npub use self::evaluators::*;\n\n\n\nmod factories;\n\n\n\nmod insertions;\n\npub use self::insertions::*;\n\n\n\nmod metrics;\n\npub use self::metrics::*;\n\n\n\nmod selectors;\n\npub use self::selectors::*;\n", "file_path": "vrp-core/src/construction/heuristics/mod.rs", "rank": 73, "score": 51012.43848950546 }, { "content": "//!\n\n\n\nmod crowding_distance;\n\nuse self::crowding_distance::*;\n\n\n\nmod non_dominated_sort;\n\nuse self::non_dominated_sort::*;\n\n\n\nmod nsga2_sort;\n\npub use self::nsga2_sort::select_and_rank;\n\n\n\nmod objective;\n\npub use self::objective::*;\n", "file_path": "vrp-core/src/algorithms/nsga2/mod.rs", "rank": 74, "score": 51012.02594848188 }, { "content": "//! Specifies logic to create a \"pragmatic\" solution and write it into json format.\n\n\n\nmod model;\n\npub use self::model::*;\n\n\n\npub(crate) mod activity_matcher;\n\n\n\nmod geo_serializer;\n\npub use self::geo_serializer::serialize_solution_as_geojson;\n\n\n\nmod initial_reader;\n\npub use self::initial_reader::read_init_solution;\n\n\n\nmod extensions;\n\n\n\nmod writer;\n\npub use self::writer::create_solution;\n\npub use self::writer::PragmaticSolution;\n\n\n\nuse super::*;\n\n\n", "file_path": "vrp-pragmatic/src/format/solution/mod.rs", "rank": 75, "score": 51011.97446422801 }, { "content": "//! Common models.\n\n\n\nmod load;\n\npub use self::load::*;\n\n\n\nmod primitives;\n\npub use self::primitives::*;\n\n\n\nmod domain;\n\npub use self::domain::*;\n", "file_path": "vrp-core/src/models/common/mod.rs", "rank": 76, "score": 51011.971211444616 }, { "content": "//! Solution domain models.\n\n\n\nmod route;\n\npub use self::route::Activity;\n\npub use self::route::Place;\n\npub use self::route::Route;\n\n\n\nmod registry;\n\npub use self::registry::Registry;\n\n\n\nmod tour;\n\npub use self::tour::Tour;\n", "file_path": "vrp-core/src/models/solution/mod.rs", "rank": 77, "score": 51011.87090944804 }, { "content": "//! The objective module specifies various objective functions for solving Vehicle Routing Problem.\n\n\n\nuse crate::construction::heuristics::InsertionContext;\n\nuse std::cmp::Ordering;\n\n\n\nmod generic_value;\n\npub(crate) use self::generic_value::GenericValue;\n\n\n\nmod total_routes;\n\npub use self::total_routes::TotalRoutes;\n\n\n\nmod total_transport;\n\npub use self::total_transport::*;\n\n\n\nmod total_unassigned_jobs;\n\npub use self::total_unassigned_jobs::TotalUnassignedJobs;\n\n\n\nmod total_value;\n\npub use self::total_value::TotalValue;\n\n\n\nmod work_balance;\n\npub use self::work_balance::WorkBalance;\n", "file_path": "vrp-core/src/solver/objectives/mod.rs", "rank": 78, "score": 51011.74118051608 }, { "content": "mod utils;\n\npub(crate) use self::utils::*;\n\n\n\nmod decompose_search;\n\npub use self::decompose_search::DecomposeSearch;\n\n\n\nmod local_search;\n\npub use self::local_search::LocalSearch;\n\n\n\nmod ruin_recreate;\n\npub use self::ruin_recreate::RuinAndRecreate;\n\n\n\n/// A trait which defines mutation behavior.\n", "file_path": "vrp-core/src/solver/mutation/mod.rs", "rank": 79, "score": 51011.67589592207 }, { "content": "//! Provides customized implementation of Growing Self Organizing Map.\n\n\n\nuse std::fmt::Display;\n\n\n\nmod network;\n\npub use self::network::*;\n\n\n\nmod node;\n\npub use self::node::*;\n\n\n\nmod state;\n\npub use self::state::*;\n\n\n\n/// Represents an input for network.\n", "file_path": "vrp-core/src/algorithms/gsom/mod.rs", "rank": 80, "score": 51011.67224775042 }, { "content": "//! Generate command helpers.\n\n\n\nmod plan;\n\nuse self::plan::generate_plan;\n\n\n\nmod fleet;\n\nuse self::fleet::generate_fleet;\n\n\n\nmod prototype;\n\nuse self::prototype::generate_from_prototype;\n\n\n\nuse crate::extensions::import::deserialize_hre_problem;\n\nuse std::io::{BufReader, Read};\n\nuse vrp_core::utils::{DefaultRandom, Random};\n\nuse vrp_pragmatic::format::problem::*;\n\nuse vrp_pragmatic::format::FormatError;\n\n\n\n/// Generates a pragmatic problem.\n", "file_path": "vrp-cli/src/extensions/generate/mod.rs", "rank": 81, "score": 51011.33498238422 }, { "content": "//! Import command helpers\n\n\n\nmod csv;\n\npub use self::csv::*;\n\n\n\nmod hre;\n\npub use self::hre::*;\n\n\n\nuse std::io::{BufReader, Read};\n\nuse vrp_pragmatic::format::problem::Problem;\n\n\n\n/// Imports solution from specific format into pragmatic.\n", "file_path": "vrp-cli/src/extensions/import/mod.rs", "rank": 82, "score": 51011.079111744424 }, { "content": "//! The mutation module specifies building blocks for mutation operator used by evolution.\n\n//!\n\n//! The default implementation of mutation operator is `RuinAndRecreateMutation` which is based on\n\n//! **ruin and recreate** principle, introduced by [`Schrimpf et al. (2000)`].\n\n//!\n\n//! [`Schrimpf et al. (2000)`]: https://www.sciencedirect.com/science/article/pii/S0021999199964136\n\n//!\n\n\n\nuse crate::construction::heuristics::InsertionContext;\n\nuse crate::solver::RefinementContext;\n\n\n\nmod local;\n\npub use self::local::*;\n\n\n\nmod recreate;\n\npub use self::recreate::*;\n\n\n\nmod ruin;\n\npub use self::ruin::*;\n\n\n", "file_path": "vrp-core/src/solver/mutation/mod.rs", "rank": 83, "score": 51010.91894106596 }, { "content": "//! This module contains definition of Markov Decision Process (MDP) model and related reinforcement\n\n//! learning logic.\n\n\n\nmod simulator;\n\npub use self::simulator::*;\n\n\n\nmod strategies;\n\npub use self::strategies::*;\n\n\n\nuse crate::utils::{compare_floats, Random};\n\nuse hashbrown::HashMap;\n\nuse std::cmp::Ordering;\n\nuse std::hash::Hash;\n\n\n\n/// Represents a state in MDP.\n", "file_path": "vrp-core/src/algorithms/mdp/mod.rs", "rank": 84, "score": 51010.916553542316 }, { "content": "//! Problem domain models.\n\n\n\nuse crate::algorithms::nsga2::Objective;\n\nuse crate::construction::constraints::ConstraintModule;\n\nuse crate::construction::heuristics::InsertionContext;\n\n\n\nmod costs;\n\npub use self::costs::*;\n\n\n\nmod jobs;\n\npub use self::jobs::*;\n\n\n\nmod fleet;\n\npub use self::fleet::*;\n\n\n\n/// An actual objective on solution type.\n\npub type TargetObjective = Box<dyn Objective<Solution = InsertionContext> + Send + Sync>;\n\n\n\n/// An actual constraint.\n\npub type TargetConstraint = Box<dyn ConstraintModule + Send + Sync>;\n", "file_path": "vrp-core/src/models/problem/mod.rs", "rank": 85, "score": 51010.91894106596 }, { "content": "//! Specifies logic to read problem and routing matrix from json input.\n\n//!\n\n\n\nmod model;\n\npub use self::model::*;\n\n\n\nmod reader;\n\npub use self::reader::PragmaticProblem;\n", "file_path": "vrp-pragmatic/src/format/problem/mod.rs", "rank": 86, "score": 51010.85941914466 }, { "content": "use crate::algorithms::geometry::Point;\n\n\n\npub mod gsom;\n\npub mod nsga2;\n\n\n", "file_path": "vrp-core/tests/helpers/algorithms/mod.rs", "rank": 87, "score": 51010.5386067686 }, { "content": "//! This module contains an implementation of Density-Based Spatial Clustering of Applications with\n\n//! Noise (DBSCAN)\n\n\n\n#[cfg(test)]\n\n#[path = \"../../../tests/unit/algorithms/dbscan/dbscan_test.rs\"]\n\nmod dbscan_test;\n\n\n\nuse hashbrown::{HashMap, HashSet};\n\nuse std::hash::Hash;\n\n\n\n/// Represents a cluster of items.\n\npub type Cluster<'a, T> = Vec<&'a T>;\n\n\n\n/// A function which returns neighbors of given item with given epsilon.\n\npub type NeighborhoodFn<'a, T> = Box<dyn Fn(&'a T, f64) -> Box<dyn Iterator<Item = &'a T> + 'a> + 'a>;\n\n\n\n/// Creates clusters of items using DBSCAN (Density-Based Spatial Clustering of Applications with Noise) algorithm.\n\n/// NOTE: `neighborhood_fn` shall return point itself.\n", "file_path": "vrp-core/src/algorithms/dbscan/mod.rs", "rank": 88, "score": 51010.53055515282 }, { "content": "use crate::utils::{Environment, Random};\n\nuse std::sync::Arc;\n\n\n\npub mod random;\n\n\n", "file_path": "vrp-core/tests/helpers/utils/mod.rs", "rank": 89, "score": 51010.423546419166 }, { "content": "//! A module with geometry primitives.\n\n\n\nmod point;\n\npub use self::point::Point;\n", "file_path": "vrp-core/src/algorithms/geometry/mod.rs", "rank": 90, "score": 51009.88636439189 }, { "content": "mod avoid_reload;\n\nmod basic_reload;\n\nmod diff_reload_places;\n\nmod multi_dim_reload;\n\nmod multi_job_reload;\n\nmod multi_vehicle_reload;\n\nmod picks_devs_reload;\n", "file_path": "vrp-pragmatic/tests/features/reload/mod.rs", "rank": 91, "score": 51006.260346089766 }, { "content": "mod basic_multi_job;\n\nmod basic_replacement;\n\nmod basic_service;\n\nmod limited_capacity;\n\nmod single_type_places;\n\nmod unassigned_multi_job;\n", "file_path": "vrp-pragmatic/tests/features/multjob/mod.rs", "rank": 92, "score": 51006.23565796622 }, { "content": "mod basic_break_test;\n\nmod break_with_multiple_locations;\n\nmod interval_break_test;\n\nmod multi_break_test;\n\nmod relation_break_test;\n\nmod skip_break_test;\n", "file_path": "vrp-pragmatic/tests/features/breaks/mod.rs", "rank": 93, "score": 51006.18952627522 }, { "content": "mod basic_multi_shift;\n\nmod basic_open_end;\n\nmod multi_dimens;\n\nmod profile_variation;\n\nmod unreachable_jobs;\n", "file_path": "vrp-pragmatic/tests/features/fleet/mod.rs", "rank": 94, "score": 51006.183047104554 }, { "content": "mod any_with_new_jobs;\n\nmod mixed_strict_any;\n\nmod mixed_strict_sequence;\n\nmod strict_with_new_jobs;\n\nmod strict_with_old_jobs;\n", "file_path": "vrp-pragmatic/tests/features/relations/mod.rs", "rank": 95, "score": 51006.16480216949 }, { "content": "pub mod common;\n\npub mod problem;\n\npub mod solution;\n\n\n\npub mod domain;\n", "file_path": "vrp-core/tests/helpers/models/mod.rs", "rank": 96, "score": 51006.12805025553 }, { "content": "mod area_allowance;\n\nmod max_distance;\n\nmod shift_time;\n\nmod tour_size;\n", "file_path": "vrp-pragmatic/tests/features/limits/mod.rs", "rank": 97, "score": 51006.12805025553 }, { "content": "mod basic_multiple_times;\n\nmod basic_waiting_time;\n\nmod strict_leads_to_unassigned;\n\nmod strict_split_into_two_tours;\n", "file_path": "vrp-pragmatic/tests/features/timing/mod.rs", "rank": 98, "score": 51006.01739299598 }, { "content": "use crate::algorithms::geometry::Point;\n\nuse crate::helpers::construction::constraints::create_constraint_pipeline_with_transport;\n\nuse crate::helpers::models::domain::test_random;\n\nuse crate::helpers::models::problem::*;\n\nuse crate::helpers::models::solution::{create_route_with_activities, test_activity_with_job};\n\nuse crate::models::common::Location;\n\nuse crate::models::problem::*;\n\nuse crate::models::solution::{Registry, Route};\n\nuse crate::models::{Problem, Solution};\n\nuse crate::solver::population::create_elitism_population;\n\nuse crate::solver::RefinementContext;\n\nuse crate::utils::Environment;\n\nuse std::sync::Arc;\n\n\n", "file_path": "vrp-core/tests/helpers/solver/mod.rs", "rank": 99, "score": 51005.98979228562 } ]
Rust
lib/engine-universal/src/unwind/windows_x64.rs
DumbMachine/wasmer
84727032e59bee88a4a5b8860cf593f7cfdd0baf
use loupe::{MemoryUsage, MemoryUsageTracker}; use std::collections::HashMap; use wasmer_compiler::CompiledFunctionUnwindInfo; use winapi::um::winnt; pub struct UnwindRegistry { functions: HashMap<usize, Vec<winnt::RUNTIME_FUNCTION>>, published: bool, } impl UnwindRegistry { pub fn new() -> Self { Self { functions: HashMap::new(), published: false, } } pub fn register( &mut self, base_address: usize, func_start: u32, func_len: u32, info: &CompiledFunctionUnwindInfo, ) -> Result<(), String> { if self.published { return Err("unwind registry has already been published".to_string()); } match info { CompiledFunctionUnwindInfo::WindowsX64(_) => {} _ => return Err("unsupported unwind information".to_string()), }; let mut entry = winnt::RUNTIME_FUNCTION::default(); entry.BeginAddress = func_start; entry.EndAddress = func_start + func_len; unsafe { *entry.u.UnwindInfoAddress_mut() = (entry.EndAddress + 3) & !3; } let entries = self .functions .entry(base_address) .or_insert_with(|| Vec::new()); entries.push(entry); Ok(()) } pub fn publish(&mut self, _eh_frame: Option<&[u8]>) -> Result<(), String> { if self.published { return Err("unwind registry has already been published".to_string()); } self.published = true; if !self.functions.is_empty() { for (base_address, functions) in self.functions.iter_mut() { assert_eq!( (functions.as_mut_ptr() as u64) % 4, 0, "function table allocation was not aligned" ); unsafe { if winnt::RtlAddFunctionTable( functions.as_mut_ptr(), functions.len() as u32, *base_address as u64, ) == 0 { return Err("failed to register function tables".to_string()); } } } } Ok(()) } } impl Drop for UnwindRegistry { fn drop(&mut self) { if self.published { unsafe { for functions in self.functions.values_mut() { winnt::RtlDeleteFunctionTable(functions.as_mut_ptr()); } } } } } impl MemoryUsage for UnwindRegistry { fn size_of_val(&self, tracker: &mut dyn MemoryUsageTracker) -> usize { self.functions .iter() .map(|(_, _)| std::mem::size_of::<u64>() * 3) .sum::<usize>() + self.published.size_of_val(tracker) } }
use loupe::{MemoryUsage, MemoryUsageTracker}; use std::collections::HashMap; use wasmer_compiler::CompiledFunctionUnwindInfo; use winapi::um::winnt; pub struct UnwindRegistry { functions: HashMap<usize, Vec<winnt::RUNTIME_FUNCTION>>, published: bool, } impl UnwindRegistry { pub fn new() -> Self { Self { functions: HashMap::new(), published: false, } } pub fn register( &mut self, base_address: usize, func_start: u32, func_len: u32, info: &CompiledFunctionUnwindInfo, ) -> Result<(), String> { if self.published { return Err("unwind registry has already been published".to_string()); } match info { CompiledFunctionUnwindInfo::WindowsX64(_) => {} _ => return Err("unsupported unwind information".to_string()), }; let mut entry = winnt::RUNTIME_FUNCTION::default(); entry.BeginAddress = func_start; entry.EndAddress = func_start + func_len; unsafe { *entry.u.UnwindInfoAddress_mut() = (entry.EndAddress + 3) & !3; } let entries = self .functions .entry(base_address) .or_insert_with(|| Vec::new()); entries.push(entry); Ok(()) } pub fn publish(&mut self, _eh_frame: Option<&[u8]>) -> Result<(), String> { if self.published { return Err("unwind registry has already been published".to_string()); } self.published = true; if !self.functions.is_empty() { for (base_address, functions) in self.functions.iter_mut() { assert_eq!( (functions.as_mut_ptr() as u64) % 4, 0, "function table allocation was not aligned" ); unsafe { if winnt::RtlAddFunctionTable( functions.as_mut_ptr(), functions.len() as u32, *base_address as u64, ) == 0 { return Err("failed to register function tables".to_string()); } } } } Ok(()) } } impl Drop for UnwindRegistry { fn drop(&mut self) { if self.published { unsafe { for functions in self.functions.values_mut() { winnt::RtlDeleteFunctionTable(functions.as_mut_ptr()); } } } } } impl MemoryUsage for UnwindRegistry {
}
fn size_of_val(&self, tracker: &mut dyn MemoryUsageTracker) -> usize { self.functions .iter() .map(|(_, _)| std::mem::size_of::<u64>() * 3) .sum::<usize>() + self.published.size_of_val(tracker) }
function_block-full_function
[ { "content": "pub fn get_emscripten_table_size(module: &Module) -> Result<(u32, Option<u32>), String> {\n\n if let Some(import) = module.imports().tables().next() {\n\n let ty = import.ty();\n\n Ok((ty.minimum, ty.maximum))\n\n } else {\n\n Err(\"Emscripten requires at least one imported table\".to_string())\n\n }\n\n}\n\n\n", "file_path": "lib/emscripten/src/utils.rs", "rank": 0, "score": 436921.66277419473 }, { "content": "/// Reads values written by `-s EMIT_EMSCRIPTEN_METADATA=1`\n\n/// Assumes values start from the end in this order:\n\n/// Last export: Dynamic Base\n\n/// Second-to-Last export: Dynamic top pointer\n\npub fn get_emscripten_metadata(module: &Module) -> Result<Option<(u32, u32)>, String> {\n\n let max_idx = match module\n\n .info()\n\n .global_initializers\n\n .iter()\n\n .map(|(k, _)| k)\n\n .max()\n\n {\n\n Some(x) => x,\n\n None => return Ok(None),\n\n };\n\n\n\n let snd_max_idx = match module\n\n .info()\n\n .global_initializers\n\n .iter()\n\n .map(|(k, _)| k)\n\n .filter(|k| *k != max_idx)\n\n .max()\n\n {\n", "file_path": "lib/emscripten/src/utils.rs", "rank": 1, "score": 396964.07072529866 }, { "content": "/// Parses an environment variable.\n\npub fn parse_envvar(entry: &str) -> Result<(String, String)> {\n\n let entry = entry.trim();\n\n\n\n match entry.find('=') {\n\n None => bail!(\n\n \"Environment variable must be of the form `<name>=<value>`; found `{}`\",\n\n &entry\n\n ),\n\n\n\n Some(0) => bail!(\n\n \"Environment variable is not well formed, the `name` is missing in `<name>=<value>`; got `{}`\",\n\n &entry\n\n ),\n\n\n\n Some(position) if position == entry.len() - 1 => bail!(\n\n \"Environment variable is not well formed, the `value` is missing in `<name>=<value>`; got `{}`\",\n\n &entry\n\n ),\n\n\n\n Some(position) => Ok((entry[..position].into(), entry[position + 1..].into())),\n", "file_path": "lib/cli/src/utils.rs", "rank": 2, "score": 392247.53965128656 }, { "content": "pub fn static_alloc(static_top: &mut u32, size: u32) -> u32 {\n\n let old_static_top = *static_top;\n\n // NOTE: The `4294967280` is a u32 conversion of -16 as gotten from emscripten.\n\n *static_top = (*static_top + size + 15) & 4294967280;\n\n old_static_top\n\n}\n", "file_path": "lib/emscripten/src/storage.rs", "rank": 3, "score": 390515.0242477732 }, { "content": "pub fn initialize(fs: &mut WasiFs) -> Result<(), String> {\n\n let frame_buffer_file = Box::new(FrameBuffer {\n\n fb_type: FrameBufferFileType::Buffer,\n\n cursor: 0,\n\n });\n\n let resolution_file = Box::new(FrameBuffer {\n\n fb_type: FrameBufferFileType::Resolution,\n\n cursor: 0,\n\n });\n\n let draw_file = Box::new(FrameBuffer {\n\n fb_type: FrameBufferFileType::Draw,\n\n cursor: 0,\n\n });\n\n let input_file = Box::new(FrameBuffer {\n\n fb_type: FrameBufferFileType::Input,\n\n cursor: 0,\n\n });\n\n\n\n let base_dir_fd = unsafe {\n\n fs.open_dir_all(\n", "file_path": "lib/wasi-experimental-io-devices/src/lib.rs", "rank": 4, "score": 384296.6983108263 }, { "content": "/// Parses a mapdir from a string\n\npub fn parse_mapdir(entry: &str) -> Result<(String, PathBuf)> {\n\n // We try first splitting by `::`\n\n if let [alias, real_dir] = entry.split(\"::\").collect::<Vec<&str>>()[..] {\n\n retrieve_alias_pathbuf(alias, real_dir)\n\n }\n\n // And then we try splitting by `:` (for compatibility with previous API)\n\n else if let [alias, real_dir] = entry.split(':').collect::<Vec<&str>>()[..] {\n\n retrieve_alias_pathbuf(alias, real_dir)\n\n } else {\n\n bail!(\n\n \"Directory mappings must consist of two paths separate by a `::` or `:`. Found {}\",\n\n &entry\n\n )\n\n }\n\n}\n\n\n", "file_path": "lib/cli/src/utils.rs", "rank": 5, "score": 360465.18100381835 }, { "content": "pub fn get_emscripten_memory_size(module: &Module) -> Result<(Pages, Option<Pages>, bool), String> {\n\n if let Some(import) = module.imports().memories().next() {\n\n let ty = import.ty();\n\n Ok((ty.minimum, ty.maximum, ty.shared))\n\n } else {\n\n Err(\"Emscripten requires at least one imported memory\".to_string())\n\n }\n\n}\n\n\n", "file_path": "lib/emscripten/src/utils.rs", "rank": 6, "score": 354883.598577168 }, { "content": "fn set_table_item(table: &VMTable, item_index: u32, item: &Function) -> Result<(), RuntimeError> {\n\n table.table.set(item_index, item).map_err(|e| e.into())\n\n}\n\n\n", "file_path": "lib/api/src/js/externals/table.rs", "rank": 7, "score": 349446.43450167903 }, { "content": "/// Returns whether the `pc`, according to globally registered information,\n\n/// is a wasm trap or not.\n\npub fn is_wasm_pc(pc: usize) -> bool {\n\n let frame_info = FRAME_INFO.read().unwrap();\n\n let module_info = frame_info.module_info(pc);\n\n module_info.is_some()\n\n}\n\n\n\n/// An RAII structure used to unregister a module's frame information when the\n\n/// module is destroyed.\n\n#[derive(MemoryUsage)]\n\npub struct GlobalFrameInfoRegistration {\n\n /// The key that will be removed from the global `ranges` map when this is\n\n /// dropped.\n\n key: usize,\n\n}\n\n\n", "file_path": "lib/engine/src/trap/frame_info.rs", "rank": 8, "score": 342872.80523227673 }, { "content": "/// Parses the Start section of the wasm module.\n\npub fn parse_start_section(index: u32, environ: &mut ModuleEnvironment) -> WasmResult<()> {\n\n environ.declare_start_function(FunctionIndex::from_u32(index))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "lib/compiler/src/translator/sections.rs", "rank": 9, "score": 336259.0559819252 }, { "content": "fn long_f(a: u32, b: u32, c: u32, d: u32, e: u32, f: u16, g: u64, h: u64, i: u16, j: u32) -> u64 {\n\n j as u64\n\n + i as u64 * 10\n\n + h * 100\n\n + g * 1000\n\n + f as u64 * 10000\n\n + e as u64 * 100000\n\n + d as u64 * 1000000\n\n + c as u64 * 10000000\n\n + b as u64 * 100000000\n\n + a as u64 * 1000000000\n\n}\n\n\n", "file_path": "tests/compilers/native_functions.rs", "rank": 10, "score": 334053.1676983796 }, { "content": "pub fn align_memory(ptr: u32) -> u32 {\n\n (ptr + 15) & !15\n\n}\n\n\n", "file_path": "lib/emscripten/src/storage.rs", "rank": 11, "score": 329137.7122824007 }, { "content": "/// Subroutine to instantiate the loggers\n\npub fn set_up_logging(verbose: u8) -> Result<(), String> {\n\n let colors_line = ColoredLevelConfig::new()\n\n .error(Color::Red)\n\n .warn(Color::Yellow)\n\n .trace(Color::BrightBlack);\n\n let should_color = wasmer_should_print_color();\n\n\n\n let colors_level = colors_line.info(Color::Green);\n\n let level = match verbose {\n\n 1 => DebugLevel::Debug,\n\n _ => DebugLevel::Trace,\n\n };\n\n let dispatch = fern::Dispatch::new()\n\n .level(level)\n\n .chain({\n\n let base = if should_color {\n\n fern::Dispatch::new().format(move |out, message, record| {\n\n let time = time::SystemTime::now().duration_since(time::UNIX_EPOCH).expect(\"Can't get time\");\n\n out.finish(format_args!(\n\n \"{color_line}[{seconds}.{millis} {level} {target}{color_line}]{ansi_close} {message}\",\n", "file_path": "lib/cli/src/logging.rs", "rank": 12, "score": 328224.85014712485 }, { "content": "#[allow(dead_code)] // it's used in `env/windows/mod.rs`.\n\npub fn read_string_from_wasm(memory: &Memory, offset: u32) -> String {\n\n let v: Vec<u8> = memory.view()[(offset as usize)..]\n\n .iter()\n\n .map(|cell| cell.get())\n\n .take_while(|&byte| byte != 0)\n\n .collect();\n\n String::from_utf8_lossy(&v).to_owned().to_string()\n\n}\n\n\n", "file_path": "lib/emscripten/src/utils.rs", "rank": 13, "score": 328173.3149221749 }, { "content": "fn deref_table_bound(info: &ModuleInfo, idx: usize, deref_into_value: bool) -> Vec<usize> {\n\n let mut x: Vec<usize> = match TableIndex::new(idx).local_or_import(info) {\n\n LocalOrImport::Local(idx) => vec![Ctx::offset_tables() as usize, idx.index() * 8, 8],\n\n LocalOrImport::Import(idx) => {\n\n vec![Ctx::offset_imported_tables() as usize, idx.index() * 8, 8]\n\n }\n\n };\n\n if deref_into_value {\n\n x.push(0);\n\n }\n\n x\n\n}\n", "file_path": "lib/compiler-llvm/src/translator/stackmap.rs", "rank": 14, "score": 325914.36887417745 }, { "content": "fn deref_table_base(info: &ModuleInfo, idx: usize, deref_into_value: bool) -> Vec<usize> {\n\n let mut x: Vec<usize> = match TableIndex::new(idx).local_or_import(info) {\n\n LocalOrImport::Local(idx) => vec![Ctx::offset_tables() as usize, idx.index() * 8, 0],\n\n LocalOrImport::Import(idx) => {\n\n vec![Ctx::offset_imported_tables() as usize, idx.index() * 8, 0]\n\n }\n\n };\n\n if deref_into_value {\n\n x.push(0);\n\n }\n\n x\n\n}\n\n\n", "file_path": "lib/compiler-llvm/src/translator/stackmap.rs", "rank": 15, "score": 325914.36887417745 }, { "content": "#[allow(clippy::cast_ptr_alignment)]\n\npub fn _localtime_r(ctx: &EmEnv, time_p: u32, result: u32) -> c_int {\n\n debug!(\"emscripten::_localtime_r {}\", time_p);\n\n\n\n // NOTE: emscripten seems to want tzset() called in this function\n\n // https://stackoverflow.com/questions/19170721/real-time-awareness-of-timezone-change-in-localtime-vs-localtime-r\n\n\n\n unsafe {\n\n let seconds = emscripten_memory_pointer!(ctx.memory(0), time_p) as *const i32;\n\n let timespec = time::OffsetDateTime::from_unix_timestamp_nanos(*seconds as _);\n\n\n\n // debug!(\n\n // \">>>>>>> time = {}, {}, {}, {}, {}, {}, {}, {}\",\n\n // result_tm.tm_sec, result_tm.tm_min, result_tm.tm_hour, result_tm.tm_mday,\n\n // result_tm.tm_mon, result_tm.tm_year, result_tm.tm_wday, result_tm.tm_yday,\n\n // );\n\n\n\n let result_addr = emscripten_memory_pointer!(ctx.memory(0), result) as *mut guest_tm;\n\n\n\n (*result_addr).tm_sec = timespec.second() as _;\n\n (*result_addr).tm_min = timespec.minute() as _;\n", "file_path": "lib/emscripten/src/time.rs", "rank": 16, "score": 324880.9855532197 }, { "content": "/// A helper to extract all the `Type` listings of each variable in `params`\n\n/// for only parameters the return true for `is_wasm`, typically paired with\n\n/// `is_wasm_return` or `is_wasm_parameter`.\n\npub fn wasm_param_types(params: &[ir::AbiParam], is_wasm: impl Fn(usize) -> bool) -> Vec<Type> {\n\n let mut ret = Vec::with_capacity(params.len());\n\n for (i, param) in params.iter().enumerate() {\n\n if is_wasm(i) {\n\n ret.push(param.value_type);\n\n }\n\n }\n\n ret\n\n}\n", "file_path": "lib/compiler-cranelift/src/translator/code_translator.rs", "rank": 17, "score": 324148.9975848568 }, { "content": "#[cfg(target_os = \"wasi\")]\n\nfn pread(fd: u32, iovs: &[&mut [u8]], offset: u64) -> u32 {\n\n let mut nread = 0;\n\n let mut processed_iovs = vec![];\n\n\n\n for iov in iovs {\n\n processed_iovs.push(WasiIovec {\n\n buf: iov.as_ptr() as usize as u32,\n\n buf_len: iov.len() as u32,\n\n })\n\n }\n\n\n\n unsafe {\n\n fd_pread(\n\n fd,\n\n processed_iovs.as_ptr() as usize as u32,\n\n processed_iovs.len() as u32,\n\n offset,\n\n &mut nread as *mut u32 as usize as u32,\n\n );\n\n }\n\n nread\n\n}\n\n\n", "file_path": "tests/wasi-wast/wasi/tests/fd_pread.rs", "rank": 18, "score": 321097.79905789776 }, { "content": "/// Check if the provided bytes are wasm-like\n\npub fn is_wasm(bytes: impl AsRef<[u8]>) -> bool {\n\n bytes.as_ref().starts_with(b\"\\0asm\")\n\n}\n", "file_path": "lib/api/src/sys/utils.rs", "rank": 19, "score": 317868.48500372755 }, { "content": "/// Check if the provided bytes are wasm-like\n\npub fn is_wasm(bytes: impl AsRef<[u8]>) -> bool {\n\n bytes.as_ref().starts_with(b\"\\0asm\")\n\n}\n", "file_path": "lib/api/src/js/utils.rs", "rank": 20, "score": 317868.48500372755 }, { "content": "#[cfg(target_os = \"wasi\")]\n\nfn allocate(fd: u32, offset: u64, length: u64) -> u16 {\n\n unsafe { fd_allocate(fd, offset, length) }\n\n}\n\n\n", "file_path": "tests/wasi-wast/wasi/tests/fd_allocate.rs", "rank": 21, "score": 313388.44509412127 }, { "content": "/// Call the global constructors for C++ and set up the emscripten environment.\n\n///\n\n/// Note that this function does not completely set up Emscripten to be called.\n\n/// before calling this function, please initialize `Ctx::data` with a pointer\n\n/// to [`EmscriptenData`].\n\npub fn set_up_emscripten(instance: &mut Instance) -> Result<(), RuntimeError> {\n\n // ATINIT\n\n // (used by C++)\n\n if let Ok(func) = instance.exports.get::<Function>(\"globalCtors\") {\n\n func.call(&[])?;\n\n }\n\n\n\n if let Ok(func) = instance\n\n .exports\n\n .get::<Function>(\"___emscripten_environ_constructor\")\n\n {\n\n func.call(&[])?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "lib/emscripten/src/lib.rs", "rank": 22, "score": 313062.74758124544 }, { "content": "/// This function is required to be called before any WebAssembly is entered.\n\n/// This will configure global state such as signal handlers to prepare the\n\n/// process to receive wasm traps.\n\n///\n\n/// This function must not only be called globally once before entering\n\n/// WebAssembly but it must also be called once-per-thread that enters\n\n/// WebAssembly. Currently in wasmer's integration this function is called on\n\n/// creation of a `Store`.\n\n///\n\n/// The `is_wasm_pc` argument is used when a trap happens to determine if a\n\n/// program counter is the pc of an actual wasm trap or not. This is then used\n\n/// to disambiguate faults that happen due to wasm and faults that happen due to\n\n/// bugs in Rust or elsewhere.\n\npub fn init_traps(is_wasm_pc: fn(usize) -> bool) {\n\n static INIT: Once = Once::new();\n\n INIT.call_once(|| unsafe {\n\n IS_WASM_PC = is_wasm_pc;\n\n platform_init();\n\n });\n\n}\n\n\n\n/// Raises a user-defined trap immediately.\n\n///\n\n/// This function performs as-if a wasm trap was just executed, only the trap\n\n/// has a dynamic payload associated with it which is user-provided. This trap\n\n/// payload is then returned from `catch_traps` below.\n\n///\n\n/// # Safety\n\n///\n\n/// Only safe to call when wasm code is on the stack, aka `catch_traps` must\n\n/// have been previous called and not yet returned.\n\n/// Additionally no Rust destructors may be on the stack.\n\n/// They will be skipped and not executed.\n", "file_path": "lib/vm/src/trap/traphandlers.rs", "rank": 23, "score": 310247.64073553134 }, { "content": "fn run_add_with_limit(mut config: crate::Config, limit: u64) -> Result<()> {\n\n config\n\n .middlewares\n\n .push(Arc::new(Metering::new(limit, cost_always_one)));\n\n let store = config.store();\n\n let wat = r#\"(module\n\n (func (export \"add\") (param i32 i32) (result i32)\n\n (i32.add (local.get 0)\n\n (local.get 1)))\n\n)\"#;\n\n let module = Module::new(&store, wat).unwrap();\n\n\n\n let import_object = imports! {};\n\n\n\n let instance = Instance::new(&module, &import_object)?;\n\n\n\n let f: NativeFunc<(i32, i32), i32> = instance.exports.get_native_function(\"add\")?;\n\n f.call(4, 6)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/compilers/metering.rs", "rank": 24, "score": 310143.97510161414 }, { "content": "/// Extract a valid Rust identifier from the stem of a path.\n\npub fn extract_name(path: impl AsRef<Path>) -> String {\n\n path.as_ref()\n\n .file_stem()\n\n .expect(\"filename should have a stem\")\n\n .to_str()\n\n .expect(\"filename should be representable as a string\")\n\n .replace(\"-\", \"_\")\n\n .replace(\"/\", \"_\")\n\n}\n\n\n", "file_path": "tests/lib/test-generator/src/lib.rs", "rank": 25, "score": 310093.73592390236 }, { "content": "/// emscripten: ___cxa_allocate_exception\n\npub fn ___cxa_allocate_exception(ctx: &EmEnv, size: u32) -> u32 {\n\n debug!(\"emscripten::___cxa_allocate_exception\");\n\n env::call_malloc(ctx, size as _)\n\n}\n\n\n", "file_path": "lib/emscripten/src/exception.rs", "rank": 26, "score": 303366.46075188275 }, { "content": "pub fn call_memalign(ctx: &EmEnv, alignment: u32, size: u32) -> u32 {\n\n if let Some(memalign) = &get_emscripten_data(ctx).memalign_ref() {\n\n memalign.call(alignment, size).unwrap()\n\n } else {\n\n panic!(\"Memalign is set to None\");\n\n }\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/mod.rs", "rank": 27, "score": 302204.59824167774 }, { "content": "#[cfg(target_os = \"wasi\")]\n\nfn poll(fds: &[u32], read: &[bool], write: &[bool]) -> Result<Vec<__wasi_event_t>, u16> {\n\n assert!(fds.len() == read.len() && read.len() == write.len());\n\n\n\n let mut in_ = fds\n\n .iter()\n\n .enumerate()\n\n .map(|(i, n)| {\n\n let mut type_ = 0;\n\n if read[i] {\n\n type_ |= __WASI_EVENTTYPE_FD_READ;\n\n }\n\n if write[i] {\n\n type_ |= __WASI_EVENTTYPE_FD_WRITE;\n\n }\n\n __wasi_subscription_t {\n\n userdata: 0x123456,\n\n type_,\n\n u: __wasi_subscription_u {\n\n fd_readwrite: __wasi_subscription_fs_readwrite_t { fd: *n as u32 },\n\n },\n", "file_path": "tests/wasi-wast/wasi/tests/poll_oneoff.rs", "rank": 28, "score": 301444.7772221727 }, { "content": "fn get_function(val: Val) -> Result<Function, RuntimeError> {\n\n match val {\n\n Val::FuncRef(func) => Ok(func.as_ref().unwrap().exported.function.clone().into()),\n\n // Only funcrefs is supported by the spec atm\n\n _ => unimplemented!(),\n\n }\n\n}\n\n\n\nimpl Table {\n\n /// Creates a new `Table` with the provided [`TableType`] definition.\n\n ///\n\n /// All the elements in the table will be set to the `init` value.\n\n ///\n\n /// This function will construct the `Table` using the store\n\n /// [`BaseTunables`][crate::js::tunables::BaseTunables].\n\n pub fn new(store: &Store, ty: TableType, init: Val) -> Result<Self, RuntimeError> {\n\n let descriptor = js_sys::Object::new();\n\n js_sys::Reflect::set(&descriptor, &\"initial\".into(), &ty.minimum.into())?;\n\n if let Some(max) = ty.maximum {\n\n js_sys::Reflect::set(&descriptor, &\"maximum\".into(), &max.into())?;\n", "file_path": "lib/api/src/js/externals/table.rs", "rank": 29, "score": 298775.658839615 }, { "content": "fn run_loop(mut config: crate::Config, limit: u64, iter_count: i32) -> Result<()> {\n\n config\n\n .middlewares\n\n .push(Arc::new(Metering::new(limit, cost_always_one)));\n\n let store = config.store();\n\n let wat = r#\"(module\n\n (func (export \"test\") (param i32)\n\n (local i32)\n\n (local.set 1 (i32.const 0))\n\n (loop\n\n (local.get 1)\n\n (i32.const 1)\n\n (i32.add)\n\n (local.tee 1)\n\n (local.get 0)\n\n (i32.ne)\n\n (br_if 0)\n\n )\n\n )\n\n)\"#;\n", "file_path": "tests/compilers/metering.rs", "rank": 30, "score": 291873.01810320996 }, { "content": "pub fn run_wast(mut config: crate::Config, wast_path: &str) -> anyhow::Result<()> {\n\n println!(\"Running wast `{}`\", wast_path);\n\n let try_nan_canonicalization = wast_path.contains(\"nan-canonicalization\");\n\n let mut features = Features::default();\n\n let is_bulkmemory = wast_path.contains(\"bulk-memory\");\n\n let is_simd = wast_path.contains(\"simd\");\n\n if is_bulkmemory {\n\n features.bulk_memory(true);\n\n }\n\n if is_simd {\n\n features.simd(true);\n\n }\n\n if config.compiler == crate::Compiler::Singlepass {\n\n features.multi_value(false);\n\n }\n\n config.set_features(features);\n\n config.set_nan_canonicalization(try_nan_canonicalization);\n\n\n\n let store = config.store();\n\n let mut wast = Wast::new_with_spectest(store);\n", "file_path": "tests/compilers/wast.rs", "rank": 31, "score": 288964.4412455892 }, { "content": "/// Registers a new compiled module's frame information.\n\n///\n\n/// This function will register the `names` information for all of the\n\n/// compiled functions within `module`. If the `module` has no functions\n\n/// then `None` will be returned. Otherwise the returned object, when\n\n/// dropped, will be used to unregister all name information from this map.\n\npub fn register(\n\n module: Arc<ModuleInfo>,\n\n finished_functions: &BoxedSlice<LocalFunctionIndex, FunctionExtent>,\n\n frame_infos: PrimaryMap<LocalFunctionIndex, CompiledFunctionFrameInfo>,\n\n) -> Option<GlobalFrameInfoRegistration> {\n\n let mut min = usize::max_value();\n\n let mut max = 0;\n\n let mut functions = BTreeMap::new();\n\n for (\n\n i,\n\n FunctionExtent {\n\n ptr: start,\n\n length: len,\n\n },\n\n ) in finished_functions.iter()\n\n {\n\n let start = **start as usize;\n\n let end = start + len;\n\n min = cmp::min(min, start);\n\n max = cmp::max(max, end);\n", "file_path": "lib/engine/src/trap/frame_info.rs", "rank": 32, "score": 288827.06220149045 }, { "content": "fn deref_global(info: &ModuleInfo, idx: usize, deref_into_value: bool) -> Vec<usize> {\n\n let mut x: Vec<usize> = match GlobalIndex::new(idx).local_or_import(info) {\n\n LocalOrImport::Local(idx) => vec![Ctx::offset_globals() as usize, idx.index() * 8, 0],\n\n LocalOrImport::Import(idx) => {\n\n vec![Ctx::offset_imported_globals() as usize, idx.index() * 8, 0]\n\n }\n\n };\n\n if deref_into_value {\n\n x.push(0);\n\n }\n\n x\n\n}\n\n\n", "file_path": "lib/compiler-llvm/src/translator/stackmap.rs", "rank": 33, "score": 287130.6681120994 }, { "content": "// From emscripten implementation\n\nfn align_up(mut val: usize, multiple: usize) -> usize {\n\n if val % multiple > 0 {\n\n val += multiple - val % multiple;\n\n }\n\n val\n\n}\n\n\n", "file_path": "lib/emscripten/src/memory.rs", "rank": 34, "score": 278846.72892693116 }, { "content": "pub fn run_basic_dynamic_function(store: &Store, compiler_name: &str, c: &mut Criterion) {\n\n let module = Module::new(&store, BASIC_WAT).unwrap();\n\n let import_object = imports! {\n\n \"env\" => {\n\n \"multiply\" => Function::new_native(&store, |a: i32, b: i32| a * b),\n\n },\n\n };\n\n let instance = Instance::new(&module, &import_object).unwrap();\n\n\n\n let dyn_f: &Function = instance.exports.get(\"add\").unwrap();\n\n c.bench_function(&format!(\"basic dynfunc {}\", compiler_name), |b| {\n\n b.iter(|| {\n\n let dyn_result = black_box(dyn_f.call(&[Val::I32(4), Val::I32(6)]).unwrap());\n\n assert_eq!(dyn_result[0], Val::I32(10));\n\n })\n\n });\n\n\n\n let dyn_f_many: &Function = instance.exports.get(\"add20\").unwrap();\n\n c.bench_function(\n\n &format!(\"basic dynfunc with many args {}\", compiler_name),\n", "file_path": "benches/static_and_dynamic_functions.rs", "rank": 35, "score": 275118.0013496573 }, { "content": "pub fn run_basic_static_function(store: &Store, compiler_name: &str, c: &mut Criterion) {\n\n let module = Module::new(&store, BASIC_WAT).unwrap();\n\n let import_object = imports! {\n\n \"env\" => {\n\n \"multiply\" => Function::new_native(&store, |a: i32, b: i32| a * b),\n\n },\n\n };\n\n let instance = Instance::new(&module, &import_object).unwrap();\n\n let dyn_f: &Function = instance.exports.get(\"add\").unwrap();\n\n let f: NativeFunc<(i32, i32), i32> = dyn_f.native().unwrap();\n\n\n\n c.bench_function(&format!(\"basic static func {}\", compiler_name), |b| {\n\n b.iter(|| {\n\n let result = black_box(f.call(4, 6).unwrap());\n\n assert_eq!(result, 10);\n\n })\n\n });\n\n\n\n let dyn_f_many: &Function = instance.exports.get(\"add20\").unwrap();\n\n let f_many: NativeFunc<\n", "file_path": "benches/static_and_dynamic_functions.rs", "rank": 36, "score": 275118.0013496573 }, { "content": "/// Parses the Table section of the wasm module.\n\npub fn parse_table_section(\n\n tables: TableSectionReader,\n\n module_info: &mut ModuleInfoPolyfill,\n\n) -> WasmResult<()> {\n\n module_info.reserve_tables(tables.get_count())?;\n\n\n\n for entry in tables {\n\n let table = entry.map_err(transform_err)?;\n\n module_info.declare_table(TableType {\n\n ty: wptype_to_type(table.element_type).unwrap(),\n\n minimum: table.limits.initial,\n\n maximum: table.limits.maximum,\n\n })?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "lib/api/src/js/module_info_polyfill.rs", "rank": 37, "score": 275117.5565500392 }, { "content": "/// Parses the Function section of the wasm module.\n\npub fn parse_function_section(\n\n functions: FunctionSectionReader,\n\n module_info: &mut ModuleInfoPolyfill,\n\n) -> WasmResult<()> {\n\n let num_functions = functions.get_count();\n\n module_info.reserve_func_types(num_functions)?;\n\n\n\n for entry in functions {\n\n let sigindex = entry.map_err(transform_err)?;\n\n module_info.declare_func_type(SignatureIndex::from_u32(sigindex))?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "lib/api/src/js/module_info_polyfill.rs", "rank": 38, "score": 274979.59552589984 }, { "content": "/// Suggest function exports for the module\n\npub fn suggest_function_exports(module: &Module, query: &str) -> Vec<String> {\n\n let mut function_names = module\n\n .exports()\n\n .functions()\n\n .map(|extern_fn| {\n\n let name = extern_fn.name();\n\n name.to_string()\n\n })\n\n .collect::<Vec<_>>();\n\n function_names.sort_by_key(|name| damerau_levenshtein(name, query));\n\n function_names\n\n}\n", "file_path": "lib/cli/src/suggestions.rs", "rank": 39, "score": 271867.69853034447 }, { "content": "/// Whether or not Wasmer should print with color\n\npub fn wasmer_should_print_color() -> bool {\n\n env::var(\"WASMER_COLOR\")\n\n .ok()\n\n .and_then(|inner| inner.parse::<bool>().ok())\n\n .unwrap_or_else(|| atty::is(atty::Stream::Stdout))\n\n}\n\n\n", "file_path": "lib/cli/src/utils.rs", "rank": 40, "score": 269627.53074024717 }, { "content": "pub fn _llvm_eh_typeid_for(_ctx: &EmEnv, _type_info_addr: u32) -> i32 {\n\n debug!(\"emscripten::_llvm_eh_typeid_for\");\n\n -1\n\n}\n\n\n", "file_path": "lib/emscripten/src/process.rs", "rank": 41, "score": 269130.8714282862 }, { "content": "pub fn _ctime(ctx: &EmEnv, time_p: u32) -> u32 {\n\n debug!(\"emscripten::_ctime {}\", time_p);\n\n let tm_current = 2414544;\n\n _ctime_r(ctx, time_p, tm_current)\n\n}\n\n\n\n/// emscripten: _timegm\n", "file_path": "lib/emscripten/src/time.rs", "rank": 42, "score": 262786.4317732046 }, { "content": "/// emscripten: _asctime\n\npub fn _asctime(ctx: &EmEnv, time: u32) -> u32 {\n\n debug!(\"emscripten::_asctime {}\", time);\n\n\n\n unsafe {\n\n let time_str_ptr = fmt_time(ctx, time);\n\n copy_cstr_into_wasm(ctx, time_str_ptr)\n\n\n\n // let c_str = emscripten_memory_pointer!(ctx.memory(0), res) as *mut i8;\n\n // use std::ffi::CStr;\n\n // debug!(\"#### cstr = {:?}\", CStr::from_ptr(c_str));\n\n }\n\n}\n\n\n", "file_path": "lib/emscripten/src/time.rs", "rank": 43, "score": 262786.4317732046 }, { "content": "/// emscripten: _asctime_r\n\npub fn _asctime_r(ctx: &EmEnv, time: u32, buf: u32) -> u32 {\n\n debug!(\"emscripten::_asctime_r {}, {}\", time, buf);\n\n\n\n unsafe {\n\n // NOTE: asctime_r is specced to behave in an undefined manner if the algorithm would attempt\n\n // to write out more than 26 bytes (including the null terminator).\n\n // See http://pubs.opengroup.org/onlinepubs/9699919799/functions/asctime.html\n\n // Our undefined behavior is to truncate the write to at most 26 bytes, including null terminator.\n\n let time_str_ptr = fmt_time(ctx, time);\n\n write_to_buf(ctx, time_str_ptr, buf, 26)\n\n\n\n // let c_str = emscripten_memory_pointer!(ctx.memory(0), res) as *mut i8;\n\n // use std::ffi::CStr;\n\n // debug!(\"#### cstr = {:?}\", CStr::from_ptr(c_str));\n\n }\n\n}\n\n\n\n/// emscripten: _localtime\n", "file_path": "lib/emscripten/src/time.rs", "rank": 44, "score": 262782.8679129755 }, { "content": "pub fn _flock(_ctx: &EmEnv, _fd: u32, _op: u32) -> u32 {\n\n debug!(\"emscripten::_flock\");\n\n 0\n\n}\n", "file_path": "lib/emscripten/src/lock.rs", "rank": 45, "score": 262782.8679129755 }, { "content": "pub fn _ctime_r(ctx: &EmEnv, time_p: u32, buf: u32) -> u32 {\n\n debug!(\"emscripten::_ctime_r {} {}\", time_p, buf);\n\n\n\n // var stack = stackSave();\n\n let (result_offset, _result_slice): (u32, &mut [u8]) = unsafe { allocate_on_stack(ctx, 44) };\n\n let time = _localtime_r(ctx, time_p, result_offset) as u32;\n\n let rv = _asctime_r(ctx, time, buf);\n\n // stackRestore(stack);\n\n rv\n\n}\n\n\n", "file_path": "lib/emscripten/src/time.rs", "rank": 46, "score": 262782.8679129755 }, { "content": "fn add_wasmer_version(pre_header: &mut String) {\n\n pre_header.push_str(&format!(\n\n r#\"\n\n// This file corresponds to the following Wasmer version.\n\n#define WASMER_VERSION \"{full}\"\n\n#define WASMER_VERSION_MAJOR {major}\n\n#define WASMER_VERSION_MINOR {minor}\n\n#define WASMER_VERSION_PATCH {patch}\n\n#define WASMER_VERSION_PRE \"{pre}\"\n\n\"#,\n\n full = env!(\"CARGO_PKG_VERSION\"),\n\n major = env!(\"CARGO_PKG_VERSION_MAJOR\"),\n\n minor = env!(\"CARGO_PKG_VERSION_MINOR\"),\n\n patch = env!(\"CARGO_PKG_VERSION_PATCH\"),\n\n pre = env!(\"CARGO_PKG_VERSION_PRE\"),\n\n ));\n\n}\n\n\n", "file_path": "lib/c-api/build.rs", "rank": 47, "score": 262689.8320926967 }, { "content": "#[cfg(target_os = \"wasi\")]\n\nfn read(fd: u32, iovs: &[&mut [u8]]) -> u32 {\n\n let mut nread = 0;\n\n let mut processed_iovs = vec![];\n\n\n\n for iov in iovs {\n\n processed_iovs.push(WasiIovec {\n\n buf: iov.as_ptr() as usize as u32,\n\n buf_len: iov.len() as u32,\n\n })\n\n }\n\n\n\n unsafe {\n\n fd_read(\n\n fd,\n\n processed_iovs.as_ptr() as usize as u32,\n\n processed_iovs.len() as u32,\n\n &mut nread as *mut u32 as usize as u32,\n\n );\n\n }\n\n nread\n\n}\n\n\n", "file_path": "tests/wasi-wast/wasi/tests/fd_read.rs", "rank": 48, "score": 260160.77902955498 }, { "content": "pub fn _times(ctx: &EmEnv, buffer: u32) -> u32 {\n\n if buffer != 0 {\n\n call_memset(ctx, buffer, 0, 16);\n\n }\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/mod.rs", "rank": 49, "score": 259961.33253305405 }, { "content": "fn is_f64_arithmetic(bits: u64) -> bool {\n\n // Mask off sign bit.\n\n let bits = bits & 0x7FFF_FFFF_FFFF_FFFF;\n\n bits < 0x7FF8_0000_0000_0000\n\n}\n\n\n\n// Constants for the bounds of truncation operations. These are the least or\n\n// greatest exact floats in either f32 or f64 representation\n\n// greater-than-or-equal-to (for least) or less-than-or-equal-to (for greatest)\n\n// the i32 or i64 or u32 or u64 min (for least) or max (for greatest), when\n\n// rounding towards zero.\n\n\n\n/// Least Exact Float (32 bits) greater-than-or-equal-to i32::MIN when rounding towards zero.\n\nconst LEF32_GEQ_I32_MIN: u64 = std::i32::MIN as u64;\n\n/// Greatest Exact Float (32 bits) less-than-or-equal-to i32::MAX when rounding towards zero.\n\nconst GEF32_LEQ_I32_MAX: u64 = 2147483520; // bits as f32: 0x4eff_ffff\n\n/// Least Exact Float (64 bits) greater-than-or-equal-to i32::MIN when rounding towards zero.\n\nconst LEF64_GEQ_I32_MIN: u64 = std::i32::MIN as u64;\n\n/// Greatest Exact Float (64 bits) less-than-or-equal-to i32::MAX when rounding towards zero.\n\nconst GEF64_LEQ_I32_MAX: u64 = std::i32::MAX as u64;\n", "file_path": "lib/compiler-llvm/src/translator/code.rs", "rank": 50, "score": 259833.92344220763 }, { "content": "fn is_f32_arithmetic(bits: u32) -> bool {\n\n // Mask off sign bit.\n\n let bits = bits & 0x7FFF_FFFF;\n\n bits < 0x7FC0_0000\n\n}\n\n\n", "file_path": "lib/compiler-llvm/src/translator/code.rs", "rank": 51, "score": 259540.07854876755 }, { "content": "pub fn call_malloc(ctx: &EmEnv, size: u32) -> u32 {\n\n get_emscripten_data(ctx)\n\n .malloc_ref()\n\n .unwrap()\n\n .call(size)\n\n .unwrap()\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/mod.rs", "rank": 52, "score": 257239.65314854134 }, { "content": "// #[no_mangle]\n\n/// emscripten: _getenv // (name: *const char) -> *const c_char;\n\npub fn _getenv(ctx: &EmEnv, name: u32) -> u32 {\n\n debug!(\"emscripten::_getenv\");\n\n let memory = ctx.memory(0);\n\n let name_string = read_string_from_wasm(&memory, name);\n\n debug!(\"=> name({:?})\", name_string);\n\n let c_str = unsafe { getenv(name_string.as_ptr() as *const libc::c_char) };\n\n if c_str.is_null() {\n\n return 0;\n\n }\n\n unsafe { copy_cstr_into_wasm(ctx, c_str as *const c_char) }\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/windows/mod.rs", "rank": 53, "score": 257239.65314854134 }, { "content": "// NOTE: Not implemented by Emscripten\n\npub fn ___wait(_ctx: &EmEnv, _which: u32, _varargs: u32, _three: u32, _four: u32) {\n\n debug!(\"emscripten::___wait\");\n\n}\n\n\n", "file_path": "lib/emscripten/src/lock.rs", "rank": 54, "score": 256959.04966790113 }, { "content": "pub fn call_memset(ctx: &EmEnv, pointer: u32, value: u32, size: u32) -> u32 {\n\n get_emscripten_data(ctx)\n\n .memset_ref()\n\n .unwrap()\n\n .call(pointer, value, size)\n\n .unwrap()\n\n}\n\n\n\npub(crate) fn get_emscripten_data(ctx: &EmEnv) -> MutexGuard<EmscriptenData> {\n\n ctx.data.lock().unwrap()\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/mod.rs", "rank": 55, "score": 255565.81931780922 }, { "content": "/// emscripten: _emscripten_memcpy_big\n\npub fn _emscripten_memcpy_big(ctx: &EmEnv, dest: u32, src: u32, len: u32) -> u32 {\n\n debug!(\n\n \"emscripten::_emscripten_memcpy_big {}, {}, {}\",\n\n dest, src, len\n\n );\n\n let dest_addr = emscripten_memory_pointer!(ctx.memory(0), dest) as *mut c_void;\n\n let src_addr = emscripten_memory_pointer!(ctx.memory(0), src) as *mut c_void;\n\n unsafe {\n\n memcpy(dest_addr, src_addr, len as size_t);\n\n }\n\n dest\n\n}\n\n\n", "file_path": "lib/emscripten/src/memory.rs", "rank": 56, "score": 255565.8193178092 }, { "content": "#[cfg(FALSE)]\n\n#[test]\n\nfn test_table() -> anyhow::Result<()> {\n\n main()\n\n}\n", "file_path": "examples/table.rs", "rank": 57, "score": 255081.54358257196 }, { "content": "/// emscripten: _emscripten_resize_heap\n\n/// Note: this function only allows growing the size of heap\n\npub fn _emscripten_resize_heap(ctx: &EmEnv, requested_size: u32) -> u32 {\n\n debug!(\"emscripten::_emscripten_resize_heap {}\", requested_size);\n\n let current_memory_pages = ctx.memory(0).size();\n\n let current_memory = current_memory_pages.bytes().0 as u32;\n\n\n\n // implementation from emscripten\n\n let mut new_size = usize::max(\n\n current_memory as usize,\n\n WASM_MIN_PAGES as usize * WASM_PAGE_SIZE,\n\n );\n\n while new_size < requested_size as usize {\n\n if new_size <= 0x2000_0000 {\n\n new_size = align_up(new_size * 2, WASM_PAGE_SIZE);\n\n } else {\n\n new_size = usize::min(\n\n align_up((3 * new_size + 0x8000_0000) / 4, WASM_PAGE_SIZE),\n\n WASM_PAGE_SIZE * WASM_MAX_PAGES as usize,\n\n );\n\n }\n\n }\n\n\n\n let amount_to_grow = (new_size - current_memory as usize) / WASM_PAGE_SIZE;\n\n if let Ok(_pages_allocated) = ctx.memory(0).grow(Pages(amount_to_grow as u32)) {\n\n debug!(\"{} pages allocated\", _pages_allocated.0);\n\n 1\n\n } else {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "lib/emscripten/src/memory.rs", "rank": 58, "score": 254621.719996648 }, { "content": "/// emscripten: abortOnCannotGrowMemory\n\npub fn abort_on_cannot_grow_memory(ctx: &EmEnv, _requested_size: u32) -> u32 {\n\n debug!(\n\n \"emscripten::abort_on_cannot_grow_memory {}\",\n\n _requested_size\n\n );\n\n abort_with_message(ctx, \"Cannot enlarge memory arrays!\");\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/memory.rs", "rank": 59, "score": 254615.65272545366 }, { "content": "pub fn _siginterrupt(_ctx: &EmEnv, _a: u32, _b: u32) -> i32 {\n\n debug!(\"emscripten::_siginterrupt {}, {}\", _a, _b);\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/signal.rs", "rank": 60, "score": 254327.56043174135 }, { "content": "/// emscripten: ___cxa_throw\n\n/// TODO: We don't have support for exceptions yet\n\npub fn ___cxa_throw(ctx: &EmEnv, _ptr: u32, _ty: u32, _destructor: u32) {\n\n debug!(\"emscripten::___cxa_throw\");\n\n eprintln!(\"Throwing exceptions not yet implemented: aborting!\");\n\n _abort(ctx);\n\n}\n\n\n", "file_path": "lib/emscripten/src/exception.rs", "rank": 61, "score": 254285.4843201331 }, { "content": "#[compiler_test(issues)]\n\nfn issue_2329(mut config: crate::Config) -> Result<()> {\n\n let store = config.store();\n\n\n\n #[derive(Clone, Default, WasmerEnv)]\n\n pub struct Env {\n\n #[wasmer(export)]\n\n memory: LazyInit<Memory>,\n\n }\n\n\n\n impl Env {\n\n pub fn new() -> Self {\n\n Self {\n\n memory: LazyInit::new(),\n\n }\n\n }\n\n }\n\n\n\n pub fn read_memory(env: &Env, guest_ptr: u32) -> u32 {\n\n dbg!(env.memory_ref());\n\n dbg!(guest_ptr);\n", "file_path": "tests/compilers/issues.rs", "rank": 62, "score": 253533.21753300296 }, { "content": "/// Check if a provided module is compiled for some version of WASI.\n\n/// Use [`get_wasi_version`] to find out which version of WASI the module is.\n\npub fn is_wasi_module(module: &Module) -> bool {\n\n get_wasi_version(module, false).is_some()\n\n}\n\n\n\n/// The version of WASI. This is determined by the imports namespace\n\n/// string.\n\n#[derive(Debug, Clone, Copy, Eq)]\n\npub enum WasiVersion {\n\n /// `wasi_unstable`.\n\n Snapshot0,\n\n\n\n /// `wasi_snapshot_preview1`.\n\n Snapshot1,\n\n\n\n /// Latest version.\n\n ///\n\n /// It's a “floating” version, i.e. it's an alias to the latest\n\n /// version (for the moment, `Snapshot1`). Using this version is a\n\n /// way to ensure that modules will run only if they come with the\n\n /// latest WASI version (in case of security issues for instance),\n", "file_path": "lib/wasi/src/utils.rs", "rank": 63, "score": 253083.59858222213 }, { "content": "/// We check if a provided module is an Emscripten generated one\n\npub fn is_emscripten_module(module: &Module) -> bool {\n\n for import in module.imports().functions() {\n\n let name = import.name();\n\n let module = import.module();\n\n if (name == \"_emscripten_memcpy_big\"\n\n || name == \"emscripten_memcpy_big\"\n\n || name == \"__map_file\")\n\n && module == \"env\"\n\n {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "lib/emscripten/src/utils.rs", "rank": 64, "score": 253077.73858147132 }, { "content": "#[cfg(unix)]\n\npub fn lazy_per_thread_init() -> Result<(), Trap> {\n\n use std::cell::RefCell;\n\n use std::ptr::null_mut;\n\n\n\n thread_local! {\n\n /// Thread-local state is lazy-initialized on the first time it's used,\n\n /// and dropped when the thread exits.\n\n static TLS: RefCell<Tls> = RefCell::new(Tls::None);\n\n }\n\n\n\n /// The size of the sigaltstack (not including the guard, which will be\n\n /// added). Make this large enough to run our signal handlers.\n\n const MIN_STACK_SIZE: usize = 16 * 4096;\n\n\n\n enum Tls {\n\n None,\n\n Allocated {\n\n mmap_ptr: *mut libc::c_void,\n\n mmap_size: usize,\n\n },\n", "file_path": "lib/vm/src/trap/traphandlers.rs", "rank": 65, "score": 252361.87826359778 }, { "content": "pub fn _sigaction(_ctx: &EmEnv, _signum: u32, _act: u32, _oldact: u32) -> i32 {\n\n debug!(\"emscripten::_sigaction {}, {}, {}\", _signum, _act, _oldact);\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/signal.rs", "rank": 66, "score": 250946.98705952713 }, { "content": "#[compiler_test(middlewares)]\n\nfn middleware_basic(mut config: crate::Config) -> Result<()> {\n\n config.set_middlewares(vec![\n\n Arc::new(Add2MulGen { value_off: 0 }) as Arc<dyn ModuleMiddleware>\n\n ]);\n\n let store = config.store();\n\n let wat = r#\"(module\n\n (func (export \"add\") (param i32 i32) (result i32)\n\n (i32.add (local.get 0)\n\n (local.get 1)))\n\n)\"#;\n\n let module = Module::new(&store, wat).unwrap();\n\n\n\n let import_object = imports! {};\n\n\n\n let instance = Instance::new(&module, &import_object)?;\n\n\n\n let f: NativeFunc<(i32, i32), i32> = instance.exports.get_native_function(\"add\")?;\n\n let result = f.call(4, 6)?;\n\n assert_eq!(result, 24);\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/compilers/middlewares.rs", "rank": 67, "score": 250133.31034770992 }, { "content": "#[compiler_test(metering)]\n\nfn complex_loop(mut config: crate::Config) -> Result<()> {\n\n // Assemblyscript\n\n // export function add_to(x: i32, y: i32): i32 {\n\n // for(var i = 0; i < x; i++){\n\n // if(i % 1 == 0){\n\n // y += i;\n\n // } else {\n\n // y *= i\n\n // }\n\n // }\n\n // return y;\n\n // }\n\n static WAT: &'static str = r#\"\n\n (module\n\n (type $t0 (func (param i32 i32) (result i32)))\n\n (type $t1 (func))\n\n (func $add_to (export \"add_to\") (type $t0) (param $p0 i32) (param $p1 i32) (result i32)\n\n (local $l0 i32)\n\n block $B0\n\n i32.const 0\n", "file_path": "tests/compilers/metering.rs", "rank": 68, "score": 250133.31034770992 }, { "content": "/// Generate C source code from some `CStatements` into a String.\n\n// TODO: add config section\n\npub fn generate_c(statements: &[CStatement]) -> String {\n\n let mut out = String::new();\n\n for statement in statements {\n\n statement.generate_c(&mut out);\n\n }\n\n out\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn generate_types() {\n\n macro_rules! assert_c_type {\n\n ($ctype:expr, $expected:expr) => {\n\n let mut w = String::new();\n\n let ctype = $ctype;\n\n ctype.generate_c(&mut w);\n\n assert_eq!(w, $expected);\n", "file_path": "lib/cli/src/c_gen/mod.rs", "rank": 69, "score": 249860.23093590897 }, { "content": "pub fn _getpagesize(_ctx: &EmEnv) -> u32 {\n\n debug!(\"emscripten::_getpagesize\");\n\n 16384\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/mod.rs", "rank": 70, "score": 249632.10105358483 }, { "content": "/// emscripten: enlargeMemory\n\npub fn enlarge_memory(_ctx: &EmEnv) -> u32 {\n\n debug!(\"emscripten::enlarge_memory\");\n\n // instance.memories[0].grow(100);\n\n // TODO: Fix implementation\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/memory.rs", "rank": 71, "score": 249632.10105358483 }, { "content": "/// emscripten: dlsym(handle: *mut c_void, symbol: *const c_char) -> *mut c_void\n\npub fn _dlsym(_ctx: &EmEnv, _filepath: u32, _symbol: u32) -> i32 {\n\n debug!(\"emscripten::_dlsym\");\n\n -1\n\n}\n\n\n", "file_path": "lib/emscripten/src/linking.rs", "rank": 72, "score": 248787.82066846 }, { "content": "#[allow(clippy::cast_ptr_alignment)]\n\npub fn _sigaddset(ctx: &EmEnv, set: u32, signum: u32) -> i32 {\n\n debug!(\"emscripten::_sigaddset {}, {}\", set, signum);\n\n let set_addr = emscripten_memory_pointer!(ctx.memory(0), set) as *mut u32;\n\n unsafe {\n\n *set_addr |= 1 << (signum - 1);\n\n }\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/signal.rs", "rank": 73, "score": 248787.24732593662 }, { "content": "/// emscripten: dlopen(filename: *const c_char, flag: c_int) -> *mut c_void\n\npub fn _dlopen(_ctx: &EmEnv, _filename: u32, _flag: u32) -> i32 {\n\n debug!(\"emscripten::_dlopen\");\n\n -1\n\n}\n\n\n", "file_path": "lib/emscripten/src/linking.rs", "rank": 74, "score": 248786.832867184 }, { "content": "/// emscripten: _difftime\n\npub fn _difftime(_ctx: &EmEnv, t0: u32, t1: u32) -> f64 {\n\n debug!(\"emscripten::_difftime\");\n\n (t0 - t1) as _\n\n}\n\n\n", "file_path": "lib/emscripten/src/time.rs", "rank": 75, "score": 248780.7818070781 }, { "content": "/// Check whether we should build the C API headers or set `inline-c` up.\n\nfn running_self() -> bool {\n\n env::var(\"DOCS_RS\").is_err()\n\n && env::var(\"_CBINDGEN_IS_RUNNING\").is_err()\n\n && env::var(\"WASMER_PUBLISH_SCRIPT_IS_RUNNING\").is_err()\n\n}\n\n\n", "file_path": "lib/c-api/build.rs", "rank": 76, "score": 248115.08715240966 }, { "content": "#[test]\n\nfn deterministic_table() -> Result<()> {\n\n let wasm_bytes = wat2wasm(\n\n br#\"\n\n(module\n\n (table 2 funcref)\n\n (func $f1)\n\n (func $f2)\n\n (elem (i32.const 0) $f1 $f2))\n\n\"#,\n\n )?;\n\n\n\n compile_and_compare(&wasm_bytes)\n\n}\n", "file_path": "tests/compilers/deterministic.rs", "rank": 77, "score": 247407.18536823097 }, { "content": "pub fn nullfunc(ctx: &EmEnv, _x: u32) {\n\n use crate::process::abort_with_message;\n\n debug!(\"emscripten::nullfunc_i {}\", _x);\n\n abort_with_message(\n\n ctx,\n\n \"Invalid function pointer. Perhaps this is an invalid value \\\n\n (e.g. caused by calling a virtual method on a NULL pointer)? Or calling a function with an \\\n\n incorrect type, which will fail? (it is worth building your source files with -Werror (\\\n\n warnings are errors), as warnings can indicate undefined behavior which can cause this)\",\n\n );\n\n}\n\n\n\n/// The current version of this crate\n\npub const VERSION: &str = env!(\"CARGO_PKG_VERSION\");\n", "file_path": "lib/emscripten/src/lib.rs", "rank": 78, "score": 247356.17879196387 }, { "content": "#[compiler_test(middlewares)]\n\nfn middleware_chain_order_1(mut config: crate::Config) -> Result<()> {\n\n config.set_middlewares(vec![\n\n Arc::new(Add2MulGen { value_off: 0 }) as Arc<dyn ModuleMiddleware>,\n\n Arc::new(Add2MulGen { value_off: 2 }) as Arc<dyn ModuleMiddleware>,\n\n ]);\n\n let store = config.store();\n\n let wat = r#\"(module\n\n (func (export \"add\") (param i32 i32) (result i32)\n\n (i32.add (local.get 0)\n\n (local.get 1)))\n\n)\"#;\n\n let module = Module::new(&store, wat).unwrap();\n\n\n\n let import_object = imports! {};\n\n\n\n let instance = Instance::new(&module, &import_object)?;\n\n\n\n let f: NativeFunc<(i32, i32), i32> = instance.exports.get_native_function(\"add\")?;\n\n let result = f.call(4, 6)?;\n\n assert_eq!(result, 24);\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/compilers/middlewares.rs", "rank": 79, "score": 246881.14006646752 }, { "content": "#[compiler_test(middlewares)]\n\nfn middleware_one_to_multi(mut config: crate::Config) -> Result<()> {\n\n config.set_middlewares(vec![\n\n Arc::new(Add2MulGen { value_off: 1 }) as Arc<dyn ModuleMiddleware>\n\n ]);\n\n let store = config.store();\n\n let wat = r#\"(module\n\n (func (export \"add\") (param i32 i32) (result i32)\n\n (i32.add (local.get 0)\n\n (local.get 1)))\n\n)\"#;\n\n let module = Module::new(&store, wat).unwrap();\n\n\n\n let import_object = imports! {};\n\n\n\n let instance = Instance::new(&module, &import_object)?;\n\n\n\n let f: NativeFunc<(i32, i32), i32> = instance.exports.get_native_function(\"add\")?;\n\n let result = f.call(4, 6)?;\n\n assert_eq!(result, 25);\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/compilers/middlewares.rs", "rank": 80, "score": 246881.14006646752 }, { "content": "#[compiler_test(middlewares)]\n\nfn middleware_chain_order_2(mut config: crate::Config) -> Result<()> {\n\n config.set_middlewares(vec![\n\n Arc::new(Add2MulGen { value_off: 2 }) as Arc<dyn ModuleMiddleware>,\n\n Arc::new(Add2MulGen { value_off: 0 }) as Arc<dyn ModuleMiddleware>,\n\n ]);\n\n let store = config.store();\n\n let wat = r#\"(module\n\n (func (export \"add\") (param i32 i32) (result i32)\n\n (i32.add (local.get 0)\n\n (local.get 1)))\n\n)\"#;\n\n let module = Module::new(&store, wat).unwrap();\n\n\n\n let import_object = imports! {};\n\n\n\n let instance = Instance::new(&module, &import_object)?;\n\n\n\n let f: NativeFunc<(i32, i32), i32> = instance.exports.get_native_function(\"add\")?;\n\n let result = f.call(4, 6)?;\n\n assert_eq!(result, 48);\n\n Ok(())\n\n}\n", "file_path": "tests/compilers/middlewares.rs", "rank": 81, "score": 246881.14006646752 }, { "content": "#[compiler_test(middlewares)]\n\nfn middleware_multi_to_one(mut config: crate::Config) -> Result<()> {\n\n config.set_middlewares(vec![Arc::new(FusionGen) as Arc<dyn ModuleMiddleware>]);\n\n let store = config.store();\n\n let wat = r#\"(module\n\n (func (export \"testfunc\") (param i32 i32) (result i32)\n\n (local.get 0)\n\n (local.get 1)\n\n (i32.const 1)\n\n (i32.add)\n\n (i32.mul))\n\n)\"#;\n\n let module = Module::new(&store, wat).unwrap();\n\n\n\n let import_object = imports! {};\n\n\n\n let instance = Instance::new(&module, &import_object)?;\n\n\n\n let f: NativeFunc<(i32, i32), i32> = instance.exports.get_native_function(\"testfunc\")?;\n\n let result = f.call(10, 20)?;\n\n assert_eq!(result, 10);\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/compilers/middlewares.rs", "rank": 82, "score": 246881.14006646752 }, { "content": "/// emscripten: getTotalMemory\n\npub fn get_total_memory(_ctx: &EmEnv) -> u32 {\n\n debug!(\"emscripten::get_total_memory\");\n\n // instance.memories[0].current_pages()\n\n // TODO: Fix implementation\n\n _ctx.memory(0).size().bytes().0 as u32\n\n}\n\n\n", "file_path": "lib/emscripten/src/memory.rs", "rank": 83, "score": 246526.92021997366 }, { "content": "pub fn load_cache_native(c: &mut Criterion) {\n\n let tmp_dir = TempDir::new().unwrap();\n\n let mut fs_cache = FileSystemCache::new(tmp_dir.path()).unwrap();\n\n let compiler = Singlepass::default();\n\n let store = Store::new(&Native::new(compiler).engine());\n\n let module = Module::new(\n\n &store,\n\n std::fs::read(\"../../lib/c-api/examples/assets/qjs.wasm\").unwrap(),\n\n )\n\n .unwrap();\n\n let key = Hash::new([0u8; 32]);\n\n fs_cache.store(key, &module).unwrap();\n\n\n\n c.bench_function(\"load native module in filesystem cache\", |b| {\n\n b.iter(|| unsafe { fs_cache.load(&store, key.clone()).unwrap() })\n\n });\n\n}\n\n\n\ncriterion_group! {\n\n name = benches;\n\n config = Criterion::default().sample_size(300);\n\n targets = store_cache_universal, load_cache_universal, store_cache_native, load_cache_native\n\n}\n\ncriterion_main!(benches);\n", "file_path": "lib/cache/benches/bench_filesystem_cache.rs", "rank": 84, "score": 246407.0695214504 }, { "content": "pub fn load_cache_universal(c: &mut Criterion) {\n\n let tmp_dir = TempDir::new().unwrap();\n\n let mut fs_cache = FileSystemCache::new(tmp_dir.path()).unwrap();\n\n let compiler = Singlepass::default();\n\n let store = Store::new(&Universal::new(compiler).engine());\n\n let module = Module::new(\n\n &store,\n\n std::fs::read(\"../../lib/c-api/examples/assets/qjs.wasm\").unwrap(),\n\n )\n\n .unwrap();\n\n let key = Hash::new([0u8; 32]);\n\n fs_cache.store(key, &module).unwrap();\n\n\n\n c.bench_function(\"load universal module in filesystem cache\", |b| {\n\n b.iter(|| unsafe { fs_cache.load(&store, key.clone()).unwrap() })\n\n });\n\n}\n\n\n", "file_path": "lib/cache/benches/bench_filesystem_cache.rs", "rank": 85, "score": 246407.0695214504 }, { "content": "pub fn store_cache_universal(c: &mut Criterion) {\n\n let tmp_dir = TempDir::new().unwrap();\n\n let mut fs_cache = FileSystemCache::new(tmp_dir.path()).unwrap();\n\n let compiler = Singlepass::default();\n\n let store = Store::new(&Universal::new(compiler).engine());\n\n let module = Module::new(\n\n &store,\n\n std::fs::read(\"../../lib/c-api/examples/assets/qjs.wasm\").unwrap(),\n\n )\n\n .unwrap();\n\n\n\n c.bench_function(\"store universal module in filesystem cache\", |b| {\n\n b.iter(|| {\n\n let key = random_key();\n\n fs_cache.store(key, &module).unwrap()\n\n })\n\n });\n\n}\n\n\n", "file_path": "lib/cache/benches/bench_filesystem_cache.rs", "rank": 86, "score": 246407.0695214504 }, { "content": "pub fn store_cache_native(c: &mut Criterion) {\n\n let tmp_dir = TempDir::new().unwrap();\n\n let mut fs_cache = FileSystemCache::new(tmp_dir.path()).unwrap();\n\n let compiler = Singlepass::default();\n\n let store = Store::new(&Native::new(compiler).engine());\n\n let module = Module::new(\n\n &store,\n\n std::fs::read(\"../../lib/c-api/examples/assets/qjs.wasm\").unwrap(),\n\n )\n\n .unwrap();\n\n\n\n c.bench_function(\"store native module in filesystem cache\", |b| {\n\n b.iter(|| {\n\n let key = random_key();\n\n fs_cache.store(key, &module).unwrap()\n\n })\n\n });\n\n}\n\n\n", "file_path": "lib/cache/benches/bench_filesystem_cache.rs", "rank": 87, "score": 246407.0695214504 }, { "content": "/// emscripten: _setenv // (name: *const char, name: *const value, overwrite: int);\n\npub fn _setenv(ctx: &EmEnv, name: u32, value: u32, _overwrite: u32) -> c_int {\n\n debug!(\"emscripten::_setenv\");\n\n let memory = ctx.memory(0);\n\n // setenv does not exist on windows, so we hack it with _putenv\n\n let name = read_string_from_wasm(&memory, name);\n\n let value = read_string_from_wasm(&memory, value);\n\n let putenv_string = format!(\"{}={}\", name, value);\n\n let putenv_cstring = CString::new(putenv_string).unwrap();\n\n let putenv_raw_ptr = putenv_cstring.as_ptr();\n\n debug!(\"=> name({:?})\", name);\n\n debug!(\"=> value({:?})\", value);\n\n unsafe { putenv(putenv_raw_ptr) }\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/windows/mod.rs", "rank": 88, "score": 246367.02193578065 }, { "content": "fn main() -> anyhow::Result<()> {\n\n let wasm_bytes = wat2wasm(\n\n r#\"\n\n(module\n\n ;; All our callbacks will take 2 i32s and return an i32.\n\n ;; Wasm tables are not limited to 1 type of function, but the code using the\n\n ;; table must have code to handle the type it finds.\n\n (type $callback_t (func (param i32 i32) (result i32)))\n\n\n\n ;; We'll call a callback by passing a table index as an i32 and then the two\n\n ;; arguments that the function expects.\n\n (type $call_callback_t (func (param i32 i32 i32) (result i32)))\n\n\n\n ;; Our table of functions that's exactly size 3 (min 3, max 3).\n\n (table $t1 3 6 funcref)\n\n\n\n ;; Call the function at the given index with the two supplied arguments.\n\n (func $call_callback (type $call_callback_t) (param $idx i32)\n\n (param $arg1 i32) (param $arg2 i32)\n\n (result i32)\n", "file_path": "examples/table.rs", "rank": 89, "score": 246241.7109458915 }, { "content": "/// emscripten: ___map_file\n\npub fn ___map_file(_ctx: &EmEnv, _one: u32, _two: u32) -> c_int {\n\n debug!(\"emscripten::___map_file\");\n\n // NOTE: TODO: Em returns -1 here as well. May need to implement properly\n\n -1\n\n}\n", "file_path": "lib/emscripten/src/memory.rs", "rank": 90, "score": 246156.78138399043 }, { "content": "#[inline(always)]\n\nfn align_pointer(ptr: usize, align: usize) -> usize {\n\n // clears bits below aligment amount (assumes power of 2) to align pointer\n\n debug_assert!(align.count_ones() == 1);\n\n ptr & !(align - 1)\n\n}\n\n\n\n/// Methods for `WasmPtr`s to data that can be dereferenced, namely to types\n\n/// that implement [`ValueType`], meaning that they're valid for all possible\n\n/// bit patterns.\n\nimpl<T: Copy + ValueType> WasmPtr<T, Item> {\n\n /// Dereference the `WasmPtr` getting access to a `&Cell<T>` allowing for\n\n /// reading and mutating of the inner value.\n\n ///\n\n /// This method is unsound if used with unsynchronized shared memory.\n\n /// If you're unsure what that means, it likely does not apply to you.\n\n /// This invariant will be enforced in the future.\n\n #[inline]\n\n pub fn deref<'a>(self, memory: &'a Memory) -> Option<WasmCell<'a, T>> {\n\n if (self.offset as usize) + mem::size_of::<T>() > memory.size().bytes().0\n\n || mem::size_of::<T>() == 0\n", "file_path": "lib/api/src/sys/ptr.rs", "rank": 91, "score": 245009.346050906 }, { "content": "#[compiler_test(issues)]\n\nfn call_with_static_data_pointers(mut config: crate::Config) -> Result<()> {\n\n let store = config.store();\n\n let memory = Memory::new(\n\n &store,\n\n MemoryType::new(Pages(1024), Some(Pages(2048)), false),\n\n )\n\n .unwrap();\n\n\n\n #[derive(Clone, WasmerEnv)]\n\n pub struct Env {\n\n memory: Memory,\n\n }\n\n\n\n pub fn banana(\n\n env: &Env,\n\n a: u64,\n\n b: u64,\n\n c: u64,\n\n d: u64,\n\n e: u64,\n", "file_path": "tests/compilers/issues.rs", "rank": 92, "score": 243767.2819904334 }, { "content": "pub fn ___cxa_current_primary_exception(_ctx: &EmEnv) -> u32 {\n\n debug!(\"emscripten::___cxa_current_primary_exception\");\n\n unimplemented!(\"emscripten::___cxa_current_primary_exception\")\n\n}\n\n\n", "file_path": "lib/emscripten/src/exception.rs", "rank": 93, "score": 243551.0504985603 }, { "content": "/// emscripten: abortOnCannotGrowMemory\n\npub fn abort_on_cannot_grow_memory_old(ctx: &EmEnv) -> u32 {\n\n debug!(\"emscripten::abort_on_cannot_grow_memory\");\n\n abort_with_message(ctx, \"Cannot enlarge memory arrays!\");\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/memory.rs", "rank": 94, "score": 243551.0504985603 }, { "content": "/// emscripten: _emscripten_get_heap_size\n\npub fn _emscripten_get_heap_size(ctx: &EmEnv) -> u32 {\n\n trace!(\"emscripten::_emscripten_get_heap_size\");\n\n let result = ctx.memory(0).size().bytes().0 as u32;\n\n trace!(\"=> {}\", result);\n\n\n\n result\n\n}\n\n\n", "file_path": "lib/emscripten/src/memory.rs", "rank": 95, "score": 243551.0504985603 }, { "content": "/// Set the new provided remaining points in an\n\n/// [`Instance`][wasmer::Instance].\n\n///\n\n/// Note: This can be used in a headless engine after an ahead-of-time\n\n/// compilation as all required state lives in the instance.\n\n///\n\n/// # Panic\n\n///\n\n/// The given [`Instance`][wasmer::Instance] must have been processed\n\n/// with the [`Metering`] middleware at compile time, otherwise this\n\n/// will panic.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use wasmer::Instance;\n\n/// use wasmer_middlewares::metering::set_remaining_points;\n\n///\n\n/// fn update_remaining_points(instance: &Instance) {\n\n/// // The new limit.\n\n/// let new_limit = 10;\n\n///\n\n/// // Update the remaining points to the `new_limit`.\n\n/// set_remaining_points(instance, new_limit);\n\n/// }\n\n/// ```\n\npub fn set_remaining_points(instance: &Instance, points: u64) {\n\n instance\n\n .exports\n\n .get_global(\"wasmer_metering_remaining_points\")\n\n .expect(\"Can't get `wasmer_metering_remaining_points` from Instance\")\n\n .set(points.into())\n\n .expect(\"Can't set `wasmer_metering_remaining_points` in Instance\");\n\n\n\n instance\n\n .exports\n\n .get_global(\"wasmer_metering_points_exhausted\")\n\n .expect(\"Can't get `wasmer_metering_points_exhausted` from Instance\")\n\n .set(0i32.into())\n\n .expect(\"Can't set `wasmer_metering_points_exhausted` in Instance\");\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "lib/middlewares/src/metering.rs", "rank": 96, "score": 241296.3509613914 }, { "content": "pub fn execvp(ctx: &EmEnv, command_name_offset: u32, argv_offset: u32) -> i32 {\n\n // a single reference to re-use\n\n let emscripten_memory = ctx.memory(0);\n\n\n\n // read command name as string\n\n let command_name_string_vec: Vec<u8> = emscripten_memory.view()\n\n [(command_name_offset as usize)..]\n\n .iter()\n\n .map(|cell| cell.get())\n\n .take_while(|&byte| byte != 0)\n\n .collect();\n\n let command_name_string = CString::new(command_name_string_vec).unwrap();\n\n\n\n // get the array of args\n\n let mut argv: Vec<*const i8> = emscripten_memory.view()[((argv_offset / 4) as usize)..]\n\n .iter()\n\n .map(|cell: &Cell<u32>| cell.get())\n\n .take_while(|&byte| byte != 0)\n\n .map(|offset| {\n\n let p: *const i8 = (emscripten_memory.view::<u8>()[(offset as usize)..])\n", "file_path": "lib/emscripten/src/exec.rs", "rank": 97, "score": 241180.94625858855 }, { "content": "/// The name of this call is `abort` but we want to avoid conflicts with libc::abort\n\npub fn em_abort(ctx: &EmEnv, arg: u32) {\n\n debug!(\"emscripten::abort\");\n\n eprintln!(\"Program aborted with value {}\", arg);\n\n _abort(ctx);\n\n}\n\n\n", "file_path": "lib/emscripten/src/process.rs", "rank": 98, "score": 241007.8903476759 }, { "content": "\n\nimpl UnwindRegistry {\n\n /// Creates a new unwind registry with the given base address.\n\n pub fn new() -> Self {\n\n Self {\n\n registrations: Vec::new(),\n\n published: false,\n\n }\n\n }\n\n\n\n /// Registers a function given the start offset, length, and unwind information.\n\n pub fn register(\n\n &mut self,\n\n _base_address: usize,\n\n _func_start: u32,\n\n _func_len: u32,\n\n info: &CompiledFunctionUnwindInfo,\n\n ) -> Result<(), String> {\n\n match info {\n\n CompiledFunctionUnwindInfo::Dwarf => {}\n", "file_path": "lib/engine-universal/src/unwind/systemv.rs", "rank": 99, "score": 88.08458866109811 } ]
Rust
gtk/src/flash.rs
system76/muf
a1561b32acad891424da8b6de8ae012f39f83d05
use atomic::Atomic; use dbus::arg::{OwnedFd, RefArg, Variant}; use dbus::blocking::{Connection, Proxy}; use dbus_udisks2::DiskDevice; use futures::executor; use libc; use popsicle::{Progress, Task}; use std::cell::Cell; use std::collections::HashMap; use std::fmt::{self, Debug, Display, Formatter}; use std::fs::File; use std::os::unix::io::FromRawFd; use std::str; use std::sync::atomic::Ordering; use std::sync::{Arc, Mutex}; use std::time::Duration; type UDisksOptions = HashMap<&'static str, Variant<Box<dyn RefArg>>>; #[derive(Clone, Copy, PartialEq)] pub enum FlashStatus { Inactive, Active, Killing, } pub struct FlashRequest { source: Option<File>, destinations: Vec<Arc<DiskDevice>>, status: Arc<Atomic<FlashStatus>>, progress: Arc<Vec<Atomic<u64>>>, finished: Arc<Vec<Atomic<bool>>>, } pub struct FlashTask { pub progress: Arc<Vec<Atomic<u64>>>, pub previous: Arc<Mutex<Vec<[u64; 7]>>>, pub finished: Arc<Vec<Atomic<bool>>>, } struct FlashProgress<'a> { request: &'a FlashRequest, id: usize, errors: &'a [Cell<Result<(), FlashError>>], } #[derive(Clone, Debug)] pub struct FlashError { kind: String, message: String, } impl Display for FlashError { fn fmt(&self, f: &mut Formatter) -> fmt::Result { write!(f, "{}: {}", self.kind, self.message) } } impl std::error::Error for FlashError {} impl<'a> Progress for FlashProgress<'a> { type Device = (); fn message(&mut self, _device: &(), kind: &str, message: &str) { self.errors[self.id] .set(Err(FlashError { kind: kind.to_string(), message: message.to_string() })); } fn finish(&mut self) { self.request.finished[self.id].store(true, Ordering::SeqCst); } fn set(&mut self, value: u64) { self.request.progress[self.id].store(value, Ordering::SeqCst); } } impl FlashRequest { pub fn new( source: File, destinations: Vec<Arc<DiskDevice>>, status: Arc<Atomic<FlashStatus>>, progress: Arc<Vec<Atomic<u64>>>, finished: Arc<Vec<Atomic<bool>>>, ) -> FlashRequest { FlashRequest { source: Some(source), destinations, status, progress, finished } } pub fn write(mut self) -> anyhow::Result<(anyhow::Result<()>, Vec<Result<(), FlashError>>)> { self.status.store(FlashStatus::Active, Ordering::SeqCst); let source = self.source.take().unwrap(); let res = self.write_inner(source); for atomic in self.finished.iter() { atomic.store(true, Ordering::SeqCst); } self.status.store(FlashStatus::Inactive, Ordering::SeqCst); res } fn write_inner<'a>( &'a self, source: File, ) -> anyhow::Result<(anyhow::Result<()>, Vec<Result<(), FlashError>>)> { for device in &self.destinations { let _ = udisks_unmount(&device.parent.path); for partition in &device.partitions { let _ = udisks_unmount(&partition.path); } } let mut files = Vec::new(); for device in &self.destinations { let file = udisks_open(&device.parent.path)?; files.push(file); } let mut errors = vec![Ok(()); files.len()]; let errors_cells = Cell::from_mut(&mut errors as &mut [_]).as_slice_of_cells(); let mut bucket = [0u8; 64 * 1024]; let mut task = Task::new(source.into(), false); for (i, file) in files.into_iter().enumerate() { let progress = FlashProgress { request: &self, errors: errors_cells, id: i }; task.subscribe(file.into(), (), progress); } let res = executor::block_on(task.process(&mut bucket)); Ok((res, errors)) } } fn udisks_unmount(dbus_path: &str) -> anyhow::Result<()> { let connection = Connection::new_system()?; let dbus_path = ::dbus::strings::Path::new(dbus_path).map_err(anyhow::Error::msg)?; let proxy = Proxy::new("org.freedesktop.UDisks2", dbus_path, Duration::new(25, 0), &connection); let mut options = UDisksOptions::new(); options.insert("force", Variant(Box::new(true))); let res: Result<(), _> = proxy.method_call("org.freedesktop.UDisks2.Filesystem", "Unmount", (options,)); if let Err(err) = res { if err.name() != Some("org.freedesktop.UDisks2.Error.NotMounted") { return Err(anyhow::Error::new(err)); } } Ok(()) } fn udisks_open(dbus_path: &str) -> anyhow::Result<File> { let connection = Connection::new_system()?; let dbus_path = ::dbus::strings::Path::new(dbus_path).map_err(anyhow::Error::msg)?; let proxy = Proxy::new("org.freedesktop.UDisks2", &dbus_path, Duration::new(25, 0), &connection); let mut options = UDisksOptions::new(); options.insert("flags", Variant(Box::new(libc::O_SYNC))); let res: (OwnedFd,) = proxy.method_call("org.freedesktop.UDisks2.Block", "OpenDevice", ("rw", options))?; Ok(unsafe { File::from_raw_fd(res.0.into_fd()) }) }
use atomic::Atomic; use dbus::arg::{OwnedFd, RefArg, Variant}; use dbus::blocking::{Connection, Proxy}; use dbus_udisks2::DiskDevice; use futures::executor; use libc; use popsicle::{Progress, Task}; use std::cell::Cell; use std::collections::HashMap; use std::fmt::{self, Debug, Display, Formatter}; use std::fs::File; use std::os::unix::io::FromRawFd; use std::str; use std::sync::atomic::Ordering; use std::sync::{Arc, Mutex}; use std::time::Duration; type UDisksOptions = HashMap<&'static str, Variant<Box<dyn RefArg>>>; #[derive(Clone, Copy, PartialEq)] pub enum FlashStatus { Inactive, Active, Killing, } pub struct FlashRequest { source: Option<File>, destinations: Vec<Arc<DiskDevice>>, status: Arc<Atomic<FlashStatus>>, progress: Arc<Vec<Atomic<u64>>>, finished: Arc<Vec<Atomic<bool>>>, } pub struct FlashTask { pub progress: Arc<Vec<Atomic<u64>>>, pub previous: Arc<Mutex<Vec<[u64; 7]>>>, pub finished: Arc<Vec<Atomic<bool>>>, } struct FlashProgress<'a> { request: &'a FlashRequest, id: usize, errors: &'a [Cell<Result<(), FlashError>>], } #[derive(Clone, Debug)] pub struct FlashError { kind: String, message: String, } impl Display for FlashError { fn fmt(&self, f: &mut Formatter) -> fmt::Result { write!(f, "{}: {}", self.kind, self.message) } } impl std::error::Error for FlashError {} impl<'a> Progress for FlashProgress<'a> { type Device = (); fn message(&mut self, _device: &(), kind: &str, message: &str) { self.errors[self.id] .set(Err(FlashError { kind: kind.to_string(), message: message.to_string() })); } fn finish(&mut self) { self.request.finished[self.id].store(true, Ordering::SeqCst); } fn set(&mut self, value: u64) { self.request.progress[self.id].store(value, Ordering::SeqCst); } } impl FlashRequest { pub fn new( source: File, destinations: Vec<Arc<DiskDevice>>, status: Arc<Atomic<FlashStatus>>, progress: Arc<Vec<Atomic<u64>>>, finished: Arc<Vec<Atomic<bool>>>, ) -> FlashRequest { FlashRequest { source: Some(source), destinations, status, progress, finished } } pub fn write(mut self) -> anyhow::Result<(anyhow::Result<()>, Vec<Result<(), FlashError>>)> { self.status.store(FlashStatus::Active, Ordering::SeqCst); let source = self.source.take().unwrap(); let res = self.write_inner(source); for atomic in self.finished.iter() { atomic.store(true, Ordering::SeqCst); } self.status.store(FlashStatus::Inactive, Ordering::SeqCst); res } fn write_inner<'a>( &'a self, source: File, ) -> anyhow::Result<(anyhow::Result<()>, Vec<Result<(), FlashError>>)> { for device in &self.destinations { let _ = udisks_unmount(&device.parent.path); for partition in &device.partitions { let _ = udisks_unmount(&partition.path); } } let mut files = Vec::new(); for device in &self.destinations { let file = udisks_open(&device.parent.path)?; files.push(file); } let mut errors = vec![Ok(()); files.len()]; let errors_cells = Cell::from_mut(&mut errors as &mut [_]).as_slice_of_cells(); let mut bucket = [0u8; 64 * 1024]; let mut task = Task::new(source.into(), false); for (i, file) in files.into_iter().enumerate() { let progress = FlashProgress { request: &self, errors: errors_cells, id: i }; task.subscribe(file.into(), (), progress); } let res = executor::block_on(task.process(&mut bucket)); Ok((res, errors)) } } fn udisks_unmount(dbus_path: &str) -> anyhow::Result<()> { let connection = Connection::new_system()?; let dbus_path = ::dbus::strings::Path::new(dbus_path).map_err(anyhow::Error::msg)?; let proxy = Proxy::new("org.freedesktop.UDisks2", dbus_path, Duration::new(25, 0), &connection); let mut options = UDisksOptions::new(); options.insert("force", Variant(Box::new(true))); let res: Result<(), _> = proxy.method_call("org.freedesktop.UDisks2.Filesystem", "Unmount", (options,)); if let Err(err) = res {
} Ok(()) } fn udisks_open(dbus_path: &str) -> anyhow::Result<File> { let connection = Connection::new_system()?; let dbus_path = ::dbus::strings::Path::new(dbus_path).map_err(anyhow::Error::msg)?; let proxy = Proxy::new("org.freedesktop.UDisks2", &dbus_path, Duration::new(25, 0), &connection); let mut options = UDisksOptions::new(); options.insert("flags", Variant(Box::new(libc::O_SYNC))); let res: (OwnedFd,) = proxy.method_call("org.freedesktop.UDisks2.Block", "OpenDevice", ("rw", options))?; Ok(unsafe { File::from_raw_fd(res.0.into_fd()) }) }
if err.name() != Some("org.freedesktop.UDisks2.Error.NotMounted") { return Err(anyhow::Error::new(err)); }
if_condition
[ { "content": "pub fn init() -> Result<(), glib::Error> {\n\n const GRESOURCE: &[u8] = include_bytes!(concat!(env!(\"OUT_DIR\"), \"/compiled.gresource\"));\n\n\n\n gio::resources_register(&gio::Resource::from_data(&glib::Bytes::from_static(GRESOURCE))?);\n\n\n\n let theme = gtk::IconTheme::default().unwrap();\n\n theme.add_resource_path(\"/org/Pop-OS/Popsicle\");\n\n\n\n Ok(())\n\n}\n", "file_path": "gtk/src/gresource.rs", "rank": 1, "score": 159474.20862608153 }, { "content": "/// Stores all discovered USB disk paths into the supplied `disks` vector.\n\npub fn get_disk_args(disks: &mut Vec<Box<Path>>) -> Result<(), DiskError> {\n\n executor::block_on(\n\n async move { usb_disk_devices(disks).await.map_err(DiskError::DeviceStream) },\n\n )\n\n}\n\n\n\npub async fn disks_from_args<D: Iterator<Item = Box<Path>>>(\n\n disk_args: D,\n\n mounts: &[MountEntry],\n\n unmount: bool,\n\n) -> Result<Vec<(Box<Path>, File)>, DiskError> {\n\n let mut disks = Vec::new();\n\n\n\n for disk_arg in disk_args {\n\n let canonical_path = fs::canonicalize(&disk_arg)\n\n .await\n\n .map_err(|why| DiskError::NoDisk { disk: disk_arg.clone(), why })?;\n\n\n\n for mount in mounts {\n\n if mount.spec.as_bytes().starts_with(canonical_path.as_os_str().as_bytes()) {\n", "file_path": "src/lib.rs", "rank": 2, "score": 156568.61660589813 }, { "content": "pub fn device_label(device: &DiskDevice) -> String {\n\n if device.drive.vendor.is_empty() {\n\n format!(\"{} ({})\", device.drive.model, device.parent.preferred_device.display())\n\n } else {\n\n format!(\n\n \"{} {} ({})\",\n\n device.drive.vendor,\n\n device.drive.model,\n\n device.parent.preferred_device.display()\n\n )\n\n }\n\n}\n", "file_path": "gtk/src/misc.rs", "rank": 4, "score": 151339.63893651284 }, { "content": "pub trait Progress {\n\n type Device;\n\n fn message(&mut self, device: &Self::Device, kind: &str, message: &str);\n\n fn finish(&mut self);\n\n fn set(&mut self, value: u64);\n\n}\n\n\n\n#[derive(new)]\n\npub struct Task<P: Progress> {\n\n image: File,\n\n\n\n #[new(default)]\n\n pub writer: MultiWriter<File>,\n\n\n\n #[new(default)]\n\n pub state: HashMap<usize, (P::Device, P)>,\n\n\n\n #[new(value = \"125\")]\n\n pub millis_between: u64,\n\n\n", "file_path": "src/task.rs", "rank": 6, "score": 127046.43902487235 }, { "content": "// Implements drag and drop support for a GTK widget.\n\npub fn drag_and_drop<W, F>(widget: &W, action: F)\n\nwhere\n\n W: WidgetExt + WidgetExtManual,\n\n F: 'static + Fn(&SelectionData),\n\n{\n\n // Configure the view as a possible drop destination.\n\n widget.drag_dest_set(gtk::DestDefaults::empty(), &[], gdk::DragAction::empty());\n\n\n\n // Then actually handle drags that are inside the view.\n\n widget.connect_drag_motion(|_view, ctx, _x, _y, time| {\n\n ctx.drag_status(gdk::DragAction::COPY, time);\n\n true\n\n });\n\n\n\n // Get the dropped data, if possible, when the active drag is valid.\n\n widget.connect_drag_drop(|view, ctx, _x, _y, time| {\n\n ctx.list_targets().last().map_or(false, |ref target| {\n\n view.drag_get_data(ctx, target, time);\n\n true\n\n })\n\n });\n\n\n\n // Then handle the dropped data, setting the image if the dropped data is valid.\n\n widget.connect_drag_data_received(move |_view, _ctx, _x, _y, data, _info, _time| action(data));\n\n}\n\n\n", "file_path": "gtk/src/misc.rs", "rank": 7, "score": 117698.23731862803 }, { "content": "fn refresh_devices() -> anyhow::Result<Box<[Arc<DiskDevice>]>> {\n\n let udisks = UDisks2::new()?;\n\n let devices = Disks::new(&udisks).devices;\n\n let mut devices = devices\n\n .into_iter()\n\n .filter(|d| d.drive.connection_bus == \"usb\" || d.drive.connection_bus == \"sdio\")\n\n .filter(|d| d.parent.size != 0)\n\n .map(Arc::new)\n\n .collect::<Vec<_>>()\n\n .into_boxed_slice();\n\n devices.sort_by_key(|d| d.drive.id.clone());\n\n Ok(devices)\n\n}\n", "file_path": "gtk/src/app/events/mod.rs", "rank": 8, "score": 100615.6854528927 }, { "content": "fn is_windows_iso(file: &File) -> bool {\n\n if let Ok(fs) = ISO9660::new(file) {\n\n return fs.publisher_identifier() == \"MICROSOFT CORPORATION\";\n\n }\n\n false\n\n}\n", "file_path": "gtk/src/app/signals/mod.rs", "rank": 10, "score": 89839.80228995655 }, { "content": "type ViewReadySignal = Rc<RefCell<Box<dyn Fn(bool)>>>;\n\n\n\npub struct DevicesView {\n\n pub view: View,\n\n pub list: gtk::ListBox,\n\n pub select_all: gtk::CheckButton,\n\n view_ready: ViewReadySignal,\n\n}\n\n\n\nimpl DevicesView {\n\n pub fn new() -> DevicesView {\n\n let list = cascade! {\n\n gtk::ListBox::new();\n\n ..style_context().add_class(\"frame\");\n\n ..style_context().add_class(\"devices\");\n\n ..set_hexpand(true);\n\n ..set_vexpand(true);\n\n };\n\n\n\n let list_ = list.clone();\n", "file_path": "gtk/src/app/views/devices.rs", "rank": 11, "score": 88529.5269583498 }, { "content": "// Get the `Localizer` to be used for localizing this library.\n\npub fn localizer() -> Box<dyn Localizer> {\n\n Box::from(DefaultLocalizer::new(&*LANGUAGE_LOADER, &Localizations))\n\n}\n", "file_path": "gtk/src/localize.rs", "rank": 12, "score": 76011.11719103242 }, { "content": "// Get the `Localizer` to be used for localizing this library.\n\npub fn localizer() -> Box<dyn Localizer> {\n\n Box::from(DefaultLocalizer::new(&*LANGUAGE_LOADER, &Localizations))\n\n}\n", "file_path": "cli/src/localize.rs", "rank": 13, "score": 76011.11719103242 }, { "content": "pub fn background_thread(events_tx: Sender<UiEvent>, events_rx: Receiver<BackgroundEvent>) {\n\n thread::spawn(move || {\n\n let mut hashed: HashMap<(PathBuf, &'static str), String> = HashMap::new();\n\n\n\n let mut device_paths = Vec::new();\n\n\n\n loop {\n\n match events_rx.recv() {\n\n Ok(BackgroundEvent::GenerateHash(path, kind)) => {\n\n // Check if the cache already contains this hash, and return it.\n\n if let Some(result) = hashed.get(&(path.clone(), kind)) {\n\n let _ = events_tx.send(UiEvent::SetHash(Ok(result.clone())));\n\n continue;\n\n }\n\n\n\n // Hash the file at the given path.\n\n let result = match kind {\n\n \"MD5\" => hasher::<Md5>(&path),\n\n \"SHA256\" => hasher::<Sha256>(&path),\n\n \"SHA1\" => hasher::<Sha1>(&path),\n", "file_path": "gtk/src/app/events/mod.rs", "rank": 14, "score": 60563.48811698526 }, { "content": "#[derive(RustEmbed)]\n\n#[folder = \"../i18n/\"]\n\nstruct Localizations;\n\n\n\npub static LANGUAGE_LOADER: Lazy<FluentLanguageLoader> = Lazy::new(|| {\n\n let loader: FluentLanguageLoader = fluent_language_loader!();\n\n\n\n loader.load_fallback_language(&Localizations).expect(\"Error while loading fallback language\");\n\n\n\n loader\n\n});\n\n\n\n#[macro_export]\n\nmacro_rules! fl {\n\n ($message_id:literal) => {{\n\n i18n_embed_fl::fl!($crate::localize::LANGUAGE_LOADER, $message_id)\n\n }};\n\n\n\n ($message_id:literal, $($args:expr),*) => {{\n\n i18n_embed_fl::fl!($crate::localize::LANGUAGE_LOADER, $message_id, $($args), *)\n\n }};\n\n}\n\n\n", "file_path": "gtk/src/localize.rs", "rank": 15, "score": 50483.49193077899 }, { "content": "#[derive(RustEmbed)]\n\n#[folder = \"../i18n/\"]\n\nstruct Localizations;\n\n\n\npub static LANGUAGE_LOADER: Lazy<FluentLanguageLoader> = Lazy::new(|| {\n\n let loader: FluentLanguageLoader = fluent_language_loader!();\n\n\n\n loader.load_fallback_language(&Localizations).expect(\"Error while loading fallback language\");\n\n\n\n loader\n\n});\n\n\n\n#[macro_export]\n\nmacro_rules! fl {\n\n ($message_id:literal) => {{\n\n i18n_embed_fl::fl!($crate::localize::LANGUAGE_LOADER, $message_id)\n\n }};\n\n\n\n ($message_id:literal, $($args:expr),*) => {{\n\n i18n_embed_fl::fl!($crate::localize::LANGUAGE_LOADER, $message_id, $($args), *)\n\n }};\n\n}\n\n\n", "file_path": "cli/src/localize.rs", "rank": 16, "score": 50483.49193077899 }, { "content": "fn main() {\n\n for i in fs::read_dir(\"assets\").unwrap() {\n\n println!(\"cargo:rerun-if-changed={}\", i.unwrap().path().display());\n\n }\n\n\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n\n\n let status = Command::new(\"glib-compile-resources\")\n\n .arg(\"--sourcedir=assets\")\n\n .arg(&format!(\"--target={}/compiled.gresource\", out_dir))\n\n .arg(\"assets/resources.gresource.xml\")\n\n .status()\n\n .unwrap();\n\n\n\n if !status.success() {\n\n eprintln!(\"glib-compile-resources failed with exit status {}\", status);\n\n process::exit(1);\n\n }\n\n}\n", "file_path": "gtk/build.rs", "rank": 17, "score": 43171.042398144156 }, { "content": "#[test]\n\nfn ipc() {\n\n executor::block_on(async move {\n\n let expected = vec![\n\n Message::Size(2229190656),\n\n Message::Device(\"/dev/sdb\".into()),\n\n Message::Device(\"/dev/sda\".into()),\n\n Message::Set(\"/dev/sda\".into(), 589824),\n\n Message::Set(\"/dev/sdb\".into(), 589824),\n\n Message::Set(\"/dev/sdb\".into(), 384434176),\n\n Message::Set(\"/dev/sda\".into(), 1669005312),\n\n Message::Set(\"/dev/sdb\".into(), 2228748288),\n\n Message::Set(\"/dev/sda\".into(), 0),\n\n Message::Message(\"/dev/sda\".into(), \"S\".into()),\n\n Message::Set(\"/dev/sdb\".into(), 0),\n\n Message::Message(\"/dev/sdb\".into(), \"S\".into()),\n\n Message::Set(\"/dev/sda\".into(), 0),\n\n Message::Message(\"/dev/sda\".into(), \"V\".into()),\n\n Message::Set(\"/dev/sdb\".into(), 0),\n\n Message::Message(\"/dev/sdb\".into(), \"V\".into()),\n\n Message::Finished(\"/dev/sda\".into()),\n", "file_path": "tests/ipc.rs", "rank": 18, "score": 43171.042398144156 }, { "content": "fn translate() {\n\n let requested_languages = DesktopLanguageRequester::requested_languages();\n\n let localizer = crate::localize::localizer();\n\n\n\n if let Err(error) = localizer.select(&requested_languages) {\n\n eprintln!(\"Error while loading languages for popsicle-cli {}\", error);\n\n }\n\n}\n", "file_path": "cli/src/main.rs", "rank": 19, "score": 42198.540107270484 }, { "content": "fn main() {\n\n translate();\n\n better_panic::install();\n\n\n\n let arg_image = fl!(\"arg-image\");\n\n let arg_disks = fl!(\"arg-disks\");\n\n\n\n let matches = App::new(env!(\"CARGO_PKG_NAME\"))\n\n .about(env!(\"CARGO_PKG_DESCRIPTION\"))\n\n .version(env!(\"CARGO_PKG_VERSION\"))\n\n .arg(Arg::with_name(&arg_image).help(&fl!(\"arg-image-desc\")).required(true))\n\n .arg(Arg::with_name(&arg_disks).help(&fl!(\"arg-disks-desc\")).multiple(true))\n\n .arg(Arg::with_name(\"all\").help(&fl!(\"arg-all-desc\")).short(\"a\").long(\"all\"))\n\n .arg(Arg::with_name(\"check\").help(&fl!(\"arg-check-desc\")).short(\"c\").long(\"check\"))\n\n .arg(Arg::with_name(\"unmount\").help(&fl!(\"arg-unmount-desc\")).short(\"u\").long(\"unmount\"))\n\n .arg(Arg::with_name(\"yes\").help(&fl!(\"arg-yes-desc\")).short(\"y\").long(\"yes\"))\n\n .get_matches();\n\n\n\n let (rtx, rrx) = oneshot::channel::<anyhow::Result<()>>();\n\n\n", "file_path": "cli/src/main.rs", "rank": 20, "score": 42198.540107270484 }, { "content": "fn main() {\n\n let localizer = crate::localize::localizer();\n\n let requested_languages = DesktopLanguageRequester::requested_languages();\n\n\n\n if let Err(error) = localizer.select(&requested_languages) {\n\n eprintln!(\"Error while loading languages for library_fluent {}\", error);\n\n }\n\n\n\n gtk::init().unwrap();\n\n\n\n gresource::init().expect(\"failed to init popsicle gresource\");\n\n\n\n glib::set_program_name(\"Popsicle\".into());\n\n glib::set_application_name(\"Popsicle\");\n\n\n\n let app = App::new(State::new());\n\n\n\n if let Some(iso_argument) = env::args().nth(1) {\n\n let path = PathBuf::from(iso_argument);\n\n if path.extension().map_or(false, |ext| {\n", "file_path": "gtk/src/main.rs", "rank": 21, "score": 42198.540107270484 }, { "content": "/// Downgrades the permissions of the current thread to the specified user and group ID.\n\nfn downgrade_permissions(uid: u32, gid: u32) {\n\n unsafe {\n\n libc::setresgid(gid, gid, gid);\n\n libc::setresuid(uid, uid, uid);\n\n }\n\n}\n", "file_path": "gtk/src/app/state/mod.rs", "rank": 22, "score": 33529.00106217345 }, { "content": " check: bool,\n\n}\n\n\n\nimpl<P: Progress> Task<P> {\n\n /// Performs the asynchronous USB device flashing.\n\n pub async fn process(mut self, buf: &mut [u8]) -> anyhow::Result<()> {\n\n self.copy(buf).await.context(\"failed to copy ISO\")?;\n\n\n\n if self.check {\n\n self.seek().await.context(\"failed to seek devices to start\")?;\n\n self.validate(buf).await.context(\"validation error\")?;\n\n }\n\n\n\n for (_, pb) in self.state.values_mut() {\n\n pb.finish();\n\n }\n\n\n\n Ok(())\n\n }\n\n\n", "file_path": "src/task.rs", "rank": 23, "score": 32541.142776707555 }, { "content": " pub fn subscribe(&mut self, file: File, device: P::Device, progress: P) -> &mut Self {\n\n let entity = self.writer.insert(file);\n\n self.state.insert(entity, (device, progress));\n\n self\n\n }\n\n\n\n async fn copy(&mut self, buf: &mut [u8]) -> anyhow::Result<()> {\n\n let mut stream = self.writer.copy(&mut self.image, buf);\n\n let mut total = 0;\n\n let mut last = Instant::now();\n\n while let Some(event) = stream.next().await {\n\n match event {\n\n CopyEvent::Progress(written) => {\n\n total += written as u64;\n\n let now = Instant::now();\n\n if now.duration_since(last).as_millis() > self.millis_between as u128 {\n\n last = now;\n\n for (_, pb) in self.state.values_mut() {\n\n pb.set(total);\n\n }\n", "file_path": "src/task.rs", "rank": 24, "score": 32540.770659578597 }, { "content": " }\n\n }\n\n CopyEvent::Failure(entity, why) => {\n\n let (device, mut pb) = self.state.remove(&entity).expect(\"missing entity\");\n\n pb.message(&device, \"E\", &format!(\"{}\", why));\n\n pb.finish();\n\n }\n\n CopyEvent::SourceFailure(why) => {\n\n for (device, pb) in self.state.values_mut() {\n\n pb.message(&device, \"E\", &format!(\"{}\", why));\n\n pb.finish();\n\n }\n\n\n\n return Err(why).context(\"error reading from source\");\n\n }\n\n CopyEvent::NoWriters => return Err(anyhow!(\"no writers left\")),\n\n }\n\n }\n\n\n\n Ok(())\n", "file_path": "src/task.rs", "rank": 25, "score": 32536.884809445124 }, { "content": " async fn validate(&mut self, buf: &mut [u8]) -> anyhow::Result<()> {\n\n for (path, pb) in self.state.values_mut() {\n\n pb.set(0);\n\n pb.message(&path, \"V\", \"\");\n\n }\n\n\n\n let copy_bufs = &mut Vec::new();\n\n let mut total = 0;\n\n let mut stream = self.writer.validate(&mut self.image, buf, copy_bufs);\n\n\n\n while let Some(event) = stream.next().await {\n\n match event {\n\n ValidationEvent::Progress(written) => {\n\n total += written as u64;\n\n for (_, pb) in self.state.values_mut() {\n\n pb.set(total);\n\n }\n\n }\n\n ValidationEvent::Failure(entity, why) => {\n\n let (path, mut pb) = self.state.remove(&entity).expect(\"missing entity\");\n", "file_path": "src/task.rs", "rank": 26, "score": 32535.547225885774 }, { "content": " pb.message(&path, \"E\", &format!(\"{}\", why));\n\n pb.finish();\n\n }\n\n ValidationEvent::SourceFailure(why) => {\n\n for (path, pb) in self.state.values_mut() {\n\n pb.message(&path, \"E\", &format!(\"error reading from source: {}\", why));\n\n pb.finish();\n\n }\n\n\n\n return Err(why).context(\"error reading from source\");\n\n }\n\n ValidationEvent::NoWriters => return Err(anyhow!(\"no writers left\")),\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/task.rs", "rank": 27, "score": 32531.17343061054 }, { "content": " }\n\n\n\n async fn seek(&mut self) -> anyhow::Result<()> {\n\n for (path, pb) in self.state.values_mut() {\n\n pb.set(0);\n\n pb.message(&path, \"S\", \"\");\n\n }\n\n\n\n self.image.seek(SeekFrom::Start(0)).await?;\n\n\n\n let mut stream = self.writer.seek(SeekFrom::Start(0));\n\n while let Some((entity, why)) = stream.next().await {\n\n let (path, mut pb) = self.state.remove(&entity).expect(\"missing entity\");\n\n pb.message(&path, \"E\", &format!(\"errored seeking to start: {}\", why));\n\n pb.finish();\n\n }\n\n\n\n Ok(())\n\n }\n\n\n", "file_path": "src/task.rs", "rank": 28, "score": 32529.55871197138 }, { "content": "use anyhow::Context;\n\nuse async_std::{fs::File, prelude::*};\n\nuse srmw::*;\n\nuse std::{collections::HashMap, io::SeekFrom, time::Instant};\n\n\n", "file_path": "src/task.rs", "rank": 29, "score": 32523.24877304862 }, { "content": "fn set_hash_widget(state: &State, ui: &GtkUi) {\n\n let hash = &ui.content.image_view.hash;\n\n\n\n let path = state.image_path.borrow();\n\n let kind = match hash.active() {\n\n Some(1) => \"SHA256\",\n\n Some(2) => \"SHA1\",\n\n Some(3) => \"MD5\",\n\n _ => return,\n\n };\n\n\n\n ui.content.image_view.chooser_container.set_visible_child_name(\"checksum\");\n\n ui.content.image_view.set_hash_sensitive(false);\n\n\n\n let _ = state.back_event_tx.send(BackgroundEvent::GenerateHash(PathBuf::from(&*path), kind));\n\n}\n", "file_path": "gtk/src/app/signals/images.rs", "rank": 30, "score": 32322.777557337056 }, { "content": "use super::View;\n\nuse crate::fl;\n\n\n\npub struct ErrorView {\n\n pub view: View,\n\n}\n\n\n\nimpl ErrorView {\n\n pub fn new() -> ErrorView {\n\n ErrorView { view: View::new(\"dialog-error\", &fl!(\"critical-error\"), \"\", |_| ()) }\n\n }\n\n}\n", "file_path": "gtk/src/app/views/error.rs", "rank": 31, "score": 29784.680363210184 }, { "content": " self.list.show_all();\n\n }\n\n\n\n pub fn reset(&self) {\n\n self.select_all.set_active(false);\n\n self.get_buttons().for_each(|c| c.set_active(false));\n\n }\n\n\n\n pub fn connect_view_ready<F: Fn(bool) + 'static>(&self, func: F) {\n\n *self.view_ready.borrow_mut() = Box::new(func);\n\n }\n\n}\n", "file_path": "gtk/src/app/views/devices.rs", "rank": 32, "score": 29420.547112842192 }, { "content": "\n\n DevicesView { view, list, select_all, view_ready }\n\n }\n\n\n\n pub fn get_buttons(&self) -> impl Iterator<Item = gtk::CheckButton> {\n\n self.list\n\n .children()\n\n .into_iter()\n\n .filter_map(|row| row.downcast::<gtk::ListBoxRow>().ok())\n\n .filter_map(|row| row.children().get(0).cloned())\n\n .filter_map(|row| row.downcast::<gtk::CheckButton>().ok())\n\n }\n\n\n\n pub fn is_active_ids(&self) -> impl Iterator<Item = usize> {\n\n self.get_buttons()\n\n .enumerate()\n\n .filter_map(|(id, button)| if button.is_active() { Some(id) } else { None })\n\n }\n\n\n\n pub fn refresh(&self, devices: &[Arc<DiskDevice>], image_size: u64) {\n", "file_path": "gtk/src/app/views/devices.rs", "rank": 33, "score": 29418.408976723396 }, { "content": "use crate::app::App;\n\nuse gtk::prelude::*;\n\n\n\nimpl App {\n\n pub fn connect_view_ready(&self) {\n\n let next = self.ui.header.next.clone();\n\n self.ui.content.devices_view.connect_view_ready(move |ready| next.set_sensitive(ready));\n\n }\n\n}\n", "file_path": "gtk/src/app/signals/devices.rs", "rank": 34, "score": 29414.10960370046 }, { "content": " let row = cascade! {\n\n gtk::CheckButton::new();\n\n ..set_sensitive(valid_size);\n\n ..add(&cascade! {\n\n gtk::Label::new(Some(name.as_str()));\n\n ..set_use_markup(true);\n\n });\n\n ..connect_toggled(move |button| {\n\n if button.is_active() {\n\n nselected.set(nselected.get() + 1);\n\n } else {\n\n nselected.set(nselected.get() - 1);\n\n }\n\n\n\n (*view_ready.borrow())(nselected.get() != 0);\n\n });\n\n };\n\n self.list.insert(&row, -1);\n\n }\n\n\n", "file_path": "gtk/src/app/views/devices.rs", "rank": 35, "score": 29412.171368678235 }, { "content": " self.list.foreach(|w| self.list.remove(w));\n\n\n\n let nselected = Rc::new(Cell::new(0));\n\n\n\n for device in devices {\n\n let valid_size = device.parent.size >= image_size;\n\n\n\n let label = &misc::device_label(&device);\n\n\n\n let size_str = bytesize::to_string(device.parent.size, true);\n\n let name = if valid_size {\n\n format!(\"<b>{}</b>\\n{}\", label, size_str)\n\n } else {\n\n let too_small = fl!(\"device-too-small\");\n\n format!(\"<b>{}</b>\\n{}: <b>{}</b>\", label, size_str, too_small)\n\n };\n\n\n\n let view_ready = self.view_ready.clone();\n\n let nselected = nselected.clone();\n\n\n", "file_path": "gtk/src/app/views/devices.rs", "rank": 36, "score": 29412.052594774686 }, { "content": "use super::View;\n\nuse crate::fl;\n\nuse crate::misc;\n\nuse dbus_udisks2::DiskDevice;\n\nuse gtk;\n\nuse gtk::prelude::*;\n\nuse std::cell::{Cell, RefCell};\n\nuse std::rc::Rc;\n\nuse std::sync::Arc;\n\n\n\nuse bytesize;\n\n\n", "file_path": "gtk/src/app/views/devices.rs", "rank": 37, "score": 29406.569418241317 }, { "content": " gtk::Box::new(gtk::Orientation::Vertical, 0);\n\n ..add(&select_all);\n\n ..add(&list);\n\n };\n\n\n\n let select_scroller = cascade! {\n\n gtk::ScrolledWindow::new(gtk::Adjustment::NONE, gtk::Adjustment::NONE);\n\n ..set_hexpand(true);\n\n ..set_vexpand(true);\n\n ..add(&list_box);\n\n };\n\n\n\n let view = View::new(\n\n \"drive-removable-media-usb\",\n\n &fl!(\"devices-view-title\"),\n\n &fl!(\"devices-view-description\"),\n\n |right_panel| right_panel.add(&select_scroller),\n\n );\n\n\n\n let view_ready: ViewReadySignal = Rc::new(RefCell::new(Box::new(|_| ())));\n", "file_path": "gtk/src/app/views/devices.rs", "rank": 38, "score": 29406.149045952912 }, { "content": " let select_all = cascade! {\n\n gtk::CheckButton::with_label(&fl!(\"select-all\"));\n\n ..set_margin_start(4);\n\n ..set_margin_bottom(3);\n\n ..connect_toggled(move |all| {\n\n let state = all.is_active();\n\n\n\n for row in list_.children() {\n\n if let Ok(row) = row.downcast::<gtk::ListBoxRow>() {\n\n if let Some(widget) = row.children().get(0) {\n\n if let Some(button) = widget.downcast_ref::<gtk::CheckButton>() {\n\n button.set_active(button.get_sensitive() && state);\n\n }\n\n }\n\n }\n\n }\n\n });\n\n };\n\n\n\n let list_box = cascade! {\n", "file_path": "gtk/src/app/views/devices.rs", "rank": 39, "score": 29405.308334907193 }, { "content": "/// Tracks progress\n\n#[derive(new)]\n\npub struct MachineProgress {\n\n id: usize,\n\n\n\n handle: mpsc::UnboundedSender<Event>,\n\n}\n\n\n\nimpl Progress for MachineProgress {\n\n type Device = Box<Path>;\n\n\n\n fn message(&mut self, _path: &Box<Path>, kind: &str, message: &str) {\n\n let _ = self.handle.unbounded_send(Event::Message(\n\n self.id,\n\n if message.is_empty() { kind.into() } else { [kind, \" \", message].concat().into() },\n\n ));\n\n }\n\n\n\n fn finish(&mut self) {\n\n let _ = self.handle.unbounded_send(Event::Finished(self.id));\n", "file_path": "cli/src/main.rs", "rank": 41, "score": 33.21088734650443 }, { "content": " }\n\n\n\n fn set(&mut self, written: u64) {\n\n let _ = self.handle.unbounded_send(Event::Set(self.id, written));\n\n }\n\n}\n\n\n\n#[derive(new)]\n\npub struct InteractiveProgress {\n\n pipe: ProgressBar<Pipe>,\n\n}\n\n\n\nimpl Progress for InteractiveProgress {\n\n type Device = Box<Path>;\n\n\n\n fn message(&mut self, path: &Box<Path>, kind: &str, message: &str) {\n\n self.pipe.message(&format!(\"{} {}: {}\", kind, path.display(), message));\n\n }\n\n\n\n fn finish(&mut self) {\n", "file_path": "cli/src/main.rs", "rank": 42, "score": 32.700881147503424 }, { "content": " Set(PathBuf, u64),\n\n Size(u64),\n\n}\n\n\n\n/// A decoder for creating a stream of messages from a reader\n\n///\n\n/// ```ignore\n\n/// use futures_code::FramedRead;\n\n///\n\n/// FramedRead::new(pipe_reader, PopsicleDecoder::default())\n\n/// ```\n\n#[derive(Default)]\n\npub struct PopsicleDecoder;\n\n\n\nimpl Decoder for PopsicleDecoder {\n\n type Item = Message;\n\n type Error = Error;\n\n\n\n fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {\n\n match memchr(b'\\n', src) {\n", "file_path": "src/codec.rs", "rank": 46, "score": 27.998231749787653 }, { "content": "use futures_codec::{BytesMut, Decoder};\n\nuse memchr::memchr;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{io, path::PathBuf};\n\n\n\n/// Errors that may occur when decoding the IPC stream.\n\n#[derive(Debug, Error)]\n\npub enum Error {\n\n #[error(\"failed to decode popsicle message: {{\\n {}\\n}}\", input)]\n\n Decode { input: Box<str>, source: ron::de::Error },\n\n #[error(\"reading from popsicle stream failed\")]\n\n Read(#[from] io::Error),\n\n}\n\n\n\n/// Popsicle's IPC protocol\n\n#[derive(Debug, Deserialize, PartialEq, Serialize)]\n\npub enum Message {\n\n Device(PathBuf),\n\n Finished(PathBuf),\n\n Message(PathBuf, String),\n", "file_path": "src/codec.rs", "rank": 47, "score": 26.57021803904995 }, { "content": "#[macro_use]\n\nextern crate anyhow;\n\n#[macro_use]\n\nextern crate derive_new;\n\n#[macro_use]\n\nextern crate thiserror;\n\n\n\npub extern crate mnt;\n\n\n\npub mod codec;\n\n\n\nmod task;\n\n\n\npub use self::task::{Progress, Task};\n\n\n\nuse anyhow::Context;\n\nuse as_result::MapResult;\n\nuse async_std::{\n\n fs::{self, File, OpenOptions},\n\n os::unix::fs::OpenOptionsExt,\n", "file_path": "src/lib.rs", "rank": 49, "score": 25.034192952915667 }, { "content": "\n\n // Only attempt to refresh the devices if the last refresh was >= 500ms ago.\n\n let time_since = now.duration_since(last_device_refresh);\n\n if time_since.as_secs() > 1 || time_since.subsec_millis() >= 500 {\n\n last_device_refresh = now;\n\n\n\n let mut all_tasks_finished = true;\n\n let length = state.image_size.load(Ordering::SeqCst);\n\n let tasks = tasks.as_mut().expect(\"no flash task\");\n\n let mut previous = tasks.previous.lock().expect(\"mutex lock\");\n\n\n\n for (id, &(ref pbar, ref label)) in flashing_devices.iter().enumerate()\n\n {\n\n let prev_values = &mut previous[id];\n\n let progress = &tasks.progress[id];\n\n let finished = &tasks.finished[id];\n\n\n\n let raw_value = progress.load(Ordering::SeqCst);\n\n let task_is_finished = finished.load(Ordering::SeqCst);\n\n let value = if task_is_finished {\n", "file_path": "gtk/src/app/signals/mod.rs", "rank": 50, "score": 23.960667943380166 }, { "content": "use crate::app::events::{self, BackgroundEvent, UiEvent};\n\nuse atomic::Atomic;\n\nuse crossbeam_channel::{unbounded, Receiver, Sender};\n\nuse dbus_udisks2::DiskDevice;\n\nuse libc;\n\nuse std::cell::{Cell, RefCell};\n\nuse std::env;\n\nuse std::fs::File;\n\nuse std::path::PathBuf;\n\nuse std::sync::Arc;\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum ActiveView {\n\n Images,\n\n Devices,\n\n Flashing,\n\n Summary,\n\n Error,\n\n}\n\n\n", "file_path": "gtk/src/app/state/mod.rs", "rank": 51, "score": 23.879880039983163 }, { "content": "mod devices;\n\nmod images;\n\n\n\nuse crate::app::events::{BackgroundEvent, UiEvent};\n\nuse crate::app::state::ActiveView;\n\nuse crate::app::App;\n\nuse crate::fl;\n\nuse crate::flash::{FlashRequest, FlashStatus, FlashTask};\n\nuse crate::misc;\n\nuse atomic::Atomic;\n\nuse crossbeam_channel::TryRecvError;\n\nuse gtk::{self, prelude::*};\n\nuse iso9660::ISO9660;\n\nuse std::fmt::Write;\n\nuse std::fs::File;\n\nuse std::sync::atomic::Ordering;\n\nuse std::sync::{Arc, Mutex};\n\nuse std::time::{Duration, Instant};\n\n\n\nimpl App {\n", "file_path": "gtk/src/app/signals/mod.rs", "rank": 52, "score": 21.740288075111337 }, { "content": " drop(etx);\n\n\n\n let task = async move {\n\n let buf = &mut [0u8; 64 * 1024];\n\n let _ = rtx.send(task.process(buf).await);\n\n };\n\n\n\n join!(machine_output(erx, &paths, image_size), task);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n/// An event for creating a machine-readable output\n\npub enum Event {\n\n Message(usize, Box<str>),\n\n Finished(usize),\n\n Set(usize, u64),\n\n}\n\n\n", "file_path": "cli/src/main.rs", "rank": 53, "score": 21.594616920536467 }, { "content": "pub struct State {\n\n pub ui_event_tx: Sender<UiEvent>,\n\n pub ui_event_rx: Receiver<UiEvent>,\n\n pub back_event_tx: Sender<BackgroundEvent>,\n\n\n\n pub active_view: Cell<ActiveView>,\n\n\n\n pub image: RefCell<Option<File>>,\n\n pub image_path: RefCell<PathBuf>,\n\n pub image_size: Arc<Atomic<u64>>,\n\n\n\n pub available_devices: RefCell<Box<[Arc<DiskDevice>]>>,\n\n pub selected_devices: RefCell<Vec<Arc<DiskDevice>>>,\n\n}\n\n\n\nimpl State {\n\n pub fn new() -> Self {\n\n let (back_event_tx, back_event_rx) = unbounded();\n\n let (ui_event_tx, ui_event_rx) = unbounded();\n\n\n", "file_path": "gtk/src/app/state/mod.rs", "rank": 54, "score": 21.57479062878825 }, { "content": " Verify { disk: Box<Path>, why: io::Error },\n\n #[error(\"error verifying disk '{}': reached EOF\", disk.display())]\n\n VerifyEOF { disk: Box<Path> },\n\n #[error(\"error verifying disk '{}': mismatch at {}:{}\", disk.display(), x, y)]\n\n VerifyMismatch { disk: Box<Path>, x: usize, y: usize },\n\n}\n\n\n\npub async fn usb_disk_devices(disks: &mut Vec<Box<Path>>) -> anyhow::Result<()> {\n\n let mut stream = UsbDiskProbe::new().await.context(\"failed to create USB disk probe\")?;\n\n\n\n while let Some(device_result) = stream.next().await {\n\n match device_result {\n\n Ok(disk) => disks.push(disk),\n\n Err(why) => {\n\n eprintln!(\"failed to reach device path: {}\", why);\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n/// Stores all discovered USB disk paths into the supplied `disks` vector.\n", "file_path": "src/lib.rs", "rank": 55, "score": 20.19322894176363 }, { "content": "use crate::fl;\n\nuse gtk::{prelude::*, *};\n\nuse std::path::PathBuf;\n\n\n\n/// A wrapped FileChooserNative that automatically destroys itself upon being dropped.\n\npub struct OpenDialog(FileChooserNative);\n\n\n\nimpl OpenDialog {\n\n pub fn new(path: Option<PathBuf>) -> OpenDialog {\n\n #[allow(unused_mut)]\n\n OpenDialog(cascade! {\n\n let dialog = FileChooserNative::new(\n\n Some(&fl!(\"open\")),\n\n Some(&Window::new(WindowType::Popup)),\n\n FileChooserAction::Open,\n\n Some(&fl!(\"open\")),\n\n Some(&fl!(\"cancel\")),\n\n );\n\n ..set_filter(&cascade! {\n\n FileFilter::new();\n", "file_path": "gtk/src/app/widgets/dialogs.rs", "rank": 56, "score": 20.155902680411053 }, { "content": "use crate::flash::{FlashError, FlashRequest};\n\nuse crate::hash::hasher;\n\n\n\nuse crossbeam_channel::{Receiver, Sender};\n\nuse dbus_udisks2::{DiskDevice, Disks, UDisks2};\n\nuse md5::Md5;\n\nuse sha1::Sha1;\n\nuse sha2::Sha256;\n\nuse std::collections::HashMap;\n\nuse std::io;\n\nuse std::path::PathBuf;\n\nuse std::sync::Arc;\n\nuse std::thread::{self, JoinHandle};\n\n\n\npub enum UiEvent {\n\n SetImageLabel(PathBuf),\n\n RefreshDevices(Box<[Arc<DiskDevice>]>),\n\n SetHash(io::Result<String>),\n\n Flash(JoinHandle<anyhow::Result<(anyhow::Result<()>, Vec<Result<(), FlashError>>)>>),\n\n Reset,\n\n}\n\n\n\npub enum BackgroundEvent {\n\n GenerateHash(PathBuf, &'static str),\n\n Flash(FlashRequest),\n\n RefreshDevices,\n\n}\n\n\n", "file_path": "gtk/src/app/events/mod.rs", "rank": 57, "score": 20.109477655566113 }, { "content": " #[error(\"unable to read image: {}\", why)]\n\n ReadError { why: io::Error },\n\n #[error(\"reached EOF prematurely\")]\n\n Eof,\n\n}\n\n\n\n#[derive(Debug, Error)]\n\n#[cfg_attr(rustfmt, rustfmt_skip)]\n\npub enum DiskError {\n\n #[error(\"failed to fetch devices from USB device stream: {}\", _0)]\n\n DeviceStream(anyhow::Error),\n\n #[error(\"unable to open directory at '{}': {}\", dir, why)]\n\n Directory { dir: &'static str, why: io::Error },\n\n #[error(\"writing to the device was killed\")]\n\n Killed,\n\n #[error(\"unable to read directory entry at '{}': invalid UTF-8\", dir.display())]\n\n UTF8 { dir: Box<Path> },\n\n #[error(\"unable to find disk '{}': {}\", disk.display(), why)]\n\n NoDisk { disk: Box<Path>, why: io::Error },\n\n #[error(\"failed to unmount {}: {}\", path.display(), why)]\n", "file_path": "src/lib.rs", "rank": 58, "score": 20.054123950144643 }, { "content": " context: &str,\n\n ) -> Result<T, ()> {\n\n result.map_err(|why| {\n\n self.content.error_view.view.description.set_text(&format!(\"{}: {}\", context, why));\n\n self.switch_to(state, ActiveView::Error);\n\n })\n\n }\n\n\n\n pub fn errorck_option<T>(\n\n &self,\n\n state: &State,\n\n result: Option<T>,\n\n context: &'static str,\n\n ) -> Result<T, ()> {\n\n result.ok_or_else(|| {\n\n self.content.error_view.view.description.set_text(&format!(\n\n \"{}: {}\",\n\n context,\n\n fl!(\"no-value-found\")\n\n ));\n", "file_path": "gtk/src/app/mod.rs", "rank": 59, "score": 19.614292126015226 }, { "content": " );\n\n\n\n let _ =\n\n state.back_event_tx.send(BackgroundEvent::Flash(FlashRequest::new(\n\n image,\n\n destinations,\n\n flash_status.clone(),\n\n progress.clone(),\n\n finished.clone(),\n\n )));\n\n\n\n tasks = Some(FlashTask {\n\n previous: Arc::new(Mutex::new(vec![[0; 7]; ndestinations])),\n\n progress,\n\n finished,\n\n });\n\n }\n\n // When the flashing view is active, and thus an image is flashing.\n\n None => {\n\n let now = Instant::now();\n", "file_path": "gtk/src/app/signals/mod.rs", "rank": 60, "score": 19.008579613992016 }, { "content": " let bar_container = cascade! {\n\n gtk::Box::new(gtk::Orientation::Vertical, 0);\n\n ..add(&pbar);\n\n ..add(&bar_label);\n\n };\n\n\n\n summary_grid.attach(&label, 0, id, 1, 1);\n\n summary_grid.attach(&bar_container, 1, id, 1, 1);\n\n\n\n flashing_devices.push((pbar, bar_label));\n\n destinations.push(device.clone());\n\n }\n\n\n\n summary_grid.show_all();\n\n let ndestinations = destinations.len();\n\n let progress = Arc::new(\n\n (0..ndestinations).map(|_| Atomic::new(0u64)).collect::<Vec<_>>(),\n\n );\n\n let finished = Arc::new(\n\n (0..ndestinations).map(|_| Atomic::new(false)).collect::<Vec<_>>(),\n", "file_path": "gtk/src/app/signals/mod.rs", "rank": 61, "score": 18.77809891445966 }, { "content": "use gtk::prelude::*;\n\nuse gtk::{self, Align, Image, Label, Orientation};\n\n\n\npub struct View {\n\n pub container: gtk::Box,\n\n pub icon: Image,\n\n pub topic: Label,\n\n pub description: Label,\n\n pub panel: gtk::Box,\n\n}\n\n\n\nimpl View {\n\n pub fn new<F: Fn(&gtk::Box)>(\n\n icon: &str,\n\n topic: &str,\n\n description: &str,\n\n configure_panel: F,\n\n ) -> View {\n\n let icon = Image::from_icon_name(Some(icon), gtk::IconSize::Dialog);\n\n icon.set_valign(Align::Start);\n", "file_path": "gtk/src/app/views/view.rs", "rank": 62, "score": 18.647733313699554 }, { "content": " path::{Path, PathBuf},\n\n};\n\nuse futures::{executor, prelude::*};\n\nuse mnt::MountEntry;\n\nuse std::{\n\n io,\n\n os::unix::{ffi::OsStrExt, fs::FileTypeExt},\n\n process::Command,\n\n};\n\nuse usb_disk_probe::stream::UsbDiskProbe;\n\n\n\n#[derive(Debug, Error)]\n\n#[cfg_attr(rustfmt, rustfmt_skip)]\n\npub enum ImageError {\n\n #[error(\"image could not be opened: {}\", why)]\n\n Open { why: io::Error },\n\n #[error(\"unable to get image metadata: {}\", why)]\n\n Metadata { why: io::Error },\n\n #[error(\"image was not a file\")]\n\n NotAFile,\n", "file_path": "src/lib.rs", "rank": 63, "score": 17.869750711298142 }, { "content": "use digest::Digest;\n\nuse hex_view::HexView;\n\nuse std::fs::File;\n\nuse std::io::{self, Read};\n\nuse std::path::Path;\n\n\n\npub(crate) fn hasher<H: Digest>(image: &Path) -> io::Result<String> {\n\n File::open(image).and_then(move |mut file| {\n\n let mut buffer = [0u8; 8 * 1024];\n\n let mut hasher = H::new();\n\n\n\n loop {\n\n let read = file.read(&mut buffer)?;\n\n if read == 0 {\n\n break;\n\n }\n\n hasher.update(&buffer[..read]);\n\n }\n\n\n\n Ok(format!(\"{:x}\", HexView::from(hasher.finalize().as_slice())))\n\n })\n\n}\n", "file_path": "gtk/src/hash.rs", "rank": 64, "score": 17.55829581713114 }, { "content": " };\n\n\n\n let mut errors = Vec::new();\n\n let mut selected_devices = state.selected_devices.borrow_mut();\n\n let ntasks = selected_devices.len();\n\n\n\n for (device, result) in\n\n selected_devices.drain(..).zip(results.into_iter())\n\n {\n\n if let Err(why) = result {\n\n errors.push((device, why));\n\n }\n\n }\n\n\n\n ui.switch_to(&state, ActiveView::Summary);\n\n let list = &ui.content.summary_view.list;\n\n let description = &ui.content.summary_view.view.description;\n\n\n\n if result.is_ok() && errors.is_empty() {\n\n let desc = fl!(\"successful-flash\", total = ntasks);\n", "file_path": "gtk/src/app/signals/mod.rs", "rank": 65, "score": 17.46924654198779 }, { "content": " for path in paths {\n\n \"Device(\\\"\" (path.display()) \"\\\")\\n\"\n\n }\n\n );\n\n\n\n while let Some(event) = rx.next().await {\n\n match event {\n\n Event::Message(id, message) => {\n\n let _ = witeln!(stdout, \"Message(\\\"\" (paths[id].display()) \"\\\",\\\"\" (message) \"\\\")\");\n\n }\n\n Event::Finished(id) => {\n\n let _ = witeln!(stdout, \"Finished(\\\"\" (paths[id].display()) \"\\\")\");\n\n }\n\n Event::Set(id, written) => {\n\n let _ = witeln!(stdout, \"Set(\\\"\" (paths[id].display()) \"\\\",\" (written) \")\");\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "cli/src/main.rs", "rank": 66, "score": 17.064593106352824 }, { "content": " let ui = self.ui.clone();\n\n\n\n let mut last_device_refresh = Instant::now();\n\n let mut flashing_devices: Vec<(gtk::ProgressBar, gtk::Label)> = Vec::new();\n\n let flash_status = Arc::new(Atomic::new(FlashStatus::Inactive));\n\n let mut flash_handles = None;\n\n let mut tasks = None;\n\n\n\n glib::timeout_add_local(Duration::from_millis(16), move || {\n\n match state.ui_event_rx.try_recv() {\n\n Err(TryRecvError::Disconnected) => return Continue(false),\n\n Err(TryRecvError::Empty) => (),\n\n Ok(UiEvent::SetHash(hash)) => {\n\n ui.content.image_view.set_hash(&match hash {\n\n Ok(hash) => hash,\n\n Err(why) => fl!(\"error\", why = format!(\"{}\", why)),\n\n });\n\n ui.content.image_view.set_hash_sensitive(true);\n\n\n\n ui.content.image_view.chooser_container.set_visible_child_name(\"chooser\");\n", "file_path": "gtk/src/app/signals/mod.rs", "rank": 67, "score": 16.986328866462443 }, { "content": " }\n\n\n\n let check = matches.is_present(\"check\");\n\n\n\n // If this is a TTY, display a progress bar. If not, display machine-readable info.\n\n if is_tty {\n\n println!();\n\n\n\n let mb = MultiBar::new();\n\n let mut task = Task::new(image, check);\n\n\n\n for (disk_path, disk) in disks {\n\n let pb = InteractiveProgress::new(cascade! {\n\n mb.create_bar(image_size);\n\n ..set_units(Units::Bytes);\n\n ..message(&format!(\"W {}: \", disk_path.display()));\n\n });\n\n\n\n task.subscribe(disk, disk_path, pb);\n\n }\n", "file_path": "cli/src/main.rs", "rank": 68, "score": 16.964373328854855 }, { "content": " let mut disk_args = Vec::new();\n\n if matches.is_present(\"all\") {\n\n popsicle::usb_disk_devices(&mut disk_args)\n\n .await\n\n .with_context(|| fl!(\"error-disks-fetch\"))?;\n\n } else if let Some(disks) = matches.values_of(\"DISKS\") {\n\n disk_args.extend(disks.map(String::from).map(PathBuf::from).map(Box::from));\n\n }\n\n\n\n if disk_args.is_empty() {\n\n return Err(anyhow!(fl!(\"error-no-disks-specified\")));\n\n }\n\n\n\n let mounts = mnt::get_submounts(Path::new(\"/\"))\n\n .with_context(|| fl!(\"error-reading-mounts\"))?;\n\n\n\n let disks =\n\n popsicle::disks_from_args(disk_args.into_iter(), &mounts, matches.is_present(\"unmount\"))\n\n .await\n\n .with_context(|| fl!(\"error-opening-disks\"))?;\n", "file_path": "cli/src/main.rs", "rank": 69, "score": 15.857962064795908 }, { "content": " _ => Err(io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n \"hash kind not supported\",\n\n )),\n\n };\n\n\n\n // If successful, cache the result.\n\n if let Ok(ref result) = result {\n\n hashed.insert((path.clone(), kind), result.clone());\n\n }\n\n\n\n // Send this result back to the main thread.\n\n let _ = events_tx.send(UiEvent::SetHash(result));\n\n }\n\n Ok(BackgroundEvent::RefreshDevices) => {\n\n // Fetch the current list of USB devices from popsicle.\n\n match refresh_devices() {\n\n Ok(devices) => {\n\n let new_device_paths: Vec<_> =\n\n devices.iter().map(|d| d.drive.path.clone()).collect();\n", "file_path": "gtk/src/app/events/mod.rs", "rank": 70, "score": 15.79006368717674 }, { "content": " };\n\n let attrs = AttrList::new();\n\n attrs.insert(fg);\n\n self.hash_label.set_attributes(&attrs);\n\n } else {\n\n self.hash_label.set_text(hash);\n\n }\n\n }\n\n\n\n pub fn set_image(&self, path: &Path, size: u64, warning: Option<&str>) {\n\n let size_str = bytesize::to_string(size, true);\n\n let mut label: String = match path.file_name() {\n\n Some(name) => format!(\"<b>{}</b>\\n{}\", name.to_string_lossy(), size_str),\n\n None => format!(\"<b>{}</b>\", fl!(\"cannot-select-directories\")),\n\n };\n\n\n\n if let Some(warning) = warning {\n\n let subject = fl!(\"warning\");\n\n label += &format!(\"\\n<span foreground='red'><b>{}</b>: {}</span>\", subject, warning);\n\n };\n\n\n\n self.image_path.set_markup(&label);\n\n }\n\n}\n", "file_path": "gtk/src/app/views/images.rs", "rank": 71, "score": 15.620874458445698 }, { "content": " };\n\n\n\n // Returns the header and all of it's state\n\n Header {\n\n container: cascade! {\n\n HeaderBar::new();\n\n ..set_title(Some(&fl!(\"app-title\")));\n\n ..pack_start(&back);\n\n ..pack_end(&next);\n\n },\n\n back,\n\n next,\n\n }\n\n }\n\n\n\n pub fn connect_back<F: Fn() + 'static>(&self, signal: F) {\n\n self.back.connect_clicked(move |_| signal());\n\n }\n\n\n\n pub fn connect_next<F: Fn() + 'static>(&self, signal: F) {\n\n self.next.connect_clicked(move |_| signal());\n\n }\n\n}\n", "file_path": "gtk/src/app/widgets/header.rs", "rank": 72, "score": 15.48553495903245 }, { "content": "use super::View;\n\nuse crate::fl;\n\nuse gtk::{prelude::*, *};\n\n\n\npub struct FlashView {\n\n pub view: View,\n\n pub progress_list: Grid,\n\n}\n\n\n\nimpl FlashView {\n\n pub fn new() -> FlashView {\n\n let progress_list = cascade! {\n\n Grid::new();\n\n ..set_row_spacing(6);\n\n ..set_column_spacing(6);\n\n };\n\n\n\n let progress_scroller = cascade! {\n\n ScrolledWindow::new(gtk::Adjustment::NONE, gtk::Adjustment::NONE);\n\n ..add(&progress_list);\n", "file_path": "gtk/src/app/views/flashing.rs", "rank": 73, "score": 15.45005410076497 }, { "content": "}\n\n\n\nimpl App {\n\n pub fn new(state: State) -> Self {\n\n if gtk::init().is_err() {\n\n eprintln!(\"failed to initialize GTK Application\");\n\n process::exit(1);\n\n }\n\n\n\n App { ui: Rc::new(GtkUi::new()), state: Arc::new(state) }\n\n }\n\n\n\n pub fn connect_events(self) -> Self {\n\n self.connect_back();\n\n self.connect_next();\n\n self.connect_ui_events();\n\n self.connect_image_chooser();\n\n self.connect_image_drag_and_drop();\n\n self.connect_hash();\n\n self.connect_view_ready();\n", "file_path": "gtk/src/app/mod.rs", "rank": 74, "score": 15.44137787995011 }, { "content": " UnmountCommand { path: Box<Path>, why: io::Error },\n\n #[error(\"error using disk '{}': {} already mounted at {}\", arg.display(), source_.display(), dest.display())]\n\n AlreadyMounted { arg: Box<Path>, source_: Box<Path>, dest: Box<Path> },\n\n #[error(\"'{}' is not a block device\", arg.display())]\n\n NotABlock { arg: Box<Path> },\n\n #[error(\"unable to get metadata of disk '{}': {}\", arg.display(), why)]\n\n Metadata { arg: Box<Path>, why: io::Error },\n\n #[error(\"unable to open disk '{}': {}\", disk.display(), why)]\n\n Open { disk: Box<Path>, why: io::Error },\n\n #[error(\"error writing disk '{}': {}\", disk.display(), why)]\n\n Write { disk: Box<Path>, why: io::Error },\n\n #[error(\"error writing disk '{}': reached EOF\", disk.display())]\n\n WriteEOF { disk: Box<Path> },\n\n #[error(\"unable to flush disk '{}': {}\", disk.display(), why)]\n\n Flush { disk: Box<Path>, why: io::Error },\n\n #[error(\"error seeking disk '{}': seeked to {} instead of 0\", disk.display(), invalid)]\n\n SeekInvalid { disk: Box<Path>, invalid: u64 },\n\n #[error(\"error seeking disk '{}': {}\", disk.display(), why)]\n\n Seek { disk: Box<Path>, why: io::Error },\n\n #[error(\"error verifying disk '{}': {}\", disk.display(), why)]\n", "file_path": "src/lib.rs", "rank": 75, "score": 15.323888666506345 }, { "content": " }\n\n }\n\n\n\n match state.active_view.get() {\n\n ActiveView::Devices => {\n\n let now = Instant::now();\n\n\n\n // Only attempt to refresh the devices if the last refresh was >= 3 seconds ago.\n\n if now.duration_since(last_device_refresh).as_secs() >= 3 {\n\n last_device_refresh = now;\n\n let _ = state.back_event_tx.send(BackgroundEvent::RefreshDevices);\n\n }\n\n }\n\n ActiveView::Flashing => match state.image.borrow_mut().take() {\n\n // When the flashing view is active, and an image has not started flashing.\n\n Some(image) => {\n\n let summary_grid = &ui.content.flash_view.progress_list;\n\n summary_grid.foreach(|w| summary_grid.remove(w));\n\n let mut destinations = Vec::new();\n\n\n", "file_path": "gtk/src/app/signals/mod.rs", "rank": 76, "score": 14.976463463965855 }, { "content": " Some(pos) => {\n\n let buf = src.split_to(pos + 1);\n\n match ron::de::from_bytes::<Self::Item>(&buf) {\n\n Ok(value) => Ok(Some(value)),\n\n Err(source) => Err(Error::Decode {\n\n input: String::from_utf8_lossy(&buf).into_owned().into(),\n\n source,\n\n }),\n\n }\n\n }\n\n None => Ok(None),\n\n }\n\n }\n\n}\n", "file_path": "src/codec.rs", "rank": 77, "score": 14.50638186832089 }, { "content": " pub error_view: ErrorView,\n\n pub flash_view: FlashView,\n\n pub summary_view: SummaryView,\n\n}\n\n\n\nimpl Content {\n\n pub fn new() -> Content {\n\n let image_view = ImageView::new();\n\n let devices_view = DevicesView::new();\n\n let flash_view = FlashView::new();\n\n let summary_view = SummaryView::new();\n\n let error_view = ErrorView::new();\n\n\n\n let container = cascade! {\n\n Stack::new();\n\n ..add(&image_view.view.container);\n\n ..add(&devices_view.view.container);\n\n ..add(&flash_view.view.container);\n\n ..add(&summary_view.view.container);\n\n ..add(&error_view.view.container);\n\n ..set_visible_child(&image_view.view.container);\n\n ..set_border_width(12);\n\n };\n\n\n\n Content { container, image_view, devices_view, flash_view, summary_view, error_view }\n\n }\n\n}\n", "file_path": "gtk/src/app/views/mod.rs", "rank": 78, "score": 14.357761346194003 }, { "content": " *state.image_path.borrow_mut() = path;\n\n }\n\n }\n\n Ok(UiEvent::RefreshDevices(devices)) => {\n\n let size = state.image_size.load(Ordering::SeqCst);\n\n ui.content.devices_view.refresh(&devices, size);\n\n *state.available_devices.borrow_mut() = devices;\n\n }\n\n Ok(UiEvent::Flash(handle)) => flash_handles = Some(handle),\n\n Ok(UiEvent::Reset) => {\n\n match flash_status.load(Ordering::SeqCst) {\n\n FlashStatus::Active => {\n\n flash_status.store(FlashStatus::Killing, Ordering::SeqCst)\n\n }\n\n FlashStatus::Inactive | FlashStatus::Killing => (),\n\n }\n\n\n\n flash_handles = None;\n\n tasks = None;\n\n flashing_devices.clear();\n", "file_path": "gtk/src/app/signals/mod.rs", "rank": 79, "score": 14.336247760224985 }, { "content": "mod devices;\n\nmod error;\n\nmod flashing;\n\nmod images;\n\nmod summary;\n\nmod view;\n\n\n\npub use self::devices::DevicesView;\n\npub use self::error::ErrorView;\n\npub use self::flashing::FlashView;\n\npub use self::images::ImageView;\n\npub use self::summary::SummaryView;\n\npub use self::view::View;\n\n\n\nuse gtk::{prelude::*, *};\n\n\n\npub struct Content {\n\n pub container: Stack,\n\n pub image_view: ImageView,\n\n pub devices_view: DevicesView,\n", "file_path": "gtk/src/app/views/mod.rs", "rank": 80, "score": 14.323722337896111 }, { "content": " if unmount {\n\n eprintln!(\n\n \"unmounting '{}': {:?} is mounted at {:?}\",\n\n disk_arg.display(),\n\n mount.spec,\n\n mount.file\n\n );\n\n\n\n Command::new(\"umount\").arg(&mount.spec).status().map_result().map_err(\n\n |why| DiskError::UnmountCommand {\n\n path: PathBuf::from(mount.spec.clone()).into_boxed_path(),\n\n why,\n\n },\n\n )?;\n\n } else {\n\n return Err(DiskError::AlreadyMounted {\n\n arg: disk_arg.clone(),\n\n source_: PathBuf::from(mount.spec.clone()).into_boxed_path(),\n\n dest: PathBuf::from(mount.file.clone()).into_boxed_path(),\n\n });\n", "file_path": "src/lib.rs", "rank": 81, "score": 14.252297634142753 }, { "content": " let selected_devices = state.selected_devices.borrow_mut();\n\n for (id, device) in selected_devices.iter().enumerate() {\n\n let id = id as i32;\n\n\n\n let pbar = cascade! {\n\n gtk::ProgressBar::new();\n\n ..set_hexpand(true);\n\n };\n\n\n\n let label = cascade! {\n\n gtk::Label::new(Some(&misc::device_label(&device)));\n\n ..set_justify(gtk::Justification::Right);\n\n ..style_context().add_class(\"bold\");\n\n };\n\n\n\n let bar_label = cascade! {\n\n gtk::Label::new(None);\n\n ..set_halign(gtk::Align::Center);\n\n };\n\n\n", "file_path": "gtk/src/app/signals/mod.rs", "rank": 82, "score": 14.156656842584262 }, { "content": " // If running in pkexec or sudo, restore home directory for open dialog,\n\n // and then downgrade permissions back to a regular user.\n\n if let Ok(pkexec_uid) = env::var(\"PKEXEC_UID\").or_else(|_| env::var(\"SUDO_UID\")) {\n\n if let Ok(uid) = pkexec_uid.parse::<u32>() {\n\n if let Some(passwd) = pwd::Passwd::from_uid(uid) {\n\n env::set_var(\"HOME\", passwd.dir);\n\n downgrade_permissions(passwd.uid, passwd.gid);\n\n }\n\n }\n\n }\n\n\n\n events::background_thread(ui_event_tx.clone(), back_event_rx);\n\n\n\n Self {\n\n ui_event_rx,\n\n ui_event_tx,\n\n back_event_tx,\n\n active_view: Cell::new(ActiveView::Images),\n\n image: RefCell::new(None),\n\n image_path: RefCell::new(PathBuf::new()),\n\n image_size: Arc::new(Atomic::new(0u64)),\n\n available_devices: RefCell::new(Box::new([])),\n\n selected_devices: RefCell::new(Vec::new()),\n\n }\n\n }\n\n}\n\n\n\n/// Downgrades the permissions of the current thread to the specified user and group ID.\n", "file_path": "gtk/src/app/state/mod.rs", "rank": 83, "score": 13.794168576224582 }, { "content": " Ok(file) => *state.image.borrow_mut() = Some(file),\n\n Err(()) => return,\n\n };\n\n\n\n let all_devices = state.available_devices.borrow();\n\n let mut devices = state.selected_devices.borrow_mut();\n\n\n\n devices.clear();\n\n\n\n for active_id in self.content.devices_view.is_active_ids() {\n\n devices.push(all_devices[active_id].clone());\n\n }\n\n\n\n back_ctx.remove_class(\"back-button\");\n\n back_ctx.add_class(&gtk::STYLE_CLASS_DESTRUCTIVE_ACTION);\n\n\n\n next.set_visible(false);\n\n &self.content.flash_view.view.container\n\n }\n\n ActiveView::Summary => {\n", "file_path": "gtk/src/app/mod.rs", "rank": 84, "score": 13.689318797717442 }, { "content": " let per_second = sum / 3;\n\n label.set_label(&format!(\n\n \"{}/s\",\n\n bytesize::to_string(per_second, true)\n\n ));\n\n }\n\n }\n\n\n\n drop(previous);\n\n\n\n if all_tasks_finished {\n\n eprintln!(\"all tasks finished\");\n\n\n\n let taken_handles = match ui.errorck_option(\n\n &state,\n\n flash_handles.take(),\n\n \"Taking flash handles failed\",\n\n ) {\n\n Ok(results) => {\n\n results.join().map_err(|why| format!(\"{:?}\", why))\n", "file_path": "gtk/src/app/signals/mod.rs", "rank": 85, "score": 13.650718064278013 }, { "content": "async fn popsicle(\n\n rtx: oneshot::Sender<anyhow::Result<()>>,\n\n matches: ArgMatches<'_>,\n\n) -> anyhow::Result<()> {\n\n let image_path =\n\n matches.value_of(&fl!(\"arg-image\")).with_context(|| fl!(\"error-image-not-set\"))?;\n\n\n\n let image = OpenOptions::new()\n\n .custom_flags(libc::O_SYNC)\n\n .read(true)\n\n .open(image_path)\n\n .await\n\n .with_context(|| fl!(\"error-image-open\", image_path = image_path.clone()))?;\n\n\n\n let image_size = image\n\n .metadata()\n\n .await\n\n .map(|x| x.len())\n\n .with_context(|| fl!(\"error-image-metadata\", image_path = image_path.clone()))?;\n\n\n", "file_path": "cli/src/main.rs", "rank": 86, "score": 13.623560616657844 }, { "content": " pub fn connect_next(&self) {\n\n let state = self.state.clone();\n\n let ui = self.ui.clone();\n\n\n\n self.ui.header.connect_next(move || {\n\n let next = match state.active_view.get() {\n\n ActiveView::Images => ActiveView::Devices,\n\n ActiveView::Devices => ActiveView::Flashing,\n\n _ => {\n\n gtk::main_quit();\n\n return;\n\n }\n\n };\n\n\n\n ui.switch_to(&state, next);\n\n });\n\n }\n\n\n\n pub fn connect_ui_events(&self) {\n\n let state = self.state.clone();\n", "file_path": "gtk/src/app/signals/mod.rs", "rank": 87, "score": 13.083908718638746 }, { "content": " }\n\n }\n\n }\n\n\n\n let metadata = canonical_path\n\n .metadata()\n\n .await\n\n .map_err(|why| DiskError::Metadata { arg: disk_arg.clone(), why })?;\n\n\n\n if !metadata.file_type().is_block_device() {\n\n return Err(DiskError::NotABlock { arg: disk_arg.clone() });\n\n }\n\n\n\n let disk = OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .custom_flags(libc::O_SYNC)\n\n .open(&canonical_path)\n\n .await\n\n .map_err(|why| DiskError::Open { disk: disk_arg.clone(), why })?;\n\n\n\n disks.push((canonical_path.into_boxed_path(), disk));\n\n }\n\n\n\n Ok(disks)\n\n}\n", "file_path": "src/lib.rs", "rank": 88, "score": 13.068467700086435 }, { "content": " 1.0f64\n\n } else {\n\n all_tasks_finished = false;\n\n raw_value as f64 / length as f64\n\n };\n\n\n\n pbar.set_fraction(value);\n\n\n\n if task_is_finished {\n\n label.set_label(&fl!(\"task-finished\"));\n\n } else {\n\n prev_values[1] = prev_values[2];\n\n prev_values[2] = prev_values[3];\n\n prev_values[3] = prev_values[4];\n\n prev_values[4] = prev_values[5];\n\n prev_values[5] = prev_values[6];\n\n prev_values[6] = raw_value - prev_values[0];\n\n prev_values[0] = raw_value;\n\n\n\n let sum: u64 = prev_values.iter().skip(1).sum();\n", "file_path": "gtk/src/app/signals/mod.rs", "rank": 89, "score": 12.773185621720017 }, { "content": "pub mod events;\n\npub mod signals;\n\npub mod state;\n\npub mod views;\n\npub mod widgets;\n\n\n\nuse self::events::*;\n\nuse self::state::*;\n\nuse self::views::*;\n\nuse self::widgets::*;\n\n\n\nuse crate::fl;\n\nuse gtk::{self, prelude::*};\n\nuse std::{fs::File, process, rc::Rc, sync::Arc};\n\n\n\nconst CSS: &str = include_str!(\"ui.css\");\n\n\n\npub struct App {\n\n pub ui: Rc<GtkUi>,\n\n pub state: Arc<State>,\n", "file_path": "gtk/src/app/mod.rs", "rank": 90, "score": 12.670716792249753 }, { "content": " ..add_pattern(\"*.[Ii][Ss][Oo]\");\n\n ..add_pattern(\"*.[Ii][Mm][Gg]\");\n\n });\n\n if let Some(p) = path {\n\n dialog.set_current_folder(p);\n\n };\n\n })\n\n }\n\n\n\n pub fn run(&self) -> Option<PathBuf> {\n\n if self.0.run() == ResponseType::Accept {\n\n self.0.filename()\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for OpenDialog {\n\n fn drop(&mut self) {\n\n self.0.destroy();\n\n }\n\n}\n", "file_path": "gtk/src/app/widgets/dialogs.rs", "rank": 91, "score": 12.567446139502024 }, { "content": " Message::Finished(\"/dev/sdb\".into()),\n\n ];\n\n\n\n let input = AllowStdIo::new(Cursor::new(SAMPLE));\n\n\n\n let mut stream = FramedRead::new(input, PopsicleDecoder::default());\n\n\n\n let mut expected_iter = expected.iter();\n\n\n\n let mut matched = 0;\n\n while let Some(message) = stream.next().await {\n\n let message = message.unwrap();\n\n\n\n assert_eq!(message, *expected_iter.next().unwrap());\n\n matched += 1;\n\n }\n\n\n\n assert_eq!(matched, expected.len());\n\n });\n\n}\n", "file_path": "tests/ipc.rs", "rank": 92, "score": 12.564443134021754 }, { "content": " &style,\n\n gtk::STYLE_PROVIDER_PRIORITY_USER,\n\n );\n\n\n\n // The icon the app will display.\n\n gtk::Window::set_default_icon_name(\"com.system76.Popsicle\");\n\n\n\n // Programs what to do when the exit button is used.\n\n window.connect_delete_event(move |_, _| {\n\n gtk::main_quit();\n\n gtk::Inhibit(false)\n\n });\n\n\n\n GtkUi { header, window, content }\n\n }\n\n\n\n pub fn errorck<T, E: ::std::fmt::Display>(\n\n &self,\n\n state: &State,\n\n result: Result<T, E>,\n", "file_path": "gtk/src/app/mod.rs", "rank": 93, "score": 12.303935443095595 }, { "content": "use futures::{\n\n channel::{mpsc, oneshot},\n\n executor, join,\n\n prelude::*,\n\n};\n\nuse i18n_embed::DesktopLanguageRequester;\n\nuse pbr::{MultiBar, Pipe, ProgressBar, Units};\n\nuse popsicle::{mnt, Progress, Task};\n\nuse std::{\n\n io::{self, Write},\n\n process, thread,\n\n};\n\n\n", "file_path": "cli/src/main.rs", "rank": 94, "score": 12.232358776624878 }, { "content": "\n\n thread::spawn(|| {\n\n executor::block_on(async move {\n\n let buf = &mut [0u8; 64 * 1024];\n\n let _ = rtx.send(task.process(buf).await);\n\n })\n\n });\n\n\n\n mb.listen();\n\n } else {\n\n let (etx, erx) = mpsc::unbounded();\n\n let mut paths = Vec::new();\n\n let mut task = Task::new(image, check);\n\n\n\n for (disk_path, disk) in disks {\n\n let pb = MachineProgress::new(paths.len(), etx.clone());\n\n paths.push(disk_path.clone());\n\n task.subscribe(disk, disk_path, pb);\n\n }\n\n\n", "file_path": "cli/src/main.rs", "rank": 95, "score": 12.011807447892025 }, { "content": "\n\n self\n\n }\n\n\n\n pub fn then_execute(self) {\n\n self.ui.window.show_all();\n\n gtk::main();\n\n }\n\n}\n\n\n\npub struct GtkUi {\n\n window: gtk::Window,\n\n header: Header,\n\n content: Content,\n\n}\n\n\n\nimpl GtkUi {\n\n pub fn new() -> Self {\n\n // Create a the headerbar and it's associated content.\n\n let header = Header::new();\n", "file_path": "gtk/src/app/mod.rs", "rank": 96, "score": 11.757284859046898 }, { "content": "\n\n let is_tty = atty::is(atty::Stream::Stdout);\n\n\n\n if is_tty && !matches.is_present(\"yes\") {\n\n epint!(\n\n (fl!(\"question\", image_path = image_path)) \"\\n\"\n\n for (path, _) in &disks {\n\n \" - \" (path.display()) \"\\n\"\n\n }\n\n (fl!(\"yn\")) \": \"\n\n );\n\n\n\n io::stdout().flush().unwrap();\n\n\n\n let mut confirm = String::new();\n\n io::stdin().read_line(&mut confirm).unwrap();\n\n\n\n if confirm.trim() != \"y\" && confirm.trim() != \"yes\" {\n\n return Err(anyhow!(fl!(\"error-exiting\")));\n\n }\n", "file_path": "cli/src/main.rs", "rank": 97, "score": 11.752370222050802 }, { "content": "use super::View;\n\nuse crate::fl;\n\nuse gtk::{prelude::*, *};\n\n\n\npub struct SummaryView {\n\n pub view: View,\n\n pub list: ListBox,\n\n}\n\n\n\nimpl SummaryView {\n\n pub fn new() -> SummaryView {\n\n let list = cascade! {\n\n ListBox::new();\n\n ..style_context().add_class(\"frame\");\n\n };\n\n\n\n let view = View::new(\"process-completed\", &fl!(\"flashing-completed\"), \"\", |right_panel| {\n\n right_panel.pack_start(&list, true, true, 0);\n\n });\n\n\n\n SummaryView { view, list }\n\n }\n\n}\n", "file_path": "gtk/src/app/views/summary.rs", "rank": 98, "score": 11.590197080043648 }, { "content": "use super::View;\n\nuse crate::fl;\n\nuse bytesize;\n\nuse gtk::prelude::*;\n\nuse gtk::*;\n\nuse pango::{AttrColor, AttrList, EllipsizeMode};\n\nuse std::path::Path;\n\n\n\npub struct ImageView {\n\n pub view: View,\n\n pub check: Button,\n\n pub chooser_container: Stack,\n\n pub chooser: Button,\n\n pub image_path: Label,\n\n pub hash: ComboBoxText,\n\n pub hash_label: Entry,\n\n}\n\n\n\nimpl ImageView {\n\n pub fn new() -> ImageView {\n", "file_path": "gtk/src/app/views/images.rs", "rank": 99, "score": 11.458160574709934 } ]
Rust
rust/envop/src/main.rs
eagletmt/misc
cb4d3d3d19a00161ad7e87056d007ee043effed7
use std::io::Write as _; fn main() -> Result<(), Box<dyn std::error::Error>> { let mut args = std::env::args(); let me = args.next().unwrap(); let name = args.next().unwrap_or_else(|| { eprintln!("Usage: {} NAME PROG ARGS...", me); std::process::exit(1); }); let prog = args.next().unwrap_or_else(|| { eprintln!("Usage: {} NAME PROG ARGS...", me); std::process::exit(1); }); let tags = std::env::var("ENVOP_TAGS").unwrap_or_else(|_| "envop".to_owned()); let vault = std::env::var("ENVOP_VAULT").unwrap_or_else(|_| "Private".to_owned()); let output = std::process::Command::new("op") .arg("list") .arg("items") .arg("--vault") .arg(&vault) .arg("--categories") .arg("Secure Note") .arg("--tags") .arg(&tags) .output()?; if !output.status.success() { eprintln!("`op list items` failed"); std::io::stdout().write_all(&output.stdout)?; std::io::stderr().write_all(&output.stderr)?; std::process::exit(output.status.code().unwrap_or(1)); } let item_summaries: Vec<ItemSummary> = serde_json::from_slice(&output.stdout)?; let mut envs = Vec::new(); for item_summary in item_summaries .into_iter() .filter(|item_summary| item_summary.overview.title == name) { let output = std::process::Command::new("op") .arg("get") .arg("item") .arg("--vault") .arg(&vault) .arg(&item_summary.uuid) .output()?; if !output.status.success() { eprintln!("`op get item {}` failed", item_summary.uuid); std::io::stdout().write_all(&output.stdout)?; std::io::stderr().write_all(&output.stderr)?; std::process::exit(output.status.code().unwrap_or(1)); } let item: Item = serde_json::from_slice(&output.stdout)?; for section in item.details.sections.into_iter() { for field in section.fields.into_iter() { if field.k == "string" || field.k == "concealed" { envs.push((field.t, field.v)); } else { eprintln!( "{}: ignoring field {} in item {}", me, field.t, item_summary.uuid ); } } } } let mut cmd = std::process::Command::new(&prog); cmd.envs(envs).args(args); let status = exec(cmd)?; if !status.success() { std::process::exit(status.code().unwrap_or(1)); } Ok(()) } #[cfg(unix)] fn exec( mut cmd: std::process::Command, ) -> Result<std::process::ExitStatus, Box<dyn std::error::Error>> { use std::os::unix::process::CommandExt as _; Err(Box::new(cmd.exec())) } #[cfg(windows)] fn exec( mut cmd: std::process::Command, ) -> Result<std::process::ExitStatus, Box<dyn std::error::Error>> { Ok(cmd.status()?) } #[derive(Debug, serde::Deserialize)] struct ItemSummary { uuid: String, overview: ItemOverview, } #[derive(Debug, serde::Deserialize)] struct ItemOverview { title: String, } #[derive(Debug, serde::Deserialize)] struct Item { details: ItemDetails, } #[derive(Debug, serde::Deserialize)] struct ItemDetails { sections: Vec<ItemSection>, } #[derive(Debug, serde::Deserialize)] struct ItemSection { fields: Vec<ItemField>, } #[derive(Debug, serde::Deserialize)] struct ItemField { k: String, t: String, v: String, }
use std::io::Write as _; fn main() -> Result<(), Box<dyn std::error::Error>> { let mut args = std::env::args(); let me = args.next().unwrap(); let name = args.next().unwrap_or_else(|| { eprintln!("Usage: {} NAME PROG ARGS...", me); std::process::exit(1); }); let prog = args.next().unwrap_or_else(|| { eprintln!("Usage: {} NAME PROG ARGS...", me); std::process::exit(1); }); let tags = std::env::var("ENVOP_TAGS").unwrap_or_else(|_| "envop".to_owned()); let vault = std::env::var("ENVOP_VAULT").unwrap_or_else(|_| "Private".to_owned()); let output = std::process::Comm
tderr().write_all(&output.stderr)?; std::process::exit(output.status.code().unwrap_or(1)); } let item_summaries: Vec<ItemSummary> = serde_json::from_slice(&output.stdout)?; let mut envs = Vec::new(); for item_summary in item_summaries .into_iter() .filter(|item_summary| item_summary.overview.title == name) { let output = std::process::Command::new("op") .arg("get") .arg("item") .arg("--vault") .arg(&vault) .arg(&item_summary.uuid) .output()?; if !output.status.success() { eprintln!("`op get item {}` failed", item_summary.uuid); std::io::stdout().write_all(&output.stdout)?; std::io::stderr().write_all(&output.stderr)?; std::process::exit(output.status.code().unwrap_or(1)); } let item: Item = serde_json::from_slice(&output.stdout)?; for section in item.details.sections.into_iter() { for field in section.fields.into_iter() { if field.k == "string" || field.k == "concealed" { envs.push((field.t, field.v)); } else { eprintln!( "{}: ignoring field {} in item {}", me, field.t, item_summary.uuid ); } } } } let mut cmd = std::process::Command::new(&prog); cmd.envs(envs).args(args); let status = exec(cmd)?; if !status.success() { std::process::exit(status.code().unwrap_or(1)); } Ok(()) } #[cfg(unix)] fn exec( mut cmd: std::process::Command, ) -> Result<std::process::ExitStatus, Box<dyn std::error::Error>> { use std::os::unix::process::CommandExt as _; Err(Box::new(cmd.exec())) } #[cfg(windows)] fn exec( mut cmd: std::process::Command, ) -> Result<std::process::ExitStatus, Box<dyn std::error::Error>> { Ok(cmd.status()?) } #[derive(Debug, serde::Deserialize)] struct ItemSummary { uuid: String, overview: ItemOverview, } #[derive(Debug, serde::Deserialize)] struct ItemOverview { title: String, } #[derive(Debug, serde::Deserialize)] struct Item { details: ItemDetails, } #[derive(Debug, serde::Deserialize)] struct ItemDetails { sections: Vec<ItemSection>, } #[derive(Debug, serde::Deserialize)] struct ItemSection { fields: Vec<ItemField>, } #[derive(Debug, serde::Deserialize)] struct ItemField { k: String, t: String, v: String, }
and::new("op") .arg("list") .arg("items") .arg("--vault") .arg(&vault) .arg("--categories") .arg("Secure Note") .arg("--tags") .arg(&tags) .output()?; if !output.status.success() { eprintln!("`op list items` failed"); std::io::stdout().write_all(&output.stdout)?; std::io::s
random
[ { "content": "fn main() -> anyhow::Result<()> {\n\n env_logger::Builder::from_env(env_logger::Env::default().default_filter_or(\"info\")).init();\n\n\n\n for arg in std::env::args().skip(1) {\n\n unpack(&arg).with_context(|| format!(\"failed to unpack {}\", arg))?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "rust/pfs-unpack/src/main.rs", "rank": 0, "score": 173425.94845928147 }, { "content": "fn main() -> Result<(), anyhow::Error> {\n\n let miam = miam2tf::loader::load_miam(\"IAMfile\")?;\n\n miam2tf::printer::print_as_hcl2(&mut std::io::stdout().lock(), &miam)?;\n\n Ok(())\n\n}\n", "file_path": "rust/miam2tf/src/main.rs", "rank": 1, "score": 167076.68222547026 }, { "content": "fn format_codepoint(f: &mut std::fmt::Formatter, c: u32) -> std::fmt::Result {\n\n if c < 0x10000 {\n\n write!(f, \"<U{:04X}>\", c)\n\n } else {\n\n write!(f, \"<U{:08X}>\", c)\n\n }\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let client = reqwest::Client::new();\n\n let resp = client\n\n .get(EAST_ASIAN_WIDTH_URL)\n\n .header(\n\n reqwest::header::ACCEPT_ENCODING,\n\n reqwest::header::HeaderValue::from_static(\"gzip\"),\n\n )\n\n .send()\n\n .await?\n\n .error_for_status()?;\n", "file_path": "rust/ambiwidth/src/main.rs", "rank": 2, "score": 146457.7834065068 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let mut args = std::env::args();\n\n let me = args.next().unwrap();\n\n let name = args.next().unwrap_or_else(|| {\n\n eprintln!(\"Usage: {} NAME PROG ARGS...\", me);\n\n std::process::exit(1);\n\n });\n\n let prog = args.next().unwrap_or_else(|| {\n\n eprintln!(\"Usage: {} NAME PROG ARGS...\", me);\n\n std::process::exit(1);\n\n });\n\n let folderid = std::env::var(\"ENVWARDEN_FOLDERID\").unwrap_or_else(|_| {\n\n eprintln!(\"ENVWARDEN_FOLDERID must be set\");\n\n std::process::exit(1);\n\n });\n\n\n\n let output = std::process::Command::new(\"bw\")\n\n .arg(\"list\")\n\n .arg(\"items\")\n\n .arg(&name)\n", "file_path": "rust/envwarden/src/main.rs", "rank": 4, "score": 145921.8679826554 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n for arg in std::env::args().skip(1) {\n\n let code = std::fs::read_to_string(&arg)?;\n\n let expr = jrsonnet_parser::parse(\n\n &code,\n\n &jrsonnet_parser::ParserSettings {\n\n loc_data: true,\n\n file_name: std::path::PathBuf::from(&arg).into(),\n\n },\n\n )?;\n\n let unused_variables = jrsonnet_lint::find_unused_variables(&expr);\n\n for variable in unused_variables {\n\n println!(\n\n \"{}:{}:{} is defined but unused\",\n\n variable.path.display(),\n\n variable.begin_offset_line()?,\n\n variable.name\n\n );\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "rust/jrsonnet-lint/src/main.rs", "rank": 5, "score": 143666.1666190183 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let mut args = std::env::args();\n\n let program = args.next().unwrap();\n\n\n\n let mut options = getopts::Options::new();\n\n options.optopt(\"p\", \"port\", \"Port number (default: 443)\", \"PORT\");\n\n let mut matches = match options.parse(args) {\n\n Ok(m) => m,\n\n Err(msg) => {\n\n eprintln!(\"{}\", msg);\n\n print_usage(&program, &options);\n\n std::process::exit(1);\n\n }\n\n };\n\n\n\n let host = if matches.free.is_empty() {\n\n print_usage(&program, &options);\n\n std::process::exit(2);\n\n } else {\n\n matches.free.remove(0)\n", "file_path": "rust/tls-cert-expiration/src/main.rs", "rank": 6, "score": 141517.9869883905 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let path = \"input.cue\";\n\n let code = std::fs::read_to_string(path)?;\n\n let exported = cue_export(path, &code)?;\n\n println!(\"{}\", exported);\n\n Ok(())\n\n}\n\n\n", "file_path": "rust/go-bridge-sample/src/main.rs", "rank": 7, "score": 141517.9869883905 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n prost_build::compile_protos(\n\n &[\"vendor/grpc/examples/protos/route_guide.proto\"],\n\n &[\"vendor/grpc/examples/protos\"],\n\n )?;\n\n Ok(())\n\n}\n", "file_path": "learn/grpc/build.rs", "rank": 8, "score": 139080.3572624062 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n if !std::process::Command::new(\"rake\")\n\n .env(\"MRUBY_CONFIG\", \"build_config.rb\")\n\n .status()?\n\n .success()\n\n {\n\n panic!(\"failed to build mruby\");\n\n }\n\n println!(\"cargo:rustc-link-lib=static=mruby\");\n\n println!(\"cargo:rustc-link-search=native=vendor/mruby/build/host/lib\");\n\n\n\n println!(\"cargo:rerun-if-changed=include/wrapper.h\");\n\n println!(\"cargo:rerun-if-changed=src/wrapper.c\");\n\n println!(\"cargo:rerun-if-changed=build_config.rb\");\n\n println!(\"cargo:rerun-if-changed=mrblib/miam.rb\");\n\n\n\n let bindings = bindgen::Builder::default()\n\n .clang_arg(\"-Ivendor/mruby/include\")\n\n .header(\"include/wrapper.h\")\n\n .generate()\n\n .expect(\"unable to generate bindings\");\n\n let out_path = std::path::PathBuf::from(std::env::var(\"OUT_DIR\")?);\n\n bindings.write_to_file(out_path.join(\"bindings.rs\"))?;\n\n Ok(())\n\n}\n", "file_path": "rust/miam2tf/build.rs", "rank": 9, "score": 139080.3572624062 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n tonic_build::compile_protos(\"proto/agent.proto\")?;\n\n Ok(())\n\n}\n", "file_path": "rust/agent-proto/build.rs", "rank": 10, "score": 136623.41930329017 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let out_dir = std::path::PathBuf::from(std::env::var(\"OUT_DIR\").unwrap());\n\n\n\n println!(\"cargo:rerun-if-changed=bridge.go\");\n\n println!(\"cargo:rustc-link-lib=static=go-bridge-sample\");\n\n println!(\"cargo:rustc-link-search=native={}\", out_dir.display());\n\n let status = std::process::Command::new(\"go\")\n\n .arg(\"build\")\n\n .arg(\"-buildmode=c-archive\")\n\n .arg(format!(\n\n \"-o={}\",\n\n out_dir.join(\"libgo-bridge-sample.a\").display()\n\n ))\n\n .status()?;\n\n if !status.success() {\n\n panic!(\"Failed to build libgo-bridge-sample.a\");\n\n }\n\n\n\n let bindings = bindgen::Builder::default()\n\n .header(format!(\n\n \"{}\",\n\n out_dir.join(\"libgo-bridge-sample.h\").display()\n\n ))\n\n .generate()\n\n .expect(\"Failed to generate bindings.rs from libgo-bridge-sample.h\");\n\n bindings.write_to_file(out_dir.join(\"bindings.rs\"))?;\n\n Ok(())\n\n}\n", "file_path": "rust/go-bridge-sample/build.rs", "rank": 11, "score": 134299.2628969799 }, { "content": "fn main() {\n\n let xlib = x11_dl::xlib::Xlib::open().expect(\"Unable to load xlib\");\n\n let xft = x11_dl::xft::Xft::open().expect(\"Unable to load xft\");\n\n\n\n let args: Vec<String> = std::env::args().collect();\n\n if args.len() > 1 {\n\n let desc_cstr =\n\n std::ffi::CString::new(args[1].clone()).expect(\"Unable to allocate CString\");\n\n\n\n let dpy = unsafe { (xlib.XOpenDisplay)(std::ptr::null()) };\n\n let screen = unsafe { (xlib.XDefaultScreenOfDisplay)(dpy) };\n\n let screen_number = unsafe { (xlib.XScreenNumberOfScreen)(screen) };\n\n let font = unsafe { (xft.XftFontOpenName)(dpy, screen_number, desc_cstr.as_ptr()) };\n\n let height = unsafe { *font }.height;\n\n println!(\"{}\", height);\n\n unsafe { (xft.XftFontClose)(dpy, font) };\n\n unsafe { (xlib.XCloseDisplay)(dpy) };\n\n } else {\n\n eprintln!(\"Usage: {} PATTERN\", args[0]);\n\n }\n\n}\n", "file_path": "rust/xft-height/src/main.rs", "rank": 12, "score": 132298.35848109392 }, { "content": "fn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n let program = args[0].clone();\n\n\n\n let mut options = getopts::Options::new();\n\n options.optmulti(\"l\", \"label\", \"Select this label\", \"LABEL\");\n\n options.optflag(\"h\", \"help\", \"Print help\");\n\n let matches = match options.parse(&args[1..]) {\n\n Ok(m) => m,\n\n Err(msg) => {\n\n println!(\"{}\", msg);\n\n print_usage(&program, &options);\n\n std::process::exit(1);\n\n }\n\n };\n\n\n\n if matches.opt_present(\"h\") {\n\n print_usage(&program, &options);\n\n std::process::exit(0);\n\n }\n", "file_path": "rust/ltsv-select/src/main.rs", "rank": 13, "score": 132298.35848109392 }, { "content": "fn main() {\n\n // Image\n\n let aspect_ratio = 16.0 / 9.0;\n\n let image_width = 1200;\n\n let image_height = (image_width as f64 / aspect_ratio) as i32;\n\n let samples_per_pixel = 100;\n\n let max_depth = 50;\n\n\n\n // World\n\n let mut rng = rand::thread_rng();\n\n let world = std::sync::Arc::new(random_scene(&mut rng));\n\n\n\n // Camera\n\n let lookfrom = Point3::new(14.0, 6.0, 3.0);\n\n let lookat = Point3::new(0.0, 0.0, 0.0);\n\n let vup = Vec3::new(0.0, 1.0, 0.0);\n\n let dist_to_focus = 12.0;\n\n let aperture = 0.1;\n\n\n\n // Render\n", "file_path": "rust/ray-tracing-one-weekend/src/main.rs", "rank": 14, "score": 128658.83083573816 }, { "content": "fn main() {\n\n tonic_build::compile_protos(\"proto/hello.proto\").unwrap();\n\n}\n", "file_path": "rust/grpc-sample/build.rs", "rank": 15, "score": 125825.79557630114 }, { "content": "fn unpack<P>(path: P) -> anyhow::Result<()>\n\nwhere\n\n P: AsRef<std::path::Path>,\n\n{\n\n let file = std::fs::File::open(path)?;\n\n let mut reader = std::io::BufReader::new(file);\n\n\n\n let mut magic = [0; 2];\n\n reader.read_exact(&mut magic)?;\n\n if &magic != b\"pf\" {\n\n return Err(anyhow::anyhow!(\"not an Artemis archive\"));\n\n }\n\n let mut version = [0; 1];\n\n reader.read_exact(&mut version)?;\n\n let version = version[0] - b'0';\n\n if version != 8 {\n\n return Err(anyhow::anyhow!(\"unknown version number: {}\", version));\n\n }\n\n\n\n let index_size = reader.read_u32::<byteorder::LittleEndian>()? as usize;\n", "file_path": "rust/pfs-unpack/src/main.rs", "rank": 16, "score": 122007.1816012088 }, { "content": "fn random_scene<R>(rng: &mut R) -> HittableList\n\nwhere\n\n R: rand::Rng,\n\n{\n\n let mut world = HittableList::default();\n\n\n\n let ground_material = Arc::new(Lambertian::new(Color::new(0.5, 0.5, 0.5), &RNG));\n\n world.add(Arc::new(Sphere::new(\n\n Point3::new(0.0, -1000.0, 0.0),\n\n 1000.0,\n\n ground_material,\n\n )));\n\n\n\n for a in -11..11 {\n\n for b in -11..11 {\n\n let choose_mat = rng.gen_range(0.0..1.0);\n\n let x = a as f64 + 0.9 * rng.gen_range(0.0..1.0);\n\n let z = b as f64 + 0.9 * rng.gen_range(0.0..1.0);\n\n let center = Point3::new(x, 0.2, z);\n\n\n", "file_path": "rust/ray-tracing-one-weekend/src/main.rs", "rank": 17, "score": 113747.29823626587 }, { "content": "pub fn print_as_hcl2<W>(writer: &mut W, miam: &crate::Miam) -> Result<(), std::io::Error>\n\nwhere\n\n W: std::io::Write,\n\n{\n\n for user in &miam.users {\n\n writeln!(writer, r#\"resource \"aws_iam_user\" \"{}\" {{\"#, user.user_name)?;\n\n writeln!(writer, r#\" name = \"{}\"\"#, user.user_name)?;\n\n if let Some(ref path) = user.path {\n\n writeln!(writer, r#\" path = \"{}\"\"#, path)?;\n\n }\n\n writeln!(writer, \"}}\")?;\n\n\n\n for policy in &user.policies {\n\n writeln!(\n\n writer,\n\n r#\"resource \"aws_iam_user_policy\" \"{}-{}\" {{\"#,\n\n user.user_name, policy.name\n\n )?;\n\n writeln!(writer, r#\" name = \"{}\"\"#, policy.name)?;\n\n writeln!(writer, \" user = aws_iam_user.{}.name\", user.user_name)?;\n", "file_path": "rust/miam2tf/src/printer.rs", "rank": 18, "score": 109748.12825998417 }, { "content": "fn unwrap_or_raise<T, E>(mrb: *mut crate::mruby_c::mrb_state, r: Result<T, E>) -> T\n\nwhere\n\n E: std::error::Error,\n\n{\n\n match r {\n\n Ok(v) => v,\n\n Err(e) => {\n\n let msg = std::ffi::CString::new(format!(\"{:?}\", e)).unwrap();\n\n unsafe {\n\n crate::mruby_c::mrb_raise(\n\n mrb,\n\n crate::mruby_c::wrapper_e_runtime_error(mrb),\n\n msg.as_ptr(),\n\n )\n\n };\n\n unreachable!();\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/miam2tf/src/mruby.rs", "rank": 19, "score": 109748.12825998417 }, { "content": "fn cue_export(filename: &str, code: &str) -> Result<String, String> {\n\n let filename_cstr = std::ffi::CString::new(filename).unwrap();\n\n let code_cstr = std::ffi::CString::new(code).unwrap();\n\n let mut e = 0;\n\n let result = unsafe {\n\n let ptr = go_bridge_sample::cue_export(filename_cstr.as_ptr(), code_cstr.as_ptr(), &mut e);\n\n let r = std::ffi::CStr::from_ptr(ptr).to_string_lossy().into_owned();\n\n libc::free(ptr as *mut libc::c_void);\n\n r\n\n };\n\n if e == 0 {\n\n Ok(result)\n\n } else {\n\n Err(result)\n\n }\n\n}\n", "file_path": "rust/go-bridge-sample/src/main.rs", "rank": 20, "score": 108646.75281454301 }, { "content": "#[cfg(windows)]\n\nfn exec(\n\n mut cmd: std::process::Command,\n\n) -> Result<std::process::ExitStatus, Box<dyn std::error::Error>> {\n\n Ok(cmd.status()?)\n\n}\n\n\n", "file_path": "rust/envwarden/src/main.rs", "rank": 22, "score": 95979.47686275662 }, { "content": "fn ltsv_select(\n\n labels: &std::collections::HashSet<String>,\n\n path: Option<&String>,\n\n) -> std::io::Result<()> {\n\n match path {\n\n None => {\n\n let stdin = std::io::stdin();\n\n let lock = stdin.lock();\n\n ltsv_select2(labels, lock)\n\n }\n\n Some(path) => {\n\n let file = std::fs::File::open(path)?;\n\n ltsv_select2(labels, std::io::BufReader::new(file))\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/ltsv-select/src/main.rs", "rank": 23, "score": 91751.15608308828 }, { "content": "fn random_in_unit_sphere<R>(rng: &mut R) -> Vec3\n\nwhere\n\n R: rand::Rng,\n\n{\n\n loop {\n\n let p = Vec3::random(rng, -1.0, 1.0);\n\n if p.length_squared() < 1.0 {\n\n return p;\n\n }\n\n }\n\n}\n\n\n\npub struct Metal<R: 'static> {\n\n albedo: Color,\n\n fuzz: f64,\n\n tls_rng: &'static std::thread::LocalKey<std::cell::RefCell<R>>,\n\n}\n\nimpl<R> Metal<R> {\n\n pub fn new(\n\n albedo: Color,\n", "file_path": "rust/ray-tracing-one-weekend/src/lib.rs", "rank": 24, "score": 89199.98567013617 }, { "content": "fn random_in_unit_disk<R>(rng: &mut R) -> Vec3\n\nwhere\n\n R: rand::Rng,\n\n{\n\n loop {\n\n let p = Vec3::new(rng.gen_range(-1.0..1.0), rng.gen_range(-1.0..1.0), 0.0);\n\n if p.length_squared() < 1.0 {\n\n return p;\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/ray-tracing-one-weekend/src/lib.rs", "rank": 25, "score": 89199.98567013617 }, { "content": "fn verify_signature(\n\n headers: &std::collections::HashMap<String, String>,\n\n body: &str,\n\n) -> Result<(), ApiGatewayV2Response> {\n\n // Verify signature https://api.slack.com/authentication/verifying-requests-from-slack\n\n let signing_secret = match std::env::var(\"SLACK_SIGNING_SECRET\") {\n\n Ok(secret) => secret,\n\n Err(_) => {\n\n return Err(ApiGatewayV2Response {\n\n status_code: 500,\n\n body: \"SLACK_SIGNING_SECRET is missing\".to_owned(),\n\n ..Default::default()\n\n });\n\n }\n\n };\n\n let slack_signature = match headers.get(\"x-slack-signature\") {\n\n Some(t) => t,\n\n None => {\n\n return Err(ApiGatewayV2Response {\n\n status_code: 400,\n", "file_path": "rust/slack-slash-command-sample/handler/src/main.rs", "rank": 26, "score": 86314.55682415921 }, { "content": "fn to_miam(root: &crate::mruby::Value) -> Result<crate::Miam, anyhow::Error> {\n\n let mut users = Vec::new();\n\n for user in root.read_attribute(\"users\").iter() {\n\n let user_name = user.read_attribute(\"user_name\").to_string();\n\n let path = user.read_attribute(\"path\").to_string_opt();\n\n let mut policies = Vec::new();\n\n for policy in user.read_attribute(\"policies\").iter() {\n\n policies.push(to_rust_policy_document(&policy));\n\n }\n\n let mut groups = Vec::new();\n\n for group in user.read_attribute(\"groups\").iter() {\n\n groups.push(group.to_string());\n\n }\n\n let mut attached_managed_policies = Vec::new();\n\n for policy in user.read_attribute(\"attached_managed_policies\").iter() {\n\n attached_managed_policies.push(policy.to_string());\n\n }\n\n users.push(crate::User {\n\n user_name,\n\n path,\n", "file_path": "rust/miam2tf/src/loader.rs", "rank": 27, "score": 81806.00130071971 }, { "content": "pub fn load_miam<P>(path: P) -> Result<crate::Miam, anyhow::Error>\n\nwhere\n\n P: AsRef<std::path::Path>,\n\n{\n\n let mruby = crate::mruby::MRuby::default();\n\n mruby.load(path)?;\n\n let root = mruby.instance_variable_get(\"@root\");\n\n to_miam(&root)\n\n}\n\n\n", "file_path": "rust/miam2tf/src/loader.rs", "rank": 28, "score": 80450.77145409357 }, { "content": "fn to_rust_string(mrb: *mut crate::mruby_c::mrb_state, s: crate::mruby_c::mrb_value) -> String {\n\n unsafe {\n\n std::ffi::CStr::from_ptr(crate::mruby_c::mrb_str_to_cstr(mrb, s))\n\n .to_string_lossy()\n\n .into_owned()\n\n }\n\n}\n\n\n\npub struct ValueIter<'a> {\n\n idx: i64,\n\n len: i64,\n\n ary: crate::mruby_c::mrb_value,\n\n mruby: &'a MRuby,\n\n}\n\nimpl<'a> Iterator for ValueIter<'a> {\n\n type Item = Value<'a>;\n\n\n\n fn next(&mut self) -> Option<<Self as Iterator>::Item> {\n\n if self.idx < self.len {\n\n let val = unsafe { crate::mruby_c::mrb_ary_entry(self.ary, self.idx) };\n", "file_path": "rust/miam2tf/src/mruby.rs", "rank": 29, "score": 76684.05378072872 }, { "content": "fn mrb_args_req(n: i64) -> crate::mruby_c::mrb_aspec {\n\n unsafe { crate::mruby_c::wrapper_mrb_args_req(n) }\n\n}\n\n\n", "file_path": "rust/miam2tf/src/mruby.rs", "rank": 30, "score": 74832.32534937396 }, { "content": "fn render_html(digest: &str, key: &str) -> String {\n\n let link = format!(\"{}/{}\", URL_PREFIX, key);\n\n\n\n let mut buf = String::new();\n\n buf.push_str(\"<!DOCTYPE html><html><head><meta charset='utf-8'><title>\");\n\n buf.push_str(digest);\n\n buf.push_str(\n\n \"</title><meta name='twitter:card' content='photo'><meta name='twitter:title' \\\n\n content='\",\n\n );\n\n buf.push_str(key);\n\n buf.push_str(\"'><meta name='twitter:description' content='\");\n\n buf.push_str(key);\n\n buf.push_str(\"'><meta name='twitter:image' content='\");\n\n buf.push_str(&link);\n\n buf.push_str(\"'><meta name='twitter:url' content='\");\n\n buf.push_str(&link);\n\n buf.push_str(\"'><meta name='og:image' content='\");\n\n buf.push_str(&link);\n\n buf.push_str(\"'></head><body><img alt='\");\n\n buf.push_str(key);\n\n buf.push_str(\"' src='\");\n\n buf.push_str(&link);\n\n buf.push_str(\"'></body></html>\");\n\n buf\n\n}\n\n\n", "file_path": "rust/gyazo/src/main.rs", "rank": 31, "score": 73915.77948046188 }, { "content": "fn ltsv_select2<R: std::io::BufRead>(\n\n labels: &std::collections::HashSet<String>,\n\n reader: R,\n\n) -> std::io::Result<()> {\n\n for line in reader.lines() {\n\n let line = line?;\n\n let mut record = std::collections::LinkedList::new();\n\n for label_and_value in line.split('\\t') {\n\n let xs: Vec<&str> = label_and_value.split(':').collect();\n\n if !xs.is_empty() {\n\n let label = xs[0];\n\n if labels.is_empty() || labels.contains(label) {\n\n record.push_back(label_and_value);\n\n }\n\n }\n\n }\n\n\n\n let mut first = true;\n\n for label_and_value in record {\n\n if first {\n", "file_path": "rust/ltsv-select/src/main.rs", "rank": 32, "score": 73837.55863027864 }, { "content": "fn print_usage(program: &str, options: &getopts::Options) {\n\n println!(\"{}\", options.short_usage(program));\n\n println!(\"{}\", options.usage(\"Filter LTSV records.\"));\n\n}\n\n\n", "file_path": "rust/ltsv-select/src/main.rs", "rank": 33, "score": 72391.95274847248 }, { "content": "fn print_usage(program: &str, options: &getopts::Options) {\n\n println!(\"{}\", options.short_usage(program));\n\n println!(\"{}\", options.usage(\"Check TLS certificate expiration\"));\n\n}\n", "file_path": "rust/tls-cert-expiration/src/main.rs", "rank": 34, "score": 70948.31219152732 }, { "content": "fn guess_content_type(ext: &std::ffi::OsStr) -> Option<String> {\n\n if ext == \"png\" {\n\n Some(\"image/png\".to_owned())\n\n } else if ext == \"jpg\" {\n\n Some(\"image/jpeg\".to_owned())\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "rust/gyazo/src/main.rs", "rank": 35, "score": 68337.82039213163 }, { "content": "fn ray_color<T>(r: &Ray, world: &T, depth: i32) -> Color\n\nwhere\n\n T: Hittable,\n\n{\n\n if depth <= 0 {\n\n return Color::new(0.0, 0.0, 0.0);\n\n }\n\n if let Some(rec) = world.hit(r, 0.001, f64::INFINITY) {\n\n if let Some((attenuation, scattered)) = rec.material.scatter(r, &rec) {\n\n return attenuation * ray_color(&scattered, world, depth - 1);\n\n } else {\n\n return Color::new(0.0, 0.0, 0.0);\n\n }\n\n }\n\n let unit_direction = r.direction().unit_vector();\n\n let t = 0.5 * (unit_direction.y() + 1.0);\n\n (1.0 - t) * Color::new(1.0, 1.0, 1.0) + t * Color::new(0.5, 0.7, 1.0)\n\n}\n\n\n\nstd::thread_local! {\n\n static RNG: std::cell::RefCell<rand_xorshift::XorShiftRng> = std::cell::RefCell::new(rand_xorshift::XorShiftRng::from_seed(rand::thread_rng().gen()));\n\n}\n\n\n", "file_path": "rust/ray-tracing-one-weekend/src/main.rs", "rank": 36, "score": 63779.04783630689 }, { "content": "func main() {\n", "file_path": "rust/go-bridge-sample/bridge.go", "rank": 37, "score": 62915.49259302243 }, { "content": "def find_errnos(arg)\n\n if arg =~ /\\A\\d+\\z/\n\n [ERRNOS[arg.to_i]]\n\n else\n\n [Errno.const_get(arg.upcase.to_sym).new]\n\n end\n\nrescue NameError\n\n ERRNOS.select { |e| e.message.include?(arg) }\n\nend\n\n\n\nARGV.each do |arg|\n\n find_errnos(arg).each do |e|\n\n puts \"#{e.errno}: #{e.class.name.sub(/^Errno::/, '')}: #{e.message}\"\n\n end\n\nend\n", "file_path": "ruby/errno.rb", "rank": 38, "score": 62806.59733066243 }, { "content": "int main(void) {\n\n char password[1024];\n\n read_password(password, sizeof(password));\n\n GnomeKeyringResult result = gnome_keyring_unlock_sync(NULL, password);\n\n switch (result) {\n\n case GNOME_KEYRING_RESULT_OK:\n\n return 0;\n\n default:\n\n fprintf(stderr, \"Unable to unlock: %s (result=%d)\\n\", gnome_keyring_result_to_message(result), result);\n\n return 1;\n\n }\n", "file_path": "cxx/gnome-keyring-unlock/gnome-keyring-unlock.c", "rank": 39, "score": 62090.56924496305 }, { "content": "fn simplify_func(\n\n loc_expr: &jrsonnet_parser::LocExpr,\n\n params: &jrsonnet_parser::ParamsDesc,\n\n body: &jrsonnet_parser::LocExpr,\n\n env: &std::collections::HashMap<String, isize>,\n\n counter: &mut isize,\n\n) -> Simplified {\n\n let mut next_env = env.clone();\n\n let mut binds = Vec::with_capacity(params.len());\n\n for param in params.iter() {\n\n let name = param.0.to_string();\n\n let index = *counter;\n\n *counter += 1;\n\n let child = if let Some(default_expr) = &param.1 {\n\n simplify_expr(default_expr, &next_env, counter)\n\n } else {\n\n Simplified::Lit\n\n };\n\n next_env.insert(name.clone(), index);\n\n binds.push((name, index, child));\n", "file_path": "rust/jrsonnet-lint/src/lib.rs", "rank": 40, "score": 61068.39936965894 }, { "content": "fn simplify_expr(\n\n loc_expr: &jrsonnet_parser::LocExpr,\n\n env: &std::collections::HashMap<String, isize>,\n\n counter: &mut isize,\n\n) -> Simplified {\n\n match loc_expr.0.as_ref() {\n\n jrsonnet_parser::Expr::Literal(_)\n\n | jrsonnet_parser::Expr::Str(_)\n\n | jrsonnet_parser::Expr::Num(_) => Simplified::Lit,\n\n jrsonnet_parser::Expr::Var(var_name) => {\n\n if let Some(index) = env.get(var_name as &str) {\n\n Simplified::Var { index: *index }\n\n } else {\n\n // unbound variable\n\n Simplified::Var { index: -1 }\n\n }\n\n }\n\n jrsonnet_parser::Expr::LocalExpr(bind_specs, child_expr) => {\n\n let mut next_env = env.clone();\n\n let binds = simplify_binds(bind_specs, &mut next_env, counter);\n", "file_path": "rust/jrsonnet-lint/src/lib.rs", "rank": 41, "score": 61068.39936965894 }, { "content": "fn find_unused(\n\n unused_variables: &mut Vec<(String, jrsonnet_parser::ExprLocation)>,\n\n expr: &Simplified,\n\n bound_indices: &mut std::collections::HashSet<isize>,\n\n) {\n\n match expr {\n\n Simplified::Expr { children } => {\n\n for child in children {\n\n find_unused(unused_variables, child, bound_indices);\n\n }\n\n }\n\n Simplified::Bind {\n\n location,\n\n name,\n\n index,\n\n expr,\n\n child,\n\n } => {\n\n find_unused(unused_variables, expr, bound_indices);\n\n bound_indices.insert(*index);\n", "file_path": "rust/jrsonnet-lint/src/lib.rs", "rank": 42, "score": 61068.39936965894 }, { "content": "fn simplify_binds(\n\n bind_specs: &[jrsonnet_parser::BindSpec],\n\n next_env: &mut std::collections::HashMap<String, isize>,\n\n counter: &mut isize,\n\n) -> Vec<(String, isize, Simplified)> {\n\n let mut binds = Vec::with_capacity(bind_specs.len());\n\n for bind_spec in bind_specs {\n\n if let Some(params) = &bind_spec.params {\n\n for param in params.iter() {\n\n let name = param.0.to_string();\n\n let index = *counter;\n\n *counter += 1;\n\n let child = if let Some(default_expr) = &param.1 {\n\n simplify_expr(default_expr, next_env, counter)\n\n } else {\n\n Simplified::Lit\n\n };\n\n next_env.insert(name.clone(), index);\n\n binds.push((name, index, child));\n\n }\n", "file_path": "rust/jrsonnet-lint/src/lib.rs", "rank": 43, "score": 61068.39936965894 }, { "content": "fn simplify_obj_body(\n\n loc_expr: &jrsonnet_parser::LocExpr,\n\n obj_body: &jrsonnet_parser::ObjBody,\n\n env: &std::collections::HashMap<String, isize>,\n\n counter: &mut isize,\n\n) -> Simplified {\n\n match obj_body {\n\n jrsonnet_parser::ObjBody::MemberList(members) => {\n\n let mut bind_specs = Vec::new();\n\n for member in members {\n\n if let jrsonnet_parser::Member::BindStmt(bind_spec) = member {\n\n bind_specs.push(bind_spec.to_owned());\n\n }\n\n }\n\n let mut next_env = env.clone();\n\n let binds = simplify_binds(&bind_specs, &mut next_env, counter);\n\n\n\n let mut children = Vec::with_capacity(members.len() - binds.len());\n\n for member in members {\n\n match member {\n", "file_path": "rust/jrsonnet-lint/src/lib.rs", "rank": 44, "score": 60075.97052001215 }, { "content": " def policy(name, &block)\n\n @role.policies << PolicyDocument.from_raw(name, block.call)\n\n end\n\n\n", "file_path": "rust/miam2tf/mrblib/miam.rb", "rank": 45, "score": 57573.1378051426 }, { "content": "def template(name, &block)\n\n @context.templates[name] = block\n\nend\n\n\n", "file_path": "rust/miam2tf/mrblib/miam.rb", "rank": 46, "score": 57573.1378051426 }, { "content": "fn print_policy_document<W>(\n\n writer: &mut W,\n\n name: &str,\n\n policy_document: &crate::PolicyDocument,\n\n) -> Result<(), std::io::Error>\n\nwhere\n\n W: std::io::Write,\n\n{\n\n writeln!(writer, r#\"data \"aws_iam_policy_document\" \"{}\" {{\"#, name)?;\n\n if let Some(ref version) = policy_document.version {\n\n writeln!(writer, r#\" version = \"{}\"\"#, version)?;\n\n }\n\n for statement in &policy_document.statements {\n\n writeln!(writer, r#\" statement {{\"#)?;\n\n writeln!(writer, r#\" effect = \"{}\"\"#, statement.effect)?;\n\n writeln!(writer, \" actions = {:?}\", statement.actions)?;\n\n writeln!(\n\n writer,\n\n \" resources = {:?}\",\n\n statement\n", "file_path": "rust/miam2tf/src/printer.rs", "rank": 47, "score": 56972.26123604244 }, { "content": " def self.from_raw(name, raw)\n\n policy = new\n\n policy.name = name.to_s\n\n policy.version = raw['Version']\n\n statements = raw['Statement']\n\n unless statements.is_a?(Array)\n\n statements = [statements]\n\n end\n\n policy.statements = statements.map do |raw_stmt|\n\n stmt = PolicyStatement.new\n\n stmt.effect = raw_stmt['Effect']\n\n stmt.actions = Array(raw_stmt['Action'])\n\n stmt.resources = Array(raw_stmt['Resource'])\n\n stmt.conditions = []\n\n if raw_stmt.key?('Condition')\n\n raw_stmt['Condition'].each do |test, raw_cond|\n\n raw_cond.each do |variable, values|\n\n cond = PolicyCondition.new\n\n cond.test = test\n\n cond.variable = variable\n", "file_path": "rust/miam2tf/mrblib/miam.rb", "rank": 48, "score": 56560.86508930134 }, { "content": "def instance_profile(name, path: nil)\n\n @root.instance_profiles << InstanceProfile.new(name, path)\n\nend\n\n\n", "file_path": "rust/miam2tf/mrblib/miam.rb", "rank": 49, "score": 53435.7193347523 }, { "content": "def group(name, path: nil, &block)\n\n group = Group.new\n\n group.name = name\n\n group.path = path\n\n GroupContext.new(group, context).instance_eval(&block)\n\n @root.groups << group\n\nend\n\n\n", "file_path": "rust/miam2tf/mrblib/miam.rb", "rank": 50, "score": 51621.74622356465 }, { "content": "def role(name, path: nil, &block)\n\n role = Role.new\n\n role.name = name\n\n role.path = path\n\n RoleContext.new(role, context).instance_eval(&block)\n\n @root.roles << role\n\nend\n\n\n", "file_path": "rust/miam2tf/mrblib/miam.rb", "rank": 51, "score": 51621.74622356465 }, { "content": "def user(name, path: nil, &block)\n\n user = User.new\n\n user.user_name = name\n\n user.path = path\n\n UserContext.new(user, context).instance_eval(&block)\n\n @root.users << user\n\nend\n\n\n", "file_path": "rust/miam2tf/mrblib/miam.rb", "rank": 52, "score": 51621.74622356465 }, { "content": "fn replace_iam_interpolation(s: &str) -> String {\n\n // https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/iam_policy_document#context-variable-interpolation\n\n s.replace(\"${\", \"&{\")\n\n}\n", "file_path": "rust/miam2tf/src/printer.rs", "rank": 53, "score": 51505.187933497 }, { "content": "def managed_policy(name, path: nil, &block)\n\n policy = ManagedPolicy.new\n\n policy.name = name\n\n policy.path = path\n\n raw = ManagedPolicyContext.new.instance_eval(&block)\n\n policy.policy_document = PolicyDocument.from_raw('ManagedPolicy', raw)\n\n @root.managed_policies << policy\n\nend\n\n\n", "file_path": "rust/miam2tf/mrblib/miam.rb", "rank": 54, "score": 50723.43737677735 }, { "content": "fn mrb_nil_value() -> crate::mruby_c::mrb_value {\n\n unsafe { crate::mruby_c::wrapper_mrb_nil_value() }\n\n}\n\n\n", "file_path": "rust/miam2tf/src/mruby.rs", "rank": 55, "score": 49792.707757151635 }, { "content": "fn degrees_to_radians(degrees: f64) -> f64 {\n\n degrees * std::f64::consts::PI / 180.0\n\n}\n\n\n", "file_path": "rust/ray-tracing-one-weekend/src/lib.rs", "rank": 56, "score": 49005.87899948912 }, { "content": "fn reflect(v: &Vec3, n: &Vec3) -> Vec3 {\n\n *v - 2.0 * v.dot(n) * *n\n\n}\n\n\n\npub struct Dielectric<R: 'static> {\n\n index_of_refraction: f64,\n\n tls_rng: &'static std::thread::LocalKey<std::cell::RefCell<R>>,\n\n}\n\nimpl<R> Dielectric<R> {\n\n pub fn new(\n\n index_of_refraction: f64,\n\n tls_rng: &'static std::thread::LocalKey<std::cell::RefCell<R>>,\n\n ) -> Self {\n\n Self {\n\n index_of_refraction,\n\n tls_rng,\n\n }\n\n }\n\n}\n\nimpl<R> Material for Dielectric<R>\n", "file_path": "rust/ray-tracing-one-weekend/src/lib.rs", "rank": 57, "score": 46641.53451717265 }, { "content": "fn mrb_nil_p(o: crate::mruby_c::mrb_value) -> bool {\n\n unsafe { crate::mruby_c::wrapper_mrb_nil_p(o) != 0 }\n\n}\n\n\n", "file_path": "rust/miam2tf/src/mruby.rs", "rank": 58, "score": 46641.53451717265 }, { "content": "fn rarray_len(ary: crate::mruby_c::mrb_value) -> i64 {\n\n unsafe { crate::mruby_c::wrapper_rarray_len(ary) }\n\n}\n\n\n", "file_path": "rust/miam2tf/src/mruby.rs", "rank": 59, "score": 45854.705759510136 }, { "content": "fn reflectance(cosine: f64, ref_idx: f64) -> f64 {\n\n // Use Schlick's approximation for reflectance\n\n let r0 = (1.0 - ref_idx) / (1.0 + ref_idx);\n\n let r0 = r0 * r0;\n\n r0 + (1.0 - r0) * (1.0 - cosine).powi(5)\n\n}\n", "file_path": "rust/ray-tracing-one-weekend/src/lib.rs", "rank": 60, "score": 44402.076421661906 }, { "content": "fn to_rust_policy_document(policy: &crate::mruby::Value) -> crate::PolicyDocument {\n\n let name = policy.read_attribute(\"name\").to_string();\n\n let version = policy.read_attribute(\"version\").to_string_opt();\n\n let mut statements = Vec::new();\n\n for statement in policy.read_attribute(\"statements\").iter() {\n\n let effect = statement.read_attribute(\"effect\").to_string();\n\n let mut actions = Vec::new();\n\n for action in statement.read_attribute(\"actions\").iter() {\n\n actions.push(action.to_string());\n\n }\n\n let mut resources = Vec::new();\n\n for resource in statement.read_attribute(\"resources\").iter() {\n\n resources.push(resource.to_string());\n\n }\n\n let mut conditions = Vec::new();\n\n for condition in statement.read_attribute(\"conditions\").iter() {\n\n let test = condition.read_attribute(\"test\").to_string();\n\n let variable = condition.read_attribute(\"variable\").to_string();\n\n let mut values = Vec::new();\n\n for value in condition.read_attribute(\"values\").iter() {\n", "file_path": "rust/miam2tf/src/loader.rs", "rank": 61, "score": 43161.20603004785 }, { "content": "fn build_assert_expr(assert_expr: AssertExpr) -> syn::export::TokenStream2 {\n\n let type_trait_object = assert_expr.type_trait_object;\n\n let expr = assert_expr.expr;\n\n if let Some(mut generics) = assert_expr.generics {\n\n generics\n\n .params\n\n .push(syn::GenericParam::Type(syn::TypeParam {\n\n attrs: Vec::new(),\n\n ident: syn::Ident::new(\"T\", type_trait_object.span()),\n\n colon_token: None,\n\n bounds: syn::punctuated::Punctuated::new(),\n\n eq_token: None,\n\n default: None,\n\n }));\n\n quote::quote! {\n\n ({\n\n fn assert#generics(x: T) -> T where T: #type_trait_object { x }\n\n assert\n\n })(#expr)\n\n }\n\n } else {\n\n quote::quote! {\n\n ({\n\n fn assert<T>(x: T) -> T where T: #type_trait_object { x }\n\n assert\n\n })(#expr)\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/assert_trait/src/lib.rs", "rank": 62, "score": 43091.14207275577 }, { "content": "pub fn write_color(pixel_color: &Color, samples_per_pixel: i32) {\n\n let r = pixel_color.x();\n\n let g = pixel_color.y();\n\n let b = pixel_color.z();\n\n\n\n // Divide the color by the number of samples and ganma-corrected for ganma=2.0\n\n let scale = 1.0 / samples_per_pixel as f64;\n\n let r = (scale * r).sqrt();\n\n let g = (scale * g).sqrt();\n\n let b = (scale * b).sqrt();\n\n\n\n println!(\n\n \"{} {} {}\",\n\n (256.0 * num::clamp(r, 0.0, 0.999)) as i32,\n\n (256.0 * num::clamp(g, 0.0, 0.999)) as i32,\n\n (256.0 * num::clamp(b, 0.0, 0.999)) as i32\n\n );\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "rust/ray-tracing-one-weekend/src/lib.rs", "rank": 63, "score": 42482.485936984915 }, { "content": "pub fn find_unused_variables(expr: &jrsonnet_parser::LocExpr) -> Vec<Variable> {\n\n let mut env = std::collections::HashMap::new();\n\n env.insert(\"std\".to_owned(), 0);\n\n let mut counter = env.len() as isize;\n\n let simplified = simplify_expr(expr, &env, &mut counter);\n\n let mut unused_variables = Vec::new();\n\n find_unused(\n\n &mut unused_variables,\n\n &simplified,\n\n &mut std::collections::HashSet::new(),\n\n );\n\n unused_variables\n\n .into_iter()\n\n .map(|(name, location)| Variable {\n\n name,\n\n path: location.0.to_path_buf(),\n\n begin_offset: location.1,\n\n end_offset: location.2,\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "rust/jrsonnet-lint/src/lib.rs", "rank": 64, "score": 41850.27168114171 }, { "content": "fn refract(uv: &Vec3, n: &Vec3, etai_over_etat: f64) -> Vec3 {\n\n let cos_theta = (-*uv).dot(n).min(1.0);\n\n let r_out_perp = etai_over_etat * (*uv + cos_theta * *n);\n\n let r_out_parallel = -((1.0 - r_out_perp.length_squared()).abs().sqrt()) * *n;\n\n r_out_perp + r_out_parallel\n\n}\n\n\n", "file_path": "rust/ray-tracing-one-weekend/src/lib.rs", "rank": 65, "score": 41412.60265864299 }, { "content": "#[proc_macro]\n\npub fn assert_trait(tokens: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let input = syn::parse_macro_input!(tokens as AssertExpr);\n\n let expanded = build_assert_expr(input);\n\n proc_macro::TokenStream::from(expanded)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn assert_trait() {\n\n let tokens = quote::quote!(Iterator<Item = char>, \"abc\".chars());\n\n let input = syn::parse2(tokens).unwrap();\n\n let actual = super::build_assert_expr(input);\n\n let expected = quote::quote! {\n\n ({\n\n fn assert<T>(x: T) -> T where T: Iterator<Item = char> { x }\n\n assert\n\n })(\"abc\".chars())\n\n };\n\n assert_eq!(format!(\"{}\", expected), format!(\"{}\", actual));\n", "file_path": "rust/assert_trait/src/lib.rs", "rank": 66, "score": 41241.61554537085 }, { "content": "#[tokio::main]\n\nasync fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let mut resolver = trust_dns_resolver::AsyncResolver::tokio_from_system_conf()?;\n\n\n\n for arg in std::env::args().skip(1) {\n\n resolve_name(&mut resolver, arg).await?;\n\n }\n\n Ok(())\n\n}\n\n\n\nasync fn resolve_name(\n\n resolver: &mut trust_dns_resolver::TokioAsyncResolver,\n\n name: String,\n\n) -> Result<(), Box<dyn std::error::Error>> {\n\n let type_style = ansi_term::Style::new().fg(ansi_term::Color::Yellow);\n\n let name_style = ansi_term::Style::new().fg(ansi_term::Color::Green);\n\n let addr_style = ansi_term::Style::new().fg(ansi_term::Color::Blue);\n\n\n\n let mx_handle = tokio::spawn(resolve_mx(\n\n resolver.clone(),\n", "file_path": "rust/rdig/src/main.rs", "rank": 68, "score": 33868.198976074214 }, { "content": " }\n\n _ => {\n\n eprintln!(\n\n \"{}: ignoring field {} in item {}\",\n\n me, field.name, item.name\n\n );\n\n }\n\n }\n\n }\n\n }\n\n _ => {\n\n eprintln!(\"{}: ignoring item {}\", me, item.name);\n\n }\n\n }\n\n }\n\n }\n\n\n\n let mut cmd = std::process::Command::new(&prog);\n\n cmd.envs(envs).args(args);\n\n let status = exec(cmd)?;\n\n if !status.success() {\n\n std::process::exit(status.code().unwrap_or(1));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "rust/envwarden/src/main.rs", "rank": 69, "score": 33867.00337180894 }, { "content": " .arg(\"--folderid\")\n\n .arg(folderid)\n\n .output()?;\n\n if !output.status.success() {\n\n eprintln!(\"`bw list items` failed\");\n\n std::io::stdout().write_all(&output.stdout)?;\n\n std::io::stderr().write_all(&output.stderr)?;\n\n std::process::exit(output.status.code().unwrap_or(1));\n\n }\n\n\n\n let items: Vec<Item> = serde_json::from_slice(&output.stdout)?;\n\n let mut envs = Vec::new();\n\n for item in items.into_iter() {\n\n if item.name == name {\n\n match item.type_ {\n\n ItemType::SecureNote => {\n\n for field in item.fields {\n\n match field.type_ {\n\n FieldType::Text | FieldType::Hidden => {\n\n envs.push((field.name, field.value));\n", "file_path": "rust/envwarden/src/main.rs", "rank": 70, "score": 33866.39865963627 }, { "content": " )\n\n .await\n\n {\n\n for rdata in resp {\n\n let caa = rdata.as_caa().unwrap();\n\n use trust_dns_resolver::proto::rr::rdata::caa::{Property, Value};\n\n match (caa.tag(), caa.value()) {\n\n (Property::Issue, Value::Issuer(Some(domain), _)) => {\n\n println!(\n\n \"{} {} issue {} (critical={})\",\n\n name,\n\n type_style.paint(\"CAA\"),\n\n name_style.paint(domain.to_utf8()),\n\n caa.issuer_critical()\n\n );\n\n }\n\n (Property::IssueWild, Value::Issuer(Some(domain), _)) => {\n\n println!(\n\n \"{} {} issuewild {} (critical={})\",\n\n name,\n", "file_path": "rust/rdig/src/main.rs", "rank": 71, "score": 33866.13374854986 }, { "content": " type_style.paint(\"CAA\"),\n\n name_style.paint(domain.to_utf8()),\n\n caa.issuer_critical()\n\n );\n\n }\n\n (Property::Iodef, Value::Url(url)) => {\n\n println!(\n\n \"{} {} iodef {} (critical={})\",\n\n name,\n\n type_style.paint(\"CAA\"),\n\n name_style.paint(url.as_str()),\n\n caa.issuer_critical()\n\n );\n\n }\n\n (tag, value) => {\n\n println!(\n\n \"{} {} {:?} {:?} (critical={})\",\n\n name,\n\n type_style.paint(\"CAA\"),\n\n tag,\n", "file_path": "rust/rdig/src/main.rs", "rank": 73, "score": 33864.97999373661 }, { "content": "use bytes::Buf as _;\n\nuse bytes::BufMut as _;\n\nuse tokio::io::AsyncReadExt as _;\n\nuse tokio::io::AsyncWriteExt as _;\n\nuse tokio_rustls::rustls::Session as _;\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let host = std::env::args()\n\n .nth(1)\n\n .unwrap_or_else(|| \"google.com\".to_owned());\n\n let tcp_stream = tokio::net::TcpStream::connect((host.clone(), 443)).await?;\n\n let mut tls_config = tokio_rustls::rustls::ClientConfig::default();\n\n tls_config\n\n .root_store\n\n .add_server_trust_anchors(&webpki_roots::TLS_SERVER_ROOTS);\n\n tls_config.alpn_protocols.push(b\"h2\".to_vec());\n\n tls_config.key_log = std::sync::Arc::new(tokio_rustls::rustls::KeyLogFile::new());\n\n let connector = tokio_rustls::TlsConnector::from(std::sync::Arc::new(tls_config));\n\n let tls_stream = connector\n", "file_path": "learn/http2/src/main.rs", "rank": 75, "score": 33864.84752412167 }, { "content": "const BUCKET_NAME: &str = \"gyazo.wanko.cc\";\n\nconst URL_PREFIX: &str = \"https://gyazo.wanko.cc\";\n\nconst REGION: &str = \"ap-northeast-1\";\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let shared_config = aws_config::from_env().region(REGION).load().await;\n\n let s3 = aws_sdk_s3::Client::new(&shared_config);\n\n\n\n for arg in std::env::args().skip(1) {\n\n upload(&s3, std::path::Path::new(&arg)).await?;\n\n }\n\n Ok(())\n\n}\n\n\n\nasync fn upload(\n\n s3_client: &aws_sdk_s3::Client,\n\n path: &std::path::Path,\n\n) -> Result<(), Box<dyn std::error::Error>> {\n\n let image = tokio::fs::read(path).await?;\n", "file_path": "rust/gyazo/src/main.rs", "rank": 76, "score": 33864.47657023936 }, { "content": " name.clone(),\n\n type_style,\n\n name_style,\n\n ));\n\n let txt_handle = tokio::spawn(resolve_txt(resolver.clone(), name.clone(), type_style));\n\n let caa_handle = tokio::spawn(resolve_caa(\n\n resolver.clone(),\n\n name.clone(),\n\n type_style,\n\n name_style,\n\n ));\n\n\n\n mx_handle.await?;\n\n txt_handle.await?;\n\n caa_handle.await?;\n\n\n\n let mut name = name;\n\n loop {\n\n let mut resolved = false;\n\n if let Ok(resp) = resolver\n", "file_path": "rust/rdig/src/main.rs", "rank": 77, "score": 33862.969257014345 }, { "content": " break;\n\n }\n\n }\n\n let name = name;\n\n\n\n let mut addrs = Vec::new();\n\n if let Ok(resp) = resolver.ipv4_lookup(name.as_str()).await {\n\n for a in resp {\n\n println!(\n\n \"{} {} {}\",\n\n name,\n\n type_style.paint(\"A\"),\n\n addr_style.paint(a.to_string())\n\n );\n\n addrs.push(std::net::IpAddr::from(a));\n\n }\n\n }\n\n\n\n if let Ok(resp) = resolver.ipv6_lookup(name.as_str()).await {\n\n for aaaa in resp {\n", "file_path": "rust/rdig/src/main.rs", "rank": 78, "score": 33862.072663385115 }, { "content": " BUCKET_NAME,\n\n digest\n\n );\n\n let put_image_future = s3_client\n\n .put_object()\n\n .bucket(BUCKET_NAME)\n\n .storage_class(aws_sdk_s3::model::StorageClass::ReducedRedundancy)\n\n .key(image_key)\n\n .content_length(image.len() as i64)\n\n .body(image.into())\n\n .set_content_type(content_type)\n\n .send();\n\n let put_html_future = s3_client\n\n .put_object()\n\n .bucket(BUCKET_NAME)\n\n .storage_class(aws_sdk_s3::model::StorageClass::ReducedRedundancy)\n\n .key(digest)\n\n .content_length(html.len() as i64)\n\n .body(html.into())\n\n .content_type(\"text/html\")\n\n .send();\n\n let (put_image_result, put_html_result) = futures::join!(put_image_future, put_html_future);\n\n put_image_result?;\n\n put_html_result?;\n\n Ok(())\n\n}\n\n\n", "file_path": "rust/gyazo/src/main.rs", "rank": 79, "score": 33861.48049021842 }, { "content": " let mut md5 = crypto::md5::Md5::new();\n\n use crypto::digest::Digest;\n\n md5.input(&image);\n\n let digest = md5.result_str();\n\n\n\n let image_key = format!(\n\n \"{}.{}\",\n\n digest,\n\n path.extension()\n\n .map(|ext| ext.to_str().unwrap())\n\n .unwrap_or(\"\")\n\n );\n\n let content_type = path.extension().and_then(guess_content_type);\n\n let html = render_html(&digest, &image_key).into_bytes();\n\n println!(\n\n \"{} -> {}/{} (https://s3-{}.amazonaws.com/{}/{})\",\n\n path.display(),\n\n URL_PREFIX,\n\n digest,\n\n REGION,\n", "file_path": "rust/gyazo/src/main.rs", "rank": 80, "score": 33861.300001649375 }, { "content": " handle.await?;\n\n }\n\n Ok(())\n\n}\n\n\n\nasync fn resolve_mx(\n\n resolver: trust_dns_resolver::TokioAsyncResolver,\n\n name: String,\n\n type_style: ansi_term::Style,\n\n name_style: ansi_term::Style,\n\n) {\n\n if let Ok(resp) = resolver.mx_lookup(name.as_str()).await {\n\n let mut records: Vec<_> = resp.into_iter().collect();\n\n records.sort_unstable_by(|x, y| {\n\n x.preference()\n\n .cmp(&y.preference())\n\n .then_with(|| x.exchange().cmp(y.exchange()))\n\n });\n\n for mx in records {\n\n println!(\n", "file_path": "rust/rdig/src/main.rs", "rank": 81, "score": 33861.02870772508 }, { "content": " println!(\n\n \"{} {} {}\",\n\n name,\n\n type_style.paint(\"AAAA\"),\n\n addr_style.paint(aaaa.to_string())\n\n );\n\n addrs.push(std::net::IpAddr::from(aaaa));\n\n }\n\n }\n\n\n\n let mut addr_handles = Vec::new();\n\n for addr in addrs {\n\n addr_handles.push(tokio::spawn(resolve_ptr(\n\n resolver.clone(),\n\n addr,\n\n type_style,\n\n name_style,\n\n )));\n\n }\n\n for handle in addr_handles {\n", "file_path": "rust/rdig/src/main.rs", "rank": 82, "score": 33860.973101399686 }, { "content": " .lookup(\n\n name.as_str(),\n\n trust_dns_resolver::proto::rr::RecordType::CNAME,\n\n Default::default(),\n\n )\n\n .await\n\n {\n\n for cname in resp {\n\n resolved = true;\n\n let next_name = cname.as_cname().unwrap().to_string();\n\n println!(\n\n \"{} {} {}\",\n\n name,\n\n type_style.paint(\"CNAME\"),\n\n name_style.paint(&next_name)\n\n );\n\n name = next_name;\n\n }\n\n }\n\n if !resolved {\n", "file_path": "rust/rdig/src/main.rs", "rank": 83, "score": 33860.396255853746 }, { "content": " let mut frame = bytes::BytesMut::new();\n\n frame.put_uint(4, 3); // Length\n\n frame.put_u8(0x3); // Type = RST_STREAM\n\n frame.put_u8(0x0); // Flags\n\n frame.put_u32(stream_identifier);\n\n frame.put_u32(0x5); // STREAM_CLOSED\n\n tls_writer.write_all(&frame).await?;\n\n break;\n\n }\n\n }\n\n 0x1 => {\n\n // HEADERS frame\n\n // https://httpwg.org/specs/rfc7540.html#rfc.section.6.2\n\n let mut decoder = hpack_codec::Decoder::new(table_size);\n\n let mut header_decoder = decoder.enter_header_block(&payload)?;\n\n while let Some(field) = header_decoder.decode_field()? {\n\n println!(\n\n \" {}: {}\",\n\n String::from_utf8_lossy(field.name()),\n\n String::from_utf8_lossy(field.value())\n", "file_path": "learn/http2/src/main.rs", "rank": 84, "score": 33860.38070041047 }, { "content": "use std::io::Write as _;\n\n\n", "file_path": "rust/envwarden/src/main.rs", "rank": 86, "score": 33859.933917724455 }, { "content": " \"{} {} {} {}\",\n\n name,\n\n type_style.paint(\"MX\"),\n\n name_style.paint(mx.exchange().to_utf8()),\n\n mx.preference()\n\n );\n\n }\n\n }\n\n}\n\n\n\nasync fn resolve_txt(\n\n resolver: trust_dns_resolver::TokioAsyncResolver,\n\n name: String,\n\n type_style: ansi_term::Style,\n\n) {\n\n if let Ok(resp) = resolver.txt_lookup(name.as_str()).await {\n\n for txt in resp {\n\n for data in txt.txt_data() {\n\n println!(\n\n \"{} {} {}\",\n", "file_path": "rust/rdig/src/main.rs", "rank": 87, "score": 33859.74509996407 }, { "content": " name,\n\n type_style.paint(\"TXT\"),\n\n String::from_utf8_lossy(data),\n\n );\n\n }\n\n }\n\n }\n\n}\n\n\n\nasync fn resolve_caa(\n\n resolver: trust_dns_resolver::TokioAsyncResolver,\n\n name: String,\n\n type_style: ansi_term::Style,\n\n name_style: ansi_term::Style,\n\n) {\n\n if let Ok(resp) = resolver\n\n .lookup(\n\n name.as_str(),\n\n trust_dns_resolver::proto::rr::RecordType::CAA,\n\n Default::default(),\n", "file_path": "rust/rdig/src/main.rs", "rank": 88, "score": 33859.701316418 }, { "content": " println!(\" frame.flags = 0x{:x}\", flags);\n\n println!(\" frame.stream_identifier = {}\", stream_identifier);\n\n window_size -= length as u32;\n\n\n\n let mut payload = bytes::BytesMut::new();\n\n if length > 0 {\n\n while payload.len() < length {\n\n let mut buf = bytes::BytesMut::new();\n\n buf.resize(length - payload.len(), 0);\n\n println!(\" Read payload for {} bytes\", buf.len());\n\n let read_bytes = tls_reader.read(&mut buf).await?;\n\n if read_bytes == 0 {\n\n eprintln!(\" Got EOF while reading frame payload\");\n\n break 'read_frame;\n\n } else {\n\n payload.put(&buf[0..read_bytes]);\n\n eprintln!(\n\n \" Read {}/{} bytes in total for payload\",\n\n payload.len(),\n\n length\n", "file_path": "learn/http2/src/main.rs", "rank": 89, "score": 33859.51442750321 }, { "content": " .connect(\n\n tokio_rustls::webpki::DNSNameRef::try_from_ascii_str(&host)?,\n\n tcp_stream,\n\n )\n\n .await?;\n\n let (_, session) = tls_stream.get_ref();\n\n println!(\"Protocol version: {:?}\", session.get_protocol_version());\n\n println!(\n\n \"ALPN protocol: {:?}\",\n\n session\n\n .get_alpn_protocol()\n\n .map(|p| String::from_utf8_lossy(p))\n\n );\n\n let (tls_reader, mut tls_writer) = tokio::io::split(tls_stream);\n\n const BUF_READER_CAP: u32 = 64 * 1024;\n\n let mut tls_reader = tokio::io::BufReader::with_capacity(BUF_READER_CAP as usize, tls_reader);\n\n\n\n // https://httpwg.org/specs/rfc7540.html#rfc.section.3.5\n\n let preface = b\"PRI * HTTP/2.0\\r\\n\\r\\nSM\\r\\n\\r\\n\";\n\n tls_writer.write_all(preface).await?;\n", "file_path": "learn/http2/src/main.rs", "rank": 90, "score": 33859.47618334691 }, { "content": " // https://httpwg.org/specs/rfc7540.html#rfc.section.4.1\n\n let mut header = bytes::BytesMut::new();\n\n while header.len() < 9 {\n\n let mut buf = bytes::BytesMut::new();\n\n buf.resize(9 - header.len(), 0);\n\n let read_bytes = tls_reader.read(&mut buf).await?;\n\n if read_bytes == 0 {\n\n eprintln!(\" Got EOF while reading frame header\");\n\n break 'read_frame;\n\n } else {\n\n header.put(&buf[0..read_bytes]);\n\n eprintln!(\" Read {}/9 bytes in total for header\", header.len());\n\n }\n\n }\n\n let length = header.get_uint(3) as usize;\n\n let type_ = header.get_u8();\n\n let flags = header.get_u8();\n\n let stream_identifier = header.get_u32();\n\n println!(\" frame.length = {}\", length);\n\n println!(\" frame.type = 0x{:x}\", type_);\n", "file_path": "learn/http2/src/main.rs", "rank": 91, "score": 33859.41013339714 }, { "content": "\n\n let mut window_size = BUF_READER_CAP;\n\n const HEADER_TABLE_SIZE: u16 = 4096;\n\n // Send SETTINGS frame\n\n {\n\n // https://httpwg.org/specs/rfc7540.html#rfc.section.6.5\n\n let mut payload = bytes::BytesMut::new();\n\n // Set SETTINGS_HEADER_TABLE_SIZE to HEADER_TABLE_SIZE\n\n payload.put_u16(0x1);\n\n payload.put_u32(HEADER_TABLE_SIZE as u32);\n\n // Set SETTINGS_ENABLE_PUSH to 0\n\n payload.put_u16(0x2);\n\n payload.put_u32(0);\n\n // Set SETTINGS_INITIAL_WINDOW_SIZE to window_size\n\n payload.put_u16(0x4);\n\n payload.put_u32(window_size);\n\n\n\n // https://httpwg.org/specs/rfc7540.html#rfc.section.4.1\n\n let mut frame = bytes::BytesMut::new();\n\n frame.put_uint(payload.len() as u64, 3); // Length\n", "file_path": "learn/http2/src/main.rs", "rank": 92, "score": 33859.21929006672 }, { "content": " header_encoder.encode_field(hpack_codec::table::StaticEntry::PathRoot)?;\n\n let header_block_flagment = header_encoder.finish();\n\n // No Padding (No PADDED flag)\n\n\n\n let mut frame = bytes::BytesMut::new();\n\n frame.put_uint(header_block_flagment.len() as u64, 3); // Length\n\n frame.put_u8(0x1); // Type = HEADERS (0x1)\n\n frame.put_u8(0x1 | 0x4); // Flags = END_STREAM | END_HEADERS\n\n frame.put_u32(0x00000001); // Stream Identifier = 1\n\n frame.extend_from_slice(&header_block_flagment);\n\n tls_writer.write_all(&frame).await?;\n\n }\n\n\n\n // Read responses\n\n let mut http_body = bytes::BytesMut::new();\n\n let mut table_size = 4096;\n\n 'read_frame: loop {\n\n if window_size < BUF_READER_CAP / 2 {\n\n const WINDOW_SIZE_INCREMENT: u32 = 16 * BUF_READER_CAP;\n\n for stream_identifier in 0..=1 {\n", "file_path": "learn/http2/src/main.rs", "rank": 93, "score": 33859.16867599296 }, { "content": " frame.put_u8(0x4); // Type = SETTINGS (0x4)\n\n frame.put_u8(0x00); // Flags = 0 (ACK = 0)\n\n frame.put_u32(0x00000000); // Stream Identifier = 0\n\n frame.extend_from_slice(&payload);\n\n tls_writer.write_all(&frame).await?;\n\n }\n\n // Send HEADERS frame\n\n {\n\n // https://httpwg.org/specs/rfc7540.html#rfc.section.6.2\n\n // No Pad Length (No PADDED flag)\n\n // No Stream Dependency (No PRIORITY flag)\n\n // No Weight (No PRIORITY flag)\n\n let mut encoder = hpack_codec::Encoder::new(HEADER_TABLE_SIZE);\n\n let mut header_encoder = encoder.enter_header_block(Vec::new())?;\n\n header_encoder.encode_field(hpack_codec::table::StaticEntry::SchemeHttps)?;\n\n header_encoder.encode_field(hpack_codec::field::LiteralHeaderField::with_indexed_name(\n\n hpack_codec::table::StaticEntry::Authority,\n\n host.as_bytes(),\n\n ))?;\n\n header_encoder.encode_field(hpack_codec::table::StaticEntry::MethodGet)?;\n", "file_path": "learn/http2/src/main.rs", "rank": 94, "score": 33858.869977443355 }, { "content": "#[tokio::main]\n\nasync fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let matches = clap::App::new(clap::crate_name!())\n\n .version(clap::crate_version!())\n\n .arg(\n\n clap::Arg::with_name(\"role-arn\")\n\n .short(\"r\")\n\n .long(\"role-arn\")\n\n .value_name(\"ROLE_ARN\")\n\n .takes_value(true)\n\n .required(true),\n\n )\n\n .get_matches();\n\n let role_arn = matches.value_of(\"role-arn\").unwrap().to_owned();\n\n let role_session_name = format!(\n\n \"{}-{}\",\n\n std::env::var(\"USER\")?,\n\n std::time::SystemTime::now()\n\n .duration_since(std::time::SystemTime::UNIX_EPOCH)?\n\n .as_secs()\n", "file_path": "rust/aws-assume-role/src/main.rs", "rank": 95, "score": 9.403369655079647 }, { "content": " jrsonnet_parser::Expr::Apply(func, jrsonnet_parser::ArgsDesc { unnamed, named }, _) => {\n\n let mut children = Vec::with_capacity(unnamed.len() + named.len() + 1);\n\n children.push(simplify_expr(func, env, counter));\n\n for arg in unnamed {\n\n children.push(simplify_expr(arg, env, counter));\n\n }\n\n for (_, arg) in named {\n\n children.push(simplify_expr(arg, env, counter));\n\n }\n\n Simplified::Expr { children }\n\n }\n\n jrsonnet_parser::Expr::Index(obj, idx) => Simplified::Expr {\n\n children: vec![\n\n simplify_expr(obj, env, counter),\n\n simplify_expr(idx, env, counter),\n\n ],\n\n },\n\n jrsonnet_parser::Expr::ArrComp(child_expr, comp_specs) => {\n\n let mut children = Vec::new();\n\n enum Spec {\n", "file_path": "rust/jrsonnet-lint/src/lib.rs", "rank": 96, "score": 8.324300310075948 }, { "content": " log::debug!(\"index_size = {}\", index_size);\n\n\n\n let pos = reader.stream_position()?;\n\n let mut index = vec![0u8; index_size];\n\n reader.read_exact(&mut index)?;\n\n let key = ring::digest::digest(&ring::digest::SHA1_FOR_LEGACY_USE_ONLY, &index);\n\n let key = key.as_ref();\n\n reader.seek(std::io::SeekFrom::Start(pos))?;\n\n\n\n let file_count = reader.read_u32::<byteorder::LittleEndian>()?;\n\n log::debug!(\"file_count = {}\", file_count);\n\n let mut entries = Vec::new();\n\n for i in 0..file_count {\n\n let name_length = reader.read_u32::<byteorder::LittleEndian>()? as usize;\n\n log::debug!(\"[{}] name_length={}\", i, name_length);\n\n let mut name = vec![0u8; name_length];\n\n reader.read_exact(&mut name)?;\n\n let name = String::from_utf8(name)?;\n\n log::debug!(\" name={}\", name);\n\n reader.read_u32::<byteorder::LittleEndian>()?; // Skip 4 bytes\n", "file_path": "rust/pfs-unpack/src/main.rs", "rank": 97, "score": 8.072599603691724 }, { "content": "#[derive(Debug)]\n\npub struct Variable {\n\n pub name: String,\n\n pub path: std::path::PathBuf,\n\n pub begin_offset: usize,\n\n pub end_offset: usize,\n\n}\n\n\n\nimpl Variable {\n\n pub fn begin_offset_line(&self) -> std::io::Result<usize> {\n\n use std::io::BufRead as _;\n\n use std::io::Read as _;\n\n\n\n let file = std::fs::File::open(&self.path)?;\n\n let reader = std::io::BufReader::new(file);\n\n let mut lineno = 1;\n\n for line in reader.take(self.begin_offset as u64).lines() {\n\n line?;\n\n lineno += 1;\n\n }\n\n Ok(lineno)\n\n }\n\n}\n\n\n", "file_path": "rust/jrsonnet-lint/src/lib.rs", "rank": 98, "score": 7.081049417403765 }, { "content": "use futures::StreamExt as _;\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n for c in assert_trait::assert_trait!(Iterator<Item = char>, \"assert\".chars()) {\n\n println!(\"{}\", c);\n\n }\n\n for line in assert_trait::assert_trait!(<'a>, Iterator<Item = &'a str>, \"a\\nb\\nc\".lines()) {\n\n println!(\"{}\", line);\n\n }\n\n\n\n let interval = assert_trait::assert_trait!(\n\n futures::Stream + Unpin,\n\n tokio::time::interval(tokio::time::Duration::from_secs(2))\n\n .map(|i| futures::future::Either::Left(i))\n\n .take(4)\n\n );\n\n let status = futures::stream::once(tokio::process::Command::new(\"sleep\").arg(\"6\").status())\n\n .map(|s| futures::future::Either::Right(s));\n\n // OK\n", "file_path": "rust/assert_trait/examples/assert.rs", "rank": 99, "score": 6.681257963809763 } ]
Rust
src/mbart/encoder.rs
eonm-abes/rust-bert
24fdb2dfb41e7cad6367f77e905570649bb7aefe
use crate::bart::{BartEncoderOutput, _expand_mask}; use crate::common::activations::TensorFunction; use crate::common::dropout::Dropout; use crate::mbart::attention::MBartAttention; use crate::mbart::embeddings::MBartLearnedPositionalEmbedding; use crate::mbart::MBartConfig; use crate::Activation; use std::borrow::{Borrow, BorrowMut}; use tch::{nn, Tensor}; pub struct MBartEncoderLayer { self_attention: MBartAttention, self_attention_layer_norm: nn::LayerNorm, dropout: Dropout, activation_dropout: Dropout, activation: TensorFunction, fc1: nn::Linear, fc2: nn::Linear, final_layer_norm: nn::LayerNorm, } impl MBartEncoderLayer { pub fn new<'p, P>(p: P, config: &MBartConfig) -> MBartEncoderLayer where P: Borrow<nn::Path<'p>>, { let p = p.borrow(); let layer_norm_config = nn::LayerNormConfig { eps: 1e-5, ..Default::default() }; let output_attention = config.output_attentions.unwrap_or(false); let self_attention = MBartAttention::new( p / "self_attn", config.d_model, config.encoder_attention_heads, config.attention_dropout, false, false, output_attention, ); let self_attention_layer_norm = nn::layer_norm( p / "self_attn_layer_norm", vec![config.d_model], layer_norm_config, ); let dropout = Dropout::new(config.dropout); let activation_dropout = Dropout::new(config.activation_dropout); let activation_function = match &config.activation_function { Some(act_function) => act_function, None => &Activation::gelu, }; let activation = activation_function.get_function(); let fc1 = nn::linear( p / "fc1", config.d_model, config.encoder_ffn_dim, Default::default(), ); let fc2 = nn::linear( p / "fc2", config.encoder_ffn_dim, config.d_model, Default::default(), ); let final_layer_norm = nn::layer_norm( p / "final_layer_norm", vec![config.d_model], layer_norm_config, ); MBartEncoderLayer { self_attention, self_attention_layer_norm, dropout, activation_dropout, activation, fc1, fc2, final_layer_norm, } } pub fn forward_t( &self, x: &Tensor, encoder_attention_mask: Option<&Tensor>, train: bool, ) -> (Tensor, Option<Tensor>) { let output = x.apply(&self.self_attention_layer_norm); let (output, attention_weights, _) = self.self_attention .forward_t(&output, None, encoder_attention_mask, None, train); let output: Tensor = output.apply_t(&self.dropout, train) + x; let residual = output.copy(); let output = output.apply(&self.final_layer_norm); let output = (self.activation.get_fn())(&output.apply(&self.fc1)); let output = output .apply_t(&self.activation_dropout, train) .apply(&self.fc2) .apply_t(&self.dropout, train); let output = output + residual; (output, attention_weights) } } pub struct MBartEncoder { dropout: Dropout, layer_norm_embedding: nn::LayerNorm, layer_norm: nn::LayerNorm, layers: Vec<MBartEncoderLayer>, embed_positions: MBartLearnedPositionalEmbedding, output_attentions: bool, output_hidden_states: bool, scale_embedding: f64, } impl MBartEncoder { pub fn new<'p, P>(p: P, config: &MBartConfig) -> MBartEncoder where P: Borrow<nn::Path<'p>>, { let p = p.borrow(); let output_attentions = config.output_attentions.unwrap_or(false); let output_hidden_states = config.output_hidden_states.unwrap_or(false); let scale_embedding = if let Some(scale_embeddings) = config.scale_embedding { if scale_embeddings { (config.d_model as f64).sqrt() } else { 1.0 } } else { 1.0 }; let dropout = Dropout::new(config.dropout); let layer_norm_embedding = nn::layer_norm( p / "layernorm_embedding", vec![config.d_model], Default::default(), ); let layer_norm = nn::layer_norm(p / "layer_norm", vec![config.d_model], Default::default()); let embed_positions = MBartLearnedPositionalEmbedding::new( p / "embed_positions", config.max_position_embeddings, config.d_model, ); let mut layers: Vec<MBartEncoderLayer> = vec![]; let p_layers = p / "layers"; for layer_index in 0..config.encoder_layers { layers.push(MBartEncoderLayer::new(&p_layers / layer_index, config)); } MBartEncoder { dropout, layer_norm_embedding, layer_norm, layers, embed_positions, output_attentions, output_hidden_states, scale_embedding, } } pub fn forward_t( &self, input_ids: &Tensor, attention_mask: Option<&Tensor>, embeddings: &nn::Embedding, train: bool, ) -> MBartEncoderOutput { let attention_mask = attention_mask.map(|mask| _expand_mask(mask, None)); let x = input_ids.apply(embeddings) * self.scale_embedding; let x = x + &self.embed_positions.forward(input_ids, 0); let mut hidden_state = x .apply(&self.layer_norm_embedding) .apply_t(&self.dropout, train); let mut all_hidden_states: Option<Vec<Tensor>> = if self.output_hidden_states { Some(vec![]) } else { None }; let mut all_attentions: Option<Vec<Tensor>> = if self.output_attentions { Some(vec![]) } else { None }; let mut attention_weights: Option<Tensor>; for layer in &self.layers { if let Some(hidden_states) = all_hidden_states.borrow_mut() { hidden_states.push(hidden_state.as_ref().copy()); }; let temp = layer.forward_t(&hidden_state, attention_mask.as_ref(), train); hidden_state = temp.0; attention_weights = temp.1; if let Some(attentions) = all_attentions.borrow_mut() { attentions.push(attention_weights.as_ref().unwrap().copy()); }; } if let Some(hidden_states) = all_hidden_states.borrow_mut() { hidden_states.push(hidden_state.as_ref().copy()); }; hidden_state = hidden_state.apply(&self.layer_norm); MBartEncoderOutput { hidden_state, all_hidden_states, all_attentions, } } } pub type MBartEncoderOutput = BartEncoderOutput;
use crate::bart::{BartEncoderOutput, _expand_mask}; use crate::common::activations::TensorFunction; use crate::common::dropout::Dropout; use crate::mbart::attention::MBartAttention; use crate::mbart::embeddings::MBartLearnedPositionalEmbedding; use crate::mbart::MBartConfig; use crate::Activation; use std::borrow::{Borrow, BorrowMut}; use tch::{nn, Tensor}; pub struct MBartEncoderLayer { self_attention: MBartAttention, self_attention_layer_norm: nn::LayerNorm, dropout: Dropout, activation_dropout: Dropout, activation: TensorFunction, fc1: nn::Linear, fc2: nn::Linear, final_layer_norm: nn::LayerNorm, } impl MBartEncoderLayer { pub fn new<'p, P>(p: P, config: &MBartConfig) -> MBartEncoderLayer where P: Borrow<nn::Path<'p>>, { let p = p.borrow(); let layer_norm_config = nn::LayerNormConfig { eps: 1e-5, ..Default::default() }; let output_attention = config.output_attentions.unwrap_or(false); let self_attention = MBartAttention::new( p / "self_attn", config.d_model, config.encoder_attention_heads, config.attention_dropout, false, false, output_attention, ); let self_attention_layer_norm = nn::layer_norm( p / "self_attn_layer_norm", vec![config.d_model], layer_norm_config, ); let dropout = Dropout::new(config.dropout); let activation_dropout = Dropout::new(config.activation_dropout); let activation_function = match &config.activation_function { Some(act_function) => act_function, None => &Activation::gelu, }; let activation = activation_function.get_function(); let fc1 = nn::linear( p / "fc1", config.d_model, config.encoder_ffn_dim, Default::default(), ); let fc2 = nn::linear( p / "fc2", config.encoder_ffn_dim, config.d_model, Default::default(), ); let final_layer_norm = nn::layer_norm( p / "final_layer_norm", vec![config.d_model], layer_norm_config, ); MBartEncoderLayer { self_attention, self_attention_layer_norm, dropout, activation_dropout, activation, fc1, fc2, final_layer_norm, } } pub fn forward_t( &self, x: &Tensor, encoder_attention_mask: Option<&Tensor>, train: bool, ) -> (Tensor, Option<Tensor>) { let output = x.apply(&self.self_attention_layer_norm); let (output, attention_weights, _) =
} pub struct MBartEncoder { dropout: Dropout, layer_norm_embedding: nn::LayerNorm, layer_norm: nn::LayerNorm, layers: Vec<MBartEncoderLayer>, embed_positions: MBartLearnedPositionalEmbedding, output_attentions: bool, output_hidden_states: bool, scale_embedding: f64, } impl MBartEncoder { pub fn new<'p, P>(p: P, config: &MBartConfig) -> MBartEncoder where P: Borrow<nn::Path<'p>>, { let p = p.borrow(); let output_attentions = config.output_attentions.unwrap_or(false); let output_hidden_states = config.output_hidden_states.unwrap_or(false); let scale_embedding = if let Some(scale_embeddings) = config.scale_embedding { if scale_embeddings { (config.d_model as f64).sqrt() } else { 1.0 } } else { 1.0 }; let dropout = Dropout::new(config.dropout); let layer_norm_embedding = nn::layer_norm( p / "layernorm_embedding", vec![config.d_model], Default::default(), ); let layer_norm = nn::layer_norm(p / "layer_norm", vec![config.d_model], Default::default()); let embed_positions = MBartLearnedPositionalEmbedding::new( p / "embed_positions", config.max_position_embeddings, config.d_model, ); let mut layers: Vec<MBartEncoderLayer> = vec![]; let p_layers = p / "layers"; for layer_index in 0..config.encoder_layers { layers.push(MBartEncoderLayer::new(&p_layers / layer_index, config)); } MBartEncoder { dropout, layer_norm_embedding, layer_norm, layers, embed_positions, output_attentions, output_hidden_states, scale_embedding, } } pub fn forward_t( &self, input_ids: &Tensor, attention_mask: Option<&Tensor>, embeddings: &nn::Embedding, train: bool, ) -> MBartEncoderOutput { let attention_mask = attention_mask.map(|mask| _expand_mask(mask, None)); let x = input_ids.apply(embeddings) * self.scale_embedding; let x = x + &self.embed_positions.forward(input_ids, 0); let mut hidden_state = x .apply(&self.layer_norm_embedding) .apply_t(&self.dropout, train); let mut all_hidden_states: Option<Vec<Tensor>> = if self.output_hidden_states { Some(vec![]) } else { None }; let mut all_attentions: Option<Vec<Tensor>> = if self.output_attentions { Some(vec![]) } else { None }; let mut attention_weights: Option<Tensor>; for layer in &self.layers { if let Some(hidden_states) = all_hidden_states.borrow_mut() { hidden_states.push(hidden_state.as_ref().copy()); }; let temp = layer.forward_t(&hidden_state, attention_mask.as_ref(), train); hidden_state = temp.0; attention_weights = temp.1; if let Some(attentions) = all_attentions.borrow_mut() { attentions.push(attention_weights.as_ref().unwrap().copy()); }; } if let Some(hidden_states) = all_hidden_states.borrow_mut() { hidden_states.push(hidden_state.as_ref().copy()); }; hidden_state = hidden_state.apply(&self.layer_norm); MBartEncoderOutput { hidden_state, all_hidden_states, all_attentions, } } } pub type MBartEncoderOutput = BartEncoderOutput;
self.self_attention .forward_t(&output, None, encoder_attention_mask, None, train); let output: Tensor = output.apply_t(&self.dropout, train) + x; let residual = output.copy(); let output = output.apply(&self.final_layer_norm); let output = (self.activation.get_fn())(&output.apply(&self.fc1)); let output = output .apply_t(&self.activation_dropout, train) .apply(&self.fc2) .apply_t(&self.dropout, train); let output = output + residual; (output, attention_weights) }
function_block-function_prefix_line
[ { "content": "pub fn _tanh(x: &Tensor) -> Tensor {\n\n x.tanh()\n\n}\n\n\n\npub struct TensorFunction(Box<fn(&Tensor) -> Tensor>);\n\n\n\nimpl TensorFunction {\n\n pub fn new(fun: Box<fn(&Tensor) -> Tensor>) -> Self {\n\n Self(fun)\n\n }\n\n\n\n pub fn get_fn(&self) -> &fn(&Tensor) -> Tensor {\n\n &self.0\n\n }\n\n}\n\nimpl std::fmt::Debug for TensorFunction {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {\n\n write!(f, \"TensorFunction\")\n\n }\n\n}\n", "file_path": "src/common/activations.rs", "rank": 0, "score": 274841.5651562009 }, { "content": "pub fn _swish(x: &Tensor) -> Tensor {\n\n x * x.sigmoid()\n\n}\n\n\n", "file_path": "src/common/activations.rs", "rank": 1, "score": 274841.5651562009 }, { "content": "pub fn _relu(x: &Tensor) -> Tensor {\n\n x.relu()\n\n}\n\n\n", "file_path": "src/common/activations.rs", "rank": 2, "score": 274841.5651562009 }, { "content": "pub fn _gelu(x: &Tensor) -> Tensor {\n\n x * 0.5 * (1.0 + (x / ((2.0_f64).sqrt())).erf())\n\n}\n\n\n", "file_path": "src/common/activations.rs", "rank": 3, "score": 274841.5651562009 }, { "content": "pub fn _mish(x: &Tensor) -> Tensor {\n\n x * (x.softplus().tanh())\n\n}\n\n\n", "file_path": "src/common/activations.rs", "rank": 4, "score": 274841.5651562009 }, { "content": "pub fn _gelu_new(x: &Tensor) -> Tensor {\n\n x * 0.5 * (((x.pow(3.0f64) * 0.044715 + x) * ((2f64 / PI).sqrt())).tanh() + 1)\n\n}\n\n\n", "file_path": "src/common/activations.rs", "rank": 5, "score": 269752.3779357532 }, { "content": "pub fn stable_argsort(input_tensor: &Tensor, dim: i64) -> Tensor {\n\n let scaling_dim = input_tensor.size()[dim as usize];\n\n let scaled_offset = Tensor::arange(scaling_dim, (Kind::Int, input_tensor.device()))\n\n .view([1, 1, -1])\n\n .expand(&input_tensor.size(), true);\n\n let scaled_tensor = scaling_dim * input_tensor + (scaled_offset / scaling_dim);\n\n scaled_tensor.argsort(dim, false)\n\n}\n\n\n", "file_path": "src/reformer/attention_utils.rs", "rank": 6, "score": 188758.6866408197 }, { "content": "pub fn look_adjacent(vectors: Tensor, num_chunks_before: i64, num_chunks_after: i64) -> Tensor {\n\n if (num_chunks_before == 0) & (num_chunks_after == 0) {\n\n vectors\n\n } else {\n\n let mut calc_slices =\n\n Vec::with_capacity((num_chunks_before + num_chunks_after + 1) as usize);\n\n let mut ref_slices =\n\n Vec::with_capacity((num_chunks_before + num_chunks_after + 1) as usize);\n\n for i in -num_chunks_before..num_chunks_after + 1 {\n\n calc_slices.push(Tensor::cat(\n\n &[\n\n &vectors.slice(2, i, vectors.size()[2], 1),\n\n &vectors.slice(2, 0, i, 1),\n\n ],\n\n 2,\n\n ))\n\n }\n\n for i in -num_chunks_before..num_chunks_after + 1 {\n\n if i == 0 {\n\n ref_slices.push(&vectors)\n\n } else {\n\n ref_slices.push(&calc_slices[(i + num_chunks_before) as usize])\n\n }\n\n }\n\n Tensor::cat(ref_slices.as_slice(), 3)\n\n }\n\n}\n\n\n", "file_path": "src/reformer/attention_utils.rs", "rank": 7, "score": 166912.8441904271 }, { "content": "pub fn main() -> Result<(), RustBertError> {\n\n let args: Vec<_> = std::env::args().collect();\n\n assert_eq!(\n\n args.len(),\n\n 3,\n\n \"usage: {} source.npz destination.ot\",\n\n args[0].as_str()\n\n );\n\n\n\n let source_file = &args[1];\n\n let destination_file = &args[2];\n\n let tensors = tch::Tensor::read_npz(source_file)?;\n\n tch::Tensor::save_multi(&tensors, destination_file)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/convert-tensor.rs", "rank": 8, "score": 164095.83659360875 }, { "content": "/// # Utility to deserialize JSON config files\n\npub trait Config<T>\n\nwhere\n\n for<'de> T: Deserialize<'de>,\n\n{\n\n /// Loads a `Config` object from a JSON file. The format is expected to be aligned with the [Transformers library](https://github.com/huggingface/transformers) configuration files for each model.\n\n /// The parsing will fail if non-optional keys expected by the model are missing.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `path` - `Path` to the configuration JSON file.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```no_run\n\n /// use rust_bert::gpt2::Gpt2Config;\n\n /// use rust_bert::Config;\n\n /// use std::path::Path;\n\n ///\n\n /// let config_path = Path::new(\"path/to/config.json\");\n\n /// let config = Gpt2Config::from_file(config_path);\n\n /// ```\n\n fn from_file<P: AsRef<Path>>(path: P) -> T {\n\n let f = File::open(path).expect(\"Could not open configuration file.\");\n\n let br = BufReader::new(f);\n\n let config: T = serde_json::from_reader(br).expect(\"could not parse configuration\");\n\n config\n\n }\n\n}\n", "file_path": "src/common/config.rs", "rank": 9, "score": 151732.2621472323 }, { "content": "pub fn reverse_sort(\n\n out_vectors: &Tensor,\n\n logits: &Tensor,\n\n undo_sorted_bucket_idx: &Tensor,\n\n) -> (Tensor, Tensor) {\n\n let expanded_undo_sort_indices = undo_sorted_bucket_idx\n\n .unsqueeze(-1)\n\n .expand(out_vectors.size().as_slice(), true);\n\n let out_vectors = out_vectors.gather(2, &expanded_undo_sort_indices, true);\n\n let logits = logits.gather(2, &undo_sorted_bucket_idx, true);\n\n (out_vectors, logits)\n\n}\n\n\n", "file_path": "src/reformer/attention_utils.rs", "rank": 10, "score": 138431.89125713208 }, { "content": "pub fn merge_hidden_size_dim(\n\n input: &Tensor,\n\n num_attention_heads: i64,\n\n attention_head_size: i64,\n\n) -> Tensor {\n\n let new_shape = [\n\n input.size()[0],\n\n -1,\n\n num_attention_heads * attention_head_size,\n\n ];\n\n input.permute(&[0, 2, 1, 3]).reshape(&new_shape)\n\n}\n\n\n", "file_path": "src/reformer/attention_utils.rs", "rank": 11, "score": 133298.84544936987 }, { "content": "pub fn get_min_chunk_len(\n\n attention_types: &[AttentionType],\n\n lsh_attn_chunk_length: Option<i64>,\n\n local_attn_chunk_length: Option<i64>,\n\n) -> i64 {\n\n let num_unique_attention_type = attention_types\n\n .iter()\n\n .collect::<HashSet<&AttentionType>>()\n\n .len();\n\n match num_unique_attention_type {\n\n 1 => {\n\n if attention_types[0] == AttentionType::lsh {\n\n lsh_attn_chunk_length.unwrap_or(64)\n\n } else {\n\n local_attn_chunk_length.unwrap_or(64)\n\n }\n\n }\n\n 2 => min(\n\n lsh_attn_chunk_length.unwrap_or(64),\n\n local_attn_chunk_length.unwrap_or(64),\n\n ),\n\n _ => panic!(\"Impossible scenario - only 2 attention types supported\"),\n\n }\n\n}\n\n\n", "file_path": "src/reformer/attention_utils.rs", "rank": 12, "score": 133298.84544936987 }, { "content": "pub fn retrieve_relevant_hidden_states(\n\n previous_hidden_states: &Tensor,\n\n chunk_length: i64,\n\n num_chunks_before: i64,\n\n) -> Tensor {\n\n let end_position = previous_hidden_states.size()[1];\n\n let start_position = ((end_position / chunk_length) - num_chunks_before) * chunk_length;\n\n previous_hidden_states.slice(1, start_position, end_position, 1)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::reformer::attention::AttentionType;\n\n use crate::reformer::attention_utils::{\n\n get_least_common_mult_chunk_len, get_min_chunk_len, lcm,\n\n };\n\n\n\n #[test]\n\n fn test_lcm_calculation() {\n\n let test_pairs = [(7, 3), (1, 1), (8, 9), (48, 32), (-1, -1), (1, 0), (0, 1)];\n", "file_path": "src/reformer/attention_utils.rs", "rank": 13, "score": 133298.84544936987 }, { "content": "pub fn split_seq_length_dim_to(\n\n vectors: &Tensor,\n\n dim_factor_1: i64,\n\n dim_factor_2: i64,\n\n num_attention_heads: i64,\n\n attention_head_size: Option<i64>,\n\n) -> Result<Tensor, RustBertError> {\n\n let input_size = vectors.size();\n\n let batch_size = input_size[0];\n\n let mut split_dim_shape = vec![batch_size, num_attention_heads, dim_factor_1, dim_factor_2];\n\n\n\n if input_size.len() == 4 {\n\n let attention_head_size = if let Some(attention_head_size_value) = attention_head_size {\n\n attention_head_size_value\n\n } else {\n\n return Err(RustBertError::ValueError(\n\n \"attention_head_size must be provided for inputs of dimension 4\".to_string(),\n\n ));\n\n };\n\n split_dim_shape.push(attention_head_size);\n\n };\n\n Ok(vectors.reshape(split_dim_shape.as_slice()))\n\n}\n\n\n", "file_path": "src/reformer/attention_utils.rs", "rank": 14, "score": 133298.84544936987 }, { "content": "pub fn split_hidden_size_dim(\n\n input: &Tensor,\n\n num_attention_heads: i64,\n\n attention_head_size: i64,\n\n) -> Tensor {\n\n let mut new_x_shape = input.size();\n\n let _ = new_x_shape.pop();\n\n new_x_shape.push(num_attention_heads);\n\n new_x_shape.push(attention_head_size);\n\n input.view(new_x_shape.as_slice()).transpose(2, 1)\n\n}\n\n\n", "file_path": "src/reformer/attention_utils.rs", "rank": 15, "score": 133298.84544936987 }, { "content": "fn matrix_multiply(iters: u64, input: &Tensor, weights: &Tensor) -> Duration {\n\n let mut duration = Duration::new(0, 0);\n\n for _i in 0..iters {\n\n let start = Instant::now();\n\n let _ = input.matmul(weights);\n\n duration = duration.checked_add(start.elapsed()).unwrap();\n\n }\n\n duration\n\n}\n\n\n", "file_path": "benches/tensor_operations_benchmark.rs", "rank": 16, "score": 132462.2430555966 }, { "content": "pub fn get_least_common_mult_chunk_len(\n\n attention_types: &[AttentionType],\n\n lsh_attn_chunk_length: Option<i64>,\n\n local_attn_chunk_length: Option<i64>,\n\n) -> i64 {\n\n let num_unique_attention_type = attention_types\n\n .iter()\n\n .collect::<HashSet<&AttentionType>>()\n\n .len();\n\n match num_unique_attention_type {\n\n 1 => {\n\n if attention_types[0] == AttentionType::lsh {\n\n lsh_attn_chunk_length.unwrap_or(64)\n\n } else {\n\n local_attn_chunk_length.unwrap_or(64)\n\n }\n\n }\n\n 2 => lcm(\n\n lsh_attn_chunk_length.unwrap_or(64),\n\n local_attn_chunk_length.unwrap_or(64),\n\n ),\n\n _ => panic!(\"Impossible scenario - only 2 attention types supported\"),\n\n }\n\n}\n\n\n", "file_path": "src/reformer/attention_utils.rs", "rank": 17, "score": 128741.35060168928 }, { "content": "fn _shift_tokens_right(input_ids: &Tensor, pad_token_id: i64) -> Tensor {\n\n let output = input_ids.masked_fill(&input_ids.eq(-100), pad_token_id);\n\n let index_eos: Tensor = input_ids.ne(pad_token_id).sum1(&[1], true, Int64) - 1;\n\n output\n\n .select(1, 0)\n\n .copy_(&input_ids.gather(1, &index_eos, true).squeeze());\n\n output\n\n .slice(1, 1, *output.size().last().unwrap(), 1)\n\n .copy_(&input_ids.slice(1, 0, *output.size().last().unwrap() - 1, 1));\n\n output\n\n}\n\n\n\npub struct MBartClassificationHead {\n\n dense: nn::Linear,\n\n dropout: Dropout,\n\n out_proj: nn::Linear,\n\n}\n\n\n\nimpl MBartClassificationHead {\n\n pub fn new<'p, P>(p: P, config: &MBartConfig) -> MBartClassificationHead\n", "file_path": "src/mbart/mbart_model.rs", "rank": 18, "score": 127280.44611034537 }, { "content": "fn _shift_tokens_right(input_ids: &Tensor, pad_token_id: i64) -> Tensor {\n\n let index_eos: Tensor = input_ids.ne(pad_token_id).sum1(&[-1], true, Int64) - 1;\n\n let output = input_ids.empty_like().to_kind(Int64);\n\n output\n\n .select(1, 0)\n\n .copy_(&input_ids.gather(1, &index_eos, true).squeeze());\n\n output\n\n .slice(1, 1, *output.size().last().unwrap(), 1)\n\n .copy_(&input_ids.slice(1, 0, *output.size().last().unwrap() - 1, 1));\n\n output\n\n}\n\n\n\n/// # BART Base model\n\n/// Base architecture for BART model. Usually complemented with a task-specific head, such as a language model head.\n\n/// It is made of the following blocks:\n\n/// - `encoder`: `BartEncoder` (transformer) made of a vector of encoding layers\n\n/// - `decoder`: `BartDecoder` (transformer) made of a vector of decoding layers with self attention and encoder cross-attention.\n\n/// caching is implemented for the decoder to avoid recalculating static states (encoder key/values and previously calculated decoder key/values)\n\n/// - `pad_token_id`: padding token id\n\npub struct BartModel {\n", "file_path": "src/bart/bart_model.rs", "rank": 19, "score": 127280.44611034537 }, { "content": "fn get_question_end_index(input_ids: &Tensor, sep_token_id: i64) -> Tensor {\n\n input_ids\n\n .eq(sep_token_id)\n\n .nonzero()\n\n .view([input_ids.size()[0], 3, 2])\n\n .select(2, 1)\n\n .select(1, 0)\n\n}\n\n\n", "file_path": "src/longformer/longformer_model.rs", "rank": 20, "score": 125607.31954081639 }, { "content": "fn create_sinusoidal_embeddings(config: &DistilBertConfig, device: Device) -> nn::Embedding {\n\n let mut sinusoidal_embedding: Vec<Tensor> =\n\n Vec::with_capacity(config.max_position_embeddings as usize);\n\n for pos in 0..config.max_position_embeddings {\n\n let mut temp_vec: Vec<f64> = Vec::with_capacity(config.dim as usize);\n\n for j in 0..config.dim {\n\n if j % 2 == 0 {\n\n temp_vec.push(\n\n (pos as f64 / 10000f64.powf((2 * (j / 2)) as f64 / config.dim as f64)).sin(),\n\n );\n\n } else {\n\n temp_vec.push(\n\n (pos as f64 / 10000f64.powf((2 * (j / 2)) as f64 / config.dim as f64)).cos(),\n\n );\n\n }\n\n }\n\n let temp_vec = Tensor::of_slice(&temp_vec);\n\n sinusoidal_embedding.push(temp_vec);\n\n }\n\n let sinusoidal_embedding = Tensor::stack(&sinusoidal_embedding, 0)\n", "file_path": "src/distilbert/embeddings.rs", "rank": 21, "score": 117126.45658244347 }, { "content": "fn bench_tensor_ops(c: &mut Criterion) {\n\n // Set-up summarization model\n\n unsafe {\n\n torch_sys::dummy_cuda_dependency();\n\n }\n\n let input = Tensor::rand(&[32, 128, 512], (Float, Device::cuda_if_available()));\n\n let weights = Tensor::rand(&[512, 512], (Float, Device::cuda_if_available()));\n\n\n\n let _ = &input.matmul(&weights);\n\n c.bench_function(\"Matrix multiply \", |b| {\n\n b.iter_custom(|iters| black_box(matrix_multiply(iters, &input, &weights)))\n\n });\n\n}\n\n\n\ncriterion_group! {\n\nname = benches;\n\nconfig = Criterion::default().sample_size(100);\n\ntargets = bench_tensor_ops\n\n}\n\n\n\ncriterion_main!(benches);\n", "file_path": "benches/tensor_operations_benchmark.rs", "rank": 22, "score": 113919.20780055161 }, { "content": "pub fn linear_no_bias<'a, T: Borrow<Path<'a>>>(\n\n vs: T,\n\n in_dim: i64,\n\n out_dim: i64,\n\n c: LinearNoBiasConfig,\n\n) -> LinearNoBias {\n\n let vs = vs.borrow();\n\n LinearNoBias {\n\n ws: vs.var(\"weight\", &[out_dim, in_dim], c.ws_init),\n\n }\n\n}\n\n\n\nimpl Module for LinearNoBias {\n\n fn forward(&self, xs: &Tensor) -> Tensor {\n\n xs.matmul(&self.ws.tr())\n\n }\n\n}\n", "file_path": "src/common/linear.rs", "rank": 23, "score": 113897.78949436735 }, { "content": "pub fn lcm(a: i64, b: i64) -> i64 {\n\n a * (b / gcd(a, b))\n\n}\n\n\n", "file_path": "src/reformer/attention_utils.rs", "rank": 24, "score": 113897.78949436735 }, { "content": "pub fn squad_processor(file_path: PathBuf) -> Vec<QaInput> {\n\n let file = fs::File::open(file_path).expect(\"unable to open file\");\n\n let json: serde_json::Value =\n\n serde_json::from_reader(file).expect(\"JSON not properly formatted\");\n\n let data = json\n\n .get(\"data\")\n\n .expect(\"SQuAD file does not contain data field\")\n\n .as_array()\n\n .expect(\"Data array not properly formatted\");\n\n\n\n let mut qa_inputs: Vec<QaInput> = Vec::with_capacity(data.len());\n\n for qa_input in data.iter() {\n\n let qa_input = qa_input.as_object().unwrap();\n\n let paragraphs = qa_input.get(\"paragraphs\").unwrap().as_array().unwrap();\n\n for paragraph in paragraphs.iter() {\n\n let paragraph = paragraph.as_object().unwrap();\n\n let context = paragraph.get(\"context\").unwrap().as_str().unwrap();\n\n let qas = paragraph.get(\"qas\").unwrap().as_array().unwrap();\n\n for qa in qas.iter() {\n\n let question = qa\n", "file_path": "src/pipelines/question_answering.rs", "rank": 25, "score": 106016.72643189685 }, { "content": "#[test]\n\nfn bert_pre_trained_ner() -> anyhow::Result<()> {\n\n // Set-up model\n\n let ner_model = NERModel::new(Default::default())?;\n\n\n\n // Define input\n\n let input = [\n\n \"My name is Amy. I live in Paris.\",\n\n \"Paris is a city in France.\",\n\n ];\n\n\n\n // Run model\n\n let output = ner_model.predict(&input);\n\n\n\n assert_eq!(output.len(), 2);\n\n assert_eq!(output[0].len(), 2);\n\n assert_eq!(output[1].len(), 2);\n\n\n\n assert_eq!(output[0][0].word, \"Amy\");\n\n assert!((output[0][0].score - 0.9986).abs() < 1e-4);\n\n assert_eq!(output[0][0].label, \"I-PER\");\n", "file_path": "tests/bert.rs", "rank": 26, "score": 105575.87084729454 }, { "content": "/// # (Download) the resource and return a path to its local path\n\n/// This function will download remote resource to their local path if they do not exist yet.\n\n/// Then for both `LocalResource` and `RemoteResource`, it will the local path to the resource.\n\n/// For `LocalResource` only the resource path is returned.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `resource` - Pointer to the `&Resource` to optionally download and get the local path.\n\n///\n\n/// # Returns\n\n///\n\n/// * `&PathBuf` Local path for the resource\n\n///\n\n/// # Example\n\n///\n\n/// ```no_run\n\n/// use rust_bert::resources::{RemoteResource, Resource};\n\n/// let model_resource = Resource::Remote(RemoteResource::from_pretrained((\n\n/// \"distilbert-sst2/model.ot\",\n\n/// \"https://huggingface.co/distilbert-base-uncased-finetuned-sst-2-english/resolve/main/rust_model.ot\",\n\n/// )));\n\n/// let local_path = model_resource.get_local_path();\n\n/// ```\n\npub fn download_resource(resource: &Resource) -> Result<PathBuf, RustBertError> {\n\n resource.get_local_path()\n\n}\n", "file_path": "src/common/resources.rs", "rank": 27, "score": 103397.03945316238 }, { "content": "fn ngram_attention_bias(sequence_length: i64, ngram: i64, device: Device) -> Tensor {\n\n let left_block = Tensor::ones(\n\n &[ngram, sequence_length, sequence_length],\n\n (Kind::Float, device),\n\n ) * f64::NEG_INFINITY;\n\n let right_block = left_block.copy();\n\n for stream_idx in 0..ngram {\n\n let _ = right_block.get(stream_idx).fill_diagonal_(0, false);\n\n let _ = left_block.get(stream_idx).triu_(-stream_idx + 1);\n\n }\n\n let _ = left_block.slice(2, 0, sequence_length, 1).fill_(0);\n\n Tensor::cat(&[left_block, right_block], 2)\n\n}\n\n\n\npub struct ProphetNetDecoderLayer {\n\n self_attention: ProphetNetNgramAttention,\n\n self_attention_layer_norm: nn::LayerNorm,\n\n cross_attention: Option<ProphetNetAttention>,\n\n cross_attention_layer_norm: Option<nn::LayerNorm>,\n\n feed_forward: ProphetNetFeedForward,\n", "file_path": "src/prophetnet/decoder.rs", "rank": 28, "score": 101263.91980343882 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Record {\n\n sentence: String,\n\n label: i8,\n\n}\n\n\n", "file_path": "benches/sst2_benchmark.rs", "rank": 29, "score": 79706.76763990909 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Record {\n\n sentence: String,\n\n label: i8,\n\n}\n\n\n", "file_path": "examples/sentiment_analysis_sst2.rs", "rank": 30, "score": 78091.4754939439 }, { "content": "struct PaddedInput {\n\n input_ids: Option<Tensor>,\n\n position_ids: Option<Tensor>,\n\n inputs_embeds: Option<Tensor>,\n\n attention_mask: Option<Tensor>,\n\n token_type_ids: Option<Tensor>,\n\n}\n\n\n\n/// # LongformerModel Base model\n\n/// Base architecture for LongformerModel models. Task-specific models will be built from this common base model\n\n/// It is made of the following blocks:\n\n/// - `embeddings`: LongformerEmbeddings containing word, position and segment id embeddings\n\n/// - `encoder`: LongformerEncoder\n\n/// - `pooler`: Optional pooling layer extracting the representation of the first token for each batch item\n\npub struct LongformerModel {\n\n embeddings: LongformerEmbeddings,\n\n encoder: LongformerEncoder,\n\n pooler: Option<LongformerPooler>,\n\n max_attention_window: i64,\n\n pad_token_id: i64,\n", "file_path": "src/longformer/longformer_model.rs", "rank": 31, "score": 76581.50343229354 }, { "content": "#[derive(Debug)]\n\nstruct QaFeature {\n\n pub input_ids: Vec<i64>,\n\n pub offsets: Vec<Option<Offset>>,\n\n pub p_mask: Vec<i8>,\n\n pub example_index: i64,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\n/// # Output for Question Answering\n\npub struct Answer {\n\n /// Confidence score\n\n pub score: f64,\n\n /// Start position of answer span\n\n pub start: usize,\n\n /// End position of answer span\n\n pub end: usize,\n\n /// Answer span\n\n pub answer: String,\n\n}\n\n\n\nimpl PartialEq for Answer {\n\n fn eq(&self, other: &Self) -> bool {\n\n (self.start == other.start) && (self.end == other.end) && (self.answer == other.answer)\n\n }\n\n}\n\n\n", "file_path": "src/pipelines/question_answering.rs", "rank": 32, "score": 76581.50343229354 }, { "content": "#[derive(Debug)]\n\nstruct BeamHypotheses {\n\n max_length: i64,\n\n length_penalty: f64,\n\n early_stopping: bool,\n\n num_beams: i64,\n\n beams: Vec<(f64, Tensor)>,\n\n worst_score: f64,\n\n}\n\n\n\nimpl Clone for BeamHypotheses {\n\n fn clone(&self) -> Self {\n\n BeamHypotheses {\n\n max_length: self.max_length,\n\n length_penalty: self.length_penalty,\n\n early_stopping: self.early_stopping,\n\n num_beams: self.num_beams,\n\n beams: self\n\n .beams\n\n .iter()\n\n .map(|(score, tensor)| (*score, tensor.copy()))\n", "file_path": "src/pipelines/generation_utils.rs", "rank": 33, "score": 76581.50343229354 }, { "content": "struct RemoteTranslationResources {\n\n model_resource: (&'static str, &'static str),\n\n config_resource: (&'static str, &'static str),\n\n vocab_resource: (&'static str, &'static str),\n\n merges_resource: (&'static str, &'static str),\n\n prefix: Option<&'static str>,\n\n model_type: ModelType,\n\n}\n\n\n\nimpl RemoteTranslationResources {\n\n pub const ENGLISH2FRENCH: RemoteTranslationResources = Self {\n\n model_resource: MarianModelResources::ENGLISH2ROMANCE,\n\n config_resource: MarianConfigResources::ENGLISH2ROMANCE,\n\n vocab_resource: MarianVocabResources::ENGLISH2ROMANCE,\n\n merges_resource: MarianSpmResources::ENGLISH2ROMANCE,\n\n prefix: MarianPrefix::ENGLISH2FRENCH,\n\n model_type: ModelType::Marian,\n\n };\n\n pub const ENGLISH2FRENCH_V2: RemoteTranslationResources = Self {\n\n model_resource: T5ModelResources::T5_BASE,\n", "file_path": "src/pipelines/translation.rs", "rank": 34, "score": 76581.50343229354 }, { "content": "struct GlobalAttentionIndices {\n\n max_num_global_attention_indices: i64,\n\n is_index_global_attn_nonzero: Vec<Option<Tensor>>,\n\n is_local_index_global_attention_nonzero: Vec<Option<Tensor>>,\n\n is_local_index_no_global_attention_nonzero: Vec<Option<Tensor>>,\n\n}\n", "file_path": "src/longformer/attention.rs", "rank": 35, "score": 76581.50343229354 }, { "content": "/// # BertEmbedding trait (for use in BertModel or RoBERTaModel)\n\n/// Defines an interface for the embedding layers in BERT-based models\n\npub trait BertEmbedding {\n\n fn new<'p, P>(p: P, config: &BertConfig) -> Self\n\n where\n\n P: Borrow<nn::Path<'p>>;\n\n\n\n fn forward_t(\n\n &self,\n\n input_ids: Option<Tensor>,\n\n token_type_ids: Option<Tensor>,\n\n position_ids: Option<Tensor>,\n\n input_embeds: Option<Tensor>,\n\n train: bool,\n\n ) -> Result<Tensor, RustBertError>;\n\n}\n\n\n\n#[derive(Debug)]\n\n/// # BertEmbeddings implementation for BERT model\n\n/// Implementation of the `BertEmbedding` trait for BERT models\n\npub struct BertEmbeddings {\n\n word_embeddings: nn::Embedding,\n", "file_path": "src/bert/embeddings.rs", "rank": 36, "score": 72445.37268713687 }, { "content": "fn squad_forward_pass(\n\n iters: u64,\n\n model: &QuestionAnsweringModel,\n\n squad_data: &[QaInput],\n\n) -> Duration {\n\n let mut duration = Duration::new(0, 0);\n\n let batch_size = BATCH_SIZE;\n\n let mut output = vec![];\n\n for _i in 0..iters {\n\n let start = Instant::now();\n\n for batch in squad_data.chunks(batch_size) {\n\n output.push(model.predict(batch, 1, 64));\n\n }\n\n duration = duration.checked_add(start.elapsed()).unwrap();\n\n }\n\n duration\n\n}\n\n\n", "file_path": "benches/squad_benchmark.rs", "rank": 37, "score": 72096.40405529713 }, { "content": "fn _shift_tokens_right(\n\n input_ids: &Tensor,\n\n pad_token_id: i64,\n\n decoder_start_token_id: i64,\n\n) -> Tensor {\n\n let input_ids_length = input_ids.size()[1];\n\n let mut shifted_input_ids = Tensor::zeros(\n\n input_ids.size().as_slice(),\n\n (input_ids.kind(), input_ids.device()),\n\n );\n\n let _ = shifted_input_ids\n\n .slice(1, 1, input_ids_length, 1)\n\n .copy_(&input_ids.slice(1, 0, input_ids_length - 1, 1));\n\n\n\n let _ = shifted_input_ids.select(1, 0).fill_(decoder_start_token_id);\n\n let _ = shifted_input_ids.masked_fill_(&shifted_input_ids.eq(-100), pad_token_id);\n\n\n\n shifted_input_ids\n\n}\n\n\n", "file_path": "src/pegasus/pegasus_model.rs", "rank": 38, "score": 70692.26583562043 }, { "content": "/// # Language Model trait\n\n/// Shared trait between language generation models (e.g. GPT2, GPT, BART) used in language generation pipelines.\n\npub trait LMHeadModel {\n\n /// Forward pass through the model. Example provided for GPT2.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `input_ids` - Optional input tensor of shape (*batch size*, *sequence_length*). If None, pre-computed embeddings must be provided (see `input_embeds`)\n\n /// * `layer_past` - Optional vector of size *n_layer* containing the past keys and values of each layer of shape (*2*, *batch size*, *number of heads*, *past_sequence_length*, *hidden size per head*). When provided, these are concatenated with the current input keys and values.\n\n /// * `attention_mask` - Optional mask of shape (*batch size*, *sequence_length*). Masked position have value 0, non-masked value 1. If None set to 1\n\n /// * `input_embeds` - Optional pre-computed input embeddings of shape (*batch size*, *sequence_length*, *hidden_size*). If None, input ids must be provided (see `input_ids`)\n\n /// * `token_type_ids` - Optional token type ids used to indicate the portion of the input the token belongs to. If not None, token type embeddings will be added to the token and position embeddings.\n\n /// * `position_ids` - Optional position ids of shape (*batch size*, *sequence_length*). If None, will be incremented starting from the length of the past input.\n\n /// * `train` - boolean flag to turn on/off the dropout layers in the model. Should be set to false for inference.\n\n ///\n\n /// # Returns\n\n ///\n\n /// * `output` - `Tensor` of shape (*batch size*, *sequence_length*, *vocab_size*) representing the logits for each vocab item and position\n\n /// * `past` - `Option<Vec<Tensor>>` of length *n_layer* containing the past keys and values of each layer of shape (*2*, *batch size*, *number of heads*, *past_sequence_length*, *hidden size per head*)\n\n /// * `hidden_states` - `Option<Vec<Tensor>>` of length *num_hidden_layers* with shape (*batch size*, *sequence_length*, *hidden_size*)\n\n /// * `attentions` - `Option<Vec<Tensor>>` of length *num_hidden_layers* with shape (*batch size*, *sequence_length*, *hidden_size*)\n\n ///\n", "file_path": "src/pipelines/generation_utils.rs", "rank": 39, "score": 69705.14882626725 }, { "content": "fn compute_global_attention_mask(\n\n input_ids: &Tensor,\n\n sep_token_id: i64,\n\n before_sep_token: bool,\n\n) -> Tensor {\n\n let question_end_index = get_question_end_index(input_ids, sep_token_id).unsqueeze(1);\n\n let attention_mask = Tensor::arange(input_ids.size()[1], (Kind::Int64, input_ids.device()));\n\n\n\n if before_sep_token {\n\n attention_mask.expand_as(input_ids).lt1(&question_end_index)\n\n } else {\n\n attention_mask\n\n .expand_as(input_ids)\n\n .gt1(&(question_end_index + 1))\n\n * attention_mask\n\n .expand_as(input_ids)\n\n .lt(*input_ids.size().last().unwrap())\n\n }\n\n}\n\n\n", "file_path": "src/longformer/longformer_model.rs", "rank": 40, "score": 69374.07606678204 }, { "content": "fn main() -> anyhow::Result<()> {\n\n let config = ConversationConfig {\n\n do_sample: false,\n\n num_beams: 3,\n\n ..Default::default()\n\n };\n\n let conversation_model = ConversationModel::new(config)?;\n\n let mut conversation_manager = ConversationManager::new();\n\n\n\n let conversation_1_id =\n\n conversation_manager.create(\"Going to the movies tonight - any suggestions?\");\n\n let _conversation_2_id = conversation_manager.create(\"What's the last book you have read?\");\n\n\n\n let output = conversation_model.generate_responses(&mut conversation_manager);\n\n\n\n println!(\"{:?}\", output);\n\n\n\n let _ = conversation_manager\n\n .get(&conversation_1_id)\n\n .unwrap()\n", "file_path": "examples/conversation.rs", "rank": 41, "score": 67720.14839056767 }, { "content": "fn main() -> anyhow::Result<()> {\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n ProphetNetConfigResources::PROPHETNET_LARGE_CNN_DM,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n ProphetNetVocabResources::PROPHETNET_LARGE_CNN_DM,\n\n ));\n\n let weights_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n ProphetNetModelResources::PROPHETNET_LARGE_CNN_DM,\n\n ));\n\n\n\n let summarization_config = SummarizationConfig {\n\n model_type: ModelType::ProphetNet,\n\n model_resource: weights_resource,\n\n config_resource,\n\n vocab_resource: vocab_resource.clone(),\n\n merges_resource: vocab_resource,\n\n length_penalty: 1.2,\n\n num_beams: 4,\n\n no_repeat_ngram_size: 3,\n", "file_path": "examples/summarization_prophetnet.rs", "rank": 42, "score": 66221.37244053482 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Set-up model\n\n let generate_config = TextGenerationConfig {\n\n model_type: ModelType::GPT2,\n\n max_length: 30,\n\n do_sample: true,\n\n num_beams: 5,\n\n temperature: 1.1,\n\n num_return_sequences: 3,\n\n ..Default::default()\n\n };\n\n let model = TextGenerationModel::new(generate_config)?;\n\n\n\n let input_context = \"The dog\";\n\n let second_input_context = \"The cat was\";\n\n let output = model.generate(&[input_context, second_input_context], None);\n\n\n\n for sentence in output {\n\n println!(\"{:?}\", sentence);\n\n }\n\n Ok(())\n\n}\n", "file_path": "examples/generation_gpt2.rs", "rank": 43, "score": 66221.37244053482 }, { "content": "#[test]\n\nfn mbart_translation() -> anyhow::Result<()> {\n\n // Resources paths\n\n let generate_config = GenerateConfig {\n\n max_length: 56,\n\n model_resource: Resource::Remote(RemoteResource::from_pretrained(\n\n MBartModelResources::MBART50_MANY_TO_MANY,\n\n )),\n\n config_resource: Resource::Remote(RemoteResource::from_pretrained(\n\n MBartConfigResources::MBART50_MANY_TO_MANY,\n\n )),\n\n vocab_resource: Resource::Remote(RemoteResource::from_pretrained(\n\n MBartVocabResources::MBART50_MANY_TO_MANY,\n\n )),\n\n merges_resource: Resource::Remote(RemoteResource::from_pretrained(\n\n MBartVocabResources::MBART50_MANY_TO_MANY,\n\n )),\n\n do_sample: false,\n\n num_beams: 3,\n\n ..Default::default()\n\n };\n", "file_path": "tests/mbart.rs", "rank": 44, "score": 66221.37244053482 }, { "content": "fn main() -> anyhow::Result<()> {\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n BartConfigResources::DISTILBART_CNN_6_6,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n BartVocabResources::DISTILBART_CNN_6_6,\n\n ));\n\n let merges_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n BartMergesResources::DISTILBART_CNN_6_6,\n\n ));\n\n let model_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n BartModelResources::DISTILBART_CNN_6_6,\n\n ));\n\n\n\n let summarization_config = SummarizationConfig {\n\n model_resource,\n\n config_resource,\n\n vocab_resource,\n\n merges_resource,\n\n num_beams: 1,\n", "file_path": "examples/summarization_bart.rs", "rank": 45, "score": 66221.37244053482 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Set-up model\n\n let sequence_classification_model = SequenceClassificationModel::new(Default::default())?;\n\n\n\n // Define input\n\n let input = [\n\n \"Probably my all-time favorite movie, a story of selflessness, sacrifice and dedication to a noble cause, but it's not preachy or boring.\",\n\n \"This film tried to be too many things all at once: stinging political satire, Hollywood blockbuster, sappy romantic comedy, family values promo...\",\n\n \"If you like original gut wrenching laughter you will like this movie. If you are young or old then you will love this movie, hell even my mom liked it.\",\n\n ];\n\n\n\n // Run model\n\n let output = sequence_classification_model.predict(&input);\n\n for label in output {\n\n println!(\"{:?}\", label);\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/sequence_classification.rs", "rank": 46, "score": 66221.37244053482 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(T5ConfigResources::T5_BASE));\n\n let vocab_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(T5VocabResources::T5_BASE));\n\n let weights_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(T5ModelResources::T5_BASE));\n\n\n\n let generate_config = GenerateConfig {\n\n model_resource: weights_resource,\n\n vocab_resource,\n\n config_resource,\n\n max_length: 40,\n\n do_sample: false,\n\n num_beams: 4,\n\n ..Default::default()\n\n };\n\n\n\n // Set-up model\n", "file_path": "examples/translation_t5.rs", "rank": 47, "score": 66221.37244053482 }, { "content": "fn main() -> anyhow::Result<()> {\n\n let translation_config =\n\n TranslationConfig::new(Language::EnglishToGerman, Device::cuda_if_available());\n\n let model = TranslationModel::new(translation_config)?;\n\n\n\n let input_context_1 = \"The quick brown fox jumps over the lazy dog\";\n\n let input_context_2 = \"The dog did not wake up\";\n\n\n\n let output = model.translate(&[input_context_1, input_context_2]);\n\n\n\n for sentence in output {\n\n println!(\"{}\", sentence);\n\n }\n\n Ok(())\n\n}\n", "file_path": "examples/translation_marian.rs", "rank": 48, "score": 66221.37244053482 }, { "content": "fn main() -> anyhow::Result<()> {\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n PegasusConfigResources::CNN_DAILYMAIL,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n PegasusVocabResources::CNN_DAILYMAIL,\n\n ));\n\n let weights_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n PegasusModelResources::CNN_DAILYMAIL,\n\n ));\n\n\n\n let summarization_config = SummarizationConfig {\n\n model_type: ModelType::Pegasus,\n\n model_resource: weights_resource,\n\n config_resource,\n\n vocab_resource: vocab_resource.clone(),\n\n merges_resource: vocab_resource,\n\n length_penalty: 1.0,\n\n num_beams: 4,\n\n no_repeat_ngram_size: 3,\n", "file_path": "examples/summarization_pegasus.rs", "rank": 49, "score": 66221.37244053482 }, { "content": "fn main() -> anyhow::Result<()> {\n\n let generate_config = GenerateConfig {\n\n max_length: 56,\n\n model_resource: Resource::Remote(RemoteResource::from_pretrained(\n\n MBartModelResources::MBART50_MANY_TO_MANY,\n\n )),\n\n config_resource: Resource::Remote(RemoteResource::from_pretrained(\n\n MBartConfigResources::MBART50_MANY_TO_MANY,\n\n )),\n\n vocab_resource: Resource::Remote(RemoteResource::from_pretrained(\n\n MBartVocabResources::MBART50_MANY_TO_MANY,\n\n )),\n\n merges_resource: Resource::Remote(RemoteResource::from_pretrained(\n\n MBartVocabResources::MBART50_MANY_TO_MANY,\n\n )),\n\n do_sample: false,\n\n num_beams: 1,\n\n ..Default::default()\n\n };\n\n let model = MBartGenerator::new(generate_config)?;\n", "file_path": "examples/translation_mbart.rs", "rank": 50, "score": 66221.37244053482 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Set-up model\n\n // Resources paths\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n XLNetConfigResources::XLNET_BASE_CASED,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n XLNetVocabResources::XLNET_BASE_CASED,\n\n ));\n\n let merges_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n XLNetVocabResources::XLNET_BASE_CASED,\n\n ));\n\n let model_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n XLNetModelResources::XLNET_BASE_CASED,\n\n ));\n\n\n\n let generate_config = TextGenerationConfig {\n\n model_type: ModelType::XLNet,\n\n model_resource,\n\n config_resource,\n", "file_path": "examples/generation_xlnet.rs", "rank": 51, "score": 66221.37244053482 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Set-up classifier\n\n let sentiment_classifier = SentimentModel::new(Default::default())?;\n\n\n\n // Define input\n\n let input = [\n\n \"Probably my all-time favorite movie, a story of selflessness, sacrifice and dedication to a noble cause, but it's not preachy or boring.\",\n\n \"This film tried to be too many things all at once: stinging political satire, Hollywood blockbuster, sappy romantic comedy, family values promo...\",\n\n \"If you like original gut wrenching laughter you will like this movie. If you are young or old then you will love this movie, hell even my mom liked it.\",\n\n ];\n\n\n\n // Run model\n\n let output = sentiment_classifier.predict(&input);\n\n for sentiment in output {\n\n println!(\"{:?}\", sentiment);\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/sentiment_analysis.rs", "rank": 52, "score": 66221.37244053482 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Load a configuration\n\n let config = TokenClassificationConfig::new(\n\n ModelType::Bert,\n\n Resource::Remote(RemoteResource::from_pretrained(\n\n BertModelResources::BERT_NER,\n\n )),\n\n Resource::Remote(RemoteResource::from_pretrained(\n\n BertConfigResources::BERT_NER,\n\n )),\n\n Resource::Remote(RemoteResource::from_pretrained(\n\n BertVocabResources::BERT_NER,\n\n )),\n\n None, //merges resource only relevant with ModelType::Roberta\n\n false, //lowercase\n\n false,\n\n None,\n\n LabelAggregationOption::Mode,\n\n );\n\n\n", "file_path": "examples/token_classification.rs", "rank": 53, "score": 66221.37244053482 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // let summarization_model = SummarizationModel::new(Default::default())?;\n\n\n\n let config_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(T5ConfigResources::T5_SMALL));\n\n let vocab_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(T5VocabResources::T5_SMALL));\n\n let weights_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(T5ModelResources::T5_SMALL));\n\n let summarization_config = SummarizationConfig::new(\n\n ModelType::T5,\n\n weights_resource,\n\n config_resource,\n\n vocab_resource.clone(),\n\n vocab_resource,\n\n );\n\n let summarization_model = SummarizationModel::new(summarization_config)?;\n\n\n\n let input = [\"In findings published Tuesday in Cornell University's arXiv by a team of scientists \\\n\nfrom the University of Montreal and a separate report published Wednesday in Nature Astronomy by a team \\\n", "file_path": "examples/summarization_t5.rs", "rank": 54, "score": 66221.37244053482 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Set-up model\n\n // Resources paths\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n ReformerConfigResources::CRIME_AND_PUNISHMENT,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n ReformerVocabResources::CRIME_AND_PUNISHMENT,\n\n ));\n\n let merges_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n ReformerVocabResources::CRIME_AND_PUNISHMENT,\n\n ));\n\n let model_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n ReformerModelResources::CRIME_AND_PUNISHMENT,\n\n ));\n\n let generate_config = TextGenerationConfig {\n\n model_type: ModelType::Reformer,\n\n model_resource,\n\n config_resource,\n\n vocab_resource,\n", "file_path": "examples/generation_reformer.rs", "rank": 55, "score": 66221.37244053482 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Set-up Question Answering model\n\n let qa_model = QuestionAnsweringModel::new(Default::default())?;\n\n\n\n // Define input\n\n let question_1 = String::from(\"Where does Amy live ?\");\n\n let context_1 = String::from(\"Amy lives in Amsterdam\");\n\n let question_2 = String::from(\"Where does Eric live\");\n\n let context_2 = String::from(\"While Amy lives in Amsterdam, Eric is in The Hague.\");\n\n let qa_input_1 = QaInput {\n\n question: question_1,\n\n context: context_1,\n\n };\n\n let qa_input_2 = QaInput {\n\n question: question_2,\n\n context: context_2,\n\n };\n\n\n\n // Get answer\n\n let answers = qa_model.predict(&[qa_input_1, qa_input_2], 1, 32);\n\n println!(\"{:?}\", answers);\n\n Ok(())\n\n}\n", "file_path": "examples/question_answering.rs", "rank": 56, "score": 66221.37244053482 }, { "content": "#[test]\n\nfn electra_discriminator() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n ElectraConfigResources::BASE_DISCRIMINATOR,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n ElectraVocabResources::BASE_DISCRIMINATOR,\n\n ));\n\n let weights_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n ElectraModelResources::BASE_DISCRIMINATOR,\n\n ));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n let weights_path = weights_resource.get_local_path()?;\n\n\n\n // Set-up masked LM model\n\n let device = Device::Cpu;\n\n let mut vs = nn::VarStore::new(device);\n\n let tokenizer: BertTokenizer =\n\n BertTokenizer::from_file(vocab_path.to_str().unwrap(), true, true)?;\n", "file_path": "tests/electra.rs", "rank": 57, "score": 66221.37244053482 }, { "content": "// #[cfg_attr(not(feature = \"all-tests\"), ignore)]\n\nfn test_translation() -> anyhow::Result<()> {\n\n // Set-up translation model\n\n let translation_config = TranslationConfig::new(Language::EnglishToFrench, Device::Cpu);\n\n let model = TranslationModel::new(translation_config)?;\n\n\n\n let input_context_1 = \"The quick brown fox jumps over the lazy dog\";\n\n let input_context_2 = \"The dog did not wake up\";\n\n\n\n let output = model.translate(&[input_context_1, input_context_2]);\n\n\n\n assert_eq!(output.len(), 2);\n\n assert_eq!(\n\n output[0],\n\n \" Le rapide renard brun saute sur le chien paresseux\"\n\n );\n\n assert_eq!(output[1], \" Le chien ne s'est pas réveillé\");\n\n\n\n Ok(())\n\n}\n", "file_path": "tests/marian.rs", "rank": 58, "score": 66221.37244053482 }, { "content": "fn _get_cache_directory() -> PathBuf {\n\n match env::var(\"RUSTBERT_CACHE\") {\n\n Ok(value) => PathBuf::from(value),\n\n Err(_) => {\n\n let mut home = dirs::home_dir().unwrap();\n\n home.push(\".cache\");\n\n home.push(\".rustbert\");\n\n home\n\n }\n\n }\n\n}\n\n\n\n#[deprecated(\n\n since = \"0.9.1\",\n\n note = \"Please use `Resource.get_local_path()` instead\"\n\n)]\n", "file_path": "src/common/resources.rs", "rank": 59, "score": 66086.98540565911 }, { "content": "fn create_sentiment_model() -> SentimentModel {\n\n let config = SequenceClassificationConfig {\n\n device: Device::cuda_if_available(),\n\n ..Default::default()\n\n };\n\n SentimentModel::new(config).unwrap()\n\n}\n\n\n", "file_path": "benches/sst2_benchmark.rs", "rank": 60, "score": 66086.98540565911 }, { "content": "fn create_summarization_model() -> SummarizationModel {\n\n let config = SummarizationConfig {\n\n device: Device::cuda_if_available(),\n\n ..Default::default()\n\n };\n\n SummarizationModel::new(config).unwrap()\n\n}\n\n\n", "file_path": "benches/summarization_benchmark.rs", "rank": 61, "score": 66086.98540565911 }, { "content": "fn create_translation_model() -> TranslationModel {\n\n let config = TranslationConfig::new(Language::EnglishToFrenchV2, Device::cuda_if_available());\n\n // let config = TranslationConfig::new_from_resources(\n\n // Resource::Local(LocalResource {\n\n // local_path: \"E:/Coding/cache/rustbert/marian-mt-en-es/model.ot\".into(),\n\n // }),\n\n // Resource::Local(LocalResource {\n\n // local_path: \"E:/Coding/cache/rustbert/marian-mt-en-es/config.json\".into(),\n\n // }),\n\n // Resource::Local(LocalResource {\n\n // local_path: \"E:/Coding/cache/rustbert/marian-mt-en-es/vocab.json\".into(),\n\n // }),\n\n // Resource::Local(LocalResource {\n\n // local_path: \"E:/Coding/cache/rustbert/marian-mt-en-es/spiece.model\".into(),\n\n // }),\n\n // None,\n\n // Device::cuda_if_available(),\n\n // ModelType::Marian,\n\n // );\n\n TranslationModel::new(config).unwrap()\n\n}\n\n\n", "file_path": "benches/translation_benchmark.rs", "rank": 62, "score": 66086.98540565911 }, { "content": "fn create_qa_model() -> QuestionAnsweringModel {\n\n let config = QuestionAnsweringConfig::new(\n\n ModelType::Bert,\n\n Resource::Remote(RemoteResource::from_pretrained(BertModelResources::BERT_QA)),\n\n Resource::Remote(RemoteResource::from_pretrained(\n\n BertConfigResources::BERT_QA,\n\n )),\n\n Resource::Remote(RemoteResource::from_pretrained(BertVocabResources::BERT_QA)),\n\n None, //merges resource only relevant with ModelType::Roberta\n\n false, //lowercase\n\n false,\n\n None,\n\n );\n\n QuestionAnsweringModel::new(config).unwrap()\n\n}\n\n\n", "file_path": "benches/squad_benchmark.rs", "rank": 63, "score": 64847.086974115584 }, { "content": "#[test]\n\nfn albert_masked_lm() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n AlbertConfigResources::ALBERT_BASE_V2,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n AlbertVocabResources::ALBERT_BASE_V2,\n\n ));\n\n let weights_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n AlbertModelResources::ALBERT_BASE_V2,\n\n ));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n let weights_path = weights_resource.get_local_path()?;\n\n\n\n // Set-up masked LM model\n\n let device = Device::Cpu;\n\n let mut vs = nn::VarStore::new(device);\n\n let tokenizer: AlbertTokenizer =\n\n AlbertTokenizer::from_file(vocab_path.to_str().unwrap(), true, false)?;\n", "file_path": "tests/albert.rs", "rank": 64, "score": 64817.234220858125 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Set-up model\n\n let sequence_classification_model = SequenceClassificationModel::new(Default::default())?;\n\n\n\n // Define input\n\n let input = [\n\n \"Probably my all-time favorite movie, a story of selflessness, sacrifice and dedication to a noble cause, but it's not preachy or boring.\",\n\n \"This is a neutral sentence.\",\n\n \"If you like original gut wrenching laughter you will like this movie. If you are young or old then you will love this movie, hell even my mom liked it.\",\n\n ];\n\n\n\n // Run model\n\n let output = sequence_classification_model.predict_multilabel(&input, 0.05);\n\n if let Ok(labels) = output {\n\n for label in labels {\n\n println!(\"{:?}\", label);\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/sequence_classification_multilabel.rs", "rank": 65, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn bart_lm_model() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n BartConfigResources::DISTILBART_CNN_6_6,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n BartVocabResources::DISTILBART_CNN_6_6,\n\n ));\n\n let merges_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n BartMergesResources::DISTILBART_CNN_6_6,\n\n ));\n\n let weights_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n BartModelResources::DISTILBART_CNN_6_6,\n\n ));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n let merges_path = merges_resource.get_local_path()?;\n\n let weights_path = weights_resource.get_local_path()?;\n\n\n\n // Set-up masked LM model\n", "file_path": "tests/bart.rs", "rank": 66, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn distilbert_question_answering() -> anyhow::Result<()> {\n\n // Set-up question answering model\n\n let qa_model = QuestionAnsweringModel::new(Default::default())?;\n\n\n\n // Define input\n\n let question = String::from(\"Where does Amy live ?\");\n\n let context = String::from(\"Amy lives in Amsterdam\");\n\n let qa_input = QaInput { question, context };\n\n\n\n let answers = qa_model.predict(&[qa_input], 1, 32);\n\n\n\n assert_eq!(answers.len(), 1usize);\n\n assert_eq!(answers[0].len(), 1usize);\n\n assert_eq!(answers[0][0].start, 13);\n\n assert_eq!(answers[0][0].end, 22);\n\n assert!((answers[0][0].score - 0.9978).abs() < 1e-4);\n\n assert_eq!(answers[0][0].answer, \"Amsterdam\");\n\n\n\n Ok(())\n\n}\n", "file_path": "tests/distilbert.rs", "rank": 67, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn bert_question_answering() -> anyhow::Result<()> {\n\n // Set-up question answering model\n\n let config = QuestionAnsweringConfig::new(\n\n ModelType::Bert,\n\n Resource::Remote(RemoteResource::from_pretrained(BertModelResources::BERT_QA)),\n\n Resource::Remote(RemoteResource::from_pretrained(\n\n BertConfigResources::BERT_QA,\n\n )),\n\n Resource::Remote(RemoteResource::from_pretrained(BertVocabResources::BERT_QA)),\n\n None, //merges resource only relevant with ModelType::Roberta\n\n false,\n\n false,\n\n None,\n\n );\n\n\n\n let qa_model = QuestionAnsweringModel::new(config)?;\n\n\n\n // Define input\n\n let question = String::from(\"Where does Amy live ?\");\n\n let context = String::from(\"Amy lives in Amsterdam\");\n", "file_path": "tests/bert.rs", "rank": 68, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn pegasus_summarization_greedy() -> anyhow::Result<()> {\n\n // Set-up model\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n PegasusConfigResources::CNN_DAILYMAIL,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n PegasusVocabResources::CNN_DAILYMAIL,\n\n ));\n\n let model_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n PegasusModelResources::CNN_DAILYMAIL,\n\n ));\n\n\n\n let summarization_config = SummarizationConfig {\n\n model_type: ModelType::Pegasus,\n\n model_resource,\n\n config_resource,\n\n vocab_resource: vocab_resource.clone(),\n\n merges_resource: vocab_resource,\n\n num_beams: 4,\n\n no_repeat_ngram_size: 3,\n", "file_path": "tests/pegasus.rs", "rank": 69, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn bert_for_question_answering() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(BertConfigResources::BERT));\n\n let vocab_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(BertVocabResources::BERT));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n\n\n // Set-up model\n\n let device = Device::Cpu;\n\n let vs = nn::VarStore::new(device);\n\n let tokenizer: BertTokenizer =\n\n BertTokenizer::from_file(vocab_path.to_str().unwrap(), true, true)?;\n\n let mut config = BertConfig::from_file(config_path);\n\n config.output_attentions = Some(true);\n\n config.output_hidden_states = Some(true);\n\n let bert_model = BertForQuestionAnswering::new(&vs.root(), &config);\n\n\n\n // Define input\n", "file_path": "tests/bert.rs", "rank": 70, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn distilgpt2_lm_model() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n Gpt2ConfigResources::DISTIL_GPT2,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n Gpt2VocabResources::DISTIL_GPT2,\n\n ));\n\n let merges_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n Gpt2MergesResources::DISTIL_GPT2,\n\n ));\n\n let weights_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n Gpt2ModelResources::DISTIL_GPT2,\n\n ));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n let merges_path = merges_resource.get_local_path()?;\n\n let weights_path = weights_resource.get_local_path()?;\n\n\n\n // Set-up model\n", "file_path": "tests/distilgpt2.rs", "rank": 71, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn bert_for_token_classification() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(BertConfigResources::BERT));\n\n let vocab_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(BertVocabResources::BERT));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n\n\n // Set-up model\n\n let device = Device::Cpu;\n\n let vs = nn::VarStore::new(device);\n\n let tokenizer: BertTokenizer =\n\n BertTokenizer::from_file(vocab_path.to_str().unwrap(), true, true)?;\n\n let mut config = BertConfig::from_file(config_path);\n\n let mut dummy_label_mapping = HashMap::new();\n\n dummy_label_mapping.insert(0, String::from(\"O\"));\n\n dummy_label_mapping.insert(1, String::from(\"LOC\"));\n\n dummy_label_mapping.insert(2, String::from(\"PER\"));\n\n dummy_label_mapping.insert(3, String::from(\"ORG\"));\n", "file_path": "tests/bert.rs", "rank": 72, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn mobilebert_masked_model() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n MobileBertConfigResources::MOBILEBERT_UNCASED,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n MobileBertVocabResources::MOBILEBERT_UNCASED,\n\n ));\n\n let weights_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n MobileBertModelResources::MOBILEBERT_UNCASED,\n\n ));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n let weights_path = weights_resource.get_local_path()?;\n\n\n\n // Set-up masked LM model\n\n let device = Device::cuda_if_available();\n\n let mut vs = nn::VarStore::new(device);\n\n let tokenizer = BertTokenizer::from_file(vocab_path.to_str().unwrap(), true, true)?;\n\n let mut config = MobileBertConfig::from_file(config_path);\n", "file_path": "tests/mobilebert.rs", "rank": 73, "score": 64817.234220858125 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Set-up Question Answering model\n\n let config = QuestionAnsweringConfig::new(\n\n ModelType::Bert,\n\n Resource::Remote(RemoteResource::from_pretrained(BertModelResources::BERT_QA)),\n\n Resource::Remote(RemoteResource::from_pretrained(\n\n BertConfigResources::BERT_QA,\n\n )),\n\n Resource::Remote(RemoteResource::from_pretrained(BertVocabResources::BERT_QA)),\n\n None, //merges resource only relevant with ModelType::Roberta\n\n false,\n\n false,\n\n None,\n\n );\n\n\n\n let qa_model = QuestionAnsweringModel::new(config)?;\n\n\n\n // Define input\n\n let question_1 = String::from(\"Where does Amy live ?\");\n\n let context_1 = String::from(\"Amy lives in Amsterdam\");\n", "file_path": "examples/question_answering_bert.rs", "rank": 74, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn distilbert_for_token_classification() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n DistilBertConfigResources::DISTIL_BERT,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n DistilBertVocabResources::DISTIL_BERT,\n\n ));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n\n\n // Set-up masked LM model\n\n let device = Device::cuda_if_available();\n\n let vs = nn::VarStore::new(device);\n\n let tokenizer: BertTokenizer =\n\n BertTokenizer::from_file(vocab_path.to_str().unwrap(), true, true)?;\n\n let mut config = DistilBertConfig::from_file(config_path);\n\n config.output_attentions = Some(true);\n\n config.output_hidden_states = Some(true);\n\n let mut dummy_label_mapping = HashMap::new();\n", "file_path": "tests/distilbert.rs", "rank": 75, "score": 64817.234220858125 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Set-up classifier\n\n let sentiment_classifier = SentimentModel::new(Default::default())?;\n\n\n\n // Define input\n\n let mut sst2_path = PathBuf::from(env::var(\"SST2_PATH\")\n\n .expect(\"Please set the \\\"squad_dataset\\\" environment variable pointing to the SQuAD dataset folder\"));\n\n sst2_path.push(\"train.tsv\");\n\n let inputs = ss2_processor(sst2_path).unwrap();\n\n\n\n // Run model\n\n let batch_size = 64;\n\n let mut output = vec![];\n\n for batch in inputs.chunks(batch_size) {\n\n output.push(\n\n sentiment_classifier.predict(\n\n batch\n\n .iter()\n\n .map(|v| v.as_str())\n\n .collect::<Vec<&str>>()\n", "file_path": "examples/sentiment_analysis_sst2.rs", "rank": 76, "score": 64817.234220858125 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Set-up model resources\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n GptNeoConfigResources::GPT_NEO_1_3B,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n GptNeoVocabResources::GPT_NEO_1_3B,\n\n ));\n\n let merges_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n GptNeoMergesResources::GPT_NEO_1_3B,\n\n ));\n\n let model_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n GptNeoModelResources::GPT_NEO_1_3B,\n\n ));\n\n let generate_config = TextGenerationConfig {\n\n model_type: ModelType::GPTNeo,\n\n model_resource,\n\n config_resource,\n\n vocab_resource,\n\n merges_resource,\n", "file_path": "examples/generation_gpt_neo.rs", "rank": 77, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn albert_for_multiple_choice() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n AlbertConfigResources::ALBERT_BASE_V2,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n AlbertVocabResources::ALBERT_BASE_V2,\n\n ));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n\n\n // Set-up model\n\n let device = Device::Cpu;\n\n let vs = nn::VarStore::new(device);\n\n let tokenizer: AlbertTokenizer =\n\n AlbertTokenizer::from_file(vocab_path.to_str().unwrap(), true, false)?;\n\n let mut config = AlbertConfig::from_file(config_path);\n\n config.output_attentions = Some(true);\n\n config.output_hidden_states = Some(true);\n\n let albert_model = AlbertForMultipleChoice::new(&vs.root(), &config);\n", "file_path": "tests/albert.rs", "rank": 78, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn distilbert_sentiment_classifier() -> anyhow::Result<()> {\n\n // Set-up classifier\n\n let sentiment_classifier = SentimentModel::new(Default::default())?;\n\n\n\n // Get sentiments\n\n let input = [\n\n \"Probably my all-time favorite movie, a story of selflessness, sacrifice and dedication to a noble cause, but it's not preachy or boring.\",\n\n \"This film tried to be too many things all at once: stinging political satire, Hollywood blockbuster, sappy romantic comedy, family values promo...\",\n\n \"If you like original gut wrenching laughter you will like this movie. If you are young or old then you will love this movie, hell even my mom liked it.\",\n\n ];\n\n\n\n let output = sentiment_classifier.predict(&input);\n\n\n\n assert_eq!(output.len(), 3usize);\n\n assert_eq!(output[0].polarity, SentimentPolarity::Positive);\n\n assert!((output[0].score - 0.9981).abs() < 1e-4);\n\n assert_eq!(output[1].polarity, SentimentPolarity::Negative);\n\n assert!((output[1].score - 0.9927).abs() < 1e-4);\n\n assert_eq!(output[2].polarity, SentimentPolarity::Positive);\n\n assert!((output[2].score - 0.9997).abs() < 1e-4);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/distilbert.rs", "rank": 79, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn electra_masked_lm() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n ElectraConfigResources::BASE_GENERATOR,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n ElectraVocabResources::BASE_GENERATOR,\n\n ));\n\n let weights_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n ElectraModelResources::BASE_GENERATOR,\n\n ));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n let weights_path = weights_resource.get_local_path()?;\n\n\n\n // Set-up masked LM model\n\n let device = Device::Cpu;\n\n let mut vs = nn::VarStore::new(device);\n\n let tokenizer: BertTokenizer =\n\n BertTokenizer::from_file(vocab_path.to_str().unwrap(), true, true)?;\n", "file_path": "tests/electra.rs", "rank": 80, "score": 64817.234220858125 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Set-up model\n\n let ner_model = NERModel::new(Default::default())?;\n\n\n\n // Define input\n\n let input = [\n\n \"My name is Amélie. I live in Москва.\",\n\n \"Chongqing is a city in China.\",\n\n ];\n\n\n\n // Run model\n\n let output = ner_model.predict(&input);\n\n for entity in output {\n\n println!(\"{:?}\", entity);\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/named_entities_recognition.rs", "rank": 81, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn bert_for_sequence_classification() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(BertConfigResources::BERT));\n\n let vocab_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(BertVocabResources::BERT));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n\n\n // Set-up model\n\n let device = Device::Cpu;\n\n let vs = nn::VarStore::new(device);\n\n let tokenizer: BertTokenizer =\n\n BertTokenizer::from_file(vocab_path.to_str().unwrap(), true, true)?;\n\n let mut config = BertConfig::from_file(config_path);\n\n let mut dummy_label_mapping = HashMap::new();\n\n dummy_label_mapping.insert(0, String::from(\"Positive\"));\n\n dummy_label_mapping.insert(1, String::from(\"Negative\"));\n\n dummy_label_mapping.insert(3, String::from(\"Neutral\"));\n\n config.id2label = Some(dummy_label_mapping);\n", "file_path": "tests/bert.rs", "rank": 82, "score": 64817.234220858125 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Set-up Question Answering model\n\n let qa_model = QuestionAnsweringModel::new(Default::default())?;\n\n\n\n // Define input\n\n let mut squad_path = PathBuf::from(env::var(\"squad_dataset\")\n\n .expect(\"Please set the \\\"squad_dataset\\\" environment variable pointing to the SQuAD dataset folder\"));\n\n squad_path.push(\"dev-v2.0.json\");\n\n let qa_inputs = squad_processor(squad_path);\n\n\n\n // Get answer\n\n let answers = qa_model.predict(&qa_inputs, 1, 64);\n\n println!(\"Sample answer: {:?}\", answers.first().unwrap());\n\n println!(\"{}\", answers.len());\n\n Ok(())\n\n}\n", "file_path": "examples/question_answering_squad.rs", "rank": 83, "score": 64817.234220858125 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Set-up Question Answering model\n\n let config = QuestionAnsweringConfig::new(\n\n ModelType::Longformer,\n\n Resource::Remote(RemoteResource::from_pretrained(\n\n LongformerModelResources::LONGFORMER_BASE_SQUAD1,\n\n )),\n\n Resource::Remote(RemoteResource::from_pretrained(\n\n LongformerConfigResources::LONGFORMER_BASE_SQUAD1,\n\n )),\n\n Resource::Remote(RemoteResource::from_pretrained(\n\n LongformerVocabResources::LONGFORMER_BASE_SQUAD1,\n\n )),\n\n Some(Resource::Remote(RemoteResource::from_pretrained(\n\n LongformerMergesResources::LONGFORMER_BASE_SQUAD1,\n\n ))),\n\n false,\n\n None,\n\n false,\n\n );\n", "file_path": "examples/question_answering_longformer.rs", "rank": 84, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn albert_for_sequence_classification() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n AlbertConfigResources::ALBERT_BASE_V2,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n AlbertVocabResources::ALBERT_BASE_V2,\n\n ));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n\n\n // Set-up model\n\n let device = Device::Cpu;\n\n let vs = nn::VarStore::new(device);\n\n let tokenizer: AlbertTokenizer =\n\n AlbertTokenizer::from_file(vocab_path.to_str().unwrap(), true, false)?;\n\n let mut config = AlbertConfig::from_file(config_path);\n\n let mut dummy_label_mapping = HashMap::new();\n\n dummy_label_mapping.insert(0, String::from(\"Positive\"));\n\n dummy_label_mapping.insert(1, String::from(\"Negative\"));\n", "file_path": "tests/albert.rs", "rank": 85, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn mobilebert_for_sequence_classification() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n MobileBertConfigResources::MOBILEBERT_UNCASED,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n MobileBertVocabResources::MOBILEBERT_UNCASED,\n\n ));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n\n\n // Set-up model\n\n let device = Device::cuda_if_available();\n\n let vs = nn::VarStore::new(device);\n\n let tokenizer = BertTokenizer::from_file(vocab_path.to_str().unwrap(), true, true)?;\n\n let mut config = MobileBertConfig::from_file(config_path);\n\n let mut dummy_label_mapping = HashMap::new();\n\n dummy_label_mapping.insert(0, String::from(\"Positive\"));\n\n dummy_label_mapping.insert(1, String::from(\"Negative\"));\n\n dummy_label_mapping.insert(3, String::from(\"Neutral\"));\n", "file_path": "tests/mobilebert.rs", "rank": 86, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn prophetnet_summarization_greedy() -> anyhow::Result<()> {\n\n // Set-up model\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n ProphetNetConfigResources::PROPHETNET_LARGE_CNN_DM,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n ProphetNetVocabResources::PROPHETNET_LARGE_CNN_DM,\n\n ));\n\n let weights_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n ProphetNetModelResources::PROPHETNET_LARGE_CNN_DM,\n\n ));\n\n\n\n let summarization_config = SummarizationConfig {\n\n model_type: ModelType::ProphetNet,\n\n model_resource: weights_resource,\n\n config_resource,\n\n vocab_resource: vocab_resource.clone(),\n\n merges_resource: vocab_resource,\n\n length_penalty: 1.2,\n\n num_beams: 4,\n", "file_path": "tests/prophetnet.rs", "rank": 87, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn gpt2_generation_greedy() -> anyhow::Result<()> {\n\n // Resources definition\n\n let config_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(Gpt2ConfigResources::GPT2));\n\n let vocab_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(Gpt2VocabResources::GPT2));\n\n let merges_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(Gpt2MergesResources::GPT2));\n\n let model_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(Gpt2ModelResources::GPT2));\n\n\n\n let generate_config = TextGenerationConfig {\n\n model_type: ModelType::GPT2,\n\n model_resource,\n\n config_resource,\n\n vocab_resource,\n\n merges_resource,\n\n max_length: 40,\n\n do_sample: false,\n\n num_beams: 1,\n", "file_path": "tests/gpt2.rs", "rank": 88, "score": 64817.234220858125 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Set-up model\n\n let pos_model = POSModel::new(Default::default())?;\n\n\n\n // Define input\n\n let input = [\"My name is Bob\"];\n\n\n\n // Run model\n\n let output = pos_model.predict(&input);\n\n for pos_tag in output {\n\n println!(\"{:?}\", pos_tag);\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/part_of_speech_tagging.rs", "rank": 89, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn albert_for_question_answering() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n AlbertConfigResources::ALBERT_BASE_V2,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n AlbertVocabResources::ALBERT_BASE_V2,\n\n ));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n\n\n // Set-up model\n\n let device = Device::Cpu;\n\n let vs = nn::VarStore::new(device);\n\n let tokenizer: AlbertTokenizer =\n\n AlbertTokenizer::from_file(vocab_path.to_str().unwrap(), true, false)?;\n\n let mut config = AlbertConfig::from_file(config_path);\n\n config.output_attentions = Some(true);\n\n config.output_hidden_states = Some(true);\n\n let albert_model = AlbertForQuestionAnswering::new(&vs.root(), &config);\n", "file_path": "tests/albert.rs", "rank": 90, "score": 64817.234220858125 }, { "content": "fn main() -> anyhow::Result<()> {\n\n // Set-up model\n\n let sequence_classification_model = ZeroShotClassificationModel::new(Default::default())?;\n\n\n\n let input_sentence = \"Who are you voting for in 2020?\";\n\n let input_sequence_2 = \"The prime minister has announced a stimulus package which was widely criticized by the opposition.\";\n\n let candidate_labels = &[\"politics\", \"public health\", \"economy\", \"sports\"];\n\n\n\n let output = sequence_classification_model.predict_multilabel(\n\n &[input_sentence, input_sequence_2],\n\n candidate_labels,\n\n Some(Box::new(|label: &str| {\n\n format!(\"This example is about {}.\", label)\n\n })),\n\n 128,\n\n );\n\n\n\n println!(\"{:?}\", output);\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/zero_shot_classification.rs", "rank": 91, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn gpt2_lm_model() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(Gpt2ConfigResources::GPT2));\n\n let vocab_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(Gpt2VocabResources::GPT2));\n\n let merges_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(Gpt2MergesResources::GPT2));\n\n let weights_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(Gpt2ModelResources::GPT2));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n let merges_path = merges_resource.get_local_path()?;\n\n let weights_path = weights_resource.get_local_path()?;\n\n\n\n let device = Device::Cpu;\n\n let mut vs = nn::VarStore::new(device);\n\n let tokenizer: Gpt2Tokenizer = Gpt2Tokenizer::from_file(\n\n vocab_path.to_str().unwrap(),\n\n merges_path.to_str().unwrap(),\n", "file_path": "tests/gpt2.rs", "rank": 92, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn distilbert_for_question_answering() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n DistilBertConfigResources::DISTIL_BERT_SQUAD,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n DistilBertVocabResources::DISTIL_BERT_SQUAD,\n\n ));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n\n\n // Set-up masked LM model\n\n let device = Device::cuda_if_available();\n\n let vs = nn::VarStore::new(device);\n\n let tokenizer: BertTokenizer =\n\n BertTokenizer::from_file(vocab_path.to_str().unwrap(), true, true)?;\n\n let mut config = DistilBertConfig::from_file(config_path);\n\n config.output_attentions = Some(true);\n\n config.output_hidden_states = Some(true);\n\n let distil_bert_model = DistilBertForQuestionAnswering::new(&vs.root(), &config);\n", "file_path": "tests/distilbert.rs", "rank": 93, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn bert_masked_lm() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(BertConfigResources::BERT));\n\n let vocab_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(BertVocabResources::BERT));\n\n let weights_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(BertModelResources::BERT));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n let weights_path = weights_resource.get_local_path()?;\n\n\n\n // Set-up masked LM model\n\n let device = Device::Cpu;\n\n let mut vs = nn::VarStore::new(device);\n\n let tokenizer: BertTokenizer =\n\n BertTokenizer::from_file(vocab_path.to_str().unwrap(), true, true)?;\n\n let config = BertConfig::from_file(config_path);\n\n let bert_model = BertForMaskedLM::new(&vs.root(), &config);\n\n vs.load(weights_path)?;\n", "file_path": "tests/bert.rs", "rank": 94, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn mbart_lm_model() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n MBartConfigResources::MBART50_MANY_TO_MANY,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n MBartVocabResources::MBART50_MANY_TO_MANY,\n\n ));\n\n let weights_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n MBartModelResources::MBART50_MANY_TO_MANY,\n\n ));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n let weights_path = weights_resource.get_local_path()?;\n\n\n\n // Set-up masked LM model\n\n let device = Device::Cpu;\n\n let mut vs = nn::VarStore::new(device);\n\n let tokenizer = MBart50Tokenizer::from_file(vocab_path.to_str().unwrap(), false)?;\n\n let config = MBartConfig::from_file(config_path);\n", "file_path": "tests/mbart.rs", "rank": 95, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn bert_for_multiple_choice() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(BertConfigResources::BERT));\n\n let vocab_resource =\n\n Resource::Remote(RemoteResource::from_pretrained(BertVocabResources::BERT));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n\n\n // Set-up model\n\n let device = Device::Cpu;\n\n let vs = nn::VarStore::new(device);\n\n let tokenizer: BertTokenizer =\n\n BertTokenizer::from_file(vocab_path.to_str().unwrap(), true, true)?;\n\n let mut config = BertConfig::from_file(config_path);\n\n config.output_attentions = Some(true);\n\n config.output_hidden_states = Some(true);\n\n let bert_model = BertForMultipleChoice::new(&vs.root(), &config);\n\n\n\n // Define input\n", "file_path": "tests/bert.rs", "rank": 96, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn distilbert_masked_lm() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n DistilBertConfigResources::DISTIL_BERT,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n DistilBertVocabResources::DISTIL_BERT,\n\n ));\n\n let weights_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n DistilBertModelResources::DISTIL_BERT,\n\n ));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n let weights_path = weights_resource.get_local_path()?;\n\n\n\n // Set-up masked LM model\n\n let device = Device::cuda_if_available();\n\n let mut vs = nn::VarStore::new(device);\n\n let tokenizer: BertTokenizer =\n\n BertTokenizer::from_file(vocab_path.to_str().unwrap(), true, true)?;\n", "file_path": "tests/distilbert.rs", "rank": 97, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn albert_for_token_classification() -> anyhow::Result<()> {\n\n // Resources paths\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n AlbertConfigResources::ALBERT_BASE_V2,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n AlbertVocabResources::ALBERT_BASE_V2,\n\n ));\n\n let config_path = config_resource.get_local_path()?;\n\n let vocab_path = vocab_resource.get_local_path()?;\n\n\n\n // Set-up model\n\n let device = Device::Cpu;\n\n let vs = nn::VarStore::new(device);\n\n let tokenizer: AlbertTokenizer =\n\n AlbertTokenizer::from_file(vocab_path.to_str().unwrap(), true, false)?;\n\n let mut config = AlbertConfig::from_file(config_path);\n\n let mut dummy_label_mapping = HashMap::new();\n\n dummy_label_mapping.insert(0, String::from(\"O\"));\n\n dummy_label_mapping.insert(1, String::from(\"LOC\"));\n", "file_path": "tests/albert.rs", "rank": 98, "score": 64817.234220858125 }, { "content": "#[test]\n\nfn bart_summarization_greedy() -> anyhow::Result<()> {\n\n let config_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n BartConfigResources::DISTILBART_CNN_6_6,\n\n ));\n\n let vocab_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n BartVocabResources::DISTILBART_CNN_6_6,\n\n ));\n\n let merges_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n BartMergesResources::DISTILBART_CNN_6_6,\n\n ));\n\n let model_resource = Resource::Remote(RemoteResource::from_pretrained(\n\n BartModelResources::DISTILBART_CNN_6_6,\n\n ));\n\n let summarization_config = SummarizationConfig {\n\n model_resource,\n\n config_resource,\n\n vocab_resource,\n\n merges_resource,\n\n num_beams: 1,\n\n length_penalty: 1.0,\n", "file_path": "tests/bart.rs", "rank": 99, "score": 64817.234220858125 } ]
Rust
crates/rome_console/src/markup.rs
RustPhilly/tools
a5c89104e6623b2eb51e2fc1881ddc551fde34d2
use std::{ fmt::{self, Debug}, io, }; use termcolor::{Color, ColorSpec}; use crate::fmt::{Display, Formatter, MarkupElements, Write}; #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum MarkupElement { Emphasis, Dim, Italic, Underline, Error, Success, Warn, Info, } impl MarkupElement { pub(crate) fn update_color(&self, color: &mut ColorSpec) { match self { MarkupElement::Emphasis => { color.set_bold(true); } MarkupElement::Dim => { color.set_dimmed(true); } MarkupElement::Italic => { color.set_italic(true); } MarkupElement::Underline => { color.set_underline(true); } MarkupElement::Error => { color.set_fg(Some(Color::Red)); } MarkupElement::Success => { color.set_fg(Some(Color::Green)); } MarkupElement::Warn => { color.set_fg(Some(Color::Yellow)); } MarkupElement::Info => { #[cfg(windows)] const BLUE: Color = Color::Cyan; #[cfg(not(windows))] const BLUE: Color = Color::Blue; color.set_fg(Some(BLUE)); } } } } #[derive(Copy, Clone)] pub struct MarkupNode<'fmt> { pub elements: &'fmt [MarkupElement], pub content: &'fmt dyn Display, } #[derive(Clone, PartialEq, Eq)] pub struct MarkupNodeBuf { pub elements: Vec<MarkupElement>, pub content: String, } impl Debug for MarkupNodeBuf { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { for element in &self.elements { write!(fmt, "<{element:?}>")?; } write!(fmt, "{:?}", self.content)?; for element in self.elements.iter().rev() { write!(fmt, "</{element:?}>")?; } if fmt.alternate() && self.content.contains('\n') { writeln!(fmt)?; } Ok(()) } } #[derive(Copy, Clone)] pub struct Markup<'fmt>(pub &'fmt [MarkupNode<'fmt>]); impl<'fmt> Markup<'fmt> { pub fn to_owned(&self) -> MarkupBuf { let mut result = MarkupBuf(Vec::new()); Formatter::new(&mut result).write_markup(*self).unwrap(); result } } #[derive(Clone, Default, PartialEq, Eq)] pub struct MarkupBuf(pub Vec<MarkupNodeBuf>); impl MarkupBuf { pub(crate) fn is_empty(&self) -> bool { self.0.is_empty() } } impl Write for MarkupBuf { fn write_str(&mut self, elements: &MarkupElements, content: &str) -> io::Result<()> { let mut styles = Vec::new(); elements.for_each(&mut |elements| { styles.extend_from_slice(elements); }); if let Some(last) = self.0.last_mut() { if last.elements == styles { last.content.push_str(content); return Ok(()); } } self.0.push(MarkupNodeBuf { elements: styles, content: content.into(), }); Ok(()) } fn write_fmt(&mut self, elements: &MarkupElements, content: fmt::Arguments) -> io::Result<()> { let mut styles = Vec::new(); elements.for_each(&mut |elements| { styles.extend_from_slice(elements); }); if let Some(last) = self.0.last_mut() { if last.elements == styles { last.content.push_str(&content.to_string()); return Ok(()); } } self.0.push(MarkupNodeBuf { elements: styles, content: content.to_string(), }); Ok(()) } } impl Display for MarkupBuf { fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { let nodes: Vec<_> = self .0 .iter() .map(|node| MarkupNode { elements: &node.elements, content: &node.content, }) .collect(); fmt.write_markup(Markup(&nodes)) } } impl Debug for MarkupBuf { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { for node in &self.0 { write!(fmt, "{node:?}")?; } Ok(()) } }
use std::{ fmt::{self, Debug}, io, }; use termcolor::{Color, ColorSpec}; use crate::fmt::{Display, Formatter, MarkupElements, Write}; #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum MarkupElement { Emphasis, Dim, Italic, Underline, Error, Success, Warn, Info, } impl MarkupElement { pub(crate) fn update_color(&self, color: &mut ColorSpec) { match self { MarkupElement::Emphasis => { color.set_bold(true); } MarkupElement::Dim => { color.set_dimmed(true); } MarkupElement::Italic => { color.set_italic(true); } MarkupElement::Underline => { color.set_underline(true); } MarkupElement::Error => { color.set_fg(Some(Color::Red)); } MarkupElement::Success => { color.set_fg(Some(Color::Green)); } MarkupElement::Warn => { color.set_fg(Some(Color::Yellow)); } MarkupElement::Info => { #[cfg(windows)] const BLUE: Color = Color::Cyan; #[cfg(not(windows))] const BLUE: Color = Color::Blue; color.set_fg(Some(BLUE)); } } } } #[derive(Copy, Clone)] pub struct MarkupNode<'fmt> { pub elements: &'fmt [MarkupElement], pub content: &'fmt dyn Display, } #[derive(Clone, PartialEq, Eq)] pub struct MarkupNodeBuf { pub elements: Vec<MarkupElement>, pub content: String, } impl Debug for MarkupNodeBuf { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { for element in &self.elements {
} #[derive(Copy, Clone)] pub struct Markup<'fmt>(pub &'fmt [MarkupNode<'fmt>]); impl<'fmt> Markup<'fmt> { pub fn to_owned(&self) -> MarkupBuf { let mut result = MarkupBuf(Vec::new()); Formatter::new(&mut result).write_markup(*self).unwrap(); result } } #[derive(Clone, Default, PartialEq, Eq)] pub struct MarkupBuf(pub Vec<MarkupNodeBuf>); impl MarkupBuf { pub(crate) fn is_empty(&self) -> bool { self.0.is_empty() } } impl Write for MarkupBuf { fn write_str(&mut self, elements: &MarkupElements, content: &str) -> io::Result<()> { let mut styles = Vec::new(); elements.for_each(&mut |elements| { styles.extend_from_slice(elements); }); if let Some(last) = self.0.last_mut() { if last.elements == styles { last.content.push_str(content); return Ok(()); } } self.0.push(MarkupNodeBuf { elements: styles, content: content.into(), }); Ok(()) } fn write_fmt(&mut self, elements: &MarkupElements, content: fmt::Arguments) -> io::Result<()> { let mut styles = Vec::new(); elements.for_each(&mut |elements| { styles.extend_from_slice(elements); }); if let Some(last) = self.0.last_mut() { if last.elements == styles { last.content.push_str(&content.to_string()); return Ok(()); } } self.0.push(MarkupNodeBuf { elements: styles, content: content.to_string(), }); Ok(()) } } impl Display for MarkupBuf { fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { let nodes: Vec<_> = self .0 .iter() .map(|node| MarkupNode { elements: &node.elements, content: &node.content, }) .collect(); fmt.write_markup(Markup(&nodes)) } } impl Debug for MarkupBuf { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { for node in &self.0 { write!(fmt, "{node:?}")?; } Ok(()) } }
write!(fmt, "<{element:?}>")?; } write!(fmt, "{:?}", self.content)?; for element in self.elements.iter().rev() { write!(fmt, "</{element:?}>")?; } if fmt.alternate() && self.content.contains('\n') { writeln!(fmt)?; } Ok(()) }
function_block-function_prefix_line
[ { "content": "pub fn prepend_generated_preamble(content: impl Display) -> String {\n\n format!(\"//! {}\\n\\n{}\", PREAMBLE, content)\n\n}\n\n\n", "file_path": "xtask/src/lib.rs", "rank": 0, "score": 448341.5380411264 }, { "content": "pub trait Language: Sized + Clone + Copy + fmt::Debug + Eq + Ord + std::hash::Hash {\n\n type Kind: SyntaxKind;\n\n}\n\n\n\n/// A list of `SyntaxNode`s and/or `SyntaxToken`s\n\n#[derive(Debug, Clone, Eq, PartialEq, Hash)]\n\npub struct SyntaxList<L: Language> {\n\n list: SyntaxNode<L>,\n\n}\n\n\n\nimpl<L: Language> SyntaxList<L> {\n\n /// Creates a new list wrapping a List `SyntaxNode`\n\n fn new(node: SyntaxNode<L>) -> Self {\n\n Self { list: node }\n\n }\n\n\n\n /// Iterates over the elements in the list.\n\n pub fn iter(&self) -> SyntaxSlots<L> {\n\n self.list.slots()\n\n }\n", "file_path": "crates/rome_rowan/src/syntax.rs", "rank": 1, "score": 373366.89663790824 }, { "content": "pub fn reformat(text: impl Display) -> Result<String> {\n\n reformat_without_preamble(text).map(prepend_generated_preamble)\n\n}\n\n\n\nconst PREAMBLE: &str = \"Generated file, do not edit by hand, see `xtask/codegen`\";\n", "file_path": "xtask/src/lib.rs", "rank": 2, "score": 369398.5227274509 }, { "content": "fn err_to_string<E: std::fmt::Debug>(e: E) -> String {\n\n format!(\"{:?}\", e)\n\n}\n\n\n\npub struct RunArgs {\n\n pub filter: String,\n\n pub criterion: bool,\n\n pub baseline: Option<String>,\n\n pub feature: FeatureToBenchmark,\n\n pub suites: String,\n\n}\n\n\n", "file_path": "xtask/bench/src/lib.rs", "rank": 3, "score": 366639.69165635225 }, { "content": "pub fn reformat_without_preamble(text: impl Display) -> Result<String> {\n\n let _e = pushenv(\"RUSTUP_TOOLCHAIN\", \"stable\");\n\n ensure_rustfmt()?;\n\n let output = run!(\n\n \"rustfmt --config fn_single_line=true\";\n\n <text.to_string().as_bytes()\n\n )?;\n\n\n\n Ok(format!(\"{}\\n\", output))\n\n}\n\n\n", "file_path": "xtask/src/lib.rs", "rank": 4, "score": 359868.1649495898 }, { "content": "#[inline]\n\npub fn comment(element: impl Into<FormatElement>) -> FormatElement {\n\n FormatElement::Comment(Box::new(element.into()))\n\n}\n\n\n\n/// Inserts a single space. Allows to separate different tokens.\n\n///\n\n/// ## Examples\n\n///\n\n/// ```\n\n/// use rome_formatter::{FormatOptions, token, format_element, space_token, format_elements};\n\n///\n\n/// // the tab must be encoded as \\\\t to not literally print a tab character (\"Hello{tab}World\" vs \"Hello\\tWorld\")\n\n/// let elements = format_elements![token(\"a\"), space_token(), token(\"b\")];\n\n///\n\n/// assert_eq!(\"a b\", format_element(&elements, FormatOptions::default()).as_code());\n\n/// ```\n\n#[inline]\n\npub const fn space_token() -> FormatElement {\n\n FormatElement::Space\n\n}\n\n\n", "file_path": "crates/rome_formatter/src/format_element.rs", "rank": 5, "score": 355022.77082310617 }, { "content": "#[inline]\n\npub fn line_suffix(element: impl Into<FormatElement>) -> FormatElement {\n\n FormatElement::LineSuffix(Box::new(element.into()))\n\n}\n\n\n\n/// Mark a [FormatElement] as being a piece of trivia\n\n///\n\n/// This does not directly influence how this content will be printed, but some\n\n/// parts of the formatter may chose to handle this element in a specific way\n\n///\n\n/// ## Examples\n\n///\n\n/// ```\n\n/// use rome_formatter::{FormatOptions, token, format_element, comment, format_elements, group_elements, empty_line, soft_line_break_or_space};\n\n///\n\n/// let elements = group_elements(format_elements![comment(empty_line()), token(\"a\"), soft_line_break_or_space(), token(\"b\")]);\n\n///\n\n/// assert_eq!(\"\\na b\", format_element(&elements, FormatOptions::default()).as_code());\n\n/// ```\n", "file_path": "crates/rome_formatter/src/format_element.rs", "rank": 6, "score": 351215.49901471415 }, { "content": "#[inline]\n\npub fn group_elements<T: Into<FormatElement>>(content: T) -> FormatElement {\n\n let content: FormatElement = content.into();\n\n let (leading, content, trailing) = content.split_trivia();\n\n format_elements![leading, Group::new(content), trailing]\n\n}\n\n\n\n/// Creates a group that forces all elements inside it to be printed on a\n\n/// single line. This behavior can in turn be escaped by introducing an inner\n\n/// `Group` element that will resume the normal breaking behavior of the printer.\n\n///\n\n/// This is useful for constructs that have a non-breaking head and a breaking\n\n/// body, such class declarations:\n\n/// ```js\n\n/// abstract /* comment */ class Example\n\n/// // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ non-breaking part\n\n/// { // <\n\n/// } // < breaking part\n\n/// ```\n\n///\n\n/// # Example\n", "file_path": "crates/rome_formatter/src/format_element.rs", "rank": 7, "score": 325029.34187257796 }, { "content": "#[inline]\n\npub fn indent<T: Into<FormatElement>>(content: T) -> FormatElement {\n\n let content = content.into();\n\n\n\n if content.is_empty() {\n\n content\n\n } else {\n\n format_elements![Indent::new(format_elements![content])]\n\n }\n\n}\n\n\n\n/// Inserts a hard line break before and after the content and increases the indention level for the content by one.\n\n///\n\n/// Block indents indent a block of code, such as in a function body, and therefore insert a line\n\n/// break before and after the content.\n\n///\n\n/// Doesn't create an indention if the passed in content is [FormatElement.is_empty].\n\n///\n\n/// ## Examples\n\n///\n\n/// ```\n", "file_path": "crates/rome_formatter/src/format_element.rs", "rank": 8, "score": 324697.79127188394 }, { "content": "/// Concatenates a list of [FormatElement]s with spaces and line breaks to fit\n\n/// them on as few lines as possible\n\n///\n\n/// ## Examples\n\n///\n\n/// ```rust\n\n/// use std::str::from_utf8;\n\n/// use rome_formatter::{fill_elements, FormatElement, space_token, token, format_element, FormatOptions};\n\n/// let a = from_utf8(&[b'a'; 30]).unwrap();\n\n/// let b = from_utf8(&[b'b'; 30]).unwrap();\n\n/// let c = from_utf8(&[b'c'; 30]).unwrap();\n\n/// let d = from_utf8(&[b'd'; 30]).unwrap();\n\n/// let expr = fill_elements([token(a), token(b), token(c), token(d)]);\n\n///\n\n/// assert_eq!(format!(\"{a} {b}\\n{c} {d}\"), format_element(&expr, FormatOptions::default()).into_code())\n\n/// ```\n\npub fn fill_elements(elements: impl IntoIterator<Item = FormatElement>) -> FormatElement {\n\n let mut list: Vec<_> = elements.into_iter().collect();\n\n match list.len() {\n\n 0 => empty_element(),\n\n 1 => list.pop().unwrap(),\n\n _ => FormatElement::Fill(List::new(list)),\n\n }\n\n}\n\n\n\n/// Joins the elements by placing a given separator between elements.\n\n///\n\n/// ## Examples\n\n///\n\n/// Joining different tokens by separating them with a comma and a space.\n\n///\n\n/// ```\n\n/// use rome_formatter::{concat_elements, FormatOptions, join_elements, space_token, token, format_element};\n\n///\n\n/// let separator = concat_elements(vec![token(\",\"), space_token()]);\n\n/// let elements = join_elements(separator, vec![token(\"1\"), token(\"2\"), token(\"3\"), token(\"4\")]);\n\n///\n\n/// assert_eq!(\"1, 2, 3, 4\", format_element(&elements, FormatOptions::default()).as_code());\n\n/// ```\n", "file_path": "crates/rome_formatter/src/format_element.rs", "rank": 9, "score": 324222.9844982327 }, { "content": "#[inline]\n\npub fn hard_group_elements<T: Into<FormatElement>>(content: T) -> FormatElement {\n\n let content = content.into();\n\n\n\n if content.is_empty() {\n\n content\n\n } else {\n\n let (leading, content, trailing) = content.split_trivia();\n\n format_elements![\n\n leading,\n\n FormatElement::HardGroup(Group::new(format_elements![content, trailing])),\n\n ]\n\n }\n\n}\n\n\n\n/// Adds a conditional content that is emitted only if it isn't inside an enclosing `Group` that\n\n/// is printed on a single line. The element allows, for example, to insert a trailing comma after the last\n\n/// array element only if the array doesn't fit on a single line.\n\n///\n\n/// The element has no special meaning if used outside of a `Group`. In that case, the content is always emitted.\n\n///\n", "file_path": "crates/rome_formatter/src/format_element.rs", "rank": 10, "score": 321441.24652057 }, { "content": "#[inline]\n\npub fn if_group_breaks<T: Into<FormatElement>>(content: T) -> FormatElement {\n\n let content = content.into();\n\n\n\n if content.is_empty() {\n\n content\n\n } else {\n\n FormatElement::from(ConditionalGroupContent::new(\n\n content,\n\n GroupPrintMode::Multiline,\n\n ))\n\n }\n\n}\n\n\n\n/// Adds a conditional content specific for `Group`s that fit on a single line. The content isn't\n\n/// emitted for `Group`s spanning multiple lines.\n\n///\n\n/// See [if_group_breaks] if you're looking for a way to print content only for groups spanning multiple lines.\n\n///\n\n/// ## Examples\n\n///\n", "file_path": "crates/rome_formatter/src/format_element.rs", "rank": 11, "score": 320911.92955610267 }, { "content": "#[inline]\n\npub fn block_indent<T: Into<FormatElement>>(content: T) -> FormatElement {\n\n let content = content.into();\n\n\n\n if content.is_empty() {\n\n content\n\n } else {\n\n format_elements![\n\n Indent::new(format_elements![hard_line_break(), content]),\n\n hard_line_break(),\n\n ]\n\n }\n\n}\n\n\n\n/// Indents the content by inserting a line break before and after the content and increasing\n\n/// the indention level for the content by one if the enclosing group doesn't fit on a single line.\n\n/// Doesn't change the formatting if the enclosing group fits on a single line.\n\n///\n\n/// ## Examples\n\n///\n\n/// Indents the content by one level and puts in new lines if the enclosing `Group` doesn't fit on a single line\n", "file_path": "crates/rome_formatter/src/format_element.rs", "rank": 12, "score": 320911.92955610267 }, { "content": "#[inline]\n\npub fn soft_block_indent<T: Into<FormatElement>>(content: T) -> FormatElement {\n\n let content = content.into();\n\n\n\n if content.is_empty() {\n\n content\n\n } else {\n\n format_elements![\n\n Indent::new(format_elements![soft_line_break(), content]),\n\n soft_line_break(),\n\n ]\n\n }\n\n}\n\n\n\n/// If the enclosing `Group` doesn't fit on a single line, inserts a line break and indent.\n\n/// Otherwise, just inserts a space.\n\n///\n\n/// Line indents are used to break a single line of code, and therefore only insert a line\n\n/// break before the content and not after the content.\n\n///\n\n/// ## Examples\n", "file_path": "crates/rome_formatter/src/format_element.rs", "rank": 13, "score": 317235.9548788632 }, { "content": "#[inline]\n\npub fn soft_line_indent_or_space<T: Into<FormatElement>>(content: T) -> FormatElement {\n\n let content = content.into();\n\n\n\n if content.is_empty() {\n\n content\n\n } else {\n\n format_elements![Indent::new(format_elements![\n\n soft_line_break_or_space(),\n\n content\n\n ])]\n\n }\n\n}\n\n\n\n/// Creates a logical `Group` around the content that should either consistently be printed on a single line\n\n/// or broken across multiple lines.\n\n///\n\n/// The printer will try to print the content of the `Group` on a single line, ignoring all soft line breaks and\n\n/// emitting spaces for soft line breaks or spaces. The printer tracks back if it isn't successful either\n\n/// because it encountered a hard line break, or because printing the `Group` on a single line exceeds\n\n/// the configured line width, and thus it must print all its content on multiple lines,\n", "file_path": "crates/rome_formatter/src/format_element.rs", "rank": 14, "score": 313664.96949160105 }, { "content": "#[inline]\n\npub fn process(sink: &mut impl TreeSink, mut events: Vec<Event>, errors: Vec<ParseDiagnostic>) {\n\n sink.errors(errors);\n\n let mut forward_parents = Vec::new();\n\n\n\n for i in 0..events.len() {\n\n match &mut events[i] {\n\n Event::Start {\n\n kind: TOMBSTONE, ..\n\n } => (),\n\n\n\n Event::Start {\n\n kind,\n\n forward_parent,\n\n ..\n\n } => {\n\n // For events[A, B, C], B is A's forward_parent, C is B's forward_parent,\n\n // in the normal control flow, the parent-child relation: `A -> B -> C`,\n\n // while with the magic forward_parent, it writes: `C <- B <- A`.\n\n\n\n // append `A` into parents.\n", "file_path": "crates/rome_js_parser/src/event.rs", "rank": 15, "score": 304605.301176981 }, { "content": "/// Colors a piece of source code using ANSI.\n\n/// The string returned will be unaltered if stdout and stderr are not terminals.\n\npub fn color(source: &str) -> String {\n\n Highlighter::new(source).color()\n\n}\n", "file_path": "crates/rome_js_parser/src/lexer/highlight.rs", "rank": 16, "score": 303700.1313750533 }, { "content": "fn print_debug_str<S: AsRef<str>>(text: S, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let text = text.as_ref();\n\n return if text.len() < 25 {\n\n write!(f, \"{:?}\", text)\n\n } else {\n\n for idx in 21..25 {\n\n if text.is_char_boundary(idx) {\n\n let text = format!(\"{} ...\", &text[..idx]);\n\n return write!(f, \"{:?}\", text);\n\n }\n\n }\n\n write!(f, \"\")\n\n };\n\n}\n", "file_path": "crates/rome_rowan/src/syntax/trivia.rs", "rank": 17, "score": 297260.03623028344 }, { "content": "/// Format element that doesn't represent any content.\n\n///\n\n/// Can be helpful if you need to return a `FormatElement` (e.g. in an else branch) but don't want\n\n/// to show any content.\n\npub fn empty_element() -> FormatElement {\n\n FormatElement::Empty\n\n}\n\n\n\n/// A line break that only gets printed if the enclosing `Group` doesn't fit on a single line.\n\n/// It's omitted if the enclosing `Group` fits on a single line.\n\n/// A soft line break is identical to a hard line break when not enclosed inside of a `Group`.\n\n///\n\n/// ## Examples\n\n///\n\n/// Soft line breaks are omitted if the enclosing `Group` fits on a single line\n\n///\n\n/// ```\n\n/// use rome_formatter::{group_elements, format_element, format_elements, token, soft_line_break, FormatOptions};\n\n///\n\n/// let elements = group_elements(format_elements![\n\n/// token(\"a,\"),\n\n/// soft_line_break(),\n\n/// token(\"b\"),\n\n/// ]);\n", "file_path": "crates/rome_formatter/src/format_element.rs", "rank": 18, "score": 292845.6596337607 }, { "content": "#[inline]\n\npub fn if_group_fits_on_single_line<TFlat>(flat_content: TFlat) -> FormatElement\n\nwhere\n\n TFlat: Into<FormatElement>,\n\n{\n\n let flat_content = flat_content.into();\n\n\n\n if flat_content.is_empty() {\n\n flat_content\n\n } else {\n\n FormatElement::from(ConditionalGroupContent::new(\n\n flat_content,\n\n GroupPrintMode::Flat,\n\n ))\n\n }\n\n}\n\n\n\n/// Specialized version of [join_elements] for joining SyntaxNodes separated by a space, soft\n\n/// line break or empty line depending on the input file.\n\n///\n\n/// This functions inspects the input source and separates consecutive elements with either\n\n/// a [soft_line_break_or_space] or [empty_line] depending on how many line breaks were\n\n/// separating the elements in the original file.\n", "file_path": "crates/rome_formatter/src/format_element.rs", "rank": 19, "score": 292832.0118327712 }, { "content": "/// Type tag for each node or token of a language\n\npub trait SyntaxKind: fmt::Debug + PartialEq + Copy {\n\n /// Returns `true` if this is an unknown node kind.\n\n fn is_unknown(&self) -> bool;\n\n\n\n /// Converts this into to the best matching unknown node kind.\n\n fn to_unknown(&self) -> Self;\n\n\n\n /// Converts this kind to a raw syntax kind.\n\n fn to_raw(&self) -> RawSyntaxKind;\n\n\n\n /// Creates a syntax kind from a raw kind.\n\n fn from_raw(raw: RawSyntaxKind) -> Self;\n\n}\n\n\n", "file_path": "crates/rome_rowan/src/syntax.rs", "rank": 20, "score": 291887.878834674 }, { "content": "/// Outputs formatter IR for a JavaScript (and its super languages) file\n\n///\n\n/// It returns a [FormatElement] result. Mostly for debugging purposes.\n\npub fn to_format_element(\n\n options: FormatOptions,\n\n syntax: &JsSyntaxNode,\n\n) -> FormatResult<FormatElement> {\n\n Formatter::new(options).format_root(syntax)\n\n}\n\n\n", "file_path": "crates/rome_js_formatter/src/lib.rs", "rank": 21, "score": 289669.85901198146 }, { "content": "fn add_file_if_supported(files: &mut TestCaseFiles, name: String, content: String) {\n\n let path = Path::new(&name);\n\n // Skip files that aren't JS/TS files (JSON, CSS...)\n\n if let Ok(mut source_type) = SourceType::try_from(path) {\n\n let is_module_regex = Regex::new(\"(import|export)\\\\s\").unwrap();\n\n // A very basic heuristic to determine if a module is a `Script` or a `Module`.\n\n // The TypeScript parser automatically detects whatever a file is a module or a script\n\n // by the presence of any module syntax. Rome's parser doesn't support this today\n\n // because it would require moving any \"strict mode\" or \"module\" specific checks\n\n // into a second compiler pass. The reason this is needed is that the module syntax\n\n // may appear at the very end of the file after the parser has already processed\n\n // some syntax that is invalid in strict mode (for example, an \"arguments\" variable).\n\n if !is_module_regex.is_match(&content) {\n\n source_type = source_type.with_module_kind(ModuleKind::Script);\n\n }\n\n\n\n files.add(name, content, source_type)\n\n }\n\n}\n\n\n", "file_path": "xtask/coverage/src/ts/ts_microsoft.rs", "rank": 22, "score": 287865.4402176384 }, { "content": "pub fn apply_indels(indels: &[Indel], text: &mut String) {\n\n match indels.len() {\n\n 0 => return,\n\n 1 => {\n\n indels[0].apply(text);\n\n return;\n\n }\n\n _ => (),\n\n }\n\n\n\n let mut total_len = TextSize::of(&*text);\n\n for indel in indels.iter() {\n\n total_len += TextSize::of(&indel.insert);\n\n total_len -= indel.delete.end() - indel.delete.start();\n\n }\n\n let mut buf = String::with_capacity(total_len.into());\n\n let mut prev = 0;\n\n for indel in indels.iter() {\n\n let start: usize = indel.delete.start().into();\n\n let end: usize = indel.delete.end().into();\n", "file_path": "crates/rome_text_edit/src/lib.rs", "rank": 23, "score": 280493.29896737816 }, { "content": "#[inline]\n\npub fn join_elements_with<I, L>(elements: I, separator: fn() -> FormatElement) -> FormatElement\n\nwhere\n\n I: IntoIterator<Item = (SyntaxNode<L>, FormatElement)>,\n\n L: Language,\n\n{\n\n /// Get the number of line breaks between two consecutive SyntaxNodes in the tree\n\n fn get_lines_between_nodes<L: Language>(\n\n prev_node: &SyntaxNode<L>,\n\n next_node: &SyntaxNode<L>,\n\n ) -> usize {\n\n // Ensure the two nodes are actually siblings on debug\n\n debug_assert_eq!(prev_node.next_sibling().as_ref(), Some(next_node));\n\n debug_assert_eq!(next_node.prev_sibling().as_ref(), Some(prev_node));\n\n\n\n // Count the lines separating the two statements,\n\n // starting with the trailing trivia of the previous node\n\n let mut line_count = prev_node\n\n .last_trailing_trivia()\n\n .and_then(|prev_token| {\n\n // Newline pieces can only come last in trailing trivias, skip to it directly\n", "file_path": "crates/rome_formatter/src/format_element.rs", "rank": 24, "score": 278007.20735679666 }, { "content": "pub fn format_element(element: &FormatElement, options: FormatOptions) -> Formatted {\n\n let printer = Printer::new(options);\n\n printer.print(element)\n\n}\n", "file_path": "crates/rome_formatter/src/lib.rs", "rank": 25, "score": 277724.78349400836 }, { "content": "/// Concatenates the content of multiple [FormatElement]s.\n\n///\n\n/// ## Examples\n\n///\n\n/// ```rust\n\n/// use rome_formatter::{concat_elements, FormatElement, space_token, token, format_element, FormatOptions};\n\n/// let expr = concat_elements(vec![token(\"a\"), space_token(), token(\"+\"), space_token(), token(\"b\")]);\n\n///\n\n/// assert_eq!(\"a + b\", format_element(&expr, FormatOptions::default()).as_code())\n\n/// ```\n\npub fn concat_elements<I>(elements: I) -> FormatElement\n\nwhere\n\n I: IntoIterator<Item = FormatElement>,\n\n{\n\n let mut elements = elements.into_iter();\n\n\n\n let (lower_bound, upper_bound) = elements.size_hint();\n\n let size_hint = upper_bound.unwrap_or(lower_bound);\n\n\n\n // If the first non empty element is a vec, use it,\n\n // otherwise create a new one with the current element\n\n let mut concatenated = loop {\n\n match elements.next() {\n\n Some(FormatElement::Empty) => continue,\n\n Some(FormatElement::List(list)) => {\n\n let mut v = list.content;\n\n v.reserve(size_hint);\n\n break v;\n\n }\n\n Some(element) => {\n", "file_path": "crates/rome_formatter/src/format_element.rs", "rank": 26, "score": 276463.41256561736 }, { "content": "/// Replace the line terminators matching the provided list with \"\\n\"\n\n/// since its the only line break type supported by the printer\n\npub fn normalize_newlines<const N: usize>(text: &str, terminators: [char; N]) -> Cow<str> {\n\n let mut result = String::new();\n\n let mut last_end = 0;\n\n\n\n for (start, part) in text.match_indices(terminators) {\n\n result.push_str(&text[last_end..start]);\n\n result.push('\\n');\n\n\n\n last_end = start + part.len();\n\n // If the current character is \\r and the\n\n // next is \\n, skip over the entire sequence\n\n if part == \"\\r\" && text[last_end..].starts_with('\\n') {\n\n last_end += 1;\n\n }\n\n }\n\n\n\n // If the result is empty no line terminators were matched,\n\n // return the entire input text without allocating a new String\n\n if result.is_empty() {\n\n Cow::Borrowed(text)\n", "file_path": "crates/rome_formatter/src/format_element.rs", "rank": 27, "score": 275424.4960520372 }, { "content": "fn assert_disjoint(indels: &mut [impl std::borrow::Borrow<Indel>]) {\n\n assert!(check_disjoint(indels));\n\n}\n", "file_path": "crates/rome_text_edit/src/lib.rs", "rank": 28, "score": 271287.15855257 }, { "content": "#[inline]\n\npub fn join_elements_hard_line<I, L>(elements: I) -> FormatElement\n\nwhere\n\n I: IntoIterator<Item = (SyntaxNode<L>, FormatElement)>,\n\n L: Language,\n\n{\n\n join_elements_with(elements, hard_line_break)\n\n}\n\n\n", "file_path": "crates/rome_formatter/src/format_element.rs", "rank": 29, "score": 263724.7038985842 }, { "content": "#[inline]\n\npub fn join_elements_soft_line<I, L>(elements: I) -> FormatElement\n\nwhere\n\n I: IntoIterator<Item = (SyntaxNode<L>, FormatElement)>,\n\n L: Language,\n\n{\n\n join_elements_with(elements, soft_line_break_or_space)\n\n}\n\n\n\n/// Specialized version of [join_elements] for joining SyntaxNodes separated by one or more\n\n/// line breaks depending on the input file.\n\n///\n\n/// This functions inspects the input source and separates consecutive elements with either\n\n/// a [hard_line_break] or [empty_line] depending on how many line breaks were separating the\n\n/// elements in the original file.\n", "file_path": "crates/rome_formatter/src/format_element.rs", "rank": 30, "score": 263724.7038985842 }, { "content": "fn check_disjoint(indels: &mut [impl std::borrow::Borrow<Indel>]) -> bool {\n\n indels.sort_by_key(|indel| (indel.borrow().delete.start(), indel.borrow().delete.end()));\n\n indels\n\n .iter()\n\n .zip(indels.iter().skip(1))\n\n .all(|(l, r)| l.borrow().delete.end() <= r.borrow().delete.start())\n\n}\n", "file_path": "crates/rome_text_edit/src/lib.rs", "rank": 31, "score": 262956.9637216875 }, { "content": "#[inline]\n\npub fn join_elements<TSep, I>(separator: TSep, elements: I) -> FormatElement\n\nwhere\n\n TSep: Into<FormatElement>,\n\n I: IntoIterator<Item = FormatElement>,\n\n{\n\n concat_elements(Intersperse::new(\n\n elements.into_iter().filter(|e| !e.is_empty()),\n\n separator.into(),\n\n ))\n\n}\n\n\n\n/// It adds a level of indentation to the given content\n\n///\n\n/// It doesn't add any line breaks at the edges of the content, meaning that\n\n/// the line breaks have to be manually added.\n\n///\n\n/// This helper should be used only in rare cases, instead you should rely more on\n\n/// [block_indent] and [soft_block_indent]\n\n///\n\n/// ## Examples\n", "file_path": "crates/rome_formatter/src/format_element.rs", "rank": 32, "score": 252565.80607725843 }, { "content": "struct DisplayDebug<T>(T);\n\nimpl<T: fmt::Debug> fmt::Display for DisplayDebug<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n fmt::Debug::fmt(&self.0, f)\n\n }\n\n}\n\n\n\nimpl<L: Language> Serialize for SyntaxNode<L> {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n let mut state = serializer.serialize_map(Some(3))?;\n\n state.serialize_entry(\"kind\", &SerDisplay(DisplayDebug(self.kind())))?;\n\n state.serialize_entry(\"text_range\", &self.text_range())?;\n\n state.serialize_entry(\"children\", &Children(self))?;\n\n state.end()\n\n }\n\n}\n\n\n", "file_path": "crates/rome_rowan/src/serde_impls.rs", "rank": 33, "score": 252122.73445714155 }, { "content": "pub fn generate_formatter() {\n\n let repo = GitRepo::open();\n\n\n\n let ast = load_js_ast();\n\n\n\n // Store references to all the files created by the codegen\n\n // script to build the module import files\n\n let mut modules = ModuleIndex::new(project_root().join(\"crates/rome_js_formatter/src\"));\n\n\n\n // Build an unified iterator over all the AstNode types\n\n let names = ast\n\n .nodes\n\n .into_iter()\n\n .map(|node| (NodeKind::Node, node.name))\n\n .chain(ast.lists.into_iter().map(|(name, node)| {\n\n (\n\n NodeKind::List {\n\n separated: node.separator.is_some(),\n\n },\n\n name,\n", "file_path": "xtask/codegen/src/formatter.rs", "rank": 34, "score": 251205.69247879807 }, { "content": "pub fn get_code(lib: &str) -> Result<(String, String), String> {\n\n let url = url::Url::from_str(lib).map_err(err_to_string)?;\n\n let segments = url\n\n .path_segments()\n\n .ok_or_else(|| \"lib url has no segments\".to_string())?;\n\n let filename = segments\n\n .last()\n\n .ok_or_else(|| \"lib url has no segments\".to_string())?;\n\n\n\n let file = Path::new(\n\n &env::var(\"CARGO_MANIFEST_DIR\").unwrap_or_else(|_| env!(\"CARGO_MANIFEST_DIR\").to_owned()),\n\n )\n\n .ancestors()\n\n .nth(2)\n\n .unwrap()\n\n .join(\"target\")\n\n .join(filename);\n\n\n\n match std::fs::read_to_string(&file) {\n\n Ok(code) => {\n", "file_path": "xtask/bench/src/utils.rs", "rank": 35, "score": 248614.9169211455 }, { "content": "pub fn date_iso() -> Result<String> {\n\n run!(\"date --iso --utc\")\n\n}\n\n\n", "file_path": "xtask/src/glue.rs", "rank": 36, "score": 247600.12111768223 }, { "content": "#[derive(Debug, Eq, PartialEq, Clone)]\n\nstruct PrintElementCall<'element> {\n\n element: &'element FormatElement,\n\n args: PrintElementArgs,\n\n}\n\n\n\nimpl<'element> PrintElementCall<'element> {\n\n pub fn new(element: &'element FormatElement, args: PrintElementArgs) -> Self {\n\n Self { element, args }\n\n }\n\n}\n\n\n\n/// Small helper that manages the order in which the elements should be visited.\n", "file_path": "crates/rome_formatter/src/printer.rs", "rank": 37, "score": 246863.4307821924 }, { "content": "/// Formatting trait for types to be displayed as markup, the `rome_console`\n\n/// equivalent to [std::fmt::Display]\n\n///\n\n/// # Example\n\n/// Implementing `Display` on a custom struct\n\n/// ```\n\n/// use std::io;\n\n/// use rome_console::{fmt::{Display, Formatter}, markup};\n\n///\n\n/// struct Warning(String);\n\n///\n\n/// impl Display for Warning {\n\n/// fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {\n\n/// fmt.write_markup(markup! {\n\n/// <Warn>{self.0}</Warn>\n\n/// })\n\n/// }\n\n/// }\n\n///\n\n/// let warning = Warning(String::from(\"content\"));\n\n/// markup! {\n\n/// <Emphasis>{warning}</Emphasis>\n\n/// };\n\n/// ```\n\npub trait Display {\n\n fn fmt(&self, fmt: &mut Formatter) -> io::Result<()>;\n\n}\n\n\n\n// Blanket implementations of Display for reference types\n\nimpl<'a, T> Display for &'a T\n\nwhere\n\n T: Display + ?Sized,\n\n{\n\n fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {\n\n T::fmt(self, fmt)\n\n }\n\n}\n\n\n\nimpl<'a, T> Display for Cow<'a, T>\n\nwhere\n\n T: Display + ToOwned + ?Sized,\n\n{\n\n fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> {\n\n T::fmt(self, fmt)\n", "file_path": "crates/rome_console/src/fmt.rs", "rank": 38, "score": 243204.20537158605 }, { "content": "pub trait Write {\n\n fn write_str(&mut self, elements: &MarkupElements, content: &str) -> io::Result<()>;\n\n fn write_fmt(&mut self, elements: &MarkupElements, content: fmt::Arguments) -> io::Result<()>;\n\n}\n\n\n", "file_path": "crates/rome_console/src/fmt.rs", "rank": 39, "score": 243135.59319970358 }, { "content": "/// Factory for creating syntax nodes of a particular kind.\n\npub trait SyntaxFactory: fmt::Debug {\n\n /// The syntax kind used by the nodes constructed by this syntax factory.\n\n type Kind: SyntaxKind;\n\n\n\n /// Creates a new syntax node of the passed `kind` with the given children.\n\n ///\n\n /// The `children` contains the parsed direct children of the node. There may be fewer children\n\n /// in case there's a syntax error and a required child or an optional child isn't present in the source code.\n\n /// The `make_syntax` implementation must then fill in empty slots to match the slots as they're defined in the grammar.\n\n ///\n\n /// The implementation is free to change the `kind` of the node but that has the consequence that\n\n /// such a node will not be cached. The reason for not caching these nodes is that the cache lookup is performed\n\n /// before calling `make_syntax`, thus querying the cache with the old kind.\n\n ///\n\n /// It's important that the factory function is idempotent, meaning, calling the function\n\n /// multiple times with the same `kind` and `children` returns syntax nodes with the same structure.\n\n /// This is important because the returned nodes may be cached by `kind` and what `children` are present.\n\n fn make_syntax(\n\n kind: Self::Kind,\n\n children: ParsedChildren<Self::Kind>,\n", "file_path": "crates/rome_rowan/src/syntax_factory.rs", "rank": 40, "score": 239202.6610724729 }, { "content": "#[test]\n\npub fn serialization() {\n\n let mut builder: crate::TreeBuilder<crate::syntax::RawLanguage> = crate::TreeBuilder::new();\n\n builder.start_node(crate::RawSyntaxKind(0));\n\n builder.token(crate::RawSyntaxKind(0), \"\\n\\tlet \");\n\n builder.finish_node();\n\n let root = builder.finish();\n\n\n\n assert!(serde_json::to_string(&root).is_ok());\n\n}\n", "file_path": "crates/rome_rowan/src/serde_impls.rs", "rank": 41, "score": 239186.99723600724 }, { "content": "pub fn to_lower_snake_case(s: &str) -> String {\n\n let mut buf = String::with_capacity(s.len());\n\n let mut prev = false;\n\n for c in s.chars() {\n\n if c.is_ascii_uppercase() && prev {\n\n buf.push('_')\n\n }\n\n prev = true;\n\n\n\n buf.push(c.to_ascii_lowercase());\n\n }\n\n buf\n\n}\n", "file_path": "xtask/codegen/src/lib.rs", "rank": 42, "score": 233289.93271158487 }, { "content": "pub fn to_upper_snake_case(s: &str) -> String {\n\n let mut buf = String::with_capacity(s.len());\n\n let mut prev = false;\n\n for c in s.chars() {\n\n if c.is_ascii_uppercase() && prev {\n\n buf.push('_')\n\n }\n\n prev = true;\n\n\n\n buf.push(c.to_ascii_uppercase());\n\n }\n\n buf\n\n}\n\n\n", "file_path": "xtask/codegen/src/lib.rs", "rank": 43, "score": 233289.93271158487 }, { "content": "/// Formats a range within a file, supported by Rome\n\n///\n\n/// This runs a simple heuristic to determine the initial indentation\n\n/// level of the node based on the provided [FormatOptions], which\n\n/// must match currently the current initial of the file. Additionally,\n\n/// because the reformatting happens only locally the resulting code\n\n/// will be indented with the same level as the original selection,\n\n/// even if it's a mismatch from the rest of the block the selection is in\n\n///\n\n/// It returns a [Formatted] result with a range corresponding to the\n\n/// range of the input that was effectively overwritten by the formatter\n\npub fn format_range(\n\n options: FormatOptions,\n\n root: &JsSyntaxNode,\n\n range: TextRange,\n\n) -> FormatResult<Formatted> {\n\n // Find the tokens corresponding to the start and end of the range\n\n let start_token = root.token_at_offset(range.start());\n\n let end_token = root.token_at_offset(range.end());\n\n\n\n // If these tokens were not found this means either:\n\n // 1. The input [SyntaxNode] was empty\n\n // 2. The input node was not the root [SyntaxNode] of the file\n\n // In the first case we can return an empty result immediately,\n\n // otherwise default to the first and last tokens in the root node\n\n let start_token = match start_token {\n\n // If the start of the range lies between two tokens,\n\n // start at the rightmost one\n\n TokenAtOffset::Between(_, token) => token,\n\n TokenAtOffset::Single(token) => token,\n\n TokenAtOffset::None => match root.first_token() {\n", "file_path": "crates/rome_js_formatter/src/lib.rs", "rank": 44, "score": 232764.14794069465 }, { "content": "/// Find and replace the cursor, range start and range end placeholders in a\n\n/// Prettier snapshot tests and return their indices in the resulting string\n\nfn strip_placeholders(input_code: &mut String) -> (Option<usize>, Option<usize>, Option<usize>) {\n\n const CURSOR_PLACEHOLDER: &str = \"<|>\";\n\n const RANGE_START_PLACEHOLDER: &str = \"<<<PRETTIER_RANGE_START>>>\";\n\n const RANGE_END_PLACEHOLDER: &str = \"<<<PRETTIER_RANGE_END>>>\";\n\n\n\n let mut cursor_index = None;\n\n let mut range_start_index = None;\n\n let mut range_end_index = None;\n\n\n\n if let Some(index) = input_code.find(CURSOR_PLACEHOLDER) {\n\n input_code.replace_range(index..index + CURSOR_PLACEHOLDER.len(), \"\");\n\n cursor_index = Some(index);\n\n }\n\n\n\n if let Some(index) = input_code.find(RANGE_START_PLACEHOLDER) {\n\n input_code.replace_range(index..index + RANGE_START_PLACEHOLDER.len(), \"\");\n\n range_start_index = Some(index);\n\n\n\n if let Some(cursor) = &mut cursor_index {\n\n if *cursor > index {\n", "file_path": "crates/rome_js_formatter/tests/prettier_tests.rs", "rank": 45, "score": 232369.03124341805 }, { "content": "struct ExpectedTokens(String);\n\n\n\nimpl ToDiagnostic for ExpectedTokens {\n\n fn to_diagnostic(self, p: &Parser) -> Diagnostic {\n\n match p.cur() {\n\n JsSyntaxKind::EOF => p\n\n .err_builder(&format!(\"expected {} but instead the file ends\", self.0))\n\n .primary(p.cur_range(), \"the file ends here\"),\n\n _ => p\n\n .err_builder(&format!(\n\n \"expected {} but instead found `{}`\",\n\n self.0,\n\n p.cur_src()\n\n ))\n\n .primary(p.cur_range(), \"unexpected\"),\n\n }\n\n }\n\n}\n", "file_path": "crates/rome_js_parser/src/parser/parse_error.rs", "rank": 46, "score": 230741.94645131886 }, { "content": "#[test]\n\npub fn jsroot_display_text_and_trimmed() {\n\n let code = \" let a = 1; \\n \";\n\n let root = parse_module(code, 0);\n\n let syntax = root.syntax();\n\n\n\n assert_eq!(format!(\"{}\", syntax), code);\n\n\n\n let syntax_text = syntax.text();\n\n assert_eq!(format!(\"{}\", syntax_text), code);\n\n\n\n let syntax_text = syntax.text_trimmed();\n\n assert_eq!(format!(\"{}\", syntax_text), code.trim());\n\n}\n\n\n", "file_path": "crates/rome_js_parser/src/tests.rs", "rank": 47, "score": 230156.88560839318 }, { "content": "/// Utility function to use to format ternary operators\n\n///\n\n/// # Panics\n\n///\n\n/// It panics if it's used with nodes that are different from:\n\n/// - [rome_js_syntax::TsConditionalType]\n\n/// - [rome_js_syntax::JsConditionalExpression]\n\npub fn format_conditional(\n\n conditional: Conditional,\n\n formatter: &Formatter,\n\n parent_is_conditional: bool,\n\n) -> FormatResult<FormatElement> {\n\n conditional.into_format_element(formatter, parent_is_conditional)\n\n}\n", "file_path": "crates/rome_js_formatter/src/utils/format_conditional.rs", "rank": 48, "score": 226960.37147592785 }, { "content": "pub fn invalid_digits_after_unicode_escape_sequence(\n\n file_id: FileId,\n\n start: usize,\n\n end: usize,\n\n) -> Diagnostic {\n\n Diagnostic::error(file_id, \"\", \"invalid digits after unicode escape sequence\")\n\n .primary(start..end, \"expected valid unicode escape sequence\")\n\n}\n", "file_path": "crates/rome_js_parser/src/lexer/errors.rs", "rank": 49, "score": 224580.2979960264 }, { "content": "/// Utility function that applies some heuristic to format chain member expressions and call expressions\n\n///\n\n/// We want to transform code that looks like this:\n\n///\n\n/// ```js\n\n/// something.execute().then().then().catch()\n\n/// ```\n\n///\n\n/// To something like this:\n\n///\n\n/// ```js\n\n/// something\n\n/// .execute()\n\n/// .then()\n\n/// .then()\n\n/// .catch()\n\n/// ```\n\n///\n\n/// In order to achieve that we use the same heuristic that [Prettier applies].\n\n///\n\n/// The process is the following:\n\n///\n\n/// ### Flattening the AST\n\n/// We flatten the AST. See, the code above is actually nested, where the first member expression (`something`)\n\n/// that we see is actually the last one. This is a oversimplified version of the AST:\n\n///\n\n/// ```block\n\n/// [\n\n/// .catch() [\n\n/// .then() [\n\n/// .then() [\n\n/// .execute() [\n\n/// something\n\n/// ]\n\n/// ]\n\n/// ]\n\n/// ]\n\n/// ]\n\n/// ```\n\n/// So we need to navigate the AST and make sure that `something` is actually\n\n/// the first one. In a sense, we have to revert the chain of children. We will do that using a recursion.\n\n///\n\n/// While we navigate the AST and we found particular nodes that we want to track, we also\n\n/// format them. The format of these nodes is different from the standard version.\n\n///\n\n/// Our formatter makes sure that we don't format twice the same nodes. Let's say for example that\n\n/// we find a `something().execute()`, its AST is like this:\n\n///\n\n/// ```block\n\n/// JsCallExpression {\n\n/// callee: JsStaticMember {\n\n/// object: JsCallExpression {\n\n/// callee: Reference {\n\n/// execute\n\n/// }\n\n/// }\n\n/// }\n\n/// }\n\n/// ```\n\n///\n\n/// When we track the first [rome_js_syntax::JsCallExpression], we hold basically all the children,\n\n/// that applies for the rest of the nodes. If we decided to format all the children of each node,\n\n/// we will risk to format the last node, the `Reference`, four times.\n\n///\n\n/// To avoid this, when we encounter particular nodes, we don't format all of its children, but defer\n\n/// the formatting to the child itself.\n\n///\n\n/// The end result of the flattening, will create an array of something like this:\n\n///\n\n/// ```block\n\n/// [ Identifier, JsCallExpression, JsStaticMember, JsCallExpression ]\n\n/// ```\n\n///\n\n/// ### Grouping\n\n///\n\n/// After the flattening, we start the grouping. We want to group nodes in a way that will help us\n\n/// to apply a deterministic formatting.\n\n/// - first group will be the identifier\n\n/// - the rest of the groups will be will start StaticMemberExpression followed by the rest of the nodes,\n\n/// right before the end of the next StaticMemberExpression\n\n///\n\n/// The first group is special, because it holds the reference; it has its own heuristic.\n\n/// Inside the first group we store the first element of the flattened array, then:\n\n///\n\n/// 1. as many as [rome_js_syntax::JsCallExpression] we can find, this cover cases like\n\n/// `something()()().then()`;\n\n/// 2. as many as [rome_js_syntax::JsComputedMemberExpression] we can find, this cover cases like\n\n/// `something()()[1][3].then()`;\n\n/// 3. as many as consecutive [rome_js_syntax::JsStaticMemberExpression] or [rome_js_syntax::JsComputedExpression], this cover cases like\n\n/// `this.items[0].then()`\n\n///\n\n/// The rest of the groups are essentially a sequence of `[StaticMemberExpression , CallExpression]`.\n\n/// In order to achieve that, we simply start looping through the rest of the flatten items that we haven't seen.\n\n///\n\n/// Eventually, we should have something like this:\n\n///\n\n/// ```block\n\n/// [\n\n/// [ReferenceIdentifier, CallExpression], // with possible computed expressions in the middle\n\n/// [StaticMemberExpression, StaticMemberExpression, CallExpression],\n\n/// [StaticMemberExpression, CallExpression],\n\n/// [StaticMemberExpression],\n\n/// ]\n\n/// ```\n\n///\n\n/// [Prettier applies]: https://github.com/prettier/prettier/blob/main/src/language-js/print/member-chain.js\n\npub fn format_call_expression(\n\n syntax_node: &JsSyntaxNode,\n\n formatter: &Formatter,\n\n) -> FormatResult<FormatElement> {\n\n let mut flattened_items = vec![];\n\n\n\n flatten_call_expression(&mut flattened_items, syntax_node.clone(), formatter)?;\n\n\n\n // Count the number of CallExpression in the chain,\n\n // will be used later to decide on how to format it\n\n let calls_count = flattened_items\n\n .iter()\n\n .filter(|item| item.is_loose_call_expression())\n\n .count();\n\n\n\n // as explained before, the first group is particular, so we calculate it\n\n let index_to_split_at = compute_first_group_index(&flattened_items);\n\n let mut flattened_items = flattened_items.into_iter();\n\n // we have the index where we want to take the first group\n\n let first_group = concat_elements(\n\n (&mut flattened_items)\n\n .take(index_to_split_at)\n\n .map(FlattenItem::into),\n\n );\n\n // `flattened_items` now contains only the nodes that should have a sequence of\n\n // `[ StaticMemberExpression -> AnyNode + JsCallExpression ]`\n\n let rest_of_groups = compute_groups(flattened_items)?;\n\n Ok(format_groups(calls_count, first_group, rest_of_groups))\n\n}\n\n\n", "file_path": "crates/rome_js_formatter/src/utils/call_expression.rs", "rank": 50, "score": 224204.87396183066 }, { "content": "pub fn pushd(path: impl Into<PathBuf>) -> Pushd {\n\n Env::with(|env| env.pushd(path.into()));\n\n Pushd { _p: () }\n\n}\n\n\n\nimpl Drop for Pushd {\n\n fn drop(&mut self) {\n\n Env::with(|env| env.popd())\n\n }\n\n}\n\n\n\npub struct Pushenv {\n\n _p: (),\n\n}\n\n\n", "file_path": "xtask/src/glue.rs", "rank": 51, "score": 221420.86852313188 }, { "content": "pub fn rm_rf(path: impl AsRef<Path>) -> Result<()> {\n\n let path = path.as_ref();\n\n if !path.exists() {\n\n return Ok(());\n\n }\n\n if path.is_file() {\n\n fs2::remove_file(path)\n\n } else {\n\n fs2::remove_dir_all(path)\n\n }\n\n}\n\n\n", "file_path": "xtask/src/glue.rs", "rank": 52, "score": 218514.2612817494 }, { "content": "pub fn all() -> impl Iterator<Item = &'static Analyzer> {\n\n ALL_ANALYZERS.iter()\n\n}\n\n\n", "file_path": "crates/rome_analyze/src/analyzers.rs", "rank": 53, "score": 218514.2612817494 }, { "content": "#[doc(hidden)]\n\npub fn run_process(cmd: String, echo: bool, stdin: Option<&[u8]>) -> Result<String> {\n\n run_process_inner(&cmd, echo, stdin).with_context(|| format!(\"process `{}` failed\", cmd))\n\n}\n\n\n", "file_path": "xtask/src/glue.rs", "rank": 54, "score": 217764.15651744726 }, { "content": "pub fn all() -> impl Iterator<Item = &'static AssistProvider> {\n\n ALL_ASSIST_PROVIDERS.iter()\n\n}\n\n\n\npub struct AssistContext<'a> {\n\n file_id: FileId,\n\n cursor_range: TextRange,\n\n offset: TextSize,\n\n analysis_server: &'a AnalysisServer,\n\n assist_provider: &'a AssistProvider,\n\n}\n\n\n\nimpl<'a> AssistContext<'a> {\n\n pub(crate) fn new(\n\n analysis_server: &'a AnalysisServer,\n\n file_id: FileId,\n\n cursor_range: TextRange,\n\n assist_provider: &'a AssistProvider,\n\n ) -> Self {\n\n let offset = cursor_range.start();\n", "file_path": "crates/rome_analyze/src/assists.rs", "rank": 55, "score": 215706.74724262647 }, { "content": "pub fn syntax_tree(document: Document) -> Result<String> {\n\n info!(\"Showing syntax tree\");\n\n trace!(\"Showing syntax tree for: {:?}\", document);\n\n let text = &document.text;\n\n let file_id = document.file_id();\n\n let source_type = document.get_source_type();\n\n let parse_result = parse(text, file_id, source_type);\n\n let cst = format!(\"{:#?}\", parse_result.tree());\n\n\n\n Ok(cst)\n\n}\n", "file_path": "crates/rome_lsp/src/requests/syntax_tree.rs", "rank": 56, "score": 215691.70153175606 }, { "content": "fn element_id(elem: GreenElementRef<'_>) -> *const () {\n\n match elem {\n\n NodeOrToken::Node(it) => it as *const GreenNodeData as *const (),\n\n NodeOrToken::Token(it) => it as *const GreenTokenData as *const (),\n\n }\n\n}\n\n\n\nimpl NodeCache {\n\n /// Hash used for nodes that haven't been cached because it has too many slots or\n\n /// one of its children wasn't cached.\n\n const UNCACHED_NODE_HASH: u64 = 0;\n\n\n\n /// Tries to retrieve a node with the given `kind` and `children` from the cache.\n\n ///\n\n /// Returns an entry that allows the caller to:\n\n /// * Retrieve the cached node if it is present in the cache\n\n /// * Insert a node if it isn't present in the cache\n\n pub(crate) fn node(\n\n &mut self,\n\n kind: RawSyntaxKind,\n", "file_path": "crates/rome_rowan/src/green/node_cache.rs", "rank": 57, "score": 214761.36741780498 }, { "content": "fn check_file_encoding(path: &std::path::Path) -> Option<String> {\n\n let buffer = std::fs::read(path).unwrap();\n\n decode_maybe_utf16_string(&buffer)\n\n .ok()\n\n .map(|decoded| decoded.to_string())\n\n}\n", "file_path": "xtask/coverage/src/lib.rs", "rank": 58, "score": 213062.19071284472 }, { "content": "#[allow(unused)]\n\npub fn action_providers() -> impl Iterator<Item = &'static Analyzer> {\n\n ALL_ANALYZERS\n\n .iter()\n\n .filter(|a| !a.action_categories.is_empty())\n\n}\n", "file_path": "crates/rome_analyze/src/analyzers.rs", "rank": 59, "score": 212993.34385245704 }, { "content": "/// Main function to run Rome CLI\n\npub fn run_cli(mut session: CliSession) -> Result<(), Termination> {\n\n let has_metrics = session.args.contains(\"--show-metrics\");\n\n if has_metrics {\n\n crate::metrics::init_metrics();\n\n }\n\n\n\n let has_help = session.args.contains(\"--help\");\n\n let subcommand = session\n\n .args\n\n .subcommand()\n\n .map_err(|source| Termination::ParseError {\n\n argument: \"<command>\",\n\n source,\n\n })?;\n\n\n\n // True if the command line did not contain any arguments beside the subcommand\n\n let is_empty = session.args.clone().finish().is_empty();\n\n\n\n match subcommand.as_deref() {\n\n // Print the help for the subcommand if it was called with `--help`\n", "file_path": "crates/rome_cli/src/lib.rs", "rank": 60, "score": 210221.01940500698 }, { "content": "#[derive(Debug, Default, Clone, Eq, PartialEq)]\n\nstruct PrintElementArgs {\n\n indent: u16,\n\n hard_group: bool,\n\n}\n\n\n\nimpl PrintElementArgs {\n\n pub fn new(indent: u16) -> Self {\n\n Self {\n\n indent,\n\n hard_group: false,\n\n }\n\n }\n\n\n\n pub fn with_incremented_indent(self) -> Self {\n\n Self::new(self.indent + 1)\n\n }\n\n\n\n pub fn with_hard_group(self, hard_group: bool) -> Self {\n\n Self { hard_group, ..self }\n\n }\n", "file_path": "crates/rome_formatter/src/printer.rs", "rank": 61, "score": 210031.00751351687 }, { "content": "struct DebugSyntaxElement(SyntaxElement);\n\nimpl Debug for DebugSyntaxElement {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n match &self.0 {\n\n SyntaxElement::Node(node) => {\n\n map_syntax_node ! (node . clone () , node => std :: fmt :: Debug :: fmt (& node , f))\n\n }\n\n SyntaxElement::Token(token) => Debug::fmt(token, f),\n\n }\n\n }\n\n}\n", "file_path": "crates/rome_css_syntax/src/generated/nodes.rs", "rank": 62, "score": 209105.3518340417 }, { "content": "struct DebugSyntaxElement(SyntaxElement);\n\nimpl Debug for DebugSyntaxElement {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n match &self.0 {\n\n SyntaxElement::Node(node) => {\n\n map_syntax_node ! (node . clone () , node => std :: fmt :: Debug :: fmt (& node , f))\n\n }\n\n SyntaxElement::Token(token) => Debug::fmt(token, f),\n\n }\n\n }\n\n}\n", "file_path": "crates/rome_js_syntax/src/generated/nodes.rs", "rank": 63, "score": 209105.3518340417 }, { "content": "#[derive(Debug, Default)]\n\nstruct SnapshotContent {\n\n input: String,\n\n output: Vec<(String, FormatOptions)>,\n\n}\n\n\n\nimpl SnapshotContent {\n\n pub fn add_output(&mut self, formatted: Formatted, options: FormatOptions) {\n\n let mut output: String = formatted.as_code().into();\n\n if !formatted.verbatim().is_empty() {\n\n output.push_str(\"\\n\\n\");\n\n output.push_str(\"## Unimplemented nodes/tokens\");\n\n output.push_str(\"\\n\\n\");\n\n for (text, range) in formatted.verbatim() {\n\n let string = format!(\"{:?} => {:?}\\n\", text, range);\n\n output.push_str(string.as_str());\n\n }\n\n }\n\n self.output.push((output, options));\n\n }\n\n\n", "file_path": "crates/rome_js_formatter/tests/spec_test.rs", "rank": 64, "score": 207049.9840801882 }, { "content": "/// Error returned if printing an item as a flat string fails because it either contains\n\n/// explicit line breaks or would otherwise exceed the specified line width.\n\nstruct LineBreakRequiredError;\n\n\n\n/// Prints the format elements into a string\n\n#[derive(Debug, Clone, Default)]\n\npub struct Printer<'a> {\n\n options: PrinterOptions,\n\n state: PrinterState<'a>,\n\n}\n\n\n\nimpl<'a> Printer<'a> {\n\n pub fn new<T: Into<PrinterOptions>>(options: T) -> Self {\n\n Self {\n\n options: options.into(),\n\n state: PrinterState::default(),\n\n }\n\n }\n\n\n\n /// Prints the passed in element as well as all its content\n\n pub fn print(self, element: &'a FormatElement) -> Formatted {\n\n tracing::debug_span!(\"Printer::print\").in_scope(move || self.print_with_indent(element, 0))\n", "file_path": "crates/rome_formatter/src/printer.rs", "rank": 65, "score": 206997.59223891154 }, { "content": "type Content = Box<FormatElement>;\n\n\n", "file_path": "crates/rome_formatter/src/format_element.rs", "rank": 66, "score": 206582.4345518891 }, { "content": "/// Format a single `else? if(test) consequent` element, returning the next else clause\n\nfn format_if_element(\n\n formatter: &Formatter,\n\n else_token: Option<JsSyntaxToken>,\n\n stmt: &JsIfStatement,\n\n) -> FormatResult<(FormatElement, Option<JsElseClause>)> {\n\n let JsIfStatementFields {\n\n if_token,\n\n l_paren_token,\n\n test,\n\n r_paren_token,\n\n consequent,\n\n else_clause,\n\n } = stmt.as_fields();\n\n\n\n let head = format_elements![\n\n else_token.format_with_or_empty(formatter, |token| format_elements![\n\n space_token(),\n\n token,\n\n space_token(),\n\n ])?,\n", "file_path": "crates/rome_js_formatter/src/js/statements/if_statement.rs", "rank": 67, "score": 203366.5610630419 }, { "content": "/// Perform a second pass of formatting on a file, printing a diff if the\n\n/// output doesn't match the input\n\npub fn check_reformat(params: CheckReformatParams) {\n\n let CheckReformatParams {\n\n root,\n\n text,\n\n source_type,\n\n file_name,\n\n format_options,\n\n } = params;\n\n\n\n let re_parse = parse(text, 0, source_type);\n\n\n\n // Panic if the result from the formatter has syntax errors\n\n if re_parse.has_errors() {\n\n let mut files = SimpleFiles::new();\n\n files.add(file_name.into(), text.into());\n\n\n\n let mut buffer = termcolor::Buffer::ansi();\n\n let mut emitter = Emitter::new(&files);\n\n\n\n for error in re_parse.diagnostics() {\n", "file_path": "crates/rome_js_formatter/tests/check_reformat.rs", "rank": 68, "score": 202681.48737722955 }, { "content": "/// Perform a second pass of formatting on a file, printing a diff if the\n\n/// output doesn't match the input\n\npub fn check_reformat(params: CheckReformatParams) {\n\n let CheckReformatParams {\n\n root,\n\n text,\n\n source_type,\n\n file_name,\n\n format_options,\n\n } = params;\n\n\n\n let re_parse = parse(text, 0, source_type);\n\n\n\n // Panic if the result from the formatter has syntax errors\n\n if re_parse.has_errors() {\n\n let mut files = SimpleFiles::new();\n\n files.add(file_name.into(), text.into());\n\n\n\n let mut buffer = termcolor::Buffer::ansi();\n\n let mut emitter = Emitter::new(&files);\n\n\n\n for error in re_parse.diagnostics() {\n", "file_path": "crates/rome_js_formatter/src/check_reformat.rs", "rank": 69, "score": 202681.48737722955 }, { "content": "/// This trait should be implemented on each node/value that should have a formatted representation\n\npub trait ToFormatElement {\n\n fn to_format_element(&self, formatter: &Formatter) -> FormatResult<FormatElement>;\n\n}\n\n\n\n/// Public return type of the formatter\n\npub type FormatResult<F> = Result<F, FormatError>;\n\n\n\n#[derive(Debug, PartialEq)]\n\n/// Series of errors encountered during formatting\n\npub enum FormatError {\n\n /// Node is missing and it should be required for a correct formatting\n\n MissingRequiredChild,\n\n\n\n /// In case our formatter doesn't know how to format a certain language\n\n UnsupportedLanguage,\n\n\n\n /// When the ability to format the current file has been turned off on purpose\n\n CapabilityDisabled,\n\n}\n\n\n", "file_path": "crates/rome_js_formatter/src/lib.rs", "rank": 70, "score": 200386.72441901517 }, { "content": "pub fn run_format(root: &JsSyntaxNode) -> Formatted {\n\n format(FormatOptions::default(), root).unwrap()\n\n}\n\n\n\nimpl FormatterMeasurement {\n\n fn total(&self) -> Duration {\n\n self.formatting\n\n }\n\n\n\n pub(crate) fn summary(&self) -> String {\n\n format!(\"{}, Formatting: {:?}\", self.id, self.total(),)\n\n }\n\n}\n\n\n\nimpl Display for FormatterMeasurement {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n let _ = writeln!(f, \"\\tFormatting: {:>10?}\", self.formatting);\n\n let _ = writeln!(f, \"\\t ----------\");\n\n let _ = writeln!(f, \"\\tTotal: {:>10?}\", self.total());\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "xtask/bench/src/features/formatter.rs", "rank": 71, "score": 199808.22108656427 }, { "content": "fn parse_ts_enum_id(p: &mut Parser, enum_token_range: TextRange) {\n\n match parse_binding(p) {\n\n Present(id) => {\n\n let text = p.source(id.range(p));\n\n if is_reserved_enum_name(text) {\n\n let err = p\n\n .err_builder(&format!(\n\n \"`{}` cannot be used as a enum name because it is already reserved\",\n\n text\n\n ))\n\n .primary(id.range(p), \"\");\n\n\n\n p.error(err);\n\n }\n\n }\n\n // test_err ts enum_decl_no_id\n\n // enum {A,B,C}\n\n // enum 1 {A,B,C}\n\n Absent => {\n\n if p.nth_at(1, L_CURLY) {\n", "file_path": "crates/rome_js_parser/src/syntax/typescript/statement.rs", "rank": 72, "score": 199040.62923572122 }, { "content": "fn should_error(name: &str, run_options: &[String]) -> bool {\n\n let error_reference_file = Path::new(REFERENCE_PATH).join(\n\n Path::new(name)\n\n .with_extension(\"errors.txt\")\n\n .file_name()\n\n .unwrap(),\n\n );\n\n\n\n if error_reference_file.exists() {\n\n return true;\n\n }\n\n\n\n run_options.iter().any(|option| {\n\n let errors_file_name = Path::new(name)\n\n .file_stem()\n\n .and_then(|name| name.to_str())\n\n .map(|name| format!(\"{name}({option}).errors.txt\"))\n\n .unwrap();\n\n\n\n let path = Path::new(REFERENCE_PATH).join(&errors_file_name);\n\n\n\n path.exists()\n\n })\n\n}\n", "file_path": "xtask/coverage/src/ts/ts_microsoft.rs", "rank": 73, "score": 198546.19086168864 }, { "content": "pub fn generate_macros(ast: &AstSrc, language_kind: LanguageKind) -> Result<String> {\n\n let syntax_kind = language_kind.syntax_kind();\n\n let syntax_node = language_kind.syntax_node();\n\n\n\n let match_arms: Vec<_> = ast\n\n .nodes\n\n .iter()\n\n .map(|node| {\n\n let name = format_ident!(\"{}\", node.name);\n\n let node_kind = format_ident!(\"{}\", to_upper_snake_case(&node.name));\n\n (name, node_kind)\n\n })\n\n .chain(ast.unknowns.iter().map(|node_name| {\n\n let name = format_ident!(\"{}\", node_name);\n\n let node_kind = format_ident!(\"{}\", to_upper_snake_case(node_name));\n\n (name, node_kind)\n\n }))\n\n .chain(ast.lists().map(|(node_name, _)| {\n\n let name = format_ident!(\"{}\", node_name);\n\n let node_kind = format_ident!(\"{}\", to_upper_snake_case(node_name));\n", "file_path": "xtask/codegen/src/generate_macros.rs", "rank": 74, "score": 198483.01757551028 }, { "content": "pub fn generate_nodes(ast: &AstSrc, language_kind: LanguageKind) -> Result<String> {\n\n let (node_defs, node_boilerplate_impls): (Vec<_>, Vec<_>) = ast\n\n .nodes\n\n .iter()\n\n .map(|node| {\n\n let name = format_ident!(\"{}\", node.name);\n\n let node_kind = format_ident!(\"{}\", to_upper_snake_case(node.name.as_str()));\n\n\n\n let methods = node\n\n .fields\n\n .iter()\n\n .enumerate()\n\n .map(|(slot_index, field)| match field {\n\n Field::Token { name, kind, .. } => {\n\n let many = matches!(kind, TokenKind::Many(_));\n\n\n\n let method_name = if many {\n\n format_ident!(\"{}\", name)\n\n } else {\n\n field.method_name(language_kind)\n", "file_path": "xtask/codegen/src/generate_nodes.rs", "rank": 75, "score": 198483.01757551028 }, { "content": "const content = `\n\nconst env = ${ JSON.stringify({\n\n\tassetsRootUrl: env.assetsRootUrl,\n\n\tenv: env.env,\n\n\trole: \"client\",\n\n\tadsfafa: \"sdfsdff\",\n\n\tasdfasff: \"wefwefw\",\n\n \tfefef: \"sf sdfs fdsfdsf s dfsfds\"\n\n}, null, \"\\t\") });\n", "file_path": "crates/rome_js_formatter/tests/specs/prettier/js/strings/template-literals.js", "rank": 76, "score": 198194.4416918723 }, { "content": "fn is_at_named_tuple_type_element(p: &mut Parser) -> bool {\n\n let offset = if p.at(T![...]) { 1 } else { 0 };\n\n\n\n // a:\n\n let is_colon = p.nth_at(offset + 1, T![:]);\n\n // a?:\n\n let is_question_colon = p.nth_at(offset + 1, T![?]) && p.nth_at(offset + 2, T![:]);\n\n\n\n is_nth_at_identifier_or_keyword(p, offset) && (is_colon || is_question_colon)\n\n}\n\n\n", "file_path": "crates/rome_js_parser/src/syntax/typescript/types.rs", "rank": 77, "score": 197588.27467101978 }, { "content": "fn load_tests(suite: &dyn TestSuite, context: &mut TestRunContext) -> TestSuiteInstance {\n\n let paths = WalkDir::new(suite.base_path())\n\n .into_iter()\n\n .filter_map(Result::ok)\n\n .filter(|file| {\n\n let path = file.path();\n\n if !path.is_file() {\n\n return false;\n\n }\n\n\n\n if !suite.is_test(path) {\n\n return false;\n\n }\n\n\n\n if let Some(filter) = &context.filter {\n\n let normalized_path = path.to_string_lossy().replace('\\\\', \"/\");\n\n let normalized_query = filter.replace('\\\\', \"/\");\n\n normalized_path.contains(&normalized_query)\n\n } else {\n\n true\n", "file_path": "xtask/coverage/src/runner.rs", "rank": 78, "score": 196178.0442012129 }, { "content": "struct SerDisplay<T>(T);\n\nimpl<T: fmt::Display> Serialize for SerDisplay<T> {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n serializer.collect_str(&self.0)\n\n }\n\n}\n\n\n", "file_path": "crates/rome_rowan/src/serde_impls.rs", "rank": 79, "score": 196129.93442087923 }, { "content": "/// An individual enum member\n\nfn parse_ts_enum_member(p: &mut Parser) -> ParsedSyntax {\n\n let member = p.start();\n\n\n\n let name = match p.cur() {\n\n T!['['] => syntax::object::parse_computed_member_name(p),\n\n T![#] => {\n\n let err = p\n\n .err_builder(\"An `enum` member cannot be private\")\n\n .primary(p.cur_range(), \"\");\n\n p.error(err);\n\n syntax::class::parse_private_class_member_name(p).map(|mut x| {\n\n x.change_to_unknown(p);\n\n x\n\n })\n\n }\n\n _ => parse_literal_as_ts_enum_member(p),\n\n };\n\n\n\n if name.is_absent() {\n\n member.abandon(p);\n\n return Absent;\n\n }\n\n\n\n let _ = parse_initializer_clause(p, ExpressionContext::default());\n\n\n\n Present(member.complete(p, TS_ENUM_MEMBER))\n\n}\n", "file_path": "crates/rome_js_parser/src/syntax/typescript/statement.rs", "rank": 80, "score": 195205.66973296445 }, { "content": "// test jsx jsx_member_element_name\n\n// <a.b.c.d></a.b.c.d>;\n\n// <a-b.c></a-b.c>;\n\n// <Abcd></Abcd>;\n\n//\n\n// test_err jsx jsx_namespace_member_element_name\n\n// <namespace:a></namespace:a>;\n\n// <namespace:a.b></namespace:a.b>;\n\nfn parse_jsx_any_element_name(p: &mut Parser) -> ParsedSyntax {\n\n let name = parse_jsx_name_or_namespace(p);\n\n name.map(|mut name| {\n\n if name.kind() == JSX_NAME && (p.at(T![.]) || !is_intrinsic_element(name.text(p))) {\n\n name.change_kind(p, JSX_REFERENCE_IDENTIFIER)\n\n } else if name.kind() == JSX_NAMESPACE_NAME && p.at(T![.]) {\n\n let error = p\n\n .err_builder(\"JSX property access expressions cannot include JSX namespace names.\")\n\n .primary(name.range(p), \"\");\n\n p.error(error);\n\n name.change_to_unknown(p);\n\n }\n\n\n\n while p.at(T![.]) {\n\n let m = name.precede(p);\n\n p.bump(T![.]);\n\n parse_name(p).or_add_diagnostic(p, expected_identifier);\n\n name = m.complete(p, JSX_MEMBER_NAME)\n\n }\n\n\n\n name\n\n })\n\n}\n\n\n", "file_path": "crates/rome_js_parser/src/syntax/jsx/mod.rs", "rank": 81, "score": 195138.2611821112 }, { "content": "#[derive(Debug, Default)]\n\nstruct ElementCallQueue<'a>(Vec<PrintElementCall<'a>>);\n\n\n\nimpl<'a> ElementCallQueue<'a> {\n\n #[inline]\n\n pub fn new() -> Self {\n\n Self(Vec::new())\n\n }\n\n\n\n #[inline]\n\n fn extend<T>(&mut self, calls: T)\n\n where\n\n T: IntoIterator<Item = PrintElementCall<'a>>,\n\n T::IntoIter: DoubleEndedIterator,\n\n {\n\n // Reverse the calls because elements are removed from the back of the vec\n\n // in reversed insertion order\n\n self.0.extend(calls.into_iter().rev());\n\n }\n\n\n\n #[inline]\n", "file_path": "crates/rome_formatter/src/printer.rs", "rank": 82, "score": 195039.38804752656 }, { "content": "pub fn generate_syntax_kinds(grammar: KindsSrc, language_kind: LanguageKind) -> Result<String> {\n\n let syntax_kind = language_kind.syntax_kind();\n\n let punctuation_values = grammar.punct.iter().map(|(token, _name)| {\n\n // These tokens, when parsed to proc_macro2::TokenStream, generates a stream of bytes\n\n // that can't be recognized by [quote].\n\n // Hence, they need to be thread differently\n\n if \"{}[]()`\".contains(token) {\n\n let c = token.chars().next().unwrap();\n\n quote! { #c }\n\n } else if *token == \"$=\" {\n\n let token = Literal::string(*token);\n\n quote! { #token }\n\n } else {\n\n let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint));\n\n quote! { #(#cs)* }\n\n }\n\n });\n\n let punctuation_strings = punctuation_values.clone().map(|name| name.to_string());\n\n\n\n let punctuation = grammar\n", "file_path": "xtask/codegen/src/generate_syntax_kinds.rs", "rank": 83, "score": 193940.2611659142 }, { "content": "pub fn generate_syntax_factory(ast: &AstSrc, language_kind: LanguageKind) -> Result<String> {\n\n let (syntax_kind, factory_kind) = match language_kind {\n\n LanguageKind::Js => (quote! { JsSyntaxKind }, quote! { JsSyntaxFactory }),\n\n LanguageKind::Css => (quote! { CssSyntaxKind }, quote! {CssSyntaxFactory}),\n\n };\n\n let normal_node_arms = ast.nodes.iter().map(|node| {\n\n let kind = format_ident!(\"{}\", to_upper_snake_case(&node.name));\n\n let expected_len = node.fields.len();\n\n\n\n let fields = node.fields.iter().map(|field| {\n\n let field_predicate = match field {\n\n Field::Node { ty, .. } => {\n\n let ast_type_name = format_ident!(\"{}\", ty);\n\n\n\n quote! {\n\n #ast_type_name::can_cast(element.kind())\n\n }\n\n }\n\n Field::Token { kind, .. } => match kind {\n\n TokenKind::Single(expected) => {\n", "file_path": "xtask/codegen/src/generate_syntax_factory.rs", "rank": 84, "score": 193940.2611659142 }, { "content": "fn parse_literal_as_ts_enum_member(p: &mut Parser) -> ParsedSyntax {\n\n let m = p.start();\n\n match p.cur() {\n\n JS_STRING_LITERAL | T![ident] => {\n\n p.bump_any();\n\n }\n\n t if t.is_keyword() => {\n\n p.bump_remap(T![ident]);\n\n }\n\n JS_NUMBER_LITERAL => {\n\n let err = p\n\n .err_builder(\"An enum member cannot have a numeric name\")\n\n .primary(p.cur_range(), \"\");\n\n p.error(err);\n\n p.bump_any()\n\n }\n\n _ => {\n\n m.abandon(p);\n\n return Absent;\n\n }\n\n }\n\n Present(m.complete(p, JS_LITERAL_MEMBER_NAME))\n\n}\n\n\n", "file_path": "crates/rome_js_parser/src/syntax/typescript/statement.rs", "rank": 85, "score": 192819.42052550384 }, { "content": "#[quickcheck]\n\nfn losslessness(string: String) -> bool {\n\n // using an mpsc channel allows us to spawn a thread and spawn the lexer there, then if\n\n // it takes more than 2 seconds we panic because it is 100% infinite recursion\n\n let cloned = string.clone();\n\n let (sender, receiver) = channel();\n\n thread::spawn(move || {\n\n let mut lexer = Lexer::from_str(&cloned, 0);\n\n let mut tokens = vec![];\n\n\n\n while lexer.next_token(LexContext::default()) != EOF {\n\n tokens.push(lexer.current_range());\n\n }\n\n\n\n sender\n\n .send(tokens)\n\n .expect(\"Could not send tokens to receiver\");\n\n });\n\n let token_ranges = receiver\n\n .recv_timeout(Duration::from_secs(2))\n\n .unwrap_or_else(|_| {\n", "file_path": "crates/rome_js_parser/src/lexer/tests.rs", "rank": 86, "score": 186435.77136185515 }, { "content": "pub fn run(\n\n suites: Option<&str>,\n\n filter: Option<&str>,\n\n json: bool,\n\n detail_level: SummaryDetailLevel,\n\n) {\n\n let mut reporters = MulticastTestReporter::new(Box::new(DefaultReporter::default()));\n\n\n\n let output_target = if json {\n\n reporters.add(Box::new(JsonReporter::default()));\n\n OutputTarget::stderr()\n\n } else {\n\n OutputTarget::stdout()\n\n };\n\n\n\n reporters.add(Box::new(SummaryReporter::new(detail_level, output_target)));\n\n\n\n let mut context = TestRunContext {\n\n filter: filter.map(|s| s.to_string()),\n\n reporter: &mut reporters,\n", "file_path": "xtask/coverage/src/lib.rs", "rank": 87, "score": 185551.51316542242 }, { "content": "#[wasm_bindgen]\n\npub fn run(\n\n code: String,\n\n line_width: u16,\n\n indent_width: Option<u8>, // If None, we use tabs\n\n quote_style: String,\n\n is_typescript: bool,\n\n is_jsx: bool,\n\n source_type: String,\n\n) -> PlaygroundResult {\n\n let mut simple_files = SimpleFiles::new();\n\n let main_file_id = simple_files.add(\"main.js\".to_string(), code.clone());\n\n\n\n let source_type = if source_type == \"script\" {\n\n SourceType::js_script()\n\n } else {\n\n let source_type = if is_typescript {\n\n SourceType::ts()\n\n } else {\n\n SourceType::js_module()\n\n };\n", "file_path": "website/playground/src/lib.rs", "rank": 88, "score": 185551.51316542242 }, { "content": "// test_err spread\n\n// [...]\n\n/// A spread element consisting of three dots and an assignment expression such as `...foo`\n\nfn parse_spread_element(p: &mut Parser, context: ExpressionContext) -> ParsedSyntax {\n\n if !p.at(T![...]) {\n\n return Absent;\n\n }\n\n let m = p.start();\n\n p.bump(T![...]);\n\n parse_assignment_expression_or_higher(p, context)\n\n .or_add_diagnostic(p, js_parse_error::expected_expression_assignment);\n\n Present(m.complete(p, JS_SPREAD))\n\n}\n\n\n\n/// A left hand side expression, either a member expression or a call expression such as `foo()`.\n\npub(super) fn parse_lhs_expr(p: &mut Parser, context: ExpressionContext) -> ParsedSyntax {\n\n // super.foo and super[bar]\n\n // test super_property_access\n\n // super.foo\n\n // super[bar]\n\n // super[foo][bar]\n\n let lhs = if p.at(T![super]) {\n\n parse_super_expression(p)\n\n } else {\n\n parse_member_expression_or_higher(p, context)\n\n };\n\n\n\n lhs.map(|lhs_marker| parse_call_expression_rest(p, lhs_marker, context))\n\n}\n\n\n", "file_path": "crates/rome_js_parser/src/syntax/expr.rs", "rank": 89, "score": 184840.89792349547 }, { "content": "fn parse_suppression_comment(comment: &str) -> impl Iterator<Item = Suppression> {\n\n let (head, mut comment) = comment.split_at(2);\n\n let is_block_comment = match head {\n\n \"//\" => false,\n\n \"/*\" => {\n\n comment = comment\n\n .strip_suffix(\"*/\")\n\n .expect(\"block comment with no closing token\");\n\n true\n\n }\n\n token => panic!(\"comment with unknown opening token {token:?}\"),\n\n };\n\n\n\n comment.lines().filter_map(move |line| {\n\n // Eat start of line whitespace\n\n let mut line = line.trim_start();\n\n\n\n // If we're in a block comment eat stars, then whitespace again\n\n if is_block_comment {\n\n line = line.trim_start_matches('*').trim_start()\n", "file_path": "crates/rome_js_formatter/src/utils/mod.rs", "rank": 90, "score": 184501.156431487 }, { "content": "pub fn coverage_compare(\n\n base_result_path: Option<&str>,\n\n new_result_path: Option<&str>,\n\n markdown: bool,\n\n) {\n\n // resolve the path passed as argument, or retrieve the default one\n\n let base_result_dir = if let Some(base_result_path) = base_result_path {\n\n PathBuf::from(base_result_path)\n\n } else {\n\n project_root().join(BASE_RESULT_FILE)\n\n };\n\n\n\n // resolve the path passed as argument, or retrieve the default one\n\n let new_result_dir = if let Some(new_result_path) = new_result_path {\n\n PathBuf::from(new_result_path)\n\n } else {\n\n project_root().join(NEW_RESULT_FILE)\n\n };\n\n\n\n if !base_result_dir.exists() {\n", "file_path": "xtask/coverage/src/compare.rs", "rank": 91, "score": 183313.44972485426 }, { "content": "pub fn emit_compare(\n\n base_results: &TestResults,\n\n new_results: &TestResults,\n\n test_suite: &str,\n\n markdown: bool,\n\n) {\n\n let base_total = base_results.summary.tests_ran as isize;\n\n let new_total = new_results.summary.tests_ran as isize;\n\n let total_diff = new_total - base_total;\n\n\n\n let base_passed = base_results.summary.passed as isize;\n\n let new_passed = new_results.summary.passed as isize;\n\n let passed_diff = new_passed - base_passed;\n\n\n\n let base_failed = base_results.summary.failed as isize;\n\n let new_failed = new_results.summary.failed as isize;\n\n let failed_diff = new_failed - base_failed;\n\n\n\n let base_panics = base_results.summary.panics as isize;\n\n let new_panics = new_results.summary.panics as isize;\n", "file_path": "xtask/coverage/src/results.rs", "rank": 92, "score": 183313.44972485426 }, { "content": "/// Wraps the statement into a block if its not already a JsBlockStatement\n\nfn into_block(formatter: &Formatter, stmt: JsAnyStatement) -> FormatResult<FormatElement> {\n\n if matches!(stmt, JsAnyStatement::JsBlockStatement(_)) {\n\n return stmt.format(formatter);\n\n }\n\n\n\n // If the body is an empty statement, force a line break to ensure behavior\n\n // is coherent with `is_non_collapsable_empty_block`\n\n if matches!(stmt, JsAnyStatement::JsEmptyStatement(_)) {\n\n return Ok(format_elements![\n\n token(\"{\"),\n\n stmt.format(formatter)?,\n\n hard_line_break(),\n\n token(\"}\")\n\n ]);\n\n }\n\n\n\n Ok(group_elements(format_elements![\n\n token(\"{\"),\n\n block_indent(stmt.format(formatter)?),\n\n token(\"}\"),\n\n ]))\n\n}\n", "file_path": "crates/rome_js_formatter/src/js/statements/if_statement.rs", "rank": 93, "score": 182803.12162668144 }, { "content": "pub fn benchmark_format_lib(id: &str, root: &JsSyntaxNode) -> BenchmarkSummary {\n\n let formatter_timer = timing::start();\n\n run_format(root);\n\n let formatter_duration = formatter_timer.stop();\n\n\n\n BenchmarkSummary::Formatter(FormatterMeasurement {\n\n id: id.to_string(),\n\n formatting: formatter_duration,\n\n })\n\n}\n\n\n", "file_path": "xtask/bench/src/features/formatter.rs", "rank": 94, "score": 182077.63620545514 }, { "content": "fn panic_handler(info: &PanicInfo) {\n\n // Buffer the error message to a string before printing it to stderr at once\n\n // to prevent it from getting mixed with other errors if multiple threads\n\n // panic at the same time\n\n let mut error = String::new();\n\n\n\n writeln!(error, \"Rome encountered an unexpected error\").unwrap();\n\n writeln!(error).unwrap();\n\n\n\n writeln!(error, \"This is a bug in Rome, not an error in your code, and we would appreciate it if you could report it to https://github.com/rome/tools/issues/ along with the following information to help us fixing the issue:\").unwrap();\n\n writeln!(error).unwrap();\n\n\n\n if let Some(location) = info.location() {\n\n writeln!(error, \"Source Location: {location}\").unwrap();\n\n }\n\n\n\n if let Some(thread) = thread::current().name() {\n\n writeln!(error, \"Thread Name: {thread}\").unwrap();\n\n }\n\n\n\n let payload = info.payload();\n\n if let Some(msg) = payload.downcast_ref::<&'static str>() {\n\n writeln!(error, \"Message: {msg}\").unwrap();\n\n } else if let Some(msg) = payload.downcast_ref::<String>() {\n\n writeln!(error, \"Message: {msg}\").unwrap();\n\n }\n\n\n\n eprintln!(\"{error}\");\n\n}\n", "file_path": "crates/rome_cli/src/panic.rs", "rank": 95, "score": 181637.64905312608 }, { "content": "/// Returns true if the provided JsArrayElementList could\n\n/// be \"fill-printed\" instead of breaking each element on\n\n/// a different line.\n\n///\n\n/// The underlying logic only allows lists of literal expressions\n\n/// with 10 or less characters, potentially wrapped in a \"short\"\n\n/// unary expression (+, -, ~ or !)\n\nfn can_print_fill(list: &JsArrayElementList) -> bool {\n\n use rome_js_syntax::JsAnyArrayElement::*;\n\n use rome_js_syntax::JsAnyExpression::*;\n\n use rome_js_syntax::JsUnaryOperator::*;\n\n\n\n list.iter().all(|item| match item {\n\n Ok(JsAnyExpression(JsUnaryExpression(expr))) => {\n\n match expr.operator() {\n\n Ok(Plus | Minus | BitwiseNot | LogicalNot) => {}\n\n _ => return false,\n\n }\n\n\n\n if let Ok(expr) = expr.argument() {\n\n is_short_literal(&expr)\n\n } else {\n\n false\n\n }\n\n }\n\n Ok(JsAnyExpression(expr)) => is_short_literal(&expr),\n\n _ => false,\n\n })\n\n}\n\n\n", "file_path": "crates/rome_js_formatter/src/js/lists/array_element_list.rs", "rank": 96, "score": 181364.64250536443 }, { "content": "fn command_name() -> String {\n\n current_exe()\n\n .ok()\n\n .and_then(|path| Some(path.file_name()?.to_str()?.to_string()))\n\n .unwrap_or_else(|| String::from(\"rome\"))\n\n}\n\n\n\n// Termination implements Debug by redirecting to Display instead of deriving\n\n// a \"canonical\" debug implementation as it it is returned as a Result in the\n\n// main function and gets printed by the standard library, which uses Debug but\n\n// we want to show the actuall error message to the user in case of an error\n\nimpl Debug for Termination {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n\n write!(fmt, \"{}\", self)\n\n }\n\n}\n", "file_path": "crates/rome_cli/src/termination.rs", "rank": 97, "score": 181238.36304461642 }, { "content": "/// Flush and print the recorded metrics to the console\n\npub fn print_metrics() {\n\n let mut histograms: Vec<_> = METRICS\n\n .write()\n\n .drain()\n\n .flat_map(|(key, entry)| entry.into_inner().into_histograms(key.0.name()))\n\n .collect();\n\n\n\n histograms.sort_unstable_by(|(a, _), (b, _)| a.cmp(b));\n\n\n\n for (key, histogram) in histograms {\n\n // Print the header line for the histogram with its name, mean sample\n\n // duration and standard deviation\n\n println!(\n\n \"{}: mean = {:.1?}, stdev = {:.1?}\",\n\n key,\n\n Duration::from_nanos(histogram.mean().round() as u64),\n\n Duration::from_nanos(histogram.stdev().round() as u64),\n\n );\n\n\n\n // For each quantile bucket in the histogram print out the associated\n", "file_path": "crates/rome_cli/src/metrics.rs", "rank": 98, "score": 181157.2704841738 }, { "content": "/// Initializes metrics recording\n\npub fn init_metrics() {\n\n // Create and injects the metrics recording layer with the tracing library\n\n tracing_subscriber::registry().with(MetricsLayer).init();\n\n}\n\n\n", "file_path": "crates/rome_cli/src/metrics.rs", "rank": 99, "score": 181157.2704841738 } ]
Rust
lib/shiika_core/src/names.rs
shiika-lang/shiika
1992ad906c4e7354f8eb7000a134898b68651883
use crate::ty; use crate::ty::*; use serde::{Deserialize, Serialize}; #[derive(Debug, PartialEq)] pub struct ClassFirstname(pub String); impl std::fmt::Display for ClassFirstname { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{}", self.0) } } impl ClassFirstname { pub fn add_namespace(&self, namespace: &str) -> ClassFullname { if namespace.is_empty() { class_fullname(self.0.clone()) } else { class_fullname(namespace.to_string() + "::" + &self.0) } } } pub fn class_firstname(s: impl Into<String>) -> ClassFirstname { ClassFirstname(s.into()) } #[derive(Debug, PartialEq, Clone, Eq, Hash, Serialize, Deserialize)] pub struct ClassFullname(pub String); impl std::fmt::Display for ClassFullname { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{}", self.0) } } pub fn class_fullname(s: impl Into<String>) -> ClassFullname { let name = s.into(); debug_assert!(name != "Meta:"); debug_assert!(!name.starts_with("::")); debug_assert!(!name.starts_with("Meta:Meta:")); ClassFullname(name) } pub fn metaclass_fullname(base_: impl Into<String>) -> ClassFullname { let base = base_.into(); debug_assert!(!base.is_empty()); if base == "Metaclass" || base.starts_with("Meta:") { class_fullname("Metaclass") } else { class_fullname(&("Meta:".to_string() + &base)) } } impl ClassFullname { pub fn new(s: impl Into<String>, is_meta: bool) -> ClassFullname { if is_meta { metaclass_fullname(s) } else { class_fullname(s) } } pub fn instance_ty(&self) -> TermTy { if self.0 == "Metaclass" { ty::new("Metaclass", Default::default(), true) } else if self.0.starts_with("Meta:") { ty::meta(&self.0.clone().split_off(5)) } else { ty::raw(&self.0) } } pub fn class_ty(&self) -> TermTy { ty::meta(&self.0) } pub fn is_meta(&self) -> bool { self.0.starts_with("Meta:") } pub fn is_the_class(&self) -> bool { self.0 == "Class" } pub fn to_ty(&self) -> TermTy { if self.is_meta() { let mut name = self.0.clone(); name.replace_range(0..=4, ""); ty::meta(&name) } else { self.instance_ty() } } pub fn meta_name(&self) -> ClassFullname { metaclass_fullname(&self.0) } pub fn method_fullname(&self, method_firstname: &MethodFirstname) -> MethodFullname { method_fullname(self, &method_firstname.0) } pub fn to_const_fullname(&self) -> ConstFullname { toplevel_const(&self.0) } } #[derive(Debug, PartialEq, Clone, Eq, Hash, Serialize, Deserialize)] pub struct MethodFirstname(pub String); impl std::fmt::Display for MethodFirstname { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{}", self.0) } } pub fn method_firstname(s: impl Into<String>) -> MethodFirstname { MethodFirstname(s.into()) } impl MethodFirstname { pub fn append(&self, suffix: &str) -> MethodFirstname { MethodFirstname(self.0.clone() + suffix) } } #[derive(Debug, PartialEq, Clone, Eq, Hash, Serialize, Deserialize)] pub struct MethodFullname { pub full_name: String, pub first_name: MethodFirstname, } pub fn method_fullname( class_name: &ClassFullname, first_name_: impl Into<String>, ) -> MethodFullname { let first_name = first_name_.into(); debug_assert!(!first_name.is_empty()); MethodFullname { full_name: class_name.0.clone() + "#" + &first_name, first_name: MethodFirstname(first_name), } } pub fn method_fullname_raw(cls: impl Into<String>, method: impl Into<String>) -> MethodFullname { method_fullname(&class_fullname(cls), method) } impl std::fmt::Display for MethodFullname { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{}", self.full_name) } } impl MethodFullname { pub fn is_class_method(&self) -> bool { self.full_name.starts_with("Meta:") } } #[derive(Debug, PartialEq, Clone, Eq, Hash, Serialize, Deserialize)] pub struct ConstFullname(pub String); impl std::fmt::Display for ConstFullname { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{}", self.0) } } pub fn const_fullname(s_: impl Into<String>) -> ConstFullname { let s = s_.into(); debug_assert!(!s.starts_with("::")); ConstFullname(format!("::{}", &s)) } pub fn toplevel_const(first_name: &str) -> ConstFullname { debug_assert!(!first_name.starts_with("::")); ConstFullname(format!("::{}", first_name)) } #[derive(Debug, PartialEq, Clone)] pub struct Namespace(pub Vec<String>); impl std::fmt::Display for Namespace { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "::{}", &self.string()) } } impl Namespace { pub fn new(names: Vec<String>) -> Namespace { debug_assert!(names.iter().all(|x| !x.contains("::"))); Namespace(names) } pub fn root() -> Namespace { Namespace::new(vec![]) } pub fn add(&self, name: &ClassFirstname) -> Namespace { let mut v = self.0.clone(); v.push(name.0.clone()); Namespace::new(v) } pub fn class_fullname(&self, name: &ClassFirstname) -> ClassFullname { let n = self.string(); if n.is_empty() { class_fullname(&name.0) } else { class_fullname(format!("{}::{}", n, &name.0)) } } pub fn const_fullname(&self, name: &str) -> ConstFullname { let n = self.string(); if n.is_empty() { const_fullname(name) } else { const_fullname(format!("{}::{}", &n, name)) } } pub fn head(&self, n: usize) -> &[String] { &self.0[0..n] } pub fn size(&self) -> usize { self.0.len() } pub fn string(&self) -> String { self.0.join("::") } } #[derive(Debug, PartialEq, Clone)] pub struct ConstName { pub names: Vec<String>, pub args: Vec<ConstName>, } impl ConstName { pub fn resolved(&self) -> ResolvedConstName { debug_assert!(self.args.is_empty()); ResolvedConstName { names: self.names.clone(), args: vec![], } } pub fn has_type_args(&self) -> bool { !self.args.is_empty() } pub fn to_class_fullname(&self) -> ClassFullname { class_fullname(&self.string()) } pub fn fullname(&self) -> String { "::".to_string() + &self.string() } fn string(&self) -> String { let mut s = self.names.join("::"); if !self.args.is_empty() { s += "<"; let v = self.args.iter().map(|x| x.string()).collect::<Vec<_>>(); s += &v.join(","); s += ">"; } s } } pub fn const_name(names: Vec<String>) -> ConstName { ConstName { names, args: vec![], } } #[derive(Debug, PartialEq, Clone)] pub struct UnresolvedConstName(pub Vec<String>); #[derive(Debug, PartialEq)] pub struct ResolvedConstName { pub names: Vec<String>, pub args: Vec<ResolvedConstName>, } impl std::fmt::Display for ResolvedConstName { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{}", &self.string()) } } impl ResolvedConstName { pub fn new(names: Vec<String>, args: Vec<ResolvedConstName>) -> ResolvedConstName { ResolvedConstName { names, args } } pub fn unsafe_create(s: String) -> ResolvedConstName { ResolvedConstName { names: vec![s], args: vec![], } } pub fn has_type_args(&self) -> bool { !self.args.is_empty() } pub fn base(&self) -> ResolvedConstName { ResolvedConstName { names: self.names.clone(), args: Default::default(), } } pub fn to_const_fullname(&self) -> ConstFullname { toplevel_const(&self.string()) } pub fn to_class_fullname(&self) -> ClassFullname { class_fullname(self.string()) } pub fn string(&self) -> String { let mut s = self.names.join("::"); if !self.args.is_empty() { s += "<"; let v = self.args.iter().map(|arg| arg.string()).collect::<Vec<_>>(); s += &v.join(","); s += ">"; } s } pub fn with_type_args(&self, args: Vec<ResolvedConstName>) -> ResolvedConstName { debug_assert!(self.args.is_empty()); ResolvedConstName { names: self.names.clone(), args, } } pub fn to_ty(&self, class_typarams: &[String], method_typarams: &[String]) -> TermTy { if self.args.is_empty() { let s = self.names.join("::"); if let Some(i) = class_typarams.iter().position(|name| *name == s) { ty::typaram(s, ty::TyParamKind::Class, i) } else if let Some(i) = method_typarams.iter().position(|name| *name == s) { ty::typaram(s, ty::TyParamKind::Method, i) } else { ty::raw(&self.names.join("::")) } } else { let type_args = self .args .iter() .map(|n| n.to_ty(class_typarams, method_typarams)) .collect(); ty::spe(&self.names.join("::"), type_args) } } } pub fn resolved_const_name(namespace: Namespace, names: Vec<String>) -> ResolvedConstName { let new_names = namespace .0 .into_iter() .chain(names.into_iter()) .collect::<Vec<String>>(); ResolvedConstName { names: new_names, args: vec![], } } pub fn typaram_as_resolved_const_name(name: impl Into<String>) -> ResolvedConstName { resolved_const_name(Namespace::root(), vec![name.into()]) }
use crate::ty; use crate::ty::*; use serde::{Deserialize, Serialize}; #[derive(Debug, PartialEq)] pub struct ClassFirstname(pub String); impl std::fmt::Display for ClassFirstname { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{}", self.0) } } impl ClassFirstname { pub fn add_namespace(&self, namespace: &str) -> ClassFullname { if namespace.is_empty() { class_fullname(self.0.clone()) } else { class_fullname(namespace.to_string() + "::" + &self.0) } } } pub fn class_firstname(s: impl Into<String>) -> ClassFirstname { ClassFirstname(s.into()) } #[derive(Debug, PartialEq, Clone, Eq, Hash, Serialize, Deserialize)] pub struct ClassFullname(pub String); impl std::fmt::Display for ClassFullname { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{}", self.0) } } pub fn class_fullname(s: impl Into<String>) -> ClassFullname { let name = s.into(); debug_assert!(name != "Meta:"); debug_assert!(!name.starts_with("::")); debug_assert!(!name.starts_with("Meta:Meta:")); ClassFullname(name) } pub fn metaclass_fullname(base_: impl Into<String>) -> ClassFullname { let base = base_.into(); debug_assert!(!base.is_empty()); if base == "Metaclass" || base.starts_with("Meta:") { class_fullname("Metaclass") } else { class_fullname(&("Meta:".to_string() + &base)) } } impl ClassFullname { pub fn new(s: impl Into<String>, is_meta: bool) -> ClassFullname { if is_meta { metaclass_fullname(s) } else { class_fullname(s) } } pub fn instance_ty(&self) -> TermTy { if self.0 == "Metaclass" { ty::new("Metaclass", Default::default(), true) } else if self.0.starts_with("Meta:") { ty::meta(&self.0.clone().split_off(5)) } else { ty::raw(&self.0) } } pub fn class_ty(&self) -> TermTy { ty::meta(&self.0) } pub fn is_meta(&self) -> bool { self.0.starts_with("Meta:") } pub fn is_the_class(&self) -> bool { self.0 == "Class" } pub fn to_ty(&self) -> TermTy { if self.is_meta() { l
pub fn meta_name(&self) -> ClassFullname { metaclass_fullname(&self.0) } pub fn method_fullname(&self, method_firstname: &MethodFirstname) -> MethodFullname { method_fullname(self, &method_firstname.0) } pub fn to_const_fullname(&self) -> ConstFullname { toplevel_const(&self.0) } } #[derive(Debug, PartialEq, Clone, Eq, Hash, Serialize, Deserialize)] pub struct MethodFirstname(pub String); impl std::fmt::Display for MethodFirstname { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{}", self.0) } } pub fn method_firstname(s: impl Into<String>) -> MethodFirstname { MethodFirstname(s.into()) } impl MethodFirstname { pub fn append(&self, suffix: &str) -> MethodFirstname { MethodFirstname(self.0.clone() + suffix) } } #[derive(Debug, PartialEq, Clone, Eq, Hash, Serialize, Deserialize)] pub struct MethodFullname { pub full_name: String, pub first_name: MethodFirstname, } pub fn method_fullname( class_name: &ClassFullname, first_name_: impl Into<String>, ) -> MethodFullname { let first_name = first_name_.into(); debug_assert!(!first_name.is_empty()); MethodFullname { full_name: class_name.0.clone() + "#" + &first_name, first_name: MethodFirstname(first_name), } } pub fn method_fullname_raw(cls: impl Into<String>, method: impl Into<String>) -> MethodFullname { method_fullname(&class_fullname(cls), method) } impl std::fmt::Display for MethodFullname { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{}", self.full_name) } } impl MethodFullname { pub fn is_class_method(&self) -> bool { self.full_name.starts_with("Meta:") } } #[derive(Debug, PartialEq, Clone, Eq, Hash, Serialize, Deserialize)] pub struct ConstFullname(pub String); impl std::fmt::Display for ConstFullname { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{}", self.0) } } pub fn const_fullname(s_: impl Into<String>) -> ConstFullname { let s = s_.into(); debug_assert!(!s.starts_with("::")); ConstFullname(format!("::{}", &s)) } pub fn toplevel_const(first_name: &str) -> ConstFullname { debug_assert!(!first_name.starts_with("::")); ConstFullname(format!("::{}", first_name)) } #[derive(Debug, PartialEq, Clone)] pub struct Namespace(pub Vec<String>); impl std::fmt::Display for Namespace { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "::{}", &self.string()) } } impl Namespace { pub fn new(names: Vec<String>) -> Namespace { debug_assert!(names.iter().all(|x| !x.contains("::"))); Namespace(names) } pub fn root() -> Namespace { Namespace::new(vec![]) } pub fn add(&self, name: &ClassFirstname) -> Namespace { let mut v = self.0.clone(); v.push(name.0.clone()); Namespace::new(v) } pub fn class_fullname(&self, name: &ClassFirstname) -> ClassFullname { let n = self.string(); if n.is_empty() { class_fullname(&name.0) } else { class_fullname(format!("{}::{}", n, &name.0)) } } pub fn const_fullname(&self, name: &str) -> ConstFullname { let n = self.string(); if n.is_empty() { const_fullname(name) } else { const_fullname(format!("{}::{}", &n, name)) } } pub fn head(&self, n: usize) -> &[String] { &self.0[0..n] } pub fn size(&self) -> usize { self.0.len() } pub fn string(&self) -> String { self.0.join("::") } } #[derive(Debug, PartialEq, Clone)] pub struct ConstName { pub names: Vec<String>, pub args: Vec<ConstName>, } impl ConstName { pub fn resolved(&self) -> ResolvedConstName { debug_assert!(self.args.is_empty()); ResolvedConstName { names: self.names.clone(), args: vec![], } } pub fn has_type_args(&self) -> bool { !self.args.is_empty() } pub fn to_class_fullname(&self) -> ClassFullname { class_fullname(&self.string()) } pub fn fullname(&self) -> String { "::".to_string() + &self.string() } fn string(&self) -> String { let mut s = self.names.join("::"); if !self.args.is_empty() { s += "<"; let v = self.args.iter().map(|x| x.string()).collect::<Vec<_>>(); s += &v.join(","); s += ">"; } s } } pub fn const_name(names: Vec<String>) -> ConstName { ConstName { names, args: vec![], } } #[derive(Debug, PartialEq, Clone)] pub struct UnresolvedConstName(pub Vec<String>); #[derive(Debug, PartialEq)] pub struct ResolvedConstName { pub names: Vec<String>, pub args: Vec<ResolvedConstName>, } impl std::fmt::Display for ResolvedConstName { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{}", &self.string()) } } impl ResolvedConstName { pub fn new(names: Vec<String>, args: Vec<ResolvedConstName>) -> ResolvedConstName { ResolvedConstName { names, args } } pub fn unsafe_create(s: String) -> ResolvedConstName { ResolvedConstName { names: vec![s], args: vec![], } } pub fn has_type_args(&self) -> bool { !self.args.is_empty() } pub fn base(&self) -> ResolvedConstName { ResolvedConstName { names: self.names.clone(), args: Default::default(), } } pub fn to_const_fullname(&self) -> ConstFullname { toplevel_const(&self.string()) } pub fn to_class_fullname(&self) -> ClassFullname { class_fullname(self.string()) } pub fn string(&self) -> String { let mut s = self.names.join("::"); if !self.args.is_empty() { s += "<"; let v = self.args.iter().map(|arg| arg.string()).collect::<Vec<_>>(); s += &v.join(","); s += ">"; } s } pub fn with_type_args(&self, args: Vec<ResolvedConstName>) -> ResolvedConstName { debug_assert!(self.args.is_empty()); ResolvedConstName { names: self.names.clone(), args, } } pub fn to_ty(&self, class_typarams: &[String], method_typarams: &[String]) -> TermTy { if self.args.is_empty() { let s = self.names.join("::"); if let Some(i) = class_typarams.iter().position(|name| *name == s) { ty::typaram(s, ty::TyParamKind::Class, i) } else if let Some(i) = method_typarams.iter().position(|name| *name == s) { ty::typaram(s, ty::TyParamKind::Method, i) } else { ty::raw(&self.names.join("::")) } } else { let type_args = self .args .iter() .map(|n| n.to_ty(class_typarams, method_typarams)) .collect(); ty::spe(&self.names.join("::"), type_args) } } } pub fn resolved_const_name(namespace: Namespace, names: Vec<String>) -> ResolvedConstName { let new_names = namespace .0 .into_iter() .chain(names.into_iter()) .collect::<Vec<String>>(); ResolvedConstName { names: new_names, args: vec![], } } pub fn typaram_as_resolved_const_name(name: impl Into<String>) -> ResolvedConstName { resolved_const_name(Namespace::root(), vec![name.into()]) }
et mut name = self.0.clone(); name.replace_range(0..=4, ""); ty::meta(&name) } else { self.instance_ty() } }
function_block-function_prefixed
[ { "content": "pub fn new(base_name_: impl Into<String>, type_args: Vec<TermTy>, is_meta: bool) -> TermTy {\n\n let base_name = base_name_.into();\n\n debug_assert!(!base_name.is_empty());\n\n debug_assert!(!base_name.starts_with(\"Meta:\"));\n\n debug_assert!(!base_name.contains('<'));\n\n let fullname = ClassFullname::new(\n\n format!(\"{}{}\", &base_name, &tyargs_str(&type_args)),\n\n is_meta,\n\n );\n\n TermTy {\n\n fullname,\n\n body: term_ty::TyBody::TyRaw(LitTy::new(base_name, type_args, is_meta)),\n\n }\n\n}\n\n\n", "file_path": "lib/shiika_core/src/ty.rs", "rank": 1, "score": 353636.3981573661 }, { "content": "/// Returns the type of the class object\n\npub fn meta(base_fullname_: impl Into<String>) -> TermTy {\n\n new(base_fullname_, Default::default(), true)\n\n}\n\n\n", "file_path": "lib/shiika_core/src/ty.rs", "rank": 2, "score": 319034.41244059027 }, { "content": "pub fn llvm_func_name(name: impl Into<String>) -> LlvmFuncName {\n\n LlvmFuncName(name.into())\n\n}\n\n\n\nimpl<'hir, 'run, 'ictx> CodeGen<'hir, 'run, 'ictx> {\n\n /// Build IR to return Shiika object\n\n pub fn build_return(&self, obj: &SkObj<'run>) {\n\n self.builder.build_return(Some(&obj.0));\n\n }\n\n\n\n /// Build IR to return ::Void\n\n pub fn build_return_void(&self) {\n\n let v = self.gen_const_ref(&toplevel_const(\"Void\"));\n\n self.build_return(&v);\n\n }\n\n\n\n /// Load value of an instance variable\n\n pub fn build_ivar_load(&self, object: SkObj<'run>, idx: usize, name: &str) -> SkObj<'run> {\n\n SkObj(self.build_llvm_struct_ref(object, OBJ_HEADER_SIZE + idx, name))\n\n }\n", "file_path": "lib/skc_codegen/src/utils.rs", "rank": 6, "score": 292658.859760909 }, { "content": "pub fn spe_meta(base_name_: impl Into<String>, type_args: Vec<TermTy>) -> TermTy {\n\n new(base_name_, type_args, true)\n\n}\n\n\n", "file_path": "lib/shiika_core/src/ty.rs", "rank": 7, "score": 291721.39300133905 }, { "content": "/// Destructively extract list of local variables\n\npub fn extract_lvars(lvars: &mut HashMap<String, CtxLVar>) -> HirLVars {\n\n std::mem::take(lvars)\n\n .into_iter()\n\n .map(|(name, ctx_lvar)| (name, ctx_lvar.ty))\n\n .collect::<Vec<_>>()\n\n}\n", "file_path": "lib/skc_ast2hir/src/hir_maker.rs", "rank": 8, "score": 275399.83072156494 }, { "content": "pub fn mangle_method(method_name: &str) -> String {\n\n method_name\n\n // Replace '_' to use '_' as delimiter\n\n .replace(\"_\", \"__\")\n\n // Replace symbols to make the function callable from Rust(skc_rustlib)\n\n .replace(\"::\", \"_\")\n\n .replace(\"Meta:\", \"Meta_\")\n\n .replace(\"#\", \"_\")\n\n .replace(\"+@\", \"uplus_\")\n\n .replace(\"-@\", \"uminus_\")\n\n .replace(\"+\", \"add_\")\n\n .replace(\"-\", \"sub_\")\n\n .replace(\"*\", \"mul_\")\n\n .replace(\"/\", \"div_\")\n\n .replace(\"%\", \"mod_\")\n\n .replace(\"==\", \"eq_\")\n\n .replace(\"<\", \"lt_\")\n\n .replace(\">\", \"gt_\")\n\n .replace(\"<=\", \"le_\")\n\n .replace(\">=\", \"ge_\")\n\n .replace(\"[]\", \"aref_\")\n\n .replace(\"[]=\", \"aset_\")\n\n}\n", "file_path": "lib/shiika_ffi/src/lib.rs", "rank": 10, "score": 270493.5863389905 }, { "content": "pub fn spe(base_name_: impl Into<String>, type_args: Vec<TermTy>) -> TermTy {\n\n new(base_name_, type_args, false)\n\n}\n\n\n", "file_path": "lib/shiika_core/src/ty.rs", "rank": 11, "score": 263902.459801168 }, { "content": "pub fn typaram(name: impl Into<String>, kind: TyParamKind, idx: usize) -> TermTy {\n\n let s = name.into();\n\n TermTy {\n\n // TODO: s is not a class name. `fullname` should be just a String\n\n fullname: class_fullname(s.clone()),\n\n body: term_ty::TyBody::TyParamRef {\n\n kind,\n\n name: s,\n\n idx,\n\n upper_bound: Box::new(ty::raw(\"Object\")),\n\n lower_bound: Box::new(ty::raw(\"Never\")),\n\n },\n\n }\n\n}\n\n\n", "file_path": "lib/shiika_core/src/ty.rs", "rank": 14, "score": 250047.62718434076 }, { "content": "pub fn bare_name(name: &str) -> AstExpression {\n\n primary_expression(AstExpressionBody::BareName(name.to_string()))\n\n}\n\n\n", "file_path": "lib/shiika_ast/src/lib.rs", "rank": 16, "score": 246586.71699983138 }, { "content": "pub fn ivars() -> HashMap<String, SkIVar> {\n\n let mut ivars = HashMap::new();\n\n ivars.insert(\n\n \"@base_name\".to_string(),\n\n SkIVar {\n\n name: \"@base_name\".to_string(),\n\n idx: 0,\n\n ty: ty::raw(\"String\"),\n\n readonly: true,\n\n },\n\n );\n\n ivars\n\n}\n", "file_path": "lib/skc_corelib/src/metaclass.rs", "rank": 17, "score": 244097.54232070228 }, { "content": "pub fn ivars() -> HashMap<String, SkIVar> {\n\n let mut ivars = HashMap::new();\n\n ivars.insert(\n\n \"@name\".to_string(),\n\n SkIVar {\n\n name: \"@name\".to_string(),\n\n idx: 0,\n\n ty: ty::raw(\"String\"),\n\n readonly: true,\n\n },\n\n );\n\n ivars\n\n}\n", "file_path": "lib/skc_corelib/src/class.rs", "rank": 18, "score": 243430.19719340128 }, { "content": "pub fn ivar_ref(name: String) -> AstExpression {\n\n primary_expression(AstExpressionBody::IVarRef(name))\n\n}\n\n\n", "file_path": "lib/shiika_ast/src/lib.rs", "rank": 19, "score": 234939.4733413761 }, { "content": "pub fn const_ref(name: Vec<String>) -> AstExpression {\n\n primary_expression(AstExpressionBody::ConstRef(UnresolvedConstName(name)))\n\n}\n\n\n", "file_path": "lib/shiika_ast/src/lib.rs", "rank": 20, "score": 227554.31299387215 }, { "content": "/// Returns the type of instances of the class\n\npub fn raw(fullname_: impl Into<String>) -> TermTy {\n\n let fullname = fullname_.into();\n\n // Usually this is `false`; the only exception is the class `Metaclass`\n\n let meta = fullname == \"Metaclass\";\n\n new(fullname, Default::default(), meta)\n\n}\n\n\n", "file_path": "lib/shiika_core/src/ty.rs", "rank": 21, "score": 226622.74277821562 }, { "content": "pub fn ivar_assign(name: String, rhs: AstExpression) -> AstExpression {\n\n non_primary_expression(AstExpressionBody::IVarAssign {\n\n name,\n\n rhs: Box::new(rhs),\n\n is_var: false,\n\n })\n\n}\n\n\n", "file_path": "lib/shiika_ast/src/lib.rs", "rank": 23, "score": 217981.84703984167 }, { "content": "pub fn lvar_decl(name: String, rhs: AstExpression) -> AstExpression {\n\n non_primary_expression(AstExpressionBody::LVarAssign {\n\n name,\n\n rhs: Box::new(rhs),\n\n is_var: true,\n\n })\n\n}\n\n\n", "file_path": "lib/shiika_ast/src/lib.rs", "rank": 24, "score": 217981.84703984167 }, { "content": "pub fn ivar_decl(name: String, rhs: AstExpression) -> AstExpression {\n\n non_primary_expression(AstExpressionBody::IVarAssign {\n\n name,\n\n rhs: Box::new(rhs),\n\n is_var: true,\n\n })\n\n}\n\n\n", "file_path": "lib/shiika_ast/src/lib.rs", "rank": 25, "score": 217981.84703984167 }, { "content": "pub fn name_error(msg: &str) -> anyhow::Error {\n\n Error::NameError {\n\n msg: msg.to_string(),\n\n backtrace: Backtrace::capture(),\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 27, "score": 215297.42036330182 }, { "content": "pub fn specialize_expr(base_name: Vec<String>, args: Vec<AstExpression>) -> AstExpression {\n\n primary_expression(AstExpressionBody::SpecializeExpression {\n\n base_name: UnresolvedConstName(base_name),\n\n args,\n\n })\n\n}\n\n\n", "file_path": "lib/shiika_ast/src/lib.rs", "rank": 28, "score": 212643.8083845081 }, { "content": "pub fn name_error(msg: &str) -> anyhow::Error {\n\n Error::NameError {\n\n msg: msg.to_string(),\n\n backtrace: Backtrace::capture(),\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "lib/skc_ast2hir/src/error.rs", "rank": 29, "score": 205655.51581604872 }, { "content": "/// Return a param of the given name and its index\n\npub fn find_param<'a>(params: &'a [MethodParam], name: &str) -> Option<(usize, &'a MethodParam)> {\n\n params\n\n .iter()\n\n .enumerate()\n\n .find(|(_, param)| param.name == name)\n\n}\n\n\n", "file_path": "lib/skc_hir/src/signature.rs", "rank": 30, "score": 197943.5730774746 }, { "content": "pub fn invalid_reassign_error(orig_ty: &TermTy, new_ty: &TermTy, name: &str) -> anyhow::Error {\n\n type_error!(\n\n \"variable {} is {:?} but tried to assign a {:?}\",\n\n name,\n\n orig_ty,\n\n new_ty\n\n )\n\n}\n\n\n", "file_path": "lib/skc_ast2hir/src/type_checking.rs", "rank": 31, "score": 192299.45868129586 }, { "content": "pub fn nonmeta(names: &[String], args: Vec<TermTy>) -> TermTy {\n\n ty::new(&names.join(\"::\"), args, false)\n\n}\n\n\n", "file_path": "lib/shiika_core/src/ty.rs", "rank": 32, "score": 190724.79105062116 }, { "content": "// Question: is there a better way to do this?\n\nfn inkwell_set_name(val: BasicValueEnum, name: &str) {\n\n match val {\n\n BasicValueEnum::ArrayValue(v) => v.set_name(name),\n\n BasicValueEnum::IntValue(v) => v.set_name(name),\n\n BasicValueEnum::FloatValue(v) => v.set_name(name),\n\n BasicValueEnum::PointerValue(v) => v.set_name(name),\n\n BasicValueEnum::StructValue(v) => v.set_name(name),\n\n BasicValueEnum::VectorValue(v) => v.set_name(name),\n\n }\n\n}\n", "file_path": "lib/skc_codegen/src/lib.rs", "rank": 33, "score": 183956.06929557226 }, { "content": "pub fn string_literal(content: String) -> AstExpression {\n\n primary_expression(AstExpressionBody::StringLiteral { content })\n\n}\n\n\n", "file_path": "lib/shiika_ast/src/lib.rs", "rank": 34, "score": 182808.18770750286 }, { "content": "fn add_args_from_env(cmd: &mut Command, key: &str) {\n\n for arg in env::var(key)\n\n .unwrap_or_else(|_| \"\".to_string())\n\n .split_ascii_whitespace()\n\n {\n\n cmd.arg(arg);\n\n }\n\n}\n", "file_path": "src/runner.rs", "rank": 35, "score": 179233.57979392802 }, { "content": "pub fn runner_error(msg: &str) -> anyhow::Error {\n\n Error::RunnerError {\n\n msg: msg.to_string(),\n\n backtrace: Backtrace::capture(),\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 36, "score": 177200.8616142068 }, { "content": "pub fn type_error(msg: &str) -> anyhow::Error {\n\n Error::TypeError {\n\n msg: msg.to_string(),\n\n backtrace: Backtrace::capture(),\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 37, "score": 177200.8616142068 }, { "content": "pub fn program_error(msg: &str) -> anyhow::Error {\n\n Error::ProgramError {\n\n msg: msg.to_string(),\n\n backtrace: Backtrace::capture(),\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 38, "score": 177200.8616142068 }, { "content": "fn run_<P: AsRef<Path>>(sk_path: P, capture_out: bool) -> Result<(String, String)> {\n\n let triple = targets::default_triple();\n\n let s = sk_path.as_ref().to_str().expect(\"failed to unwrap sk_path\");\n\n //let ll_path = s.to_string() + \".ll\";\n\n //let opt_ll_path = s.to_string() + \".opt.ll\";\n\n let bc_path = s.to_string() + \".bc\";\n\n //let asm_path = s.to_string() + \".s\";\n\n let out_path = s.to_string() + \".out\";\n\n\n\n // let mut cmd = Command::new(\"opt\");\n\n // cmd.arg(\"-O3\");\n\n // cmd.arg(ll_path);\n\n // cmd.arg(\"-o\");\n\n // cmd.arg(bc_path.clone());\n\n // let output = cmd.output()?;\n\n // if !output.stderr.is_empty() {\n\n // println!(\"{}\", String::from_utf8(output.stderr)?);\n\n // }\n\n //\n\n // let mut cmd = Command::new(\"llvm-dis\");\n", "file_path": "src/runner.rs", "rank": 39, "score": 175523.37573148494 }, { "content": "/// Execute compiled .ll and return the outputs (for tests)\n\npub fn run_and_capture<P: AsRef<Path>>(sk_path: P) -> Result<(String, String)> {\n\n run_(sk_path, true)\n\n}\n\n\n", "file_path": "src/runner.rs", "rank": 40, "score": 173722.6839711616 }, { "content": "// Make TermTy from ConstName\n\nfn convert_typ(typ: &ConstName, class_typarams: &[&String]) -> TermTy {\n\n if typ.args.is_empty() {\n\n let s = typ.names.join(\"::\");\n\n if let Some(i) = class_typarams.iter().position(|name| **name == s) {\n\n ty::typaram(s, ty::TyParamKind::Class, i)\n\n } else {\n\n ty::raw(&typ.names.join(\"::\"))\n\n }\n\n } else {\n\n let type_args = typ\n\n .args\n\n .iter()\n\n .map(|n| convert_typ(n, class_typarams))\n\n .collect();\n\n ty::spe(&typ.names.join(\"::\"), type_args)\n\n }\n\n}\n", "file_path": "lib/skc_ast2hir/src/rustlib_methods.rs", "rank": 41, "score": 171510.03083434596 }, { "content": "pub fn syntax_error(msg: &str) -> anyhow::Error {\n\n Error::SyntaxError {\n\n msg: msg.to_string(),\n\n backtrace: Backtrace::capture(),\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "lib/skc_ast2hir/src/error.rs", "rank": 42, "score": 169985.60131380835 }, { "content": "pub fn type_error(msg: &str) -> anyhow::Error {\n\n Error::TypeError {\n\n msg: msg.to_string(),\n\n backtrace: Backtrace::capture(),\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "lib/skc_ast2hir/src/error.rs", "rank": 43, "score": 169985.60131380835 }, { "content": "pub fn program_error(msg: &str) -> anyhow::Error {\n\n Error::ProgramError {\n\n msg: msg.to_string(),\n\n backtrace: Backtrace::capture(),\n\n }\n\n .into()\n\n}\n", "file_path": "lib/skc_ast2hir/src/error.rs", "rank": 44, "score": 169985.60131380835 }, { "content": "/// Collect class names in the program\n\npub fn create(\n\n toplevel_defs: &[&shiika_ast::Definition],\n\n initial_sk_classes: &SkClasses,\n\n imported_classes: &SkClasses,\n\n) -> ClassIndex {\n\n let mut cindex = HashMap::new();\n\n index_sk_classes(&mut cindex, initial_sk_classes);\n\n index_sk_classes(&mut cindex, imported_classes);\n\n index_toplevel_defs(&mut cindex, toplevel_defs);\n\n cindex\n\n}\n\n\n", "file_path": "lib/skc_ast2hir/src/class_dict/class_index.rs", "rank": 45, "score": 167882.18533046517 }, { "content": "#[allow(clippy::reversed_empty_ranges)]\n\npub fn fn_items() -> Vec<ClassItem> {\n\n vec![\n\n fn_item!(0),\n\n fn_item!(1),\n\n fn_item!(2),\n\n fn_item!(3),\n\n fn_item!(4),\n\n fn_item!(5),\n\n fn_item!(6),\n\n fn_item!(7),\n\n fn_item!(8),\n\n fn_item!(9),\n\n ]\n\n}\n", "file_path": "lib/skc_corelib/src/fn_x.rs", "rank": 46, "score": 165403.9087676439 }, { "content": "fn ivars() -> HashMap<String, SkIVar> {\n\n let mut ivars = HashMap::new();\n\n ivars.insert(\n\n \"@func\".to_string(),\n\n SkIVar {\n\n name: \"@func\".to_string(),\n\n idx: 0,\n\n ty: ty::raw(\"Shiika::Internal::Ptr\"),\n\n readonly: true,\n\n },\n\n );\n\n ivars.insert(\n\n \"@the_self\".to_string(),\n\n SkIVar {\n\n name: \"@the_self\".to_string(),\n\n idx: 1,\n\n ty: ty::raw(\"Object\"),\n\n readonly: true,\n\n },\n\n );\n", "file_path": "lib/skc_corelib/src/fn_x.rs", "rank": 47, "score": 164477.67334240943 }, { "content": "pub fn unary_expr(expr: AstExpression, op: &str) -> AstExpression {\n\n primary_expression(AstExpressionBody::MethodCall {\n\n receiver_expr: Some(Box::new(expr)),\n\n method_name: method_firstname(op),\n\n arg_exprs: vec![],\n\n type_args: vec![],\n\n may_have_paren_wo_args: false,\n\n })\n\n}\n\n\n", "file_path": "lib/shiika_ast/src/lib.rs", "rank": 49, "score": 160731.59236731936 }, { "content": "pub fn check_condition_ty(ty: &TermTy, on: &str) -> Result<()> {\n\n if *ty == ty::raw(\"Bool\") {\n\n Ok(())\n\n } else {\n\n Err(type_error!(\n\n \"{} condition must be bool but got {:?}\",\n\n on,\n\n ty.fullname\n\n ))\n\n }\n\n}\n\n\n", "file_path": "lib/skc_ast2hir/src/type_checking.rs", "rank": 50, "score": 160731.59236731936 }, { "content": "fn index_sk_classes(cindex: &mut ClassIndex, sk_classes: &SkClasses) {\n\n for (name, class) in sk_classes {\n\n cindex.insert(name.clone(), class.typarams.clone());\n\n }\n\n}\n\n\n", "file_path": "lib/skc_ast2hir/src/class_dict/class_index.rs", "rank": 51, "score": 158954.89301118447 }, { "content": "pub fn check_logical_operator_ty(ty: &TermTy, on: &str) -> Result<()> {\n\n if *ty == ty::raw(\"Bool\") {\n\n Ok(())\n\n } else {\n\n Err(type_error!(\"{} must be bool but got {:?}\", on, ty.fullname))\n\n }\n\n}\n\n\n", "file_path": "lib/skc_ast2hir/src/type_checking.rs", "rank": 52, "score": 158780.27687734374 }, { "content": "// Parse signature into AstMethodSignature\n\nfn parse_signature(item: &(String, String)) -> (ClassFullname, AstMethodSignature) {\n\n let (classname, sig_str) = item;\n\n let ast_sig = Parser::parse_signature(sig_str).unwrap();\n\n (class_fullname(classname), ast_sig)\n\n}\n", "file_path": "lib/skc_corelib/src/rustlib_methods.rs", "rank": 53, "score": 157599.57267535626 }, { "content": "pub fn lambda_expr(params: Vec<Param>, exprs: Vec<AstExpression>, is_fn: bool) -> AstExpression {\n\n primary_expression(AstExpressionBody::LambdaExpr {\n\n params,\n\n exprs,\n\n is_fn,\n\n })\n\n}\n\n\n", "file_path": "lib/shiika_ast/src/lib.rs", "rank": 54, "score": 155443.00149398728 }, { "content": "pub fn create<'hir_maker>(\n\n ast: &shiika_ast::Program,\n\n // Corelib classes (REFACTOR: corelib should provide methods only)\n\n initial_sk_classes: SkClasses,\n\n imported_classes: &'hir_maker SkClasses,\n\n) -> Result<ClassDict<'hir_maker>> {\n\n let defs = ast\n\n .toplevel_items\n\n .iter()\n\n .filter_map(|item| match item {\n\n shiika_ast::TopLevelItem::Def(x) => Some(x),\n\n shiika_ast::TopLevelItem::Expr(_) => None,\n\n })\n\n .collect::<Vec<_>>();\n\n let mut dict = ClassDict {\n\n class_index: class_index::create(&defs, &initial_sk_classes, imported_classes),\n\n sk_classes: initial_sk_classes,\n\n imported_classes,\n\n };\n\n dict.index_program(&defs)?;\n", "file_path": "lib/skc_ast2hir/src/class_dict.rs", "rank": 55, "score": 154980.56148003964 }, { "content": "/// Returns llvm function name of the given method\n\npub fn method_func_name(method_name: &MethodFullname) -> LlvmFuncName {\n\n LlvmFuncName(mangle_method(&method_name.full_name))\n\n}\n", "file_path": "lib/skc_codegen/src/utils.rs", "rank": 56, "score": 154433.69077929592 }, { "content": "/// Returns \"\" if the argument is empty.\n\n/// Returns a string like \"<A,B,C>\" otherwise.\n\nfn tyargs_str(type_args: &[TermTy]) -> String {\n\n if type_args.is_empty() {\n\n \"\".to_string()\n\n } else {\n\n let s = type_args\n\n .iter()\n\n .map(|x| x.fullname.0.to_string())\n\n .collect::<Vec<_>>()\n\n .join(\",\");\n\n format!(\"<{}>\", &s)\n\n }\n\n}\n", "file_path": "lib/shiika_core/src/ty.rs", "rank": 57, "score": 151775.27021483827 }, { "content": "pub fn bin_op_expr(left: AstExpression, op: &str, right: AstExpression) -> AstExpression {\n\n non_primary_expression(AstExpressionBody::MethodCall {\n\n receiver_expr: Some(Box::new(left)),\n\n method_name: method_firstname(op),\n\n arg_exprs: vec![right],\n\n type_args: vec![],\n\n may_have_paren_wo_args: false,\n\n })\n\n}\n\n\n", "file_path": "lib/shiika_ast/src/lib.rs", "rank": 58, "score": 148278.4609717945 }, { "content": "fn index_toplevel_defs(cindex: &mut ClassIndex, toplevel_defs: &[&shiika_ast::Definition]) {\n\n let namespace = Namespace::root();\n\n for def in toplevel_defs {\n\n match def {\n\n shiika_ast::Definition::ClassDefinition {\n\n name,\n\n typarams,\n\n defs,\n\n ..\n\n } => index_class(cindex, &namespace, name, parse_typarams(typarams), defs),\n\n shiika_ast::Definition::EnumDefinition {\n\n name,\n\n typarams,\n\n cases,\n\n ..\n\n } => index_enum(cindex, &namespace, name, parse_typarams(typarams), cases),\n\n _ => (),\n\n }\n\n }\n\n}\n\n\n", "file_path": "lib/skc_ast2hir/src/class_dict/class_index.rs", "rank": 59, "score": 147968.4840836993 }, { "content": "pub fn create_class_methods() -> Vec<SkMethod> {\n\n vec![\n\n create_method(\n\n \"Meta:Shiika::Internal::Memory\",\n\n \"gc_malloc(n_bytes: Int) -> Shiika::Internal::Ptr\",\n\n |code_gen, function| {\n\n let n_bytes = code_gen.unbox_int(code_gen.get_nth_param(function, 1));\n\n let n_bytes_64 =\n\n code_gen\n\n .builder\n\n .build_int_z_extend(n_bytes, code_gen.i64_type, \"n_bytes_64\");\n\n let mem = code_gen.call_llvm_func(\"shiika_malloc\", &[n_bytes_64.into()], \"mem\");\n\n let skptr = code_gen.box_i8ptr(mem);\n\n code_gen.build_return(&skptr);\n\n Ok(())\n\n },\n\n ),\n\n create_method(\n\n \"Meta:Shiika::Internal::Memory\",\n\n \"gc_realloc(ptr: Shiika::Internal::Ptr, n_bytes: Int) -> Shiika::Internal::Ptr\",\n", "file_path": "lib/skc_corelib/src/shiika_internal_memory.rs", "rank": 60, "score": 143648.4102248734 }, { "content": " class B : Array<Bool>; end\n\n \";\n\n test_class_dict(src, |class_dict| {\n\n let a = ty::raw(\"A\");\n\n let b = ty::raw(\"B\");\n\n assert!(!class_dict.conforms(&a, &b));\n\n })\n\n }\n\n\n\n #[test]\n\n fn test_conforms_void_func() -> Result<()> {\n\n let src = \"\";\n\n test_class_dict(src, |class_dict| {\n\n let a = ty::spe(\"Fn0\", vec![ty::raw(\"Int\")]);\n\n let b = ty::spe(\"Fn0\", vec![ty::raw(\"Void\")]);\n\n assert!(class_dict.conforms(&a, &b));\n\n })\n\n }\n\n\n\n #[test]\n", "file_path": "lib/skc_ast2hir/src/class_dict/query.rs", "rank": 61, "score": 140693.00907994076 }, { "content": "/// Returns signatures of corelib methods implemented in Rust\n\npub fn provided_methods() -> Vec<(ClassFullname, AstMethodSignature)> {\n\n load_methods_json()\n\n .unwrap()\n\n .iter()\n\n .map(parse_signature)\n\n .collect()\n\n}\n\n\n", "file_path": "lib/skc_corelib/src/rustlib_methods.rs", "rank": 62, "score": 139508.87370261303 }, { "content": "/// Returns complete list of corelib classes/methods i.e. both those\n\n/// implemented in Shiika and in Rust.\n\npub fn mix_with_corelib(corelib: Corelib) -> (SkClasses, SkMethods) {\n\n let rustlib_methods = make_rustlib_methods(&corelib);\n\n let mut sk_classes = corelib.sk_classes;\n\n let mut sk_methods = corelib.sk_methods;\n\n for (classname, m) in rustlib_methods.into_iter() {\n\n // Add to sk_classes\n\n let c = sk_classes\n\n .get_mut(&classname)\n\n .unwrap_or_else(|| panic!(\"not in sk_classes: {}\", &classname));\n\n let first_name = &m.signature.fullname.first_name;\n\n debug_assert!(!c.method_sigs.contains_key(first_name));\n\n c.method_sigs\n\n .insert(first_name.clone(), m.signature.clone());\n\n // Add to sk_methods\n\n let v = sk_methods\n\n .get_mut(&classname)\n\n .unwrap_or_else(|| panic!(\"not in sk_methods: {}\", &classname));\n\n v.push(m);\n\n }\n\n (sk_classes, sk_methods)\n\n}\n\n\n", "file_path": "lib/skc_ast2hir/src/rustlib_methods.rs", "rank": 63, "score": 137595.73547875133 }, { "content": "// Make hir param from ast param\n\nfn convert_param(param: &shiika_ast::Param, class_typarams: &[&String]) -> MethodParam {\n\n MethodParam {\n\n name: param.name.to_string(),\n\n ty: convert_typ(&param.typ, class_typarams),\n\n }\n\n}\n\n\n", "file_path": "lib/skc_ast2hir/src/rustlib_methods.rs", "rank": 64, "score": 134891.2946316415 }, { "content": "/// Create the type of return value of `.new` method of the class\n\npub fn return_type_of_new(classname: &ClassFullname, typarams: &[TyParam]) -> TermTy {\n\n if typarams.is_empty() {\n\n ty::raw(&classname.0)\n\n } else {\n\n let args = typarams\n\n .iter()\n\n .enumerate()\n\n .map(|(i, t)| typaram(&t.name, TyParamKind::Class, i))\n\n .collect::<Vec<_>>();\n\n ty::spe(&classname.0, args)\n\n }\n\n}\n\n\n", "file_path": "lib/shiika_core/src/ty.rs", "rank": 65, "score": 132044.26882170662 }, { "content": "// Make hir params from ast params\n\nfn convert_params(params: &[shiika_ast::Param], class_typarams: &[&String]) -> Vec<MethodParam> {\n\n params\n\n .iter()\n\n .map(|x| convert_param(x, class_typarams))\n\n .collect()\n\n}\n\n\n", "file_path": "lib/skc_ast2hir/src/rustlib_methods.rs", "rank": 66, "score": 131386.97484429163 }, { "content": "// Read provided_methods.json\n\nfn load_methods_json() -> Result<Vec<(String, String)>> {\n\n let mut f = fs::File::open(\"lib/skc_rustlib/provided_methods.json5\")\n\n .context(\"./lib/skc_rustlib/provided_methods.json5 not found\")?;\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents)\n\n .context(\"failed to read provided_methods.json5\")?;\n\n json5::from_str(&contents).context(\"provided_methods.json5 is broken\")\n\n}\n\n\n", "file_path": "lib/skc_corelib/src/rustlib_methods.rs", "rank": 67, "score": 130731.00965845614 }, { "content": "/// Load ./builtin/*.sk into a String\n\nfn load_builtin() -> Result<String> {\n\n let mut s = String::new();\n\n let dir = fs::read_dir(\"builtin\").context(\"./builtin not found\")?;\n\n let mut files = vec![];\n\n for entry in dir {\n\n let pathbuf = entry?.path();\n\n let path = pathbuf\n\n .to_str()\n\n .ok_or_else(|| anyhow!(\"Filename not utf8\"))?;\n\n files.push(path.to_string());\n\n }\n\n files.sort();\n\n for path in files {\n\n if path.ends_with(\".sk\") {\n\n let src = fs::read_to_string(&path).context(format!(\"failed to load {}\", path))?;\n\n s += &src;\n\n }\n\n }\n\n Ok(s)\n\n}\n\n\n", "file_path": "src/runner.rs", "rank": 68, "score": 128751.34855833188 }, { "content": "fn class_props(mk: &HirMaker, cls: &TermTy) -> Result<Vec<(String, TermTy)>> {\n\n let (sig, _) =\n\n mk.class_dict\n\n .lookup_method(cls, &method_firstname(\"initialize\"), Default::default())?;\n\n Ok(sig\n\n .params\n\n .iter()\n\n .map(|x| (x.name.to_string(), x.ty.clone()))\n\n .collect())\n\n}\n\n\n", "file_path": "lib/skc_ast2hir/src/pattern_match.rs", "rank": 69, "score": 128090.97820659372 }, { "content": "pub fn if_expr(\n\n cond_expr: AstExpression,\n\n then_exprs: Vec<AstExpression>,\n\n else_exprs: Option<Vec<AstExpression>>,\n\n) -> AstExpression {\n\n non_primary_expression(AstExpressionBody::If {\n\n cond_expr: Box::new(cond_expr),\n\n then_exprs,\n\n else_exprs,\n\n })\n\n}\n\n\n", "file_path": "lib/shiika_ast/src/lib.rs", "rank": 70, "score": 125967.08040475319 }, { "content": "/// Compile hir and dump it to `outpath`\n\npub fn run(\n\n mir: &Mir,\n\n bc_path: &str,\n\n opt_ll_path: Option<&str>,\n\n generate_main: bool,\n\n opt_target_triple: Option<&inkwell::targets::TargetTriple>,\n\n) -> Result<()> {\n\n let context = inkwell::context::Context::create();\n\n let module = context.create_module(\"main\");\n\n if let Some(triple) = opt_target_triple {\n\n module.set_triple(triple);\n\n }\n\n let builder = context.create_builder();\n\n let mut code_gen = CodeGen::new(mir, &context, &module, &builder, &generate_main);\n\n code_gen.gen_program(&mir.hir, &mir.imports)?;\n\n code_gen.module.write_bitcode_to_path(Path::new(bc_path));\n\n if let Some(ll_path) = opt_ll_path {\n\n code_gen\n\n .module\n\n .print_to_file(ll_path)\n", "file_path": "lib/skc_codegen/src/lib.rs", "rank": 71, "score": 125967.08040475319 }, { "content": "pub fn method_call(\n\n receiver_expr: Option<AstExpression>,\n\n method_name: &str,\n\n arg_exprs: Vec<AstExpression>,\n\n type_args: Vec<AstExpression>,\n\n primary: bool,\n\n may_have_paren_wo_args: bool,\n\n) -> AstExpression {\n\n AstExpression {\n\n primary,\n\n body: AstExpressionBody::MethodCall {\n\n receiver_expr: receiver_expr.map(Box::new),\n\n method_name: method_firstname(method_name),\n\n arg_exprs,\n\n type_args,\n\n may_have_paren_wo_args,\n\n },\n\n }\n\n}\n\n\n", "file_path": "lib/shiika_ast/src/lib.rs", "rank": 72, "score": 124144.37364136879 }, { "content": "pub fn make_hir(\n\n ast: shiika_ast::Program,\n\n corelib: Option<Corelib>,\n\n imports: &LibraryExports,\n\n) -> Result<Hir> {\n\n let (core_classes, core_methods) = if let Some(c) = corelib {\n\n rustlib_methods::mix_with_corelib(c)\n\n } else {\n\n (Default::default(), Default::default())\n\n };\n\n let class_dict = class_dict::create(&ast, core_classes, &imports.sk_classes)?;\n\n\n\n let mut hir_maker = HirMaker::new(class_dict, &imports.constants);\n\n hir_maker.define_class_constants();\n\n let (main_exprs, main_lvars) = hir_maker.convert_toplevel_items(&ast.toplevel_items)?;\n\n let mut hir = hir_maker.extract_hir(main_exprs, main_lvars);\n\n\n\n // While corelib classes are included in `class_dict`,\n\n // corelib methods are not. Here we need to add them manually\n\n hir.add_methods(core_methods);\n\n\n\n Ok(hir)\n\n}\n\n\n", "file_path": "lib/skc_ast2hir/src/lib.rs", "rank": 73, "score": 124144.37364136879 }, { "content": "/// Create a signature of a `initialize` method\n\npub fn signature_of_initialize(\n\n class_fullname: &ClassFullname,\n\n params: Vec<MethodParam>,\n\n) -> MethodSignature {\n\n MethodSignature {\n\n fullname: method_fullname(class_fullname, \"initialize\"),\n\n ret_ty: ty::raw(\"Void\"),\n\n params,\n\n typarams: vec![],\n\n }\n\n}\n", "file_path": "lib/skc_hir/src/signature.rs", "rank": 74, "score": 124144.37364136879 }, { "content": "/// Create a signature of a `new` method\n\npub fn signature_of_new(\n\n metaclass_fullname: &ClassFullname,\n\n initialize_params: Vec<MethodParam>,\n\n instance_ty: &TermTy,\n\n) -> MethodSignature {\n\n MethodSignature {\n\n fullname: method_fullname(metaclass_fullname, \"new\"),\n\n ret_ty: instance_ty.clone(),\n\n params: initialize_params,\n\n typarams: vec![],\n\n }\n\n}\n\n\n", "file_path": "lib/skc_hir/src/signature.rs", "rank": 75, "score": 124144.37364136879 }, { "content": "#[repr(C)]\n\n#[derive(Debug)]\n\nstruct ShiikaBool {\n\n vtable: *const u8,\n\n class_obj: *const u8,\n\n value: bool,\n\n}\n\n\n\nimpl From<SkBool> for bool {\n\n fn from(sk_bool: SkBool) -> Self {\n\n unsafe { (*sk_bool.0).value }\n\n }\n\n}\n\n\n\nimpl From<bool> for SkBool {\n\n fn from(b: bool) -> Self {\n\n unsafe { box_bool(b) }\n\n }\n\n}\n", "file_path": "lib/skc_rustlib/src/builtin/bool.rs", "rank": 76, "score": 122757.94248625469 }, { "content": "#[repr(C)]\n\n#[derive(Debug)]\n\nstruct ShiikaString {\n\n vtable: *const u8,\n\n class_obj: *const u8,\n\n ptr: SkPtr,\n\n bytesize: SkInt,\n\n}\n\n\n\nimpl From<String> for SkStr {\n\n /// Make a Shiika `String` from Rust `String`. `s` must not contain a null byte in it.\n\n fn from(s: String) -> Self {\n\n let bytesize = s.as_bytes().len() as i64;\n\n let cstring = CString::new(s).unwrap();\n\n let leaked = Box::leak(Box::new(cstring));\n\n unsafe { gen_literal_string(leaked.as_ptr() as *const u8, bytesize) }\n\n }\n\n}\n\n\n\nimpl SkStr {\n\n /// Returns byte slice\n\n // TODO: more Rust-y name?\n", "file_path": "lib/skc_rustlib/src/builtin/string.rs", "rank": 77, "score": 122382.91843235398 }, { "content": "/// Execute tests/sk/x.sk\n\n/// Fail if it prints something\n\nfn run_sk_test(path: &str) -> Result<()> {\n\n dbg!(&path);\n\n runner::compile(path)?;\n\n let (stdout, stderr) = runner::run_and_capture(path)?;\n\n assert_eq!(stderr, \"\");\n\n assert_eq!(stdout, \"ok\\n\");\n\n runner::cleanup(path)?;\n\n Ok(())\n\n}\n", "file_path": "tests/integration_test.rs", "rank": 78, "score": 121793.58076321601 }, { "content": "/// Create a `Corelib`\n\npub fn create() -> Corelib {\n\n let (sk_classes, sk_methods) = make_classes(rust_body_items());\n\n\n\n Corelib {\n\n sk_classes,\n\n sk_methods,\n\n }\n\n}\n\n\n", "file_path": "lib/skc_corelib/src/lib.rs", "rank": 79, "score": 121513.52151883885 }, { "content": "/// Create builtin.bc and exports.json from builtin/*.sk and skc_corelib\n\npub fn build_corelib() -> Result<(), Error> {\n\n let builtin = load_builtin()?;\n\n let ast = Parser::parse(&builtin)?;\n\n log::debug!(\"created ast\");\n\n let corelib = skc_corelib::create();\n\n log::debug!(\"loaded corelib\");\n\n let imports = Default::default();\n\n let hir = skc_ast2hir::make_hir(ast, Some(corelib), &imports)?;\n\n log::debug!(\"created hir\");\n\n let mir = skc_mir::build(hir, imports);\n\n log::debug!(\"created mir\");\n\n let exports = LibraryExports::new(&mir);\n\n let triple = targets::default_triple();\n\n skc_codegen::run(\n\n &mir,\n\n \"builtin/builtin.bc\",\n\n Some(\"builtin/builtin.ll\"),\n\n false,\n\n Some(&triple),\n\n )?;\n\n log::debug!(\"created .bc\");\n\n\n\n let json = serde_json::to_string_pretty(&exports).unwrap();\n\n let mut f = fs::File::create(\"builtin/exports.json\").unwrap();\n\n f.write_all(json.as_bytes()).unwrap();\n\n log::debug!(\"created .json\");\n\n Ok(())\n\n}\n\n\n", "file_path": "src/runner.rs", "rank": 80, "score": 120981.99232530483 }, { "content": "pub fn check_return_value(\n\n class_dict: &ClassDict,\n\n sig: &MethodSignature,\n\n ty: &TermTy,\n\n) -> Result<()> {\n\n if sig.ret_ty.is_void_type() {\n\n return Ok(());\n\n }\n\n let want = match &sig.ret_ty.body {\n\n TyBody::TyParamRef { lower_bound, .. } => {\n\n // To avoid errors like this. (I'm not sure this is the right way;\n\n // looks ad-hoc)\n\n // > TypeError: Maybe#expect should return TermTy(TyParamRef(V 0C)) but returns TermTy(TyParamRef(V 0C))\n\n if ty.equals_to(&sig.ret_ty) {\n\n return Ok(());\n\n }\n\n lower_bound\n\n }\n\n _ => &sig.ret_ty,\n\n };\n", "file_path": "lib/skc_ast2hir/src/type_checking.rs", "rank": 81, "score": 120741.46049158773 }, { "content": "pub fn convert_match_expr(\n\n mk: &mut HirMaker,\n\n cond: &AstExpression,\n\n ast_clauses: &[AstMatchClause],\n\n) -> Result<(HirExpression, HirLVars)> {\n\n let cond_expr = mk.convert_expr(cond)?;\n\n let tmp_name = mk.generate_lvar_name(\"expr\");\n\n let tmp_ref = Hir::lvar_ref(cond_expr.ty.clone(), tmp_name.clone());\n\n let mut clauses = ast_clauses\n\n .iter()\n\n .map(|clause| convert_match_clause(mk, &tmp_ref, clause))\n\n .collect::<Result<Vec<MatchClause>>>()?;\n\n let result_ty = calc_result_ty(mk, &mut clauses)?;\n\n let mut lvars = collect_lvars(&clauses);\n\n lvars.push((tmp_name.clone(), cond_expr.ty.clone()));\n\n\n\n let panic_msg = Hir::string_literal(mk.register_string_literal(\"no matching clause found\"));\n\n clauses.push(MatchClause {\n\n components: vec![],\n\n body_hir: Hir::expressions(vec![Hir::method_call(\n", "file_path": "lib/skc_ast2hir/src/pattern_match.rs", "rank": 82, "score": 120741.46049158773 }, { "content": "/// Check argument types of a method call\n\npub fn check_method_args(\n\n class_dict: &ClassDict,\n\n sig: &MethodSignature,\n\n arg_tys: &[&TermTy],\n\n receiver_hir: &HirExpression,\n\n arg_hirs: &[HirExpression],\n\n) -> Result<()> {\n\n check_method_arity(sig, arg_tys, receiver_hir, arg_hirs)?;\n\n check_arg_types(class_dict, sig, arg_tys, receiver_hir, arg_hirs)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "lib/skc_ast2hir/src/type_checking.rs", "rank": 83, "score": 120741.46049158773 }, { "content": "/// Check the type of the argument of `return`\n\npub fn check_return_arg_type(\n\n class_dict: &ClassDict,\n\n return_arg_ty: &TermTy,\n\n method_sig: &MethodSignature,\n\n) -> Result<()> {\n\n if class_dict.conforms(return_arg_ty, &method_sig.ret_ty) {\n\n Ok(())\n\n } else {\n\n Err(type_error!(\n\n \"method {} should return {} but returns {}\",\n\n &method_sig.fullname,\n\n &method_sig.ret_ty,\n\n &return_arg_ty\n\n ))\n\n }\n\n}\n\n\n", "file_path": "lib/skc_ast2hir/src/type_checking.rs", "rank": 84, "score": 119150.73298603034 }, { "content": "pub fn break_expr() -> AstExpression {\n\n non_primary_expression(AstExpressionBody::Break {})\n\n}\n\n\n", "file_path": "lib/shiika_ast/src/lib.rs", "rank": 85, "score": 118110.6083690578 }, { "content": "/// Returns default `TargetTriple`\n\npub fn default_triple() -> inkwell::targets::TargetTriple {\n\n if let Ok(info) = mac_sys_info::get_mac_sys_info() {\n\n // #281: get_default_triple returns `darwin` but clang shows warning for it\n\n let arch = info.cpu_info().architecture();\n\n let ver = info.os_info().os_version();\n\n let s = format!(\"{}-apple-macosx{}\", arch, ver);\n\n inkwell::targets::TargetTriple::create(&s)\n\n } else {\n\n inkwell::targets::TargetMachine::get_default_triple()\n\n }\n\n}\n", "file_path": "src/targets.rs", "rank": 86, "score": 115300.17801940258 }, { "content": "// Convert ast_sig into hir_sig\n\nfn make_hir_sig(class: &SkClass, ast_sig: &AstMethodSignature) -> MethodSignature {\n\n let class_typarams = class.typarams.iter().map(|x| &x.name).collect::<Vec<_>>();\n\n let fullname = method_fullname(&class.fullname, &ast_sig.name.0);\n\n let ret_ty = if let Some(typ) = &ast_sig.ret_typ {\n\n convert_typ(typ, &class_typarams)\n\n } else {\n\n ty::raw(\"Void\")\n\n };\n\n let params = convert_params(&ast_sig.params, &class_typarams);\n\n MethodSignature {\n\n fullname,\n\n ret_ty,\n\n params,\n\n // TODO: Fix this when a rustlib method has method typaram\n\n typarams: Default::default(),\n\n }\n\n}\n\n\n", "file_path": "lib/skc_ast2hir/src/rustlib_methods.rs", "rank": 87, "score": 114431.50075904341 }, { "content": "/// Create `expr.class == cls`\n\nfn test_class(value: &HirExpression, base_ty: &TermTy) -> HirExpression {\n\n let cls_ref = Hir::const_ref(base_ty.meta_ty(), base_ty.fullname.to_const_fullname());\n\n Hir::method_call(\n\n ty::raw(\"Bool\"),\n\n Hir::method_call(\n\n ty::raw(\"Class\"),\n\n value.clone(),\n\n method_fullname_raw(\"Object\", \"class\"),\n\n vec![],\n\n ),\n\n method_fullname_raw(\"Class\", \"==\"),\n\n vec![cls_ref],\n\n )\n\n}\n", "file_path": "lib/skc_ast2hir/src/pattern_match.rs", "rank": 88, "score": 112222.46857451269 }, { "content": "/// Format `type_args` with .dbg_str\n\nfn _dbg_type_args(type_args: &[TermTy]) -> String {\n\n if type_args.is_empty() {\n\n \"\".to_string()\n\n } else {\n\n let s = type_args\n\n .iter()\n\n .map(|x| x.dbg_str())\n\n .collect::<Vec<_>>()\n\n .join(\", \");\n\n format!(\"<{}>\", &s)\n\n }\n\n}\n\n\n\nimpl TermTy {\n\n /// Return string to inspect `self`\n\n fn dbg_str(&self) -> String {\n\n match &self.body {\n\n TyRaw(LitTy {\n\n base_name,\n\n type_args,\n", "file_path": "lib/shiika_core/src/ty/term_ty.rs", "rank": 89, "score": 111705.5303245302 }, { "content": "pub fn create_methods() -> Vec<SkMethod> {\n\n vec![\n\n create_method(\n\n \"Shiika::Internal::Ptr\",\n\n \"+(n_bytes: Int) -> Shiika::Internal::Ptr\",\n\n |code_gen, function| {\n\n let ptr = code_gen.unbox_i8ptr(code_gen.get_nth_param(function, 0));\n\n let n_bytes = code_gen.unbox_int(code_gen.get_nth_param(function, 1));\n\n let newptr = unsafe { code_gen.builder.build_gep(ptr.0, &[n_bytes], \"newptr\") };\n\n let skptr = code_gen.box_i8ptr(newptr.into());\n\n code_gen.build_return(&skptr);\n\n Ok(())\n\n },\n\n ),\n\n create_method(\n\n \"Shiika::Internal::Ptr\",\n\n \"store(value: Object)\",\n\n |code_gen, function| {\n\n let i8ptr = code_gen.unbox_i8ptr(code_gen.get_nth_param(function, 0));\n\n let obj_ptr_type = code_gen.llvm_type(&ty::raw(\"Object\")).into_pointer_type();\n", "file_path": "lib/skc_corelib/src/shiika_internal_ptr.rs", "rank": 90, "score": 111182.62639860573 }, { "content": "#[derive(Debug)]\n\nstruct LVarInfo {\n\n ty: TermTy,\n\n detail: LVarDetail,\n\n}\n", "file_path": "lib/skc_ast2hir/src/convert_exprs.rs", "rank": 91, "score": 110891.76968529807 }, { "content": "pub fn pseudo_variable(token: Token) -> AstExpression {\n\n primary_expression(AstExpressionBody::PseudoVariable(token))\n\n}\n\n\n", "file_path": "lib/shiika_ast/src/lib.rs", "rank": 92, "score": 110523.40004326505 }, { "content": "pub fn decimal_literal(value: i64) -> AstExpression {\n\n primary_expression(AstExpressionBody::DecimalLiteral { value })\n\n}\n\n\n", "file_path": "lib/shiika_ast/src/lib.rs", "rank": 93, "score": 110523.40004326505 }, { "content": "pub fn logical_not(expr: AstExpression) -> AstExpression {\n\n non_primary_expression(AstExpressionBody::LogicalNot {\n\n expr: Box::new(expr),\n\n })\n\n}\n\n\n", "file_path": "lib/shiika_ast/src/lib.rs", "rank": 94, "score": 110523.40004326505 }, { "content": "pub fn float_literal(value: f64) -> AstExpression {\n\n primary_expression(AstExpressionBody::FloatLiteral { value })\n\n}\n\n\n", "file_path": "lib/shiika_ast/src/lib.rs", "rank": 95, "score": 110523.40004326505 }, { "content": "fn index_class(\n\n cindex: &mut ClassIndex,\n\n namespace: &Namespace,\n\n firstname: &ClassFirstname,\n\n typarams: Vec<ty::TyParam>,\n\n defs: &[shiika_ast::Definition],\n\n) {\n\n let fullname = namespace.class_fullname(firstname);\n\n cindex.insert(fullname, typarams);\n\n let inner_namespace = namespace.add(firstname);\n\n for def in defs {\n\n match def {\n\n shiika_ast::Definition::ClassDefinition {\n\n name,\n\n typarams,\n\n defs,\n\n ..\n\n } => {\n\n index_class(\n\n cindex,\n", "file_path": "lib/skc_ast2hir/src/class_dict/class_index.rs", "rank": 96, "score": 109790.15155986212 }, { "content": "/// Shortcut for Array<T>\n\npub fn ary(type_arg: TermTy) -> TermTy {\n\n spe(\"Array\", vec![type_arg])\n\n}\n\n\n", "file_path": "lib/shiika_core/src/ty.rs", "rank": 97, "score": 109063.55911199129 }, { "content": "pub fn primary_expression(body: AstExpressionBody) -> AstExpression {\n\n AstExpression {\n\n primary: true,\n\n body,\n\n }\n\n}\n\n\n", "file_path": "lib/shiika_ast/src/lib.rs", "rank": 98, "score": 107663.18479890854 }, { "content": "/// Destructively bitcast body_hir\n\nfn bitcast_match_clause_body(c: &mut MatchClause, ty: TermTy) {\n\n let mut tmp = Hir::expressions(Default::default());\n\n std::mem::swap(&mut tmp, &mut c.body_hir);\n\n tmp = tmp.bitcast_to(ty);\n\n std::mem::swap(&mut tmp, &mut c.body_hir);\n\n}\n\n\n", "file_path": "lib/skc_ast2hir/src/pattern_match.rs", "rank": 99, "score": 107577.46154641957 } ]
Rust
client/src/command_receiver.rs
amethyst/naia
791d5c90ce3435c254da1a6048064c2204ff538c
use std::collections::{HashMap, VecDeque}; use naia_shared::{ wrapping_diff, EntityType, ProtocolType, Ref, Replicate, SequenceBuffer, SequenceIterator, WorldMutType, }; use super::{entity_manager::EntityManager, event::OwnedEntity}; const COMMAND_HISTORY_SIZE: u16 = 64; #[derive(Debug)] pub struct CommandReceiver<P: ProtocolType, K: EntityType> { queued_incoming_commands: VecDeque<(u16, OwnedEntity<K>, Ref<dyn Replicate<P>>)>, command_history: HashMap<K, SequenceBuffer<Ref<dyn Replicate<P>>>>, queued_command_replays: VecDeque<(u16, OwnedEntity<K>, Ref<dyn Replicate<P>>)>, replay_trigger: HashMap<K, u16>, } impl<P: ProtocolType, K: EntityType> CommandReceiver<P, K> { pub fn new() -> Self { CommandReceiver { queued_incoming_commands: VecDeque::new(), command_history: HashMap::new(), queued_command_replays: VecDeque::new(), replay_trigger: HashMap::new(), } } pub fn pop_command(&mut self) -> Option<(u16, OwnedEntity<K>, Ref<dyn Replicate<P>>)> { self.queued_incoming_commands.pop_front() } pub fn pop_command_replay(&mut self) -> Option<(u16, OwnedEntity<K>, Ref<dyn Replicate<P>>)> { self.queued_command_replays.pop_front() } pub fn process_command_replay<W: WorldMutType<P, K>>( &mut self, world: &mut W, entity_manager: &mut EntityManager<P, K>, ) { for (world_entity, history_tick) in self.replay_trigger.iter() { if let Some(predicted_entity) = entity_manager.get_predicted_entity(world_entity) { entity_manager.prediction_reset_entity(world, world_entity); if let Some(command_buffer) = self.command_history.get_mut(&world_entity) { self.queued_incoming_commands.clear(); self.queued_command_replays.clear(); let current_tick = command_buffer.sequence_num(); for tick in *history_tick..=current_tick { if let Some(command) = command_buffer.get_mut(tick) { self.queued_command_replays.push_back(( tick, OwnedEntity::new(world_entity, &predicted_entity), command.clone(), )); } } } } } self.replay_trigger.clear(); } pub fn queue_command( &mut self, host_tick: u16, owned_entity: OwnedEntity<K>, command: Ref<dyn Replicate<P>>, ) { let world_entity = owned_entity.confirmed; self.queued_incoming_commands .push_back((host_tick, owned_entity, command.clone())); if let Some(command_buffer) = self.command_history.get_mut(&world_entity) { command_buffer.insert(host_tick, command); } } pub fn command_history_count(&self, owned_entity: &K) -> u8 { if let Some(command_buffer) = self.command_history.get(owned_entity) { return command_buffer.get_entries_count(); } return 0; } pub fn command_history_iter( &self, owned_entity: &K, reverse: bool, ) -> Option<SequenceIterator<Ref<dyn Replicate<P>>>> { if let Some(command_buffer) = self.command_history.get(owned_entity) { return Some(command_buffer.iter(reverse)); } return None; } pub fn replay_commands(&mut self, history_tick: u16, owned_entity: &K) { if let Some(tick) = self.replay_trigger.get_mut(owned_entity) { if wrapping_diff(*tick, history_tick) > 0 { *tick = history_tick; } } else { self.replay_trigger.insert(*owned_entity, history_tick); } } pub fn remove_history_until(&mut self, history_tick: u16, owned_entity: &K) { if let Some(command_buffer) = self.command_history.get_mut(owned_entity) { command_buffer.remove_until(history_tick); } } pub fn prediction_init(&mut self, owned_entity: &K) { self.command_history.insert( *owned_entity, SequenceBuffer::with_capacity(COMMAND_HISTORY_SIZE), ); } pub fn prediction_cleanup(&mut self, owned_entity: &K) { self.command_history.remove(owned_entity); } }
use std::collections::{HashMap, VecDeque}; use naia_shared::{ wrapping_diff, EntityType, ProtocolType, Ref, Replicate, SequenceBuffer, SequenceIterator, WorldMutType, }; use super::{entity_manager::EntityManager, event::OwnedEntity}; const COMMAND_HISTORY_SIZE: u16 = 64; #[derive(Debug)] pub struct CommandReceiver<P: ProtocolType, K: EntityType> { queued_incoming_commands: VecDeque<(u16, OwnedEntity<K>, Ref<dyn Replicate<P>>)>, command_history: HashMap<K, SequenceBuffer<Ref<dyn Replicate<P>>>>, queued_command_replays: VecDeque<(u16, OwnedEntity<K>, Ref<dyn Replicate<P>>)>, replay_trigger: HashMap<K, u16>, } impl<P: ProtocolType, K: EntityType> CommandReceiver<P, K> { pub fn new() -> Self { CommandReceiver { queued_incoming_commands: VecDeque::new(), command_history: HashMap::new(), queued_command_replays: VecDeque::new(), replay_trigger: HashMap::new(), } } pub fn pop_command(&mut self) -> Option<(u16, OwnedEntity<K>, Ref<dyn Replicate<P>>)> { self.queued_incoming_commands.pop_front() } pub fn pop_command_replay(&mut self) -> Option<(u16, OwnedEntity<K>, Ref<dyn Replicate<P>>)> { self.queued_command_replays.pop_front() } pub fn process_command_replay<W: WorldMutType<P, K>>( &mut self, world: &mut W, entity_manager: &mut EntityManager<P, K>, ) { for (world_entity, history_tick) in self.replay_trigger.iter() { if let Some(predicted_entity) = entity_manager.get_predicted_entity(world_entity) { entity_manager.prediction_reset_entity(world, world_entity); if let Some(command_buffer) = self.command_history.get_mut(&world_entity) { self.queued_incoming_commands.clear(); self.queued_command_replays.clear(); let current_tick = command_buffer.sequence_num(); for tick in *history_tick..=current_tick { if let Some(command) = command_buffer.get_mut(tick) { self.queued_command_replays.push_back(( tick, OwnedEntity::new(world_entity, &predicted_entity), command.clone(), )); } } } } } self.replay_trigger.clear(); } pub fn queue_command( &mut self, host_tick: u16, owned_entity: OwnedEntity<K>, command: Ref<dyn Replicate<P>>, ) { let world_entity = owned_entity.confirmed; self.queued_incoming_commands .push_back((host_tick, owned_entity, command.clone())); if let Some(command_buffer) = self.command_history.get_mut(&world_entity) { command_buffer.insert(host_tick, command); } } pub fn command_history_count(&self, owned_entity: &K) -> u8 { if let Some(command_buffer) = self.command_history.get(owned_entity) { return command_buffer.get_entries_count(); } return 0; } pub fn command_history_iter( &self, owned_entity: &K, reverse: bool, ) -> Option<SequenceIterator<Ref<dyn Replicate<P>>>> { if let Some(command_buffer) = self.command_history.get(owned_entity) { return Some(command_buffer.iter(reverse)); } return None; } pub fn replay_commands(&mut self, history_tick: u16, owned_entity: &K) { if let Some(tick) = self.replay_trigger.get_mut(owned_entity) { if wrapping_diff(*tick, history_tick) > 0 { *tick = history_tick; } } else { self.replay_trigger.insert(*owned_entity, history_tick); } } pub fn remove_history_until(&mut self, history_tick: u16, owned_entity: &K) {
owned_entity: &K) { self.command_history.insert( *owned_entity, SequenceBuffer::with_capacity(COMMAND_HISTORY_SIZE), ); } pub fn prediction_cleanup(&mut self, owned_entity: &K) { self.command_history.remove(owned_entity); } }
if let Some(command_buffer) = self.command_history.get_mut(owned_entity) { command_buffer.remove_until(history_tick); } } pub fn prediction_init(&mut self,
random
[ { "content": "pub fn before_receive_events(world: &mut World) {\n\n world.resource_scope(|world, mut client: Mut<Client<Entity>>| {\n\n\n\n // Host Component Updates\n\n let mut host_component_event_reader = world\n\n .get_resource_mut::<Events<HostSyncEvent>>()\n\n .unwrap();\n\n let host_component_events: Vec<HostSyncEvent> = host_component_event_reader.drain().collect();\n\n for event in host_component_events {\n\n match event {\n\n HostSyncEvent::Insert(entity, component_kind) => {\n\n let mut world_proxy = world.proxy_mut();\n\n let Some(mut component_mut) = world_proxy.component_mut_of_kind(&entity, &component_kind) else {\n\n continue;\n\n };\n\n client.insert_component_worldless(&entity, DerefMut::deref_mut(&mut component_mut));\n\n }\n\n HostSyncEvent::Remove(entity, component_kind) => {\n\n client.remove_component_worldless(&entity, &component_kind);\n\n }\n", "file_path": "adapters/bevy/client/src/systems.rs", "rank": 0, "score": 273544.92779516155 }, { "content": "pub fn before_receive_events(world: &mut World) {\n\n world.resource_scope(|world, mut server: Mut<Server<Entity>>| {\n\n if !server.is_listening() {\n\n return;\n\n }\n\n\n\n // Host Component Updates\n\n let mut host_component_event_reader = world\n\n .get_resource_mut::<Events<HostSyncEvent>>()\n\n .unwrap();\n\n let host_component_events: Vec<HostSyncEvent> = host_component_event_reader.drain().collect();\n\n for event in host_component_events {\n\n match event {\n\n HostSyncEvent::Insert(entity, component_kind) => {\n\n if server.entity_authority_status(&entity) == Some(EntityAuthStatus::Denied) {\n\n // if auth status is denied, that means the client is performing this operation and it's already being handled\n\n continue;\n\n }\n\n let mut world_proxy = world.proxy_mut();\n\n let Some(mut component_mut) = world_proxy.component_mut_of_kind(&entity, &component_kind) else {\n", "file_path": "adapters/bevy/server/src/systems.rs", "rank": 1, "score": 273544.9277951615 }, { "content": "pub trait SerdeIntegerConversion<const SIGNED: bool, const VARIABLE: bool, const BITS: u8> {\n\n fn from(value: &SerdeInteger<SIGNED, VARIABLE, BITS>) -> Self;\n\n}\n\n\n\n// Tests\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{\n\n bit_reader::BitReader,\n\n bit_writer::BitWriter,\n\n integer::{SignedInteger, SignedVariableInteger, UnsignedInteger, UnsignedVariableInteger},\n\n serde::Serde,\n\n };\n\n\n\n #[test]\n\n fn in_and_out() {\n\n let in_u16: u16 = 123;\n\n let middle = UnsignedInteger::<9>::new(in_u16);\n\n let out_u16: u16 = middle.get() as u16;\n", "file_path": "shared/serde/src/integer.rs", "rank": 2, "score": 267080.04165191425 }, { "content": "/// Returns whether or not a wrapping number is greater than another\n\n/// sequence_less_than(1,2) will return true\n\n/// sequence_less_than(2,1) will return false\n\n/// sequence_less_than(1,1) will return false\n\npub fn sequence_less_than(s1: u16, s2: u16) -> bool {\n\n sequence_greater_than(s2, s1)\n\n}\n\n\n", "file_path": "shared/src/wrapping_number.rs", "rank": 3, "score": 261329.6050305408 }, { "content": "/// Returns whether or not a wrapping number is greater than another\n\n/// sequence_greater_than(2,1) will return true\n\n/// sequence_greater_than(1,2) will return false\n\n/// sequence_greater_than(1,1) will return false\n\npub fn sequence_greater_than(s1: u16, s2: u16) -> bool {\n\n ((s1 > s2) && (s1 - s2 <= 32768)) || ((s1 < s2) && (s2 - s1 > 32768))\n\n}\n\n\n", "file_path": "shared/src/wrapping_number.rs", "rank": 4, "score": 261329.6050305408 }, { "content": "pub fn init(mut commands: Commands, mut server: Server) {\n\n info!(\"Naia Bevy Server Demo is running\");\n\n\n\n // Naia Server initialization\n\n let server_addresses = webrtc::ServerAddrs::new(\n\n \"127.0.0.1:14191\"\n\n .parse()\n\n .expect(\"could not parse Signaling address/port\"),\n\n // IP Address to listen on for UDP WebRTC data channels\n\n \"127.0.0.1:14192\"\n\n .parse()\n\n .expect(\"could not parse WebRTC data address/port\"),\n\n // The public WebRTC IP address to advertise\n\n \"http://127.0.0.1:14192\",\n\n );\n\n let socket = webrtc::Socket::new(&server_addresses, server.socket_config());\n\n server.listen(socket);\n\n\n\n // Create a new, singular room, which will contain Users and Entities that they\n\n // can receive updates from\n", "file_path": "demos/bevy/server/src/systems/init.rs", "rank": 5, "score": 240343.03646205692 }, { "content": "fn world_data_unchecked_mut(world: &mut World) -> Mut<WorldData> {\n\n unsafe {\n\n return world\n\n .as_unsafe_world_cell()\n\n .get_resource_mut::<WorldData>()\n\n .expect(\"Need to instantiate by adding WorldData<Protocol> resource at startup!\");\n\n }\n\n}\n", "file_path": "adapters/bevy/shared/src/world_proxy.rs", "rank": 6, "score": 237029.3978343738 }, { "content": "fn has_component<R: Replicate>(world: &World, entity: &Entity) -> bool {\n\n if let Some(component_map) = world.entities.get(entity) {\n\n return component_map.contains_key(&ComponentKind::of::<R>());\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "demos/demo_utils/demo_world/src/world.rs", "rank": 7, "score": 235661.29981147003 }, { "content": "fn has_component<R: Replicate>(world: &World, entity: &Entity) -> bool {\n\n let result = world.get::<&R>(*entity);\n\n result.is_ok()\n\n}\n\n\n", "file_path": "adapters/hecs/shared/src/world_proxy.rs", "rank": 8, "score": 234901.33570639917 }, { "content": "fn has_component<R: Replicate>(world: &World, entity: &Entity) -> bool {\n\n return world.get::<R>(*entity).is_some();\n\n}\n\n\n", "file_path": "adapters/bevy/shared/src/world_proxy.rs", "rank": 9, "score": 234901.33570639917 }, { "content": "fn has_component<R: Replicate>(world: &World, entity: &Entity) -> bool {\n\n let result = world.get::<&R>(*entity);\n\n result.is_ok()\n\n}\n\n\n", "file_path": "adapters/hecs/shared/src/world_wrapper.rs", "rank": 10, "score": 234901.33570639917 }, { "content": "/// Retrieves the wrapping difference between 2 u16 values\n\n/// wrapping_diff(1,2) will return 1\n\n/// wrapping_diff(2,1) will return -1\n\n/// wrapping_diff(65535,0) will return 1\n\n/// wrapping_diff(0,65535) will return -1\n\npub fn wrapping_diff(a: u16, b: u16) -> i16 {\n\n const MAX: i32 = std::i16::MAX as i32;\n\n const MIN: i32 = std::i16::MIN as i32;\n\n const ADJUST: i32 = (std::u16::MAX as i32) + 1;\n\n\n\n let a: i32 = i32::from(a);\n\n let b: i32 = i32::from(b);\n\n\n\n let mut result = b - a;\n\n if (MIN..=MAX).contains(&result) {\n\n result as i16\n\n } else if b > a {\n\n result = b - (a + ADJUST);\n\n if (MIN..=MAX).contains(&result) {\n\n result as i16\n\n } else {\n\n panic!(\"integer overflow, this shouldn't happen\")\n\n }\n\n } else {\n\n result = (b + ADJUST) - a;\n", "file_path": "shared/src/wrapping_number.rs", "rank": 11, "score": 228456.56893345155 }, { "content": "pub fn march_and_mark(app: &mut App) {\n\n if !app.has_user {\n\n return;\n\n }\n\n // march entities across the screen\n\n let mut entities_to_add: Vec<Entity> = Vec::new();\n\n let mut entities_to_remove: Vec<Entity> = Vec::new();\n\n let mut entities_to_delete: Vec<Entity> = Vec::new();\n\n let mut entities_to_respawn: Vec<Entity> = Vec::new();\n\n\n\n for (entity, position) in app.world.query_mut::<&mut Position>() {\n\n *position.x += 1;\n\n\n\n if *position.x == 100 {\n\n entities_to_add.push(entity);\n\n }\n\n if *position.x == 150 {\n\n entities_to_remove.push(entity);\n\n }\n\n if *position.x > 250 {\n", "file_path": "demos/hecs/server/src/systems/tick.rs", "rank": 12, "score": 223777.1700795941 }, { "content": "pub fn check_scopes(app: &mut App) {\n\n // Update scopes of entities\n\n let server = &mut app.server;\n\n let world = &app.world;\n\n for (_, user_key, entity) in server.scope_checks() {\n\n if let Ok(entity_ref) = world.entity(entity) {\n\n if let Some(position) = entity_ref.get::<&Position>() {\n\n let x = *position.x;\n\n\n\n if (50..=200).contains(&x) {\n\n server.user_scope(&user_key).include(&entity);\n\n } else {\n\n server.user_scope(&user_key).exclude(&entity);\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "demos/hecs/server/src/systems/tick.rs", "rank": 13, "score": 223777.1700795941 }, { "content": "pub fn send_updates(app: &mut App) {\n\n // VERY IMPORTANT! Calling this actually sends all update data\n\n // packets to all Clients that require it. If you don't call this\n\n // method, the Server will never communicate with it's connected Clients\n\n app.server.send_all_updates(&app.world);\n\n}\n", "file_path": "demos/hecs/server/src/systems/tick.rs", "rank": 14, "score": 223777.1700795941 }, { "content": "pub fn process_command(key_command: &KeyCommand, position: &mut Position) {\n\n if key_command.w {\n\n *position.y = position.y.wrapping_sub(SQUARE_SPEED);\n\n }\n\n if key_command.s {\n\n *position.y = position.y.wrapping_add(SQUARE_SPEED);\n\n }\n\n if key_command.a {\n\n *position.x = position.x.wrapping_sub(SQUARE_SPEED);\n\n }\n\n if key_command.d {\n\n *position.x = position.x.wrapping_add(SQUARE_SPEED);\n\n }\n\n}\n", "file_path": "demos/macroquad/shared/src/behavior/process_command.rs", "rank": 15, "score": 223033.352405932 }, { "content": "pub fn process_command(key_command: &KeyCommand, position: &mut Position) {\n\n if key_command.w {\n\n *position.y = position.y.wrapping_add(SQUARE_SPEED);\n\n }\n\n if key_command.s {\n\n *position.y = position.y.wrapping_sub(SQUARE_SPEED);\n\n }\n\n if key_command.a {\n\n *position.x = position.x.wrapping_sub(SQUARE_SPEED);\n\n }\n\n if key_command.d {\n\n *position.x = position.x.wrapping_add(SQUARE_SPEED);\n\n }\n\n}\n", "file_path": "demos/bevy/shared/src/behavior/process_command.rs", "rank": 16, "score": 223033.352405932 }, { "content": "pub fn get_new_complete_method(\n\n replica_name: &Ident,\n\n enum_name: &Ident,\n\n properties: &[Property],\n\n struct_type: &StructType,\n\n) -> TokenStream {\n\n let mut args = quote! {};\n\n for property in properties.iter() {\n\n match property {\n\n Property::Normal(property) => {\n\n let field_name = &property.variable_name;\n\n let field_type = &property.inner_type;\n\n\n\n let new_output_right = quote! {\n\n #field_name: #field_type,\n\n };\n\n\n\n let new_output_result = quote! {\n\n #args #new_output_right\n\n };\n", "file_path": "shared/derive/src/replicate.rs", "rank": 17, "score": 221386.94211180243 }, { "content": "fn has_entity(world: &World, entity: &Entity) -> bool {\n\n world.entities.contains_key(entity)\n\n}\n\n\n", "file_path": "demos/demo_utils/demo_world/src/world.rs", "rank": 18, "score": 211961.95659143396 }, { "content": "// private static methods\n\nfn has_entity(world: &World, entity: &Entity) -> bool {\n\n world.contains(*entity)\n\n}\n\n\n", "file_path": "adapters/hecs/shared/src/world_proxy.rs", "rank": 19, "score": 210741.82003282406 }, { "content": "fn has_entity(world: &World, entity: &Entity) -> bool {\n\n world.contains(*entity)\n\n}\n\n\n", "file_path": "adapters/hecs/shared/src/world_wrapper.rs", "rank": 20, "score": 210741.82003282406 }, { "content": "fn has_entity(world: &World, entity: &Entity) -> bool {\n\n return world.get_entity(*entity).is_some();\n\n}\n\n\n", "file_path": "adapters/bevy/shared/src/world_proxy.rs", "rank": 21, "score": 210741.82003282406 }, { "content": "pub fn get_dyn_ref_method() -> TokenStream {\n\n quote! {\n\n fn dyn_ref(&self) -> ReplicaDynRef<'_> {\n\n return ReplicaDynRef::new(self);\n\n }\n\n }\n\n}\n\n\n", "file_path": "shared/derive/src/replicate.rs", "rank": 22, "score": 208729.44370113226 }, { "content": "pub fn get_dyn_mut_method() -> TokenStream {\n\n quote! {\n\n fn dyn_mut(&mut self) -> ReplicaDynMut<'_> {\n\n return ReplicaDynMut::new(self);\n\n }\n\n }\n\n}\n\n\n", "file_path": "shared/derive/src/replicate.rs", "rank": 23, "score": 208653.5354872524 }, { "content": "pub trait ReplicaMutTrait<R: Replicate>: ReplicaRefTrait<R> {\n\n fn to_mut(&mut self) -> &mut R;\n\n}\n\n\n\n// ReplicaMutWrapper\n\n\n\npub struct ReplicaMutWrapper<'a, R: Replicate> {\n\n inner: Box<dyn ReplicaMutTrait<R> + 'a>,\n\n}\n\n\n\nimpl<'a, R: Replicate> ReplicaMutWrapper<'a, R> {\n\n pub fn new<I: ReplicaMutTrait<R> + 'a>(inner: I) -> Self {\n\n Self {\n\n inner: Box::new(inner),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, R: Replicate> Deref for ReplicaMutWrapper<'a, R> {\n\n type Target = R;\n", "file_path": "shared/src/world/component/replica_ref.rs", "rank": 24, "score": 208594.18889523306 }, { "content": "pub fn replicate_impl(\n\n input: proc_macro::TokenStream,\n\n shared_crate_name: TokenStream,\n\n) -> proc_macro::TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n\n\n // Helper Properties\n\n let properties = get_properties(&input);\n\n let struct_type = get_struct_type(&input);\n\n\n\n // Names\n\n let replica_name = input.ident.clone();\n\n let replica_name_str = LitStr::new(&replica_name.to_string(), replica_name.span());\n\n let lowercase_replica_name = Ident::new(\n\n replica_name.to_string().to_lowercase().as_str(),\n\n Span::call_site(),\n\n );\n\n let module_name = format_ident!(\"define_{}\", lowercase_replica_name);\n\n let enum_name = format_ident!(\"{}Property\", replica_name);\n\n let builder_name = format_ident!(\"{}Builder\", replica_name);\n", "file_path": "shared/derive/src/replicate.rs", "rank": 25, "score": 198287.58376568698 }, { "content": "fn has_component_of_type(world: &World, entity: &Entity, component_kind: &ComponentKind) -> bool {\n\n if let Some(component_map) = world.entities.get(entity) {\n\n return component_map.contains_key(component_kind);\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "demos/demo_utils/demo_world/src/world.rs", "rank": 26, "score": 193572.84336388175 }, { "content": "fn has_component_of_kind(world: &World, entity: &Entity, component_kind: &ComponentKind) -> bool {\n\n return world\n\n .entity(*entity)\n\n .contains_type_id(<ComponentKind as Into<TypeId>>::into(*component_kind));\n\n}\n\n\n", "file_path": "adapters/bevy/shared/src/world_proxy.rs", "rank": 27, "score": 191483.28781931574 }, { "content": "/// Structures that implement the WorldMutType trait will be able to be loaded\n\n/// into the Server at which point the Server will use this interface to keep\n\n/// the WorldMutType in-sync with it's own Entities/Components\n\npub trait WorldMutType<E>: WorldRefType<E> {\n\n // Entities\n\n /// spawn an entity\n\n fn spawn_entity(&mut self) -> E;\n\n /// duplicate an entity\n\n fn local_duplicate_entity(&mut self, entity: &E) -> E;\n\n /// make it so one entity has all the same components as another\n\n fn local_duplicate_components(&mut self, mutable_entity: &E, immutable_entity: &E);\n\n /// despawn an entity\n\n fn despawn_entity(&mut self, entity: &E);\n\n\n\n // Components\n\n /// gets all of an Entity's Components\n\n fn component_kinds(&mut self, entity: &E) -> Vec<ComponentKind>;\n\n /// gets an entity's component\n\n fn component_mut<'a, R: Replicate>(\n\n &'a mut self,\n\n entity: &E,\n\n ) -> Option<ReplicaMutWrapper<'a, R>>;\n\n /// gets an entity's component, dynamically\n", "file_path": "shared/src/world/world_type.rs", "rank": 28, "score": 190918.65709682286 }, { "content": "pub trait ReplicaRefTrait<R: Replicate> {\n\n fn to_ref(&self) -> &R;\n\n}\n\n\n\n// ReplicaRefWrapper\n\n\n\npub struct ReplicaRefWrapper<'a, R: Replicate> {\n\n inner: Box<dyn ReplicaRefTrait<R> + 'a>,\n\n}\n\n\n\nimpl<'a, R: Replicate> ReplicaRefWrapper<'a, R> {\n\n pub fn new<I: ReplicaRefTrait<R> + 'a>(inner: I) -> Self {\n\n Self {\n\n inner: Box::new(inner),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, R: Replicate> Deref for ReplicaRefWrapper<'a, R> {\n\n type Target = R;\n\n\n\n fn deref(&self) -> &R {\n\n self.inner.to_ref()\n\n }\n\n}\n\n\n\n// ReplicaMutTrait\n\n\n", "file_path": "shared/src/world/component/replica_ref.rs", "rank": 29, "score": 189740.87338567947 }, { "content": "pub trait WorldProxyMut<'w> {\n\n fn proxy_mut(self) -> WorldMut<'w>;\n\n}\n\n\n\nimpl<'w> WorldProxyMut<'w> for &'w mut World {\n\n fn proxy_mut(self) -> WorldMut<'w> {\n\n WorldMut::new(self)\n\n }\n\n}\n\n\n\n// WorldRef //\n\n\n\npub struct WorldRef<'w> {\n\n world: &'w World,\n\n}\n\n\n\nimpl<'w> WorldRef<'w> {\n\n pub fn new(world: &'w World) -> Self {\n\n WorldRef { world }\n\n }\n", "file_path": "adapters/bevy/shared/src/world_proxy.rs", "rank": 30, "score": 188123.05011588405 }, { "content": "pub fn process_events(app: &mut App) {\n\n if app.client.is_disconnected() {\n\n return;\n\n }\n\n\n\n let mut events = app.client.receive(&mut app.world);\n\n\n\n // Connect Events\n\n for server_address in events.read::<ConnectEvent>() {\n\n info!(\"Client connected to: {}\", server_address);\n\n }\n\n\n\n // Disconnect Events\n\n for server_address in events.read::<DisconnectEvent>() {\n\n info!(\"Client disconnected from: {}\", server_address);\n\n }\n\n\n\n // Spawn Entity Events\n\n for entity in events.read::<SpawnEntityEvent>() {\n\n let new_id = app.next_id;\n", "file_path": "demos/hecs/client/src/systems/events.rs", "rank": 31, "score": 185120.4781153109 }, { "content": "pub fn process_events(app: &mut App) {\n\n let mut events = app.server.receive(&mut app.world);\n\n if events.is_empty() {\n\n // If we don't sleep here, app will loop at 100% CPU until a new message comes in\n\n sleep(Duration::from_millis(3));\n\n return;\n\n } else {\n\n for (user_key, auth) in events.read::<AuthEvent<Auth>>() {\n\n if auth.username == \"charlie\" && auth.password == \"12345\" {\n\n // Accept incoming connection\n\n app.server.accept_connection(&user_key);\n\n } else {\n\n // Reject incoming connection\n\n app.server.reject_connection(&user_key);\n\n }\n\n }\n\n for user_key in events.read::<ConnectEvent>() {\n\n let address = app\n\n .server\n\n .user_mut(&user_key)\n", "file_path": "demos/hecs/server/src/systems/events.rs", "rank": 32, "score": 185120.4781153109 }, { "content": "pub trait WorldProxyMut<'w, 'd> {\n\n fn proxy_mut(self, data: &'d mut WorldData) -> WorldMut<'w, 'd>;\n\n}\n\n\n\nimpl<'w, 'd> WorldProxyMut<'w, 'd> for &'w mut World {\n\n fn proxy_mut(self, data: &'d mut WorldData) -> WorldMut<'w, 'd> {\n\n WorldMut::new(self, data)\n\n }\n\n}\n\n\n\n// WorldRef\n\n\n\npub struct WorldRef<'w, 'd> {\n\n world: &'w World,\n\n world_data: &'d WorldData,\n\n}\n\n\n\nimpl<'w, 'd> WorldRef<'w, 'd> {\n\n pub fn new(world: &'w World, data: &'d WorldData) -> Self {\n\n WorldRef {\n", "file_path": "adapters/hecs/shared/src/world_proxy.rs", "rank": 33, "score": 184111.90550226718 }, { "content": "pub fn get_read_method(\n\n replica_name: &Ident,\n\n properties: &[Property],\n\n struct_type: &StructType,\n\n) -> TokenStream {\n\n let mut prop_names = quote! {};\n\n for property in properties.iter() {\n\n let field_name = property.variable_name();\n\n let new_output_right = quote! {\n\n #field_name\n\n };\n\n let new_output_result = quote! {\n\n #prop_names\n\n #new_output_right,\n\n };\n\n prop_names = new_output_result;\n\n }\n\n\n\n let mut prop_reads = quote! {};\n\n for property in properties.iter() {\n", "file_path": "shared/derive/src/replicate.rs", "rank": 34, "score": 182899.43233403488 }, { "content": "pub trait ReplicaDynMutTrait: ReplicaDynRefTrait {\n\n fn to_dyn_mut(&mut self) -> &mut dyn Replicate;\n\n}\n\n\n\npub struct ReplicaDynMutWrapper<'a> {\n\n inner: Box<dyn ReplicaDynMutTrait + 'a>,\n\n}\n\n\n\nimpl<'a> ReplicaDynMutWrapper<'a> {\n\n pub fn new<I: ReplicaDynMutTrait + 'a>(inner: I) -> Self {\n\n Self {\n\n inner: Box::new(inner),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Deref for ReplicaDynMutWrapper<'a> {\n\n type Target = dyn Replicate;\n\n\n\n fn deref(&self) -> &dyn Replicate {\n\n self.inner.to_dyn_ref()\n\n }\n\n}\n\n\n\nimpl<'a> DerefMut for ReplicaDynMutWrapper<'a> {\n\n fn deref_mut(&mut self) -> &mut dyn Replicate {\n\n self.inner.to_dyn_mut()\n\n }\n\n}\n", "file_path": "shared/src/world/component/replica_ref.rs", "rank": 35, "score": 181506.4973136703 }, { "content": "pub fn tick_events(\n\n mut server: Server,\n\n mut position_query: Query<&mut Position>,\n\n mut tick_reader: EventReader<TickEvent>,\n\n) {\n\n let mut has_ticked = false;\n\n\n\n for TickEvent(server_tick) in tick_reader.read() {\n\n has_ticked = true;\n\n\n\n // All game logic should happen here, on a tick event\n\n\n\n let mut messages = server.receive_tick_buffer_messages(server_tick);\n\n for (_user_key, key_command) in messages.read::<PlayerCommandChannel, KeyCommand>() {\n\n let Some(entity) = &key_command.entity.get(&server) else {\n\n continue;\n\n };\n\n let Ok(mut position) = position_query.get_mut(*entity) else {\n\n continue;\n\n };\n", "file_path": "demos/bevy/server/src/systems/events.rs", "rank": 36, "score": 179645.12679274718 }, { "content": "pub fn tick_events(\n\n mut client: Client,\n\n mut global: ResMut<Global>,\n\n mut tick_reader: EventReader<ClientTickEvent>,\n\n mut position_query: Query<&mut Position>,\n\n) {\n\n let Some(predicted_entity) = global\n\n .owned_entity\n\n .as_ref()\n\n .map(|owned_entity| owned_entity.predicted) else {\n\n // No owned Entity\n\n return;\n\n };\n\n\n\n let Some(command) = global.queued_command.take() else {\n\n return;\n\n };\n\n\n\n for ClientTickEvent(client_tick) in tick_reader.read() {\n\n if !global.command_history.can_insert(client_tick) {\n", "file_path": "demos/bevy/client/src/systems/events.rs", "rank": 37, "score": 179645.12679274718 }, { "content": "#[allow(clippy::format_push_string)]\n\npub fn derive_serde_struct(\n\n struct_: &DataStruct,\n\n struct_name: &Ident,\n\n serde_crate_name: TokenStream,\n\n) -> TokenStream {\n\n let mut ser_body = quote! {};\n\n let mut de_body = quote! {};\n\n let mut bit_length_body = quote! {};\n\n\n\n for field in &struct_.fields {\n\n let field_name = field.ident.as_ref().expect(\"expected field to have a name\");\n\n ser_body = quote! {\n\n #ser_body\n\n self.#field_name.ser(writer);\n\n };\n\n de_body = quote! {\n\n #de_body\n\n #field_name: Serde::de(reader)?,\n\n };\n\n bit_length_body = quote! {\n", "file_path": "shared/serde/derive/src/impls/structure.rs", "rank": 38, "score": 176453.26179343654 }, { "content": "pub fn auth_events(mut server: Server, mut event_reader: EventReader<AuthEvents>) {\n\n for events in event_reader.read() {\n\n for (user_key, auth) in events.read::<Auth>() {\n\n if auth.username == \"charlie\" && auth.password == \"12345\" {\n\n // Accept incoming connection\n\n server.accept_connection(&user_key);\n\n } else {\n\n // Reject incoming connection\n\n server.reject_connection(&user_key);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "demos/bevy/server/src/systems/events.rs", "rank": 39, "score": 176084.5464489265 }, { "content": "pub fn sync_cursor_sprite(mut query: Query<(&Position, &mut Transform), With<LocalCursor>>) {\n\n for (position, mut transform) in query.iter_mut() {\n\n transform.translation.x = *position.x as f32;\n\n transform.translation.y = *position.y as f32;\n\n }\n\n}\n", "file_path": "demos/bevy/client/src/systems/sync.rs", "rank": 40, "score": 175322.33858533556 }, { "content": "#[allow(clippy::format_push_string)]\n\npub fn derive_serde_tuple_struct(\n\n struct_: &DataStruct,\n\n struct_name: &Ident,\n\n serde_crate_name: TokenStream,\n\n) -> TokenStream {\n\n let mut ser_body = quote! {};\n\n let mut de_body = quote! {};\n\n let mut bit_length_body = quote! {};\n\n\n\n for (i, _) in struct_.fields.iter().enumerate() {\n\n let field_index = i;\n\n ser_body = quote! {\n\n #ser_body\n\n self.#field_index.ser(writer);\n\n };\n\n de_body = quote! {\n\n #de_body\n\n #field_index: Serde::de(reader)?,\n\n };\n\n bit_length_body = quote! {\n", "file_path": "shared/serde/derive/src/impls/tuple_structure.rs", "rank": 41, "score": 170505.10917597776 }, { "content": "pub fn reject_events(mut event_reader: EventReader<RejectEvent>) {\n\n for _ in event_reader.read() {\n\n info!(\"Client rejected from connecting to Server\");\n\n }\n\n}\n\n\n", "file_path": "demos/bevy/client/src/systems/events.rs", "rank": 42, "score": 170453.36200807025 }, { "content": "pub fn error_events(mut event_reader: EventReader<ErrorEvent>) {\n\n for ErrorEvent(error) in event_reader.read() {\n\n info!(\"Naia Server Error: {:?}\", error);\n\n }\n\n}\n\n\n", "file_path": "demos/bevy/server/src/systems/events.rs", "rank": 43, "score": 170453.36200807025 }, { "content": "pub fn disconnect_events(mut event_reader: EventReader<DisconnectEvent>) {\n\n for _ in event_reader.read() {\n\n info!(\"Client disconnected from Server\");\n\n }\n\n}\n\n\n", "file_path": "demos/bevy/client/src/systems/events.rs", "rank": 44, "score": 170453.36200807025 }, { "content": "pub fn unpublish_entity_events(mut event_reader: EventReader<UnpublishEntityEvent>) {\n\n for UnpublishEntityEvent(_entity) in event_reader.read() {\n\n info!(\"client demo: unpublish entity event\");\n\n }\n\n}\n\n\n", "file_path": "demos/bevy/client/src/systems/events.rs", "rank": 45, "score": 165962.35033594325 }, { "content": "pub fn update_component_events(mut event_reader: EventReader<UpdateComponentEvents>) {\n\n for events in event_reader.read() {\n\n for (_user_key, _client_entity) in events.read::<Position>() {\n\n // info!(\"update component in client entity\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "demos/bevy/server/src/systems/events.rs", "rank": 46, "score": 165962.35033594325 }, { "content": "pub fn remove_component_events(mut event_reader: EventReader<RemoveComponentEvents>) {\n\n for events in event_reader.read() {\n\n for (_entity, _component) in events.read::<Position>() {\n\n info!(\"removed Position component from entity\");\n\n }\n\n for (_entity, _component) in events.read::<Color>() {\n\n info!(\"removed Color component from entity\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "demos/bevy/client/src/systems/events.rs", "rank": 47, "score": 165962.35033594325 }, { "content": "pub fn spawn_entity_events(mut event_reader: EventReader<SpawnEntityEvent>) {\n\n for SpawnEntityEvent(_entity) in event_reader.read() {\n\n info!(\"spawned entity\");\n\n }\n\n}\n\n\n", "file_path": "demos/bevy/client/src/systems/events.rs", "rank": 48, "score": 165962.35033594325 }, { "content": "pub fn publish_entity_events(mut event_reader: EventReader<PublishEntityEvent>) {\n\n for PublishEntityEvent(_entity) in event_reader.read() {\n\n info!(\"client demo: publish entity event\");\n\n }\n\n}\n\n\n", "file_path": "demos/bevy/client/src/systems/events.rs", "rank": 49, "score": 165962.35033594325 }, { "content": "pub fn despawn_entity_events(mut event_reader: EventReader<DespawnEntityEvent>) {\n\n for DespawnEntityEvent(_, _) in event_reader.read() {\n\n info!(\"despawned client entity\");\n\n }\n\n}\n\n\n", "file_path": "demos/bevy/server/src/systems/events.rs", "rank": 50, "score": 165962.35033594325 }, { "content": "pub fn remove_component_events(mut event_reader: EventReader<RemoveComponentEvents>) {\n\n for events in event_reader.read() {\n\n for (_user_key, _entity, _component) in events.read::<Position>() {\n\n info!(\"removed Position component from client entity\");\n\n }\n\n for (_user_key, _entity, _component) in events.read::<Color>() {\n\n info!(\"removed Color component from client entity\");\n\n }\n\n for (_user_key, _entity, _component) in events.read::<Shape>() {\n\n info!(\"removed Shape component from client entity\");\n\n }\n\n }\n\n}\n", "file_path": "demos/bevy/server/src/systems/events.rs", "rank": 51, "score": 165962.35033594325 }, { "content": "pub fn unpublish_entity_events(mut event_reader: EventReader<UnpublishEntityEvent>) {\n\n for UnpublishEntityEvent(_user_key, _client_entity) in event_reader.read() {\n\n info!(\"client entity has been unpublished\");\n\n }\n\n}\n\n\n", "file_path": "demos/bevy/server/src/systems/events.rs", "rank": 52, "score": 165962.35033594325 }, { "content": "pub fn despawn_entity_events(mut event_reader: EventReader<DespawnEntityEvent>) {\n\n for DespawnEntityEvent(_entity) in event_reader.read() {\n\n info!(\"despawned entity\");\n\n }\n\n}\n\n\n", "file_path": "demos/bevy/client/src/systems/events.rs", "rank": 53, "score": 165962.35033594325 }, { "content": "pub fn insert_component_events(mut event_reader: EventReader<InsertComponentEvents>) {\n\n for events in event_reader.read() {\n\n for (_user_key, _client_entity) in events.read::<Position>() {\n\n info!(\"insert Position component into client entity\");\n\n }\n\n for (_user_key, _client_entity) in events.read::<Color>() {\n\n info!(\"insert Color component into client entity\");\n\n }\n\n for (_user_key, _client_entity) in events.read::<Shape>() {\n\n info!(\"insert Shape component into client entity\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "demos/bevy/server/src/systems/events.rs", "rank": 54, "score": 165962.35033594325 }, { "content": "pub fn on_component_removed<R: Replicate>(\n\n mut events: EventWriter<HostSyncEvent>,\n\n query: Query<Entity, With<HostOwned>>,\n\n mut removals: RemovedComponents<R>,\n\n) {\n\n for entity in removals.read() {\n\n if let Ok(_) = query.get(entity) {\n\n events.send(HostSyncEvent::Remove(entity, ComponentKind::of::<R>()));\n\n }\n\n }\n\n}\n", "file_path": "adapters/bevy/shared/src/change_detection.rs", "rank": 55, "score": 165382.55215103837 }, { "content": "pub fn on_component_added<R: Replicate>(\n\n mut events: EventWriter<HostSyncEvent>,\n\n query: Query<Entity, (Added<R>, With<HostOwned>)>,\n\n) {\n\n for entity in query.iter() {\n\n events.send(HostSyncEvent::Insert(entity, ComponentKind::of::<R>()));\n\n }\n\n}\n\n\n", "file_path": "adapters/bevy/shared/src/change_detection.rs", "rank": 56, "score": 165382.55215103837 }, { "content": "/// Structures that implement the WorldMutType trait will be able to be loaded\n\n/// into the Server at which point the Server will use this interface to keep\n\n/// the WorldMutType in-sync with it's own Entities/Components\n\npub trait WorldRefType<E> {\n\n // Entities\n\n /// check whether entity exists\n\n fn has_entity(&self, entity: &E) -> bool;\n\n /// get a list of all entities in the World\n\n fn entities(&self) -> Vec<E>;\n\n\n\n // Components\n\n /// check whether entity contains component\n\n fn has_component<R: Replicate>(&self, entity: &E) -> bool;\n\n /// check whether entity contains component, dynamically\n\n fn has_component_of_kind(&self, entity: &E, component_kind: &ComponentKind) -> bool;\n\n /// gets an entity's component\n\n fn component<'a, R: Replicate>(&'a self, entity: &E) -> Option<ReplicaRefWrapper<'a, R>>;\n\n /// gets an entity's component, dynamically\n\n fn component_of_kind<'a>(\n\n &'a self,\n\n entity: &E,\n\n component_kind: &ComponentKind,\n\n ) -> Option<ReplicaDynRefWrapper<'a>>;\n\n}\n\n\n", "file_path": "shared/src/world/world_type.rs", "rank": 57, "score": 163998.73100070603 }, { "content": "#[derive(SystemSet, Debug, Hash, PartialEq, Eq, Clone)]\n\nstruct Tick;\n\n\n", "file_path": "demos/bevy/client/src/app.rs", "rank": 58, "score": 161285.25765853538 }, { "content": "/// A struct that implements Replicate is a Component, or otherwise,\n\n/// a container of Properties that can be scoped, tracked, and synced, with a\n\n/// remote host\n\npub trait Replicate: ReplicateInner + Named + Any {\n\n /// Gets the ComponentKind of this type\n\n fn kind(&self) -> ComponentKind;\n\n fn to_any(&self) -> &dyn Any;\n\n fn to_any_mut(&mut self) -> &mut dyn Any;\n\n fn to_boxed_any(self: Box<Self>) -> Box<dyn Any>;\n\n fn copy_to_box(&self) -> Box<dyn Replicate>;\n\n fn create_builder() -> Box<dyn ReplicateBuilder>\n\n where\n\n Self: Sized;\n\n /// Gets the number of bytes of the Component's DiffMask\n\n fn diff_mask_size(&self) -> u8;\n\n /// Get an immutable reference to the inner Component as a Replicate trait object\n\n fn dyn_ref(&self) -> ReplicaDynRef<'_>;\n\n /// Get an mutable reference to the inner Component as a Replicate trait object\n\n fn dyn_mut(&mut self) -> ReplicaDynMut<'_>;\n\n /// Sets the current Component to the state of another Component of the\n\n /// same type\n\n fn mirror(&mut self, other: &dyn Replicate);\n\n /// Set the Component's PropertyMutator, which keeps track\n", "file_path": "shared/src/world/component/replicate.rs", "rank": 59, "score": 160966.36184474756 }, { "content": "fn world_data(world: &World) -> &WorldData {\n\n return world\n\n .get_resource::<WorldData>()\n\n .expect(\"Need to instantiate by adding WorldData<Protocol> resource at startup!\");\n\n}\n\n\n", "file_path": "adapters/bevy/shared/src/world_proxy.rs", "rank": 60, "score": 159906.2877561486 }, { "content": "fn component_mut_of_kind<'a>(\n\n world: &'a mut World,\n\n entity: &Entity,\n\n component_kind: &ComponentKind,\n\n) -> Option<ReplicaDynMutWrapper<'a>> {\n\n if let Some(component_map) = world.entities.get_mut(entity) {\n\n if let Some(raw_ref) = component_map.get_mut(component_kind) {\n\n let wrapped_ref = ReplicaDynMutWrapper::new(raw_ref.dyn_mut());\n\n return Some(wrapped_ref);\n\n }\n\n }\n\n\n\n None\n\n}\n", "file_path": "demos/demo_utils/demo_world/src/world.rs", "rank": 61, "score": 158901.7248952021 }, { "content": "pub trait ReplicaDynRefTrait {\n\n fn to_dyn_ref(&self) -> &dyn Replicate;\n\n}\n\n\n\npub struct ReplicaDynRefWrapper<'a> {\n\n inner: Box<dyn ReplicaDynRefTrait + 'a>,\n\n}\n\n\n\nimpl<'a> ReplicaDynRefWrapper<'a> {\n\n pub fn new<I: ReplicaDynRefTrait + 'a>(inner: I) -> Self {\n\n Self {\n\n inner: Box::new(inner),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Deref for ReplicaDynRefWrapper<'a> {\n\n type Target = dyn Replicate;\n\n\n\n fn deref(&self) -> &dyn Replicate {\n\n self.inner.to_dyn_ref()\n\n }\n\n}\n\n\n\n// ReplicaDynMutWrapper\n\n\n", "file_path": "shared/src/world/component/replica_ref.rs", "rank": 62, "score": 157987.09220292824 }, { "content": "fn component<'a, R: Replicate>(\n\n world: &'a World,\n\n entity: &Entity,\n\n) -> Option<ReplicaRefWrapper<'a, R>> {\n\n if let Some(component_map) = world.entities.get(entity) {\n\n if let Some(boxed_component) = component_map.get(&ComponentKind::of::<R>()) {\n\n if let Some(raw_ref) = boxed_component.to_any().downcast_ref::<R>() {\n\n let wrapper = ComponentRef::<R>::new(raw_ref);\n\n let wrapped_ref = ReplicaRefWrapper::new(wrapper);\n\n return Some(wrapped_ref);\n\n }\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "demos/demo_utils/demo_world/src/world.rs", "rank": 63, "score": 154924.94042463848 }, { "content": "fn entities(world: &World) -> Vec<Entity> {\n\n let mut output = Vec::new();\n\n\n\n for (key, _) in world.entities.iter() {\n\n output.push(key);\n\n }\n\n\n\n output\n\n}\n\n\n", "file_path": "demos/demo_utils/demo_world/src/world.rs", "rank": 64, "score": 154256.36523850725 }, { "content": "pub fn get_create_builder_method(builder_name: &Ident) -> TokenStream {\n\n quote! {\n\n fn create_builder() -> Box<dyn ReplicateBuilder> where Self:Sized {\n\n Box::new(#builder_name)\n\n }\n\n }\n\n}\n\n\n", "file_path": "shared/derive/src/replicate.rs", "rank": 65, "score": 153948.07529796095 }, { "content": "fn entities(world: &World) -> Vec<Entity> {\n\n let mut output = Vec::new();\n\n\n\n for entity in world.iter() {\n\n output.push(entity.entity());\n\n }\n\n\n\n output\n\n}\n\n\n", "file_path": "adapters/hecs/shared/src/world_wrapper.rs", "rank": 66, "score": 152269.57388710833 }, { "content": "fn entities(world: &World) -> Vec<Entity> {\n\n let world_data = world_data(world);\n\n world_data.entities()\n\n}\n\n\n", "file_path": "adapters/bevy/shared/src/world_proxy.rs", "rank": 67, "score": 152269.57388710833 }, { "content": "fn entities(world: &World) -> Vec<Entity> {\n\n let mut output = Vec::new();\n\n\n\n for entity in world.iter() {\n\n output.push(entity.entity());\n\n }\n\n\n\n output\n\n}\n\n\n", "file_path": "adapters/hecs/shared/src/world_proxy.rs", "rank": 68, "score": 152269.57388710833 }, { "content": "pub trait ReplicateBuilder: Send + Sync + Named {\n\n /// Create new Component from incoming bit stream\n\n fn read(\n\n &self,\n\n reader: &mut BitReader,\n\n converter: &dyn LocalEntityAndGlobalEntityConverter,\n\n ) -> Result<Box<dyn Replicate>, SerdeErr>;\n\n /// Create new Component Update from incoming bit stream\n\n fn read_create_update(&self, reader: &mut BitReader) -> Result<ComponentUpdate, SerdeErr>;\n\n /// Split a Component update into Waiting and Ready updates\n\n fn split_update(\n\n &self,\n\n converter: &dyn LocalEntityAndGlobalEntityConverter,\n\n update: ComponentUpdate,\n\n ) -> Result<\n\n (\n\n Option<Vec<(RemoteEntity, ComponentFieldUpdate)>>,\n\n Option<ComponentUpdate>,\n\n ),\n\n SerdeErr,\n\n >;\n\n}\n\n\n", "file_path": "shared/src/world/component/replicate.rs", "rank": 69, "score": 152073.0568143479 }, { "content": " pub trait ReplicateInner: Sync + Send + 'static {}\n\n\n\n impl<T> ReplicateInner for T\n\n where T: Sync + Send + 'static {\n\n }\n\n }\n\n}\n", "file_path": "shared/src/world/component/replicate.rs", "rank": 70, "score": 152073.0568143479 }, { "content": "pub trait MutChannelType: Send + Sync {\n\n fn new_receiver(&mut self, address: &Option<SocketAddr>) -> Option<MutReceiver>;\n\n fn send(&self, diff: u8);\n\n}\n\n\n\n// MutChannel\n\n#[derive(Clone)]\n\npub struct MutChannel {\n\n data: Arc<RwLock<dyn MutChannelType>>,\n\n}\n\n\n\nimpl MutChannel {\n\n pub fn new_channel<E: Copy + Eq + Hash>(\n\n global_world_manager: &dyn GlobalWorldManagerType<E>,\n\n diff_mask_length: u8,\n\n ) -> (MutSender, MutReceiverBuilder) {\n\n let channel = Self {\n\n data: global_world_manager.new_mut_channel(diff_mask_length),\n\n };\n\n\n", "file_path": "shared/src/world/host/mut_channel.rs", "rank": 71, "score": 151102.72115960426 }, { "content": "pub trait WorldProxy<'w> {\n\n fn proxy(self) -> WorldRef<'w>;\n\n}\n\n\n\nimpl<'w> WorldProxy<'w> for &'w World {\n\n fn proxy(self) -> WorldRef<'w> {\n\n WorldRef::new(self)\n\n }\n\n}\n\n\n\n// WorldProxyMut\n\n\n", "file_path": "adapters/bevy/shared/src/world_proxy.rs", "rank": 72, "score": 149411.09659984589 }, { "content": "// Bevy Commands Extension\n\npub trait CommandsExt<'w, 's, 'a> {\n\n fn enable_replication(&'a mut self, server: &mut Server) -> &'a mut EntityCommands<'w, 's, 'a>;\n\n fn disable_replication(&'a mut self, server: &mut Server)\n\n -> &'a mut EntityCommands<'w, 's, 'a>;\n\n fn configure_replication(\n\n &'a mut self,\n\n config: ReplicationConfig,\n\n ) -> &'a mut EntityCommands<'w, 's, 'a>;\n\n fn replication_config(&'a self, server: &Server) -> Option<ReplicationConfig>;\n\n fn give_authority(\n\n &'a mut self,\n\n server: &mut Server,\n\n user_key: &UserKey,\n\n ) -> &'a mut EntityCommands<'w, 's, 'a>;\n\n fn take_authority(&'a mut self, server: &mut Server) -> &'a mut EntityCommands<'w, 's, 'a>;\n\n fn authority(&'a self, server: &Server) -> Option<EntityAuthStatus>;\n\n fn pause_replication(&'a mut self, server: &mut Server) -> &'a mut EntityCommands<'w, 's, 'a>;\n\n fn resume_replication(&'a mut self, server: &mut Server) -> &'a mut EntityCommands<'w, 's, 'a>;\n\n}\n\n\n", "file_path": "adapters/bevy/server/src/commands.rs", "rank": 73, "score": 146188.95540547572 }, { "content": "// Bevy Commands Extension\n\npub trait CommandsExt<'w, 's, 'a> {\n\n fn local_duplicate(&'a mut self) -> EntityCommands<'w, 's, 'a>;\n\n fn enable_replication(&'a mut self, client: &mut Client) -> &'a mut EntityCommands<'w, 's, 'a>;\n\n fn disable_replication(&'a mut self, client: &mut Client)\n\n -> &'a mut EntityCommands<'w, 's, 'a>;\n\n fn configure_replication(\n\n &'a mut self,\n\n config: ReplicationConfig,\n\n ) -> &'a mut EntityCommands<'w, 's, 'a>;\n\n fn replication_config(&'a self, client: &Client) -> Option<ReplicationConfig>;\n\n fn request_authority(&'a mut self, client: &mut Client) -> &'a mut EntityCommands<'w, 's, 'a>;\n\n fn release_authority(&'a mut self, client: &mut Client) -> &'a mut EntityCommands<'w, 's, 'a>;\n\n fn authority(&'a self, client: &Client) -> Option<EntityAuthStatus>;\n\n}\n\n\n\nimpl<'w, 's, 'a> CommandsExt<'w, 's, 'a> for EntityCommands<'w, 's, 'a> {\n\n fn local_duplicate(&'a mut self) -> EntityCommands<'w, 's, 'a> {\n\n let old_entity = self.id();\n\n let commands = self.commands();\n\n let new_entity = commands.spawn_empty().id();\n", "file_path": "adapters/bevy/client/src/commands.rs", "rank": 74, "score": 146188.95540547572 }, { "content": "fn component<'a, R: Replicate>(\n\n world: &'a World,\n\n entity: &Entity,\n\n) -> Option<ReplicaRefWrapper<'a, R>> {\n\n if let Some(bevy_ref) = world.get::<R>(*entity) {\n\n let wrapper = ComponentRef(bevy_ref);\n\n let component_ref = ReplicaRefWrapper::new(wrapper);\n\n return Some(component_ref);\n\n }\n\n None\n\n}\n\n\n", "file_path": "adapters/bevy/shared/src/world_proxy.rs", "rank": 75, "score": 146137.07568249278 }, { "content": "fn component<'a, R: Replicate>(\n\n world: &'a World,\n\n entity: &Entity,\n\n) -> Option<ReplicaRefWrapper<'a, R>> {\n\n if let Ok(hecs_ref) = world.get::<&R>(*entity) {\n\n let wrapper = ComponentRef(hecs_ref);\n\n let component_ref = ReplicaRefWrapper::new(wrapper);\n\n return Some(component_ref);\n\n }\n\n None\n\n}\n\n\n", "file_path": "adapters/hecs/shared/src/world_proxy.rs", "rank": 76, "score": 146137.07568249278 }, { "content": "fn component<'a, R: Replicate>(\n\n world: &'a World,\n\n entity: &Entity,\n\n) -> Option<ReplicaRefWrapper<'a, R>> {\n\n if let Ok(hecs_ref) = world.get::<&R>(*entity) {\n\n let wrapper = ComponentRef(hecs_ref);\n\n let component_ref = ReplicaRefWrapper::new(wrapper);\n\n return Some(component_ref);\n\n }\n\n None\n\n}\n\n\n", "file_path": "adapters/hecs/shared/src/world_wrapper.rs", "rank": 77, "score": 146137.07568249278 }, { "content": "pub trait WorldProxy<'w, 'd> {\n\n fn proxy(self, data: &'d WorldData) -> WorldRef<'w, 'd>;\n\n}\n\n\n\nimpl<'w, 'd> WorldProxy<'w, 'd> for &'w World {\n\n fn proxy(self, data: &'d WorldData) -> WorldRef<'w, 'd> {\n\n WorldRef::new(self, data)\n\n }\n\n}\n\n\n\n// WorldProxyMut\n\n\n", "file_path": "adapters/hecs/shared/src/world_proxy.rs", "rank": 78, "score": 145399.951986229 }, { "content": "/// Get the field name as a TokenStream\n\nfn get_field_name(property: &Property, struct_type: &StructType) -> Member {\n\n match *struct_type {\n\n StructType::Struct => Member::from(property.variable_name().clone()),\n\n StructType::TupleStruct => {\n\n let index = Index {\n\n index: property.index() as u32,\n\n span: property.variable_name().span(),\n\n };\n\n Member::from(index)\n\n }\n\n _ => {\n\n panic!(\"The struct should not have any fields\")\n\n }\n\n }\n\n}\n\n\n\nimpl Property {\n\n pub fn normal(index: usize, variable_name: Ident, inner_type: Type) -> Self {\n\n Self::Normal(NormalProperty {\n\n index,\n", "file_path": "shared/derive/src/replicate.rs", "rank": 79, "score": 143203.12832786163 }, { "content": "pub fn message_impl(\n\n input: proc_macro::TokenStream,\n\n shared_crate_name: TokenStream,\n\n is_fragment: bool,\n\n) -> proc_macro::TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n\n\n // Helper Properties\n\n let struct_type = get_struct_type(&input);\n\n let fields = get_fields(&input);\n\n\n\n // Names\n\n let struct_name = input.ident;\n\n let struct_name_str = LitStr::new(&struct_name.to_string(), struct_name.span());\n\n let lowercase_struct_name = Ident::new(\n\n struct_name.to_string().to_lowercase().as_str(),\n\n Span::call_site(),\n\n );\n\n let module_name = format_ident!(\"define_{}\", lowercase_struct_name);\n\n let builder_name = format_ident!(\"{}Builder\", struct_name);\n", "file_path": "shared/derive/src/message.rs", "rank": 80, "score": 142548.3532936107 }, { "content": "pub fn run() {\n\n App::default()\n\n // Bevy Plugins\n\n .add_plugins(DefaultPlugins)\n\n // Add Naia Client Plugin\n\n .add_plugins(ClientPlugin::new(ClientConfig::default(), protocol()))\n\n // Background Color\n\n .insert_resource(ClearColor(Color::BLACK))\n\n // Startup System\n\n .add_systems(Startup, init)\n\n // Receive Client Events\n\n .add_systems(\n\n Update,\n\n (\n\n events::connect_events,\n\n events::disconnect_events,\n\n events::reject_events,\n\n events::spawn_entity_events,\n\n events::despawn_entity_events,\n\n events::publish_entity_events,\n", "file_path": "demos/bevy/client/src/app.rs", "rank": 81, "score": 142548.3532936107 }, { "content": "#[proc_macro_derive(Replicate)]\n\npub fn replicate_derive_shared(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let shared_crate_name = quote! { naia_shared };\n\n replicate_impl(input, shared_crate_name)\n\n}\n\n\n\n/// Derives the Replicate trait for a given struct, for the Bevy adapter\n", "file_path": "shared/derive/src/lib.rs", "rank": 82, "score": 141566.08158616416 }, { "content": "#[proc_macro_derive(ReplicateBevy)]\n\npub fn replicate_derive_bevy(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let shared_crate_name = quote! { naia_bevy_shared };\n\n replicate_impl(input, shared_crate_name)\n\n}\n\n\n\n/// Derives the Replicate trait for a given struct, for the Bevy adapter\n", "file_path": "shared/derive/src/lib.rs", "rank": 83, "score": 141566.0191151451 }, { "content": "#[proc_macro_derive(ReplicateHecs)]\n\npub fn replicate_derive_hecs(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let shared_crate_name = quote! { naia_hecs_shared };\n\n replicate_impl(input, shared_crate_name)\n\n}\n\n\n\n// Channel\n\n\n\n/// Derives the Channel trait for a given struct\n", "file_path": "shared/derive/src/lib.rs", "rank": 84, "score": 141566.0191151451 }, { "content": "pub fn get_read_create_update_method(replica_name: &Ident, properties: &[Property]) -> TokenStream {\n\n let mut prop_read_writes = quote! {};\n\n for property in properties.iter() {\n\n let new_output_right = match property {\n\n Property::Normal(inner_property) => {\n\n let field_type = &inner_property.inner_type;\n\n quote! {\n\n {\n\n let should_read = bool::de(reader)?;\n\n should_read.ser(&mut update_writer);\n\n if should_read {\n\n Property::<#field_type>::read_write(reader, &mut update_writer)?;\n\n }\n\n }\n\n }\n\n }\n\n Property::Entity(_) => {\n\n quote! {\n\n {\n\n let should_read = bool::de(reader)?;\n", "file_path": "shared/derive/src/replicate.rs", "rank": 85, "score": 141560.6239010493 }, { "content": "fn get_unpublish_method(properties: &[Property], struct_type: &StructType) -> TokenStream {\n\n let mut output = quote! {};\n\n\n\n for property in properties.iter().filter(|p| p.is_replicated()) {\n\n let field_name = get_field_name(property, struct_type);\n\n let new_output_right = quote! {\n\n self.#field_name.remote_unpublish();\n\n };\n\n let new_output_result = quote! {\n\n #output\n\n #new_output_right\n\n };\n\n output = new_output_result;\n\n }\n\n\n\n quote! {\n\n fn unpublish(&mut self) {\n\n #output\n\n }\n\n }\n\n}\n\n\n", "file_path": "shared/derive/src/replicate.rs", "rank": 86, "score": 140758.05091570003 }, { "content": "fn get_write_method(properties: &[Property], struct_type: &StructType) -> TokenStream {\n\n let mut property_writes = quote! {};\n\n\n\n for property in properties.iter() {\n\n let field_name = get_field_name(property, struct_type);\n\n let new_output_right = match property {\n\n Property::Normal(_) => {\n\n quote! {\n\n Property::write(&self.#field_name, writer);\n\n }\n\n }\n\n Property::Entity(_) => {\n\n quote! {\n\n EntityProperty::write(&self.#field_name, writer, converter);\n\n }\n\n }\n\n Property::NonReplicated(_) => {\n\n continue;\n\n }\n\n };\n", "file_path": "shared/derive/src/replicate.rs", "rank": 87, "score": 140758.05091570003 }, { "content": "fn get_localize_method(properties: &[Property], struct_type: &StructType) -> TokenStream {\n\n let mut output = quote! {};\n\n\n\n for property in properties.iter().filter(|p| p.is_replicated()) {\n\n let field_name = get_field_name(property, struct_type);\n\n let new_output_right = quote! {\n\n self.#field_name.localize();\n\n };\n\n let new_output_result = quote! {\n\n #output\n\n #new_output_right\n\n };\n\n output = new_output_result;\n\n }\n\n\n\n quote! {\n\n fn localize(&mut self) {\n\n #output\n\n }\n\n }\n\n}\n\n\n", "file_path": "shared/derive/src/replicate.rs", "rank": 88, "score": 140758.05091570003 }, { "content": "pub fn start_session_server(\n\n server_addrs: ServerAddrs,\n\n config: SocketConfig,\n\n session_endpoint: SessionEndpoint,\n\n) {\n\n RTC_URL_PATH\n\n .set(format!(\"POST /{}\", config.rtc_endpoint_path))\n\n .expect(\"unable to set the URL Path\");\n\n executor::spawn(async move {\n\n listen(server_addrs, config, session_endpoint.clone()).await;\n\n })\n\n .detach();\n\n}\n\n\n\n/// Listens for incoming connections and serves them.\n\nasync fn listen(\n\n server_addrs: ServerAddrs,\n\n config: SocketConfig,\n\n session_endpoint: SessionEndpoint,\n\n) {\n", "file_path": "socket/server/src/session.rs", "rank": 89, "score": 140172.11405021572 }, { "content": "pub fn get_read_method(\n\n struct_name: &Ident,\n\n fields: &[Field],\n\n struct_type: &StructType,\n\n) -> TokenStream {\n\n let mut field_names = quote! {};\n\n for field in fields.iter() {\n\n let field_name = field.variable_name();\n\n let new_output_right = quote! {\n\n #field_name\n\n };\n\n let new_output_result = quote! {\n\n #field_names\n\n #new_output_right,\n\n };\n\n field_names = new_output_result;\n\n }\n\n\n\n let mut field_reads = quote! {};\n\n for field in fields.iter() {\n", "file_path": "shared/derive/src/message.rs", "rank": 90, "score": 140172.11405021572 }, { "content": "pub fn on_despawn(\n\n mut events: EventWriter<HostSyncEvent>,\n\n query: Query<Entity>,\n\n mut removals: RemovedComponents<HostOwned>,\n\n) {\n\n for entity in removals.read() {\n\n if let Ok(_) = query.get(entity) {\n\n // Entity is still alive, expected if Auth is reset on Delegated Entity\n\n } else {\n\n // info!(\"despawn on HostOwned entity: {:?}\", entity);\n\n events.send(HostSyncEvent::Despawn(entity));\n\n }\n\n }\n\n}\n\n\n", "file_path": "adapters/bevy/shared/src/change_detection.rs", "rank": 91, "score": 140172.11405021572 }, { "content": "pub fn init(\n\n mut commands: Commands,\n\n mut client: Client,\n\n mut meshes: ResMut<Assets<Mesh>>,\n\n mut materials: ResMut<Assets<ColorMaterial>>,\n\n) {\n\n info!(\"Naia Bevy Client Demo started\");\n\n\n\n client.auth(Auth::new(\"charlie\", \"12345\"));\n\n let socket = webrtc::Socket::new(\"http://127.0.0.1:14191\", client.socket_config());\n\n client.connect(socket);\n\n\n\n // Setup Camera\n\n commands.spawn(Camera2dBundle::default());\n\n\n\n // Setup Global Resource\n\n let mut global = Global::default();\n\n\n\n // Load colors\n\n global.red = materials.add(ColorMaterial::from(Color::RED));\n", "file_path": "demos/bevy/client/src/systems/init.rs", "rank": 92, "score": 140172.11405021572 }, { "content": "fn get_disable_delegation_method(properties: &[Property], struct_type: &StructType) -> TokenStream {\n\n let mut output = quote! {};\n\n\n\n for property in properties.iter().filter(|p| p.is_replicated()) {\n\n let field_name = get_field_name(property, struct_type);\n\n let new_output_right = quote! {\n\n self.#field_name.disable_delegation();\n\n };\n\n let new_output_result = quote! {\n\n #output\n\n #new_output_right\n\n };\n\n output = new_output_result;\n\n }\n\n\n\n quote! {\n\n fn disable_delegation(&mut self) {\n\n #output\n\n }\n\n }\n\n}\n\n\n", "file_path": "shared/derive/src/replicate.rs", "rank": 93, "score": 138411.23289370086 }, { "content": "fn get_set_mutator_method(properties: &[Property], struct_type: &StructType) -> TokenStream {\n\n let mut output = quote! {};\n\n\n\n for property in properties.iter().filter(|p| p.is_replicated()) {\n\n let field_name = get_field_name(property, struct_type);\n\n let new_output_right = quote! {\n\n self.#field_name.set_mutator(mutator);\n\n };\n\n let new_output_result = quote! {\n\n #output\n\n #new_output_right\n\n };\n\n output = new_output_result;\n\n }\n\n\n\n quote! {\n\n fn set_mutator(&mut self, mutator: &PropertyMutator) {\n\n #output\n\n }\n\n }\n\n}\n\n\n", "file_path": "shared/derive/src/replicate.rs", "rank": 94, "score": 138411.23289370086 }, { "content": "fn get_relations_waiting_method(fields: &[Property], struct_type: &StructType) -> TokenStream {\n\n let mut body = quote! {};\n\n\n\n for field in fields.iter() {\n\n if let Property::Entity(_) = field {\n\n let field_name = get_field_name(field, struct_type);\n\n let body_add_right = quote! {\n\n if let Some(local_entity) = self.#field_name.waiting_local_entity() {\n\n output.insert(local_entity);\n\n }\n\n };\n\n let new_body = quote! {\n\n #body\n\n #body_add_right\n\n };\n\n body = new_body;\n\n }\n\n }\n\n\n\n quote! {\n", "file_path": "shared/derive/src/replicate.rs", "rank": 95, "score": 138411.23289370086 }, { "content": "fn get_relations_complete_method(fields: &[Property], struct_type: &StructType) -> TokenStream {\n\n let mut body = quote! {};\n\n\n\n for field in fields.iter() {\n\n if let Property::Entity(_) = field {\n\n let field_name = get_field_name(field, struct_type);\n\n let body_add_right = quote! {\n\n self.#field_name.waiting_complete(converter);\n\n };\n\n let new_body = quote! {\n\n #body\n\n #body_add_right\n\n };\n\n body = new_body;\n\n }\n\n }\n\n\n\n quote! {\n\n fn relations_complete(&mut self, converter: &dyn LocalEntityAndGlobalEntityConverter) {\n\n #body\n\n }\n\n }\n\n}\n", "file_path": "shared/derive/src/replicate.rs", "rank": 96, "score": 138411.23289370086 }, { "content": " pub trait ReplicateInner: Component<Storage = TableStorage> + Sync + Send + 'static {}\n\n\n\n impl<T> ReplicateInner for T\n\n where T: Component<Storage = TableStorage> + Sync + Send + 'static {\n\n }\n\n }\n\n else\n\n {\n", "file_path": "shared/src/world/component/replicate.rs", "rank": 97, "score": 138005.0929607208 }, { "content": "pub fn message_events(\n\n mut commands: Commands,\n\n client: Client,\n\n mut global: ResMut<Global>,\n\n mut event_reader: EventReader<MessageEvents>,\n\n position_query: Query<&Position>,\n\n color_query: Query<&Color>,\n\n) {\n\n for events in event_reader.read() {\n\n for message in events.read::<EntityAssignmentChannel, EntityAssignment>() {\n\n let assign = message.assign;\n\n\n\n let entity = message.entity.get(&client).unwrap();\n\n if assign {\n\n info!(\"gave ownership of entity\");\n\n\n\n // Here we create a local copy of the Player entity, to use for client-side prediction\n\n if let Ok(position) = position_query.get(entity) {\n\n let prediction_entity = commands\n\n .entity(entity)\n", "file_path": "demos/bevy/client/src/systems/events.rs", "rank": 98, "score": 137921.55615028314 }, { "content": "pub fn connect_events(\n\n mut commands: Commands,\n\n mut client: Client,\n\n mut global: ResMut<Global>,\n\n mut event_reader: EventReader<ConnectEvent>,\n\n) {\n\n for _ in event_reader.read() {\n\n let Ok(server_address) = client.server_address() else {\n\n panic!(\"Shouldn't happen\");\n\n };\n\n info!(\"Client connected to: {}\", server_address);\n\n\n\n // Create entity for Client-authoritative Cursor\n\n\n\n // Spawn Cursor Entity\n\n let cursor_entity = commands\n\n // Spawn new Square Entity\n\n .spawn_empty()\n\n // MUST call this to begin replication\n\n .enable_replication(&mut client)\n", "file_path": "demos/bevy/client/src/systems/events.rs", "rank": 99, "score": 137921.55615028314 } ]
Rust
packages/moneymarket/src/testing.rs
1Zaitsev/money-market-mocks
d2c5d089e1a34735e9483f9e54afb647bb00c13d
use crate::mock_querier::mock_dependencies; use crate::oracle::PriceResponse; use crate::querier::{compute_tax, deduct_tax, query_price, query_tax_rate, TimeConstraints}; use crate::tokens::{Tokens, TokensHuman, TokensMath, TokensToRaw}; use cosmwasm_bignumber::{Decimal256, Uint256}; use cosmwasm_std::{Addr, Api, CanonicalAddr, Coin, Decimal, StdError, Uint128}; #[test] fn tax_rate_querier() { let mut deps = mock_dependencies(&[]); deps.querier.with_tax(Decimal::percent(1), &[]); assert_eq!( query_tax_rate(deps.as_ref()).unwrap(), Decimal256::percent(1), ); } #[test] fn test_compute_tax() { let mut deps = mock_dependencies(&[]); deps.querier.with_tax( Decimal::percent(1), &[(&"uusd".to_string(), &Uint128::from(1000000u128))], ); assert_eq!( compute_tax(deps.as_ref(), &Coin::new(10000000000u128, "uusd")).unwrap(), Uint256::from(1000000u64) ); assert_eq!( compute_tax(deps.as_ref(), &Coin::new(50000000u128, "uusd")).unwrap(), Uint256::from(495050u64) ); } #[test] fn test_deduct_tax() { let mut deps = mock_dependencies(&[]); deps.querier.with_tax( Decimal::percent(1), &[(&"uusd".to_string(), &Uint128::from(1000000u128))], ); assert_eq!( deduct_tax(deps.as_ref(), Coin::new(10000000000u128, "uusd")).unwrap(), Coin { denom: "uusd".to_string(), amount: Uint128::from(9999000000u128) } ); assert_eq!( deduct_tax(deps.as_ref(), Coin::new(50000000u128, "uusd")).unwrap(), Coin { denom: "uusd".to_string(), amount: Uint128::from(49504950u128) } ); } #[test] fn oracle_price_querier() { let mut deps = mock_dependencies(&[]); deps.querier.with_oracle_price(&[( &("terra123123".to_string(), "uusd".to_string()), &(Decimal256::from_ratio(131, 2), 123, 321), )]); let oracle_price = query_price( deps.as_ref(), Addr::unchecked("oracle"), "terra123123".to_string(), "uusd".to_string(), None, ) .unwrap(); assert_eq!( oracle_price, PriceResponse { rate: Decimal256::from_ratio(131, 2), last_updated_base: 123, last_updated_quote: 321, } ); query_price( deps.as_ref(), Addr::unchecked("oracle"), "terra123123".to_string(), "ukrw".to_string(), None, ) .unwrap_err(); let res = query_price( deps.as_ref(), Addr::unchecked("oracle"), "terra123123".to_string(), "uusd".to_string(), Some(TimeConstraints { block_time: 500u64, valid_timeframe: 60u64, }), ); match res { Err(StdError::GenericErr { msg, .. }) => assert_eq!(msg, "Price is too old"), _ => panic!("DO NOT ENTER HERE"), } } #[test] fn tokens_math() { let deps = mock_dependencies(&[]); let tokens_1: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token2".to_string(), Uint256::from(1000000u64)), ("token3".to_string(), Uint256::from(1000000u64)), ("token5".to_string(), Uint256::from(1000000u64)), ]; let tokens_2: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token4".to_string(), Uint256::from(1000000u64)), ]; let tokens_3: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token6".to_string(), Uint256::from(1000000u64)), ]; let tokens_4: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token2".to_string(), Uint256::from(1200000u64)), ]; let mut tokens_1_raw: Tokens = tokens_1.to_raw(deps.as_ref()).unwrap(); let tokens_2_raw: Tokens = tokens_2.to_raw(deps.as_ref()).unwrap(); let tokens_3_raw: Tokens = tokens_3.to_raw(deps.as_ref()).unwrap(); let tokens_4_raw: Tokens = tokens_4.to_raw(deps.as_ref()).unwrap(); assert!(tokens_1_raw.clone().sub(tokens_2_raw).is_err()); assert!(tokens_1_raw.clone().sub(tokens_3_raw).is_err()); assert!(tokens_1_raw.sub(tokens_4_raw).is_err()); } #[test] fn tokens_math_normal_add() { let deps = mock_dependencies(&[]); let acct1 = deps .api .addr_humanize(&CanonicalAddr::from(vec![ 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ])) .unwrap() .to_string(); let acct2 = deps .api .addr_humanize(&CanonicalAddr::from(vec![ 1, 1, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ])) .unwrap() .to_string(); let acct3 = deps .api .addr_humanize(&CanonicalAddr::from(vec![ 1, 1, 1, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ])) .unwrap() .to_string(); let acct4 = deps .api .addr_humanize(&CanonicalAddr::from(vec![ 1, 1, 1, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ])) .unwrap() .to_string(); let acct5 = deps .api .addr_humanize(&CanonicalAddr::from(vec![ 1, 1, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ])) .unwrap() .to_string(); let tokens_1: TokensHuman = vec![ (acct1.clone(), Uint256::from(1000000u64)), (acct2, Uint256::from(1000000u64)), (acct3, Uint256::from(1000000u64)), (acct5, Uint256::from(1000000u64)), ]; let tokens_2: TokensHuman = vec![ (acct1, Uint256::from(1000000u64)), (acct4, Uint256::from(1000000u64)), ]; let mut tokens_1_raw: Tokens = tokens_1.to_raw(deps.as_ref()).unwrap(); let tokens_2_raw: Tokens = tokens_2.to_raw(deps.as_ref()).unwrap(); tokens_1_raw.add(tokens_2_raw); assert_eq!(tokens_1_raw[0].1, Uint256::from(2000000u64)); assert_eq!(tokens_1_raw.len(), 5); } #[test] fn token_math_zero_token() { let deps = mock_dependencies(&[]); let tokens_1: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token2".to_string(), Uint256::from(1000000u64)), ]; let tokens_2: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token2".to_string(), Uint256::from(1000000u64)), ]; let mut tokens_1_raw: Tokens = tokens_1.to_raw(deps.as_ref()).unwrap(); let tokens_2_raw: Tokens = tokens_2.to_raw(deps.as_ref()).unwrap(); tokens_1_raw.sub(tokens_2_raw).unwrap(); assert_eq!(tokens_1_raw.len(), 0); } #[test] #[should_panic] fn token_math_invalid_token() { let deps = mock_dependencies(&[]); let tokens_1: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token2".to_string(), Uint256::from(1000000u64)), ("token3".to_string(), Uint256::from(1000000u64)), ("token5".to_string(), Uint256::from(1000000u64)), ]; let tokens_2: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token1".to_string(), Uint256::from(1000000u64)), ("token3".to_string(), Uint256::from(1000000u64)), ]; let mut tokens_1_raw: Tokens = tokens_1.to_raw(deps.as_ref()).unwrap(); let tokens_2_raw: Tokens = tokens_2.to_raw(deps.as_ref()).unwrap(); let _ = tokens_1_raw.sub(tokens_2_raw); } #[test] #[should_panic] fn token_math_invalid_token_2() { let deps = mock_dependencies(&[]); let tokens_1: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token2".to_string(), Uint256::from(1000000u64)), ("token2".to_string(), Uint256::from(1000000u64)), ("token3".to_string(), Uint256::from(1000000u64)), ("token5".to_string(), Uint256::from(1000000u64)), ]; let tokens_2: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token2".to_string(), Uint256::from(1000000u64)), ("token3".to_string(), Uint256::from(1000000u64)), ]; let mut tokens_1_raw: Tokens = tokens_1.to_raw(deps.as_ref()).unwrap(); let tokens_2_raw: Tokens = tokens_2.to_raw(deps.as_ref()).unwrap(); let _ = tokens_1_raw.sub(tokens_2_raw); }
use crate::mock_querier::mock_dependencies; use crate::oracle::PriceResponse; use crate::querier::{compute_tax, deduct_tax, query_price, query_tax_rate, TimeConstraints}; use crate::tokens::{Tokens, TokensHuman, TokensMath, TokensToRaw}; use cosmwasm_bignumber::{Decimal256, Uint256}; use cosmwasm_std::{Addr, Api, CanonicalAddr, Coin, Decimal, StdError, Uint128}; #[test] fn tax_rate_querier() { let mut deps = mock_dependencies(&[]); deps.querier.with_tax(Decimal::percent(1), &[]); assert_eq!( query_tax_rate(deps.as_ref()).unwrap(), Decimal256::percent(1), ); } #[test] fn test_compute_tax() { let mut deps = mock_dependencies(&[]); deps.querier.with_tax( Decimal::percent(1), &[(&"uusd".to_string(), &Uint128::from(1000000u128))], ); assert_eq!( compute_tax(deps.as_ref(), &Coin::new(10000000000u128, "uusd")).unwrap(), Uint256::from(1000000u64) ); assert_eq!( compute_tax(deps.as_ref(), &Coin::new(50000000u128, "uusd")).unwrap(), Uint256::from(495050u64) ); } #[test] fn test_deduct_tax() { let mut deps = mock_dependencies(&[]); deps.querier.with_tax( Decimal::percent(1), &[(&"uusd".to_string(), &Uint128::from(1000000u128))], ); assert_eq!( deduct_tax(deps.as_ref(), Coin::new(10000000000u128, "uusd")).unwrap(), Coin { denom: "uusd".to_string(), amount: Uint128::from(9999000000u128) } ); assert_eq!( deduct_tax(deps.as_ref(), Coin::new(50000000u128, "uusd")).unwrap(), Coin { denom: "uusd".to_string(), amount: Uint128::from(49504950u128) } ); } #[test] fn oracle_price_querier() { let mut deps = mock_dependencies(&[]); deps.querier.with_oracle_price(&[( &("terra123123".to_string(), "uusd".to_string()), &(Decimal256::from_ratio(131, 2), 123, 321), )]); let oracle_price = query_price( deps.as_ref(), Addr::unchecked("oracle"), "terra123123".to_string(), "uusd".to_string(), None, ) .unwrap(); assert_eq!( oracle_price, PriceResponse { rate: Decimal256::f
fn token_math_invalid_token_2() { let deps = mock_dependencies(&[]); let tokens_1: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token2".to_string(), Uint256::from(1000000u64)), ("token2".to_string(), Uint256::from(1000000u64)), ("token3".to_string(), Uint256::from(1000000u64)), ("token5".to_string(), Uint256::from(1000000u64)), ]; let tokens_2: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token2".to_string(), Uint256::from(1000000u64)), ("token3".to_string(), Uint256::from(1000000u64)), ]; let mut tokens_1_raw: Tokens = tokens_1.to_raw(deps.as_ref()).unwrap(); let tokens_2_raw: Tokens = tokens_2.to_raw(deps.as_ref()).unwrap(); let _ = tokens_1_raw.sub(tokens_2_raw); }
rom_ratio(131, 2), last_updated_base: 123, last_updated_quote: 321, } ); query_price( deps.as_ref(), Addr::unchecked("oracle"), "terra123123".to_string(), "ukrw".to_string(), None, ) .unwrap_err(); let res = query_price( deps.as_ref(), Addr::unchecked("oracle"), "terra123123".to_string(), "uusd".to_string(), Some(TimeConstraints { block_time: 500u64, valid_timeframe: 60u64, }), ); match res { Err(StdError::GenericErr { msg, .. }) => assert_eq!(msg, "Price is too old"), _ => panic!("DO NOT ENTER HERE"), } } #[test] fn tokens_math() { let deps = mock_dependencies(&[]); let tokens_1: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token2".to_string(), Uint256::from(1000000u64)), ("token3".to_string(), Uint256::from(1000000u64)), ("token5".to_string(), Uint256::from(1000000u64)), ]; let tokens_2: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token4".to_string(), Uint256::from(1000000u64)), ]; let tokens_3: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token6".to_string(), Uint256::from(1000000u64)), ]; let tokens_4: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token2".to_string(), Uint256::from(1200000u64)), ]; let mut tokens_1_raw: Tokens = tokens_1.to_raw(deps.as_ref()).unwrap(); let tokens_2_raw: Tokens = tokens_2.to_raw(deps.as_ref()).unwrap(); let tokens_3_raw: Tokens = tokens_3.to_raw(deps.as_ref()).unwrap(); let tokens_4_raw: Tokens = tokens_4.to_raw(deps.as_ref()).unwrap(); assert!(tokens_1_raw.clone().sub(tokens_2_raw).is_err()); assert!(tokens_1_raw.clone().sub(tokens_3_raw).is_err()); assert!(tokens_1_raw.sub(tokens_4_raw).is_err()); } #[test] fn tokens_math_normal_add() { let deps = mock_dependencies(&[]); let acct1 = deps .api .addr_humanize(&CanonicalAddr::from(vec![ 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ])) .unwrap() .to_string(); let acct2 = deps .api .addr_humanize(&CanonicalAddr::from(vec![ 1, 1, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ])) .unwrap() .to_string(); let acct3 = deps .api .addr_humanize(&CanonicalAddr::from(vec![ 1, 1, 1, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ])) .unwrap() .to_string(); let acct4 = deps .api .addr_humanize(&CanonicalAddr::from(vec![ 1, 1, 1, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ])) .unwrap() .to_string(); let acct5 = deps .api .addr_humanize(&CanonicalAddr::from(vec![ 1, 1, 1, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ])) .unwrap() .to_string(); let tokens_1: TokensHuman = vec![ (acct1.clone(), Uint256::from(1000000u64)), (acct2, Uint256::from(1000000u64)), (acct3, Uint256::from(1000000u64)), (acct5, Uint256::from(1000000u64)), ]; let tokens_2: TokensHuman = vec![ (acct1, Uint256::from(1000000u64)), (acct4, Uint256::from(1000000u64)), ]; let mut tokens_1_raw: Tokens = tokens_1.to_raw(deps.as_ref()).unwrap(); let tokens_2_raw: Tokens = tokens_2.to_raw(deps.as_ref()).unwrap(); tokens_1_raw.add(tokens_2_raw); assert_eq!(tokens_1_raw[0].1, Uint256::from(2000000u64)); assert_eq!(tokens_1_raw.len(), 5); } #[test] fn token_math_zero_token() { let deps = mock_dependencies(&[]); let tokens_1: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token2".to_string(), Uint256::from(1000000u64)), ]; let tokens_2: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token2".to_string(), Uint256::from(1000000u64)), ]; let mut tokens_1_raw: Tokens = tokens_1.to_raw(deps.as_ref()).unwrap(); let tokens_2_raw: Tokens = tokens_2.to_raw(deps.as_ref()).unwrap(); tokens_1_raw.sub(tokens_2_raw).unwrap(); assert_eq!(tokens_1_raw.len(), 0); } #[test] #[should_panic] fn token_math_invalid_token() { let deps = mock_dependencies(&[]); let tokens_1: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token2".to_string(), Uint256::from(1000000u64)), ("token3".to_string(), Uint256::from(1000000u64)), ("token5".to_string(), Uint256::from(1000000u64)), ]; let tokens_2: TokensHuman = vec![ ("token1".to_string(), Uint256::from(1000000u64)), ("token1".to_string(), Uint256::from(1000000u64)), ("token3".to_string(), Uint256::from(1000000u64)), ]; let mut tokens_1_raw: Tokens = tokens_1.to_raw(deps.as_ref()).unwrap(); let tokens_2_raw: Tokens = tokens_2.to_raw(deps.as_ref()).unwrap(); let _ = tokens_1_raw.sub(tokens_2_raw); } #[test] #[should_panic]
random
[ { "content": "pub fn query_tax_rate_and_cap(deps: Deps, denom: String) -> StdResult<(Decimal256, Uint256)> {\n\n let terra_querier = TerraQuerier::new(&deps.querier);\n\n let rate = terra_querier.query_tax_rate()?.rate;\n\n let cap = terra_querier.query_tax_cap(denom)?.cap;\n\n Ok((rate.into(), cap.into()))\n\n}\n\n\n", "file_path": "packages/moneymarket/src/querier.rs", "rank": 0, "score": 244491.74289082846 }, { "content": "pub fn compute_tax(deps: Deps, coin: &Coin) -> StdResult<Uint256> {\n\n let terra_querier = TerraQuerier::new(&deps.querier);\n\n let tax_rate = Decimal256::from((terra_querier.query_tax_rate()?).rate);\n\n let tax_cap = Uint256::from((terra_querier.query_tax_cap(coin.denom.to_string())?).cap);\n\n let amount = Uint256::from(coin.amount);\n\n Ok(std::cmp::min(\n\n amount * Decimal256::one() - amount / (Decimal256::one() + tax_rate),\n\n tax_cap,\n\n ))\n\n}\n\n\n", "file_path": "packages/moneymarket/src/querier.rs", "rank": 1, "score": 243371.8127567399 }, { "content": "fn process_bid_activation(bid: &mut Bid, bid_pool: &mut BidPool, amount: Uint256) {\n\n bid.product_snapshot = bid_pool.product_snapshot;\n\n bid.sum_snapshot = bid_pool.sum_snapshot;\n\n bid.wait_end = None;\n\n bid.scale_snapshot = bid_pool.current_scale;\n\n bid.epoch_snapshot = bid_pool.current_epoch;\n\n\n\n bid_pool.total_bid_amount += amount;\n\n}\n\n\n\n/// On each collateral execution the product_snapshot and sum_snapshot are updated\n\n/// to track the expense and reward distribution for biders in the pool\n\n/// More details:\n\n/// https://github.com/liquity/liquity/blob/master/papers/Scalable_Reward_Distribution_with_Compounding_Stakes.pdf\n", "file_path": "contracts/liquidation_queue/src/bid.rs", "rank": 2, "score": 241143.21628873976 }, { "content": "fn instantiate_and_whitelist(deps: &mut OwnedDeps<MemoryStorage, MockApi, WasmMockQuerier>) {\n\n deps.querier.with_tax(\n\n Decimal::percent(1),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier\n\n .with_collateral_max_ltv(&[(&\"col0000\".to_string(), &Decimal256::percent(90))]);\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(10),\n\n bid_fee: Decimal256::percent(1),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::from(100000000u64),\n\n price_timeframe: 101u64,\n\n waiting_period: 60u64,\n\n overseer: \"overseer0000\".to_string(),\n\n };\n\n\n", "file_path": "contracts/liquidation_queue/src/testing/product_stress_tests.rs", "rank": 3, "score": 218225.99780573536 }, { "content": "pub fn deduct_tax(deps: Deps, coin: Coin) -> StdResult<Coin> {\n\n let tax_amount = compute_tax(deps, &coin)?;\n\n Ok(Coin {\n\n denom: coin.denom,\n\n amount: (Uint256::from(coin.amount) - tax_amount).into(),\n\n })\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]\n\npub struct TimeConstraints {\n\n pub block_time: u64,\n\n pub valid_timeframe: u64,\n\n}\n\n\n", "file_path": "packages/moneymarket/src/querier.rs", "rank": 4, "score": 218040.33911912926 }, { "content": "pub fn query_balance(deps: Deps, account_addr: Addr, denom: String) -> StdResult<Uint256> {\n\n // load price form the oracle\n\n let balance: BalanceResponse = deps.querier.query(&QueryRequest::Bank(BankQuery::Balance {\n\n address: account_addr.to_string(),\n\n denom,\n\n }))?;\n\n Ok(balance.amount.amount.into())\n\n}\n\n\n", "file_path": "packages/moneymarket/src/querier.rs", "rank": 5, "score": 207165.03168152043 }, { "content": "fn claim_bid_residue(bid_pool: &mut BidPool) -> Uint256 {\n\n let claimable = bid_pool.residue_bid * Uint256::one();\n\n if !claimable.is_zero() {\n\n bid_pool.residue_bid = bid_pool.residue_bid - Decimal256::from_uint256(claimable);\n\n }\n\n claimable\n\n}\n", "file_path": "contracts/liquidation_queue/src/bid.rs", "rank": 6, "score": 183535.8572197465 }, { "content": "fn claim_col_residue(bid_pool: &mut BidPool) -> Uint256 {\n\n let claimable = bid_pool.residue_collateral * Uint256::one();\n\n if !claimable.is_zero() {\n\n bid_pool.residue_collateral =\n\n bid_pool.residue_collateral - Decimal256::from_uint256(claimable);\n\n }\n\n claimable\n\n}\n\n\n", "file_path": "contracts/liquidation_queue/src/bid.rs", "rank": 7, "score": 183535.8572197465 }, { "content": "pub fn pop_bid_idx(storage: &mut dyn Storage) -> StdResult<Uint128> {\n\n let mut idx_store = singleton(storage, KEY_BID_IDX);\n\n let last_idx: Uint128 = idx_store.load().unwrap_or_else(|_| Uint128::from(1u128));\n\n idx_store.save(&(last_idx + Uint128::from(1u128)))?;\n\n Ok(last_idx)\n\n}\n\n\n", "file_path": "contracts/liquidation_queue/src/state.rs", "rank": 8, "score": 168120.18647951505 }, { "content": "pub fn remove_bid(storage: &mut dyn Storage, bid_idx: Uint128) -> StdResult<()> {\n\n let bid: Bid = read_bid(storage, bid_idx)?;\n\n let mut bid_bucket: Bucket<Bid> = Bucket::new(storage, PREFIX_BID);\n\n bid_bucket.remove(&bid_idx.u128().to_be_bytes());\n\n\n\n // remove indexer\n\n let mut bid_indexer_by_user: Bucket<bool> = Bucket::multilevel(\n\n storage,\n\n &[\n\n PREFIX_BID_BY_USER,\n\n bid.collateral_token.as_slice(),\n\n bid.bidder.as_slice(),\n\n ],\n\n );\n\n bid_indexer_by_user.remove(&bid_idx.u128().to_be_bytes());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/liquidation_queue/src/state.rs", "rank": 9, "score": 163070.13626214062 }, { "content": "pub fn query_supply(deps: Deps, contract_addr: Addr) -> StdResult<Uint256> {\n\n // load price form the oracle\n\n let token_info: TokenInfoResponse =\n\n deps.querier.query(&QueryRequest::Wasm(WasmQuery::Smart {\n\n contract_addr: contract_addr.to_string(),\n\n msg: to_binary(&Cw20QueryMsg::TokenInfo {})?,\n\n }))?;\n\n\n\n Ok(Uint256::from(token_info.total_supply))\n\n}\n\n\n", "file_path": "packages/moneymarket/src/querier.rs", "rank": 10, "score": 156799.04886378482 }, { "content": "pub fn store_bid(storage: &mut dyn Storage, bid_idx: Uint128, bid: &Bid) -> StdResult<()> {\n\n let mut bid_bucket: Bucket<Bid> = Bucket::new(storage, PREFIX_BID);\n\n bid_bucket.save(&bid_idx.u128().to_be_bytes(), bid)?;\n\n\n\n let mut bid_indexer_by_user: Bucket<bool> = Bucket::multilevel(\n\n storage,\n\n &[\n\n PREFIX_BID_BY_USER,\n\n bid.collateral_token.as_slice(),\n\n bid.bidder.as_slice(),\n\n ],\n\n );\n\n bid_indexer_by_user.save(&bid_idx.u128().to_be_bytes(), &true)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/liquidation_queue/src/state.rs", "rank": 11, "score": 153974.91871211308 }, { "content": "pub fn query_all_balances(deps: Deps, account_addr: Addr) -> StdResult<Vec<Coin>> {\n\n // load price form the oracle\n\n let all_balances: AllBalanceResponse =\n\n deps.querier\n\n .query(&QueryRequest::Bank(BankQuery::AllBalances {\n\n address: account_addr.to_string(),\n\n }))?;\n\n Ok(all_balances.amount)\n\n}\n\n\n", "file_path": "packages/moneymarket/src/querier.rs", "rank": 12, "score": 152989.6909661847 }, { "content": "pub fn query_bid(deps: Deps, bid_idx: Uint128) -> StdResult<BidResponse> {\n\n let bid: Bid = read_bid(deps.storage, bid_idx)?;\n\n let bid_pool: BidPool = read_bid_pool(deps.storage, &bid.collateral_token, bid.premium_slot)?;\n\n\n\n let (bid_amount, bid_pending_liquidated_collateral) = if bid.wait_end.is_some() {\n\n (bid.amount, bid.pending_liquidated_collateral)\n\n } else {\n\n // calculate remaining bid amount\n\n let (remaining_bid, _) = calculate_remaining_bid(&bid, &bid_pool)?;\n\n\n\n // calculate liquidated collateral\n\n let (liquidated_collateral, _) = calculate_liquidated_collateral(deps.storage, &bid)?;\n\n\n\n (\n\n remaining_bid,\n\n bid.pending_liquidated_collateral + liquidated_collateral,\n\n )\n\n };\n\n\n\n Ok(BidResponse {\n", "file_path": "contracts/liquidation_queue/src/query.rs", "rank": 13, "score": 152768.20206935672 }, { "content": "#[test]\n\nfn query_liquidation_amount() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(1),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(10),\n\n bid_fee: Decimal256::percent(1),\n\n max_premium_rate: Decimal256::percent(5),\n\n liquidation_threshold: Uint256::from(100000000u64),\n\n price_timeframe: 60u64,\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();\n", "file_path": "contracts/liquidation/src/testing/tests.rs", "rank": 14, "score": 148067.063540769 }, { "content": "#[test]\n\nfn swap_to_stable_denom() {\n\n let mut deps = mock_dependencies(&[\n\n Coin {\n\n denom: \"uusd\".to_string(),\n\n amount: Uint128::from(1000000u128),\n\n },\n\n Coin {\n\n denom: \"ukrw\".to_string(),\n\n amount: Uint128::from(20000000000u128),\n\n },\n\n Coin {\n\n denom: \"usdr\".to_string(),\n\n amount: Uint128::from(2000000u128),\n\n },\n\n ]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner\".to_string(),\n\n collateral_token: \"bluna\".to_string(),\n\n overseer_contract: \"overseer\".to_string(),\n", "file_path": "contracts/custody_bluna/src/testing/tests.rs", "rank": 17, "score": 146100.81810288207 }, { "content": "#[test]\n\nfn swap_to_stable_denom() {\n\n let mut deps = mock_dependencies(&[\n\n Coin {\n\n denom: \"uusd\".to_string(),\n\n amount: Uint128::new(1000000u128),\n\n },\n\n Coin {\n\n denom: \"ukrw\".to_string(),\n\n amount: Uint128::new(20000000000u128),\n\n },\n\n Coin {\n\n denom: \"usdr\".to_string(),\n\n amount: Uint128::new(2000000u128),\n\n },\n\n ]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner\".to_string(),\n\n collateral_token: \"beth\".to_string(),\n\n overseer_contract: \"overseer\".to_string(),\n", "file_path": "contracts/custody_beth/src/testing/tests.rs", "rank": 18, "score": 146100.81810288207 }, { "content": "#[test]\n\nfn deposit_stable_huge_amount() {\n\n let mut deps = mock_dependencies(&[Coin {\n\n denom: \"uusd\".to_string(),\n\n amount: Uint128::from(INITIAL_DEPOSIT_AMOUNT),\n\n }]);\n\n\n\n let msg = InstantiateMsg {\n\n owner_addr: \"owner\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n aterra_code_id: 123u64,\n\n anc_emission_rate: Decimal256::one(),\n\n max_borrow_factor: Decimal256::one(),\n\n };\n\n\n\n let info = mock_info(\n\n \"addr0000\",\n\n &[Coin {\n\n denom: \"uusd\".to_string(),\n\n amount: Uint128::from(INITIAL_DEPOSIT_AMOUNT),\n\n }],\n", "file_path": "contracts/market/src/testing/tests.rs", "rank": 19, "score": 145467.18275167394 }, { "content": "#[test]\n\nfn integration_test_simul() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(1),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier.with_collateral_max_ltv(&[\n\n (&\"token0000\".to_string(), &Decimal256::percent(60)),\n\n (&\"token0001\".to_string(), &Decimal256::percent(30)),\n\n ]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(80),\n\n bid_fee: Decimal256::percent(0),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::zero(),\n\n price_timeframe: 60u64,\n", "file_path": "contracts/liquidation_queue/src/testing/query_liq_amount_tests.rs", "rank": 20, "score": 143232.5169251762 }, { "content": "#[test]\n\nfn query_liquidation_amount() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(1),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier.with_collateral_max_ltv(&[\n\n (&\"token0000\".to_string(), &Decimal256::percent(50)),\n\n (&\"token0001\".to_string(), &Decimal256::percent(50)),\n\n (&\"token0002\".to_string(), &Decimal256::percent(50)),\n\n ]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(10),\n\n bid_fee: Decimal256::percent(1),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::from(100000000u64),\n", "file_path": "contracts/liquidation_queue/src/testing/query_tests.rs", "rank": 21, "score": 142974.72892986264 }, { "content": "#[test]\n\nfn proper_emission_rate() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n emission_cap: Decimal256::from_uint256(100u64),\n\n emission_floor: Decimal256::from_uint256(10u64),\n\n increment_multiplier: Decimal256::percent(110),\n\n decrement_multiplier: Decimal256::percent(90),\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();\n\n\n\n // high = 8.75\n\n // low = 6.25\n\n // no changes\n\n let query_msg = QueryMsg::AncEmissionRate {\n\n deposit_rate: Decimal256::percent(7),\n\n target_deposit_rate: Decimal256::percent(10),\n", "file_path": "contracts/distribution_model/src/tests.rs", "rank": 23, "score": 139031.47541485872 }, { "content": "#[test]\n\nfn non_partial_liquidation() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(0),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier\n\n .with_collateral_max_ltv(&[(&\"token0000\".to_string(), &Decimal256::percent(50))]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(80),\n\n bid_fee: Decimal256::percent(0),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::from(1000000u128),\n\n price_timeframe: 60u64,\n\n waiting_period: 60u64,\n\n overseer: \"overseer0000\".to_string(),\n", "file_path": "contracts/liquidation_queue/src/testing/query_liq_amount_tests.rs", "rank": 24, "score": 138285.79711691802 }, { "content": "fn simulate_bids_with_2_liq_amounts(\n\n iterations: u32,\n\n asset_price: Decimal256,\n\n bid_amount: u128,\n\n liq_amount_1: u128,\n\n liq_amount_2: u128,\n\n) {\n\n let mut deps = mock_dependencies(&[]);\n\n instantiate_and_whitelist(&mut deps);\n\n deps.querier.with_oracle_price(&[(\n\n &(\"col0000\".to_string(), \"uusd\".to_string()),\n\n &(\n\n asset_price,\n\n mock_env().block.time.seconds(),\n\n mock_env().block.time.seconds(),\n\n ),\n\n )]);\n\n\n\n let mut total_liquidated = Uint256::zero();\n\n let mut total_consumed = Uint256::zero();\n", "file_path": "contracts/liquidation_queue/src/testing/product_stress_tests.rs", "rank": 25, "score": 138280.55446600038 }, { "content": "#[test]\n\nfn partial_three_collaterals_one_slot_diff_ltv_big_amounts() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(0),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier.with_collateral_max_ltv(&[\n\n (&\"token0000\".to_string(), &Decimal256::percent(50)),\n\n (&\"token0001\".to_string(), &Decimal256::percent(40)),\n\n (&\"token0002\".to_string(), &Decimal256::percent(30)),\n\n ]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(80),\n\n bid_fee: Decimal256::percent(0),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::zero(),\n", "file_path": "contracts/liquidation_queue/src/testing/query_liq_amount_tests.rs", "rank": 26, "score": 138184.10104118948 }, { "content": "#[test]\n\nfn partial_three_collaterals_one_slot_diff_ltv_big_amounts_2() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(0),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier.with_collateral_max_ltv(&[\n\n (&\"token0000\".to_string(), &Decimal256::percent(50)),\n\n (&\"token0001\".to_string(), &Decimal256::percent(40)),\n\n (&\"token0002\".to_string(), &Decimal256::percent(30)),\n\n ]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(80),\n\n bid_fee: Decimal256::percent(0),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::zero(),\n", "file_path": "contracts/liquidation_queue/src/testing/query_liq_amount_tests.rs", "rank": 27, "score": 138184.10104118948 }, { "content": "pub fn query_tax_rate(deps: Deps) -> StdResult<Decimal256> {\n\n let terra_querier = TerraQuerier::new(&deps.querier);\n\n Ok(terra_querier.query_tax_rate()?.rate.into())\n\n}\n\n\n", "file_path": "packages/moneymarket/src/querier.rs", "rank": 28, "score": 137069.8099376293 }, { "content": "#[test]\n\nfn non_partial_liquidation_with_fees() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(1),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier\n\n .with_collateral_max_ltv(&[(&\"token0000\".to_string(), &Decimal256::percent(50))]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(80),\n\n bid_fee: Decimal256::percent(1),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::from(1000000u128),\n\n price_timeframe: 60u64,\n\n waiting_period: 60u64,\n\n overseer: \"overseer0000\".to_string(),\n", "file_path": "contracts/liquidation_queue/src/testing/query_liq_amount_tests.rs", "rank": 29, "score": 136077.50866840297 }, { "content": "#[test]\n\nfn not_enough_bids_for_one_of_two_col() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(1),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier.with_collateral_max_ltv(&[\n\n (&\"token0000\".to_string(), &Decimal256::percent(50)),\n\n (&\"token0001\".to_string(), &Decimal256::percent(30)),\n\n ]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(80),\n\n bid_fee: Decimal256::percent(1),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::zero(),\n\n price_timeframe: 60u64,\n", "file_path": "contracts/liquidation_queue/src/testing/query_liq_amount_tests.rs", "rank": 30, "score": 133952.91737405484 }, { "content": "#[test]\n\nfn partial_one_collateral_two_slots() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(0),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier\n\n .with_collateral_max_ltv(&[(&\"token0000\".to_string(), &Decimal256::percent(50))]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(80),\n\n bid_fee: Decimal256::percent(0),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::zero(),\n\n price_timeframe: 60u64,\n\n waiting_period: 60u64,\n\n overseer: \"overseer0000\".to_string(),\n", "file_path": "contracts/liquidation_queue/src/testing/query_liq_amount_tests.rs", "rank": 31, "score": 133952.91737405484 }, { "content": "#[test]\n\nfn partial_one_collateral_one_slot() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(0),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier\n\n .with_collateral_max_ltv(&[(&\"token0000\".to_string(), &Decimal256::percent(50))]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(80),\n\n bid_fee: Decimal256::percent(0),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::zero(),\n\n price_timeframe: 60u64,\n\n waiting_period: 60u64,\n\n overseer: \"overseer0000\".to_string(),\n", "file_path": "contracts/liquidation_queue/src/testing/query_liq_amount_tests.rs", "rank": 32, "score": 133952.91737405484 }, { "content": "#[test]\n\nfn partial_two_collaterals_ltv_diff() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(0),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier.with_collateral_max_ltv(&[\n\n (&\"token0000\".to_string(), &Decimal256::percent(99)),\n\n (&\"token0001\".to_string(), &Decimal256::percent(1)),\n\n ]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(80),\n\n bid_fee: Decimal256::percent(0),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::zero(),\n\n price_timeframe: 60u64,\n", "file_path": "contracts/liquidation_queue/src/testing/query_liq_amount_tests.rs", "rank": 33, "score": 133952.91737405484 }, { "content": "#[test]\n\nfn non_partial_liquidation_two_slots() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(0),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier\n\n .with_collateral_max_ltv(&[(&\"token0000\".to_string(), &Decimal256::percent(50))]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(80),\n\n bid_fee: Decimal256::percent(0),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::from(1000000u128),\n\n price_timeframe: 60u64,\n\n waiting_period: 60u64,\n\n overseer: \"overseer0000\".to_string(),\n", "file_path": "contracts/liquidation_queue/src/testing/query_liq_amount_tests.rs", "rank": 34, "score": 133952.91737405484 }, { "content": "#[test]\n\nfn proper_compute_exchange_rate() {\n\n let mut deps = mock_dependencies(&[Coin {\n\n denom: \"uusd\".to_string(),\n\n amount: Uint128::from(2000000u128),\n\n }]);\n\n let env = mock_env();\n\n //setting up the required environment for the function call (inputs)\n\n let mock_config = Config {\n\n contract_addr: deps.api.addr_canonicalize(MOCK_CONTRACT_ADDR).unwrap(),\n\n owner_addr: deps.api.addr_canonicalize(\"owner\").unwrap(),\n\n aterra_contract: deps.api.addr_canonicalize(\"AT-uusd\").unwrap(),\n\n interest_model: deps.api.addr_canonicalize(\"interest\").unwrap(),\n\n distribution_model: deps.api.addr_canonicalize(\"distribution\").unwrap(),\n\n distributor_contract: deps.api.addr_canonicalize(\"distributor\").unwrap(),\n\n collector_contract: deps.api.addr_canonicalize(\"collector\").unwrap(),\n\n overseer_contract: deps.api.addr_canonicalize(\"overseer\").unwrap(),\n\n stable_denom: \"uusd\".to_string(),\n\n max_borrow_factor: Decimal256::one(),\n\n };\n\n deps.querier.with_token_balances(&[(\n", "file_path": "contracts/market/src/testing/deposit_ut.rs", "rank": 35, "score": 133589.83834752778 }, { "content": "pub fn remove_borrower_info(storage: &mut dyn Storage, borrower: &CanonicalAddr) {\n\n let mut borrower_bucket: Bucket<BorrowerInfo> = Bucket::new(storage, PREFIX_BORROWER);\n\n borrower_bucket.remove(borrower.as_slice());\n\n}\n\n\n", "file_path": "contracts/custody_beth/src/state.rs", "rank": 36, "score": 132746.09144155434 }, { "content": "pub fn remove_borrower_info(storage: &mut dyn Storage, borrower: &CanonicalAddr) {\n\n let mut borrower_bucket: Bucket<BorrowerInfo> = Bucket::new(storage, PREFIX_BORROWER);\n\n borrower_bucket.remove(borrower.as_slice());\n\n}\n\n\n", "file_path": "contracts/custody_bluna/src/state.rs", "rank": 37, "score": 132746.09144155434 }, { "content": "#[test]\n\nfn partial_one_collateral_one_slot_with_fees_all() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(1),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier\n\n .with_collateral_max_ltv(&[(&\"token0000\".to_string(), &Decimal256::percent(50))]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(80),\n\n bid_fee: Decimal256::percent(1),\n\n liquidator_fee: Decimal256::percent(1),\n\n liquidation_threshold: Uint256::zero(),\n\n price_timeframe: 60u64,\n\n waiting_period: 60u64,\n\n overseer: \"overseer0000\".to_string(),\n", "file_path": "contracts/liquidation_queue/src/testing/query_liq_amount_tests.rs", "rank": 38, "score": 131907.22653943978 }, { "content": "#[test]\n\nfn non_partial_liquidation_two_slots_with_fees() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(1),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier\n\n .with_collateral_max_ltv(&[(&\"token0000\".to_string(), &Decimal256::percent(50))]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(80),\n\n bid_fee: Decimal256::percent(1),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::from(1000000u128),\n\n price_timeframe: 60u64,\n\n waiting_period: 60u64,\n\n overseer: \"overseer0000\".to_string(),\n", "file_path": "contracts/liquidation_queue/src/testing/query_liq_amount_tests.rs", "rank": 39, "score": 131907.22653943978 }, { "content": "#[test]\n\nfn partial_one_collateral_one_slot_with_fees() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(1),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier\n\n .with_collateral_max_ltv(&[(&\"token0000\".to_string(), &Decimal256::percent(50))]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(80),\n\n bid_fee: Decimal256::percent(1),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::zero(),\n\n price_timeframe: 60u64,\n\n waiting_period: 60u64,\n\n overseer: \"overseer0000\".to_string(),\n", "file_path": "contracts/liquidation_queue/src/testing/query_liq_amount_tests.rs", "rank": 40, "score": 131907.22653943978 }, { "content": "#[test]\n\nfn partial_one_collateral_two_slots_with_fees() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(1),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier\n\n .with_collateral_max_ltv(&[(&\"token0000\".to_string(), &Decimal256::percent(50))]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(80),\n\n bid_fee: Decimal256::percent(1),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::zero(),\n\n price_timeframe: 60u64,\n\n waiting_period: 60u64,\n\n overseer: \"overseer0000\".to_string(),\n", "file_path": "contracts/liquidation_queue/src/testing/query_liq_amount_tests.rs", "rank": 41, "score": 131907.22653943978 }, { "content": "#[test]\n\nfn partial_two_collaterals_one_slot_diff_ltv() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(0),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier.with_collateral_max_ltv(&[\n\n (&\"token0000\".to_string(), &Decimal256::percent(50)),\n\n (&\"token0001\".to_string(), &Decimal256::percent(30)),\n\n ]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(80),\n\n bid_fee: Decimal256::percent(0),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::zero(),\n\n price_timeframe: 60u64,\n", "file_path": "contracts/liquidation_queue/src/testing/query_liq_amount_tests.rs", "rank": 42, "score": 129936.00637496589 }, { "content": "#[test]\n\nfn partial_three_collaterals_one_slot_diff_ltv() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(0),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier.with_collateral_max_ltv(&[\n\n (&\"token0000\".to_string(), &Decimal256::percent(50)),\n\n (&\"token0001\".to_string(), &Decimal256::percent(70)),\n\n (&\"token0002\".to_string(), &Decimal256::percent(30)),\n\n ]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(80),\n\n bid_fee: Decimal256::percent(0),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::zero(),\n", "file_path": "contracts/liquidation_queue/src/testing/query_liq_amount_tests.rs", "rank": 43, "score": 129936.00637496589 }, { "content": "#[test]\n\nfn partial_two_collaterals_multi_slots_per_col() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(0),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier.with_collateral_max_ltv(&[\n\n (&\"token0000\".to_string(), &Decimal256::percent(50)),\n\n (&\"token0001\".to_string(), &Decimal256::percent(30)),\n\n ]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(80),\n\n bid_fee: Decimal256::percent(0),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::zero(),\n\n price_timeframe: 60u64,\n", "file_path": "contracts/liquidation_queue/src/testing/query_liq_amount_tests.rs", "rank": 44, "score": 129936.00637496589 }, { "content": "#[test]\n\nfn partial_one_collateral_one_slot_high_ltv() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(0),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier\n\n .with_collateral_max_ltv(&[(&\"token0000\".to_string(), &Decimal256::percent(90))]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(80),\n\n bid_fee: Decimal256::percent(0),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::zero(),\n\n price_timeframe: 60u64,\n\n waiting_period: 60u64,\n\n overseer: \"overseer0000\".to_string(),\n", "file_path": "contracts/liquidation_queue/src/testing/query_liq_amount_tests.rs", "rank": 45, "score": 129936.00637496589 }, { "content": "#[test]\n\nfn non_partial_liquidation_two_slots_with_fees_big_nums() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(1),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier\n\n .with_collateral_max_ltv(&[(&\"token0000\".to_string(), &Decimal256::percent(50))]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(80),\n\n bid_fee: Decimal256::percent(1),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::from(2000000000u128),\n\n price_timeframe: 60u64,\n\n waiting_period: 60u64,\n\n overseer: \"overseer0000\".to_string(),\n", "file_path": "contracts/liquidation_queue/src/testing/query_liq_amount_tests.rs", "rank": 46, "score": 128035.15907669775 }, { "content": "pub fn query_target_deposit_rate(deps: Deps, overseer_contract: Addr) -> StdResult<Decimal256> {\n\n let overseer_config: ConfigResponse =\n\n deps.querier.query(&QueryRequest::Wasm(WasmQuery::Smart {\n\n contract_addr: overseer_contract.to_string(),\n\n msg: to_binary(&OverseerQueryMsg::Config {})?,\n\n }))?;\n\n\n\n Ok(overseer_config.target_deposit_rate)\n\n}\n", "file_path": "contracts/market/src/querier.rs", "rank": 47, "score": 127246.61568193704 }, { "content": "pub fn execute_epoch_operations(deps: DepsMut, env: Env) -> Result<Response, ContractError> {\n\n let config: Config = read_config(deps.storage)?;\n\n let state: EpochState = read_epoch_state(deps.storage)?;\n\n if env.block.height < state.last_executed_height + config.epoch_period {\n\n return Err(ContractError::EpochNotPassed(state.last_executed_height));\n\n }\n\n\n\n // # of blocks from the last executed height\n\n let blocks = Uint256::from(env.block.height - state.last_executed_height);\n\n\n\n // Compute next epoch state\n\n let market_contract = deps.api.addr_humanize(&config.market_contract)?;\n\n let epoch_state: EpochStateResponse = query_epoch_state(\n\n deps.as_ref(),\n\n market_contract.clone(),\n\n env.block.height,\n\n None,\n\n )?;\n\n\n\n // effective_deposit_rate = cur_exchange_rate / prev_exchange_rate\n", "file_path": "contracts/overseer/src/contract.rs", "rank": 48, "score": 126579.4573145298 }, { "content": "pub fn register_aterra(deps: DepsMut, token_addr: Addr) -> Result<Response, ContractError> {\n\n let mut config: Config = read_config(deps.storage)?;\n\n if config.aterra_contract != CanonicalAddr::from(vec![]) {\n\n return Err(ContractError::Unauthorized {});\n\n }\n\n\n\n config.aterra_contract = deps.api.addr_canonicalize(token_addr.as_str())?;\n\n store_config(deps.storage, &config)?;\n\n\n\n Ok(Response::new().add_attributes(vec![attr(\"aterra\", token_addr)]))\n\n}\n\n\n", "file_path": "contracts/market/src/contract.rs", "rank": 49, "score": 126579.4573145298 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn reply(deps: DepsMut, _env: Env, msg: Reply) -> Result<Response, ContractError> {\n\n match msg.id {\n\n 1 => {\n\n // get new token's contract address\n\n let res: MsgInstantiateContractResponse = Message::parse_from_bytes(\n\n msg.result.unwrap().data.unwrap().as_slice(),\n\n )\n\n .map_err(|_| {\n\n ContractError::Std(StdError::parse_err(\n\n \"MsgInstantiateContractResponse\",\n\n \"failed to parse data\",\n\n ))\n\n })?;\n\n let token_addr = Addr::unchecked(res.get_contract_address());\n\n\n\n register_aterra(deps, token_addr)\n\n }\n\n _ => Err(ContractError::InvalidReplyId {}),\n\n }\n\n}\n\n\n", "file_path": "contracts/market/src/contract.rs", "rank": 50, "score": 125512.97550633048 }, { "content": "pub fn optional_addr_validate(api: &dyn Api, addr: Option<String>) -> StdResult<Option<Addr>> {\n\n let addr = if let Some(addr) = addr {\n\n Some(api.addr_validate(&addr)?)\n\n } else {\n\n None\n\n };\n\n\n\n Ok(addr)\n\n}\n", "file_path": "packages/moneymarket/src/common.rs", "rank": 51, "score": 122354.33296178479 }, { "content": "pub fn repay_stable(deps: DepsMut, env: Env, info: MessageInfo) -> Result<Response, ContractError> {\n\n let config: Config = read_config(deps.storage)?;\n\n\n\n // Check stable denom deposit\n\n let amount: Uint256 = info\n\n .funds\n\n .iter()\n\n .find(|c| c.denom == config.stable_denom)\n\n .map(|c| Uint256::from(c.amount))\n\n .unwrap_or_else(Uint256::zero);\n\n\n\n // Cannot deposit zero amount\n\n if amount.is_zero() {\n\n return Err(ContractError::ZeroRepay(config.stable_denom));\n\n }\n\n\n\n let mut state: State = read_state(deps.storage)?;\n\n\n\n let borrower = info.sender;\n\n let borrower_raw = deps.api.addr_canonicalize(borrower.as_str())?;\n", "file_path": "contracts/market/src/borrow.rs", "rank": 52, "score": 121746.08397528576 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::Receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::UpdateConfig {\n\n owner,\n\n oracle_contract,\n\n safe_ratio,\n\n bid_fee,\n\n liquidator_fee,\n\n liquidation_threshold,\n\n price_timeframe,\n\n waiting_period,\n\n overseer,\n\n } => update_config(\n\n deps,\n\n info,\n\n owner,\n\n oracle_contract,\n\n safe_ratio,\n\n bid_fee,\n", "file_path": "contracts/liquidation_queue/src/contract.rs", "rank": 53, "score": 117759.64274891678 }, { "content": "/// Compute distributed reward and update global index\n\npub fn compute_reward(state: &mut State, block_height: u64) {\n\n if state.last_reward_updated >= block_height {\n\n return;\n\n }\n\n\n\n let passed_blocks = Decimal256::from_uint256(block_height - state.last_reward_updated);\n\n let reward_accrued = passed_blocks * state.anc_emission_rate;\n\n let borrow_amount = state.total_liabilities / state.global_interest_index;\n\n\n\n if !reward_accrued.is_zero() && !borrow_amount.is_zero() {\n\n state.global_reward_index += reward_accrued / borrow_amount;\n\n }\n\n\n\n state.last_reward_updated = block_height;\n\n}\n\n\n\n/// Compute reward amount a borrower received\n\npub(crate) fn compute_borrower_reward(state: &State, liability: &mut BorrowerInfo) {\n\n liability.pending_rewards += Decimal256::from_uint256(liability.loan_amount)\n\n / state.global_interest_index\n\n * (state.global_reward_index - liability.reward_index);\n\n liability.reward_index = state.global_reward_index;\n\n}\n\n\n", "file_path": "contracts/market/src/borrow.rs", "rank": 54, "score": 113722.72507576787 }, { "content": "// this will set the first key after the provided key, by appending a 1 byte\n\nfn calc_range_start_idx(start_after: Option<Uint128>) -> Option<Vec<u8>> {\n\n start_after.map(|idx| {\n\n let mut v = idx.u128().to_be_bytes().to_vec();\n\n v.push(1);\n\n v\n\n })\n\n}\n\n\n", "file_path": "contracts/liquidation_queue/src/state.rs", "rank": 55, "score": 110986.519098087 }, { "content": "#[test]\n\nfn whitelist() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n let msg = InstantiateMsg {\n\n owner_addr: \"owner\".to_string(),\n\n oracle_contract: \"oracle\".to_string(),\n\n market_contract: \"market\".to_string(),\n\n liquidation_contract: \"liquidation\".to_string(),\n\n collector_contract: \"collector\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n epoch_period: 86400u64,\n\n threshold_deposit_rate: Decimal256::permille(3),\n\n target_deposit_rate: Decimal256::permille(5),\n\n buffer_distribution_factor: Decimal256::percent(20),\n\n anc_purchase_factor: Decimal256::percent(20),\n\n price_timeframe: 60u64,\n\n };\n\n\n\n // we can just call .unwrap() to assert this was a success\n", "file_path": "contracts/overseer/src/testing/tests.rs", "rank": 56, "score": 110119.47826443554 }, { "content": "#[test]\n\nfn deposit_stable() {\n\n let mut deps = mock_dependencies(&[Coin {\n\n denom: \"uusd\".to_string(),\n\n amount: Uint128::from(INITIAL_DEPOSIT_AMOUNT),\n\n }]);\n\n\n\n let msg = InstantiateMsg {\n\n owner_addr: \"owner\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n aterra_code_id: 123u64,\n\n anc_emission_rate: Decimal256::one(),\n\n max_borrow_factor: Decimal256::one(),\n\n };\n\n\n\n let info = mock_info(\n\n \"addr0000\",\n\n &[Coin {\n\n denom: \"uusd\".to_string(),\n\n amount: Uint128::from(INITIAL_DEPOSIT_AMOUNT),\n\n }],\n", "file_path": "contracts/market/src/testing/tests.rs", "rank": 57, "score": 108348.62110047662 }, { "content": "#[test]\n\nfn claim_rewards() {\n\n let mut deps = mock_dependencies(&[Coin {\n\n denom: \"uusd\".to_string(),\n\n amount: Uint128::from(INITIAL_DEPOSIT_AMOUNT),\n\n }]);\n\n deps.querier.with_tax(\n\n Decimal::percent(1),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n\n\n let msg = InstantiateMsg {\n\n owner_addr: \"owner\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n aterra_code_id: 123u64,\n\n anc_emission_rate: Decimal256::one(),\n\n max_borrow_factor: Decimal256::one(),\n\n };\n\n\n\n let info = mock_info(\n\n \"addr0000\",\n", "file_path": "contracts/market/src/testing/tests.rs", "rank": 58, "score": 108348.62110047662 }, { "content": "#[test]\n\nfn update_config() {\n\n let mut deps = mock_dependencies(&[Coin {\n\n denom: \"uusd\".to_string(),\n\n amount: Uint128::from(INITIAL_DEPOSIT_AMOUNT),\n\n }]);\n\n deps.querier\n\n .with_borrow_rate(&[(&\"interest\".to_string(), &Decimal256::percent(1))]);\n\n\n\n let msg = InstantiateMsg {\n\n owner_addr: \"owner\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n aterra_code_id: 123u64,\n\n anc_emission_rate: Decimal256::one(),\n\n max_borrow_factor: Decimal256::one(),\n\n };\n\n\n\n let info = mock_info(\n\n \"addr0000\",\n\n &[Coin {\n\n denom: \"uusd\".to_string(),\n", "file_path": "contracts/market/src/testing/tests.rs", "rank": 59, "score": 108348.62110047662 }, { "content": "#[test]\n\nfn proper_initialization() {\n\n let mut deps = mock_dependencies(&[Coin {\n\n denom: \"uusd\".to_string(),\n\n amount: Uint128::from(INITIAL_DEPOSIT_AMOUNT),\n\n }]);\n\n\n\n let msg = InstantiateMsg {\n\n owner_addr: \"owner\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n aterra_code_id: 123u64,\n\n anc_emission_rate: Decimal256::one(),\n\n max_borrow_factor: Decimal256::one(),\n\n };\n\n\n\n let info = mock_info(\n\n \"addr0000\",\n\n &[Coin {\n\n denom: \"uusd\".to_string(),\n\n amount: Uint128::from(INITIAL_DEPOSIT_AMOUNT),\n\n }],\n", "file_path": "contracts/market/src/testing/tests.rs", "rank": 60, "score": 108348.62110047662 }, { "content": "#[test]\n\nfn proper_initialization() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(10),\n\n bid_fee: Decimal256::percent(1),\n\n max_premium_rate: Decimal256::percent(5),\n\n liquidation_threshold: Uint256::from(100000000u64),\n\n price_timeframe: 60u64,\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n\n\n // we can just call .unwrap() to assert this was a success\n\n let res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();\n\n assert_eq!(0, res.messages.len());\n\n\n", "file_path": "contracts/liquidation/src/testing/tests.rs", "rank": 61, "score": 108348.62110047662 }, { "content": "#[test]\n\nfn retract_bid() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(10),\n\n bid_fee: Decimal256::percent(1),\n\n max_premium_rate: Decimal256::percent(5),\n\n liquidation_threshold: Uint256::from(100000000u64),\n\n price_timeframe: 60u64,\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();\n\n\n\n let msg = ExecuteMsg::SubmitBid {\n\n collateral_token: \"asset0000\".to_string(),\n\n premium_rate: Decimal256::percent(1),\n", "file_path": "contracts/liquidation/src/testing/tests.rs", "rank": 62, "score": 108348.62110047662 }, { "content": "#[test]\n\nfn update_config() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(10),\n\n bid_fee: Decimal256::percent(1),\n\n max_premium_rate: Decimal256::percent(5),\n\n liquidation_threshold: Uint256::from(100000000u64),\n\n price_timeframe: 60u64,\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();\n\n\n\n // update owner\n\n let info = mock_info(\"owner0000\", &[]);\n\n let msg = ExecuteMsg::UpdateConfig {\n", "file_path": "contracts/liquidation/src/testing/tests.rs", "rank": 63, "score": 108348.62110047662 }, { "content": "#[test]\n\nfn repay_stable() {\n\n let mut deps = mock_dependencies(&[Coin {\n\n denom: \"uusd\".to_string(),\n\n amount: Uint128::from(INITIAL_DEPOSIT_AMOUNT),\n\n }]);\n\n deps.querier.with_tax(\n\n Decimal::percent(1),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n\n\n let msg = InstantiateMsg {\n\n owner_addr: \"owner\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n aterra_code_id: 123u64,\n\n anc_emission_rate: Decimal256::one(),\n\n max_borrow_factor: Decimal256::one(),\n\n };\n\n\n\n let info = mock_info(\n\n \"addr0000\",\n", "file_path": "contracts/market/src/testing/tests.rs", "rank": 64, "score": 108348.62110047662 }, { "content": "#[test]\n\nfn submit_bid() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(10),\n\n bid_fee: Decimal256::percent(1),\n\n max_premium_rate: Decimal256::percent(5),\n\n liquidation_threshold: Uint256::from(100000000u64),\n\n price_timeframe: 60u64,\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n let _res = instantiate(deps.as_mut(), mock_env(), info.clone(), msg).unwrap();\n\n\n\n let msg = ExecuteMsg::SubmitBid {\n\n collateral_token: \"asset0000\".to_string(),\n\n premium_rate: Decimal256::percent(20),\n", "file_path": "contracts/liquidation/src/testing/tests.rs", "rank": 65, "score": 108348.62110047662 }, { "content": "#[test]\n\nfn update_config() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n let msg = InstantiateMsg {\n\n owner_addr: \"owner\".to_string(),\n\n oracle_contract: \"oracle\".to_string(),\n\n market_contract: \"market\".to_string(),\n\n liquidation_contract: \"liquidation\".to_string(),\n\n collector_contract: \"collector\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n epoch_period: 86400u64,\n\n threshold_deposit_rate: Decimal256::permille(3),\n\n target_deposit_rate: Decimal256::permille(5),\n\n buffer_distribution_factor: Decimal256::percent(20),\n\n anc_purchase_factor: Decimal256::percent(20),\n\n price_timeframe: 60u64,\n\n };\n\n\n\n // we can just call .unwrap() to assert this was a success\n", "file_path": "contracts/overseer/src/testing/tests.rs", "rank": 66, "score": 108348.62110047662 }, { "content": "#[test]\n\nfn borrow_stable() {\n\n let mut deps = mock_dependencies(&[Coin {\n\n denom: \"uusd\".to_string(),\n\n amount: Uint128::from(INITIAL_DEPOSIT_AMOUNT),\n\n }]);\n\n deps.querier.with_tax(\n\n Decimal::percent(1),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n\n\n let msg = InstantiateMsg {\n\n owner_addr: \"owner\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n aterra_code_id: 123u64,\n\n anc_emission_rate: Decimal256::one(),\n\n max_borrow_factor: Decimal256::one(),\n\n };\n\n\n\n let info = mock_info(\n\n \"addr0000\",\n", "file_path": "contracts/market/src/testing/tests.rs", "rank": 67, "score": 108348.62110047662 }, { "content": "#[test]\n\nfn liquidate_collateral() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier\n\n .with_liquidation_percent(&[(&\"liquidation\".to_string(), &Decimal256::percent(1))]);\n\n\n\n let info = mock_info(\"owner\", &[]);\n\n let env = mock_env();\n\n let msg = InstantiateMsg {\n\n owner_addr: \"owner\".to_string(),\n\n oracle_contract: \"oracle\".to_string(),\n\n market_contract: \"market\".to_string(),\n\n liquidation_contract: \"liquidation\".to_string(),\n\n collector_contract: \"collector\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n epoch_period: 86400u64,\n\n threshold_deposit_rate: Decimal256::permille(3),\n\n target_deposit_rate: Decimal256::permille(5),\n\n buffer_distribution_factor: Decimal256::percent(20),\n\n anc_purchase_factor: Decimal256::percent(20),\n\n price_timeframe: 60u64,\n", "file_path": "contracts/overseer/src/testing/tests.rs", "rank": 68, "score": 108348.62110047662 }, { "content": "#[test]\n\nfn execute_bid() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(1),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(10),\n\n bid_fee: Decimal256::percent(1),\n\n max_premium_rate: Decimal256::percent(5),\n\n liquidation_threshold: Uint256::from(100000000u64),\n\n price_timeframe: 60u64,\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n let env = mock_env();\n\n deps.querier.with_oracle_price(&[(\n", "file_path": "contracts/liquidation/src/testing/tests.rs", "rank": 69, "score": 108348.62110047662 }, { "content": "#[test]\n\nfn lock_collateral() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let info = mock_info(\"owner\", &[]);\n\n let msg = InstantiateMsg {\n\n owner_addr: \"owner\".to_string(),\n\n oracle_contract: \"oracle\".to_string(),\n\n market_contract: \"market\".to_string(),\n\n liquidation_contract: \"liquidation\".to_string(),\n\n collector_contract: \"collector\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n epoch_period: 86400u64,\n\n threshold_deposit_rate: Decimal256::permille(3),\n\n target_deposit_rate: Decimal256::permille(5),\n\n buffer_distribution_factor: Decimal256::percent(20),\n\n anc_purchase_factor: Decimal256::percent(20),\n\n price_timeframe: 60u64,\n\n };\n\n\n\n // we can just call .unwrap() to assert this was a success\n", "file_path": "contracts/overseer/src/testing/tests.rs", "rank": 70, "score": 108348.62110047662 }, { "content": "#[test]\n\nfn unlock_collateral() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let info = mock_info(\"owner\", &[]);\n\n let env = mock_env();\n\n let msg = InstantiateMsg {\n\n owner_addr: \"owner\".to_string(),\n\n oracle_contract: \"oracle\".to_string(),\n\n market_contract: \"market\".to_string(),\n\n liquidation_contract: \"liquidation\".to_string(),\n\n collector_contract: \"collector\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n epoch_period: 86400u64,\n\n threshold_deposit_rate: Decimal256::permille(3),\n\n target_deposit_rate: Decimal256::permille(5),\n\n buffer_distribution_factor: Decimal256::percent(20),\n\n anc_purchase_factor: Decimal256::percent(20),\n\n price_timeframe: 60u64,\n\n };\n\n\n", "file_path": "contracts/overseer/src/testing/tests.rs", "rank": 71, "score": 108348.62110047662 }, { "content": "#[test]\n\nfn proper_initialization() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n owner_addr: \"owner\".to_string(),\n\n oracle_contract: \"oracle\".to_string(),\n\n market_contract: \"market\".to_string(),\n\n liquidation_contract: \"liquidation\".to_string(),\n\n collector_contract: \"collector\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n epoch_period: 86400u64,\n\n threshold_deposit_rate: Decimal256::permille(3),\n\n target_deposit_rate: Decimal256::permille(5),\n\n buffer_distribution_factor: Decimal256::percent(20),\n\n anc_purchase_factor: Decimal256::percent(20),\n\n price_timeframe: 60u64,\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n\n", "file_path": "contracts/overseer/src/testing/tests.rs", "rank": 72, "score": 108348.62110047662 }, { "content": "#[test]\n\nfn redeem_stable() {\n\n let mut deps = mock_dependencies(&[Coin {\n\n denom: \"uusd\".to_string(),\n\n amount: Uint128::from(INITIAL_DEPOSIT_AMOUNT),\n\n }]);\n\n\n\n let msg = InstantiateMsg {\n\n owner_addr: \"owner\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n aterra_code_id: 123u64,\n\n anc_emission_rate: Decimal256::one(),\n\n max_borrow_factor: Decimal256::one(),\n\n };\n\n\n\n let info = mock_info(\n\n \"addr0000\",\n\n &[Coin {\n\n denom: \"uusd\".to_string(),\n\n amount: Uint128::from(INITIAL_DEPOSIT_AMOUNT),\n\n }],\n", "file_path": "contracts/market/src/testing/tests.rs", "rank": 73, "score": 108348.62110047662 }, { "content": "//#[test]\n\nfn stress_tests() {\n\n // submit bids and execute liquidations repeatedly\n\n // we can alternate larger and smaller executions to decrease the bid_pool product at different rates\n\n\n\n // with very tight liquidations, constatly resetting product\n\n // 1M UST bids\n\n simulate_bids_with_2_liq_amounts(\n\n ITERATIONS,\n\n Decimal256::percent(2000),\n\n 1000000000000u128,\n\n 49999999999,\n\n 49999999990,\n\n );\n\n // 10 UST bids\n\n simulate_bids_with_2_liq_amounts(\n\n ITERATIONS,\n\n Decimal256::percent(2000),\n\n 10000000u128,\n\n 499999,\n\n 499999,\n", "file_path": "contracts/liquidation_queue/src/testing/product_stress_tests.rs", "rank": 74, "score": 108163.10021912146 }, { "content": "pub fn store_config(storage: &mut dyn Storage, data: &Config) -> StdResult<()> {\n\n Singleton::new(storage, KEY_CONFIG).save(data)\n\n}\n\n\n", "file_path": "contracts/market/src/state.rs", "rank": 75, "score": 106927.49400798045 }, { "content": "pub fn store_config(storage: &mut dyn Storage, config: &Config) -> StdResult<()> {\n\n singleton(storage, KEY_CONFIG).save(config)\n\n}\n\n\n", "file_path": "contracts/oracle/src/state.rs", "rank": 76, "score": 106927.49400798045 }, { "content": "pub fn store_config(storage: &mut dyn Storage, data: &Config) -> StdResult<()> {\n\n Singleton::new(storage, KEY_CONFIG).save(data)\n\n}\n\n\n", "file_path": "contracts/overseer/src/state.rs", "rank": 77, "score": 106927.49400798045 }, { "content": "pub fn store_state(storage: &mut dyn Storage, data: &State) -> StdResult<()> {\n\n Singleton::new(storage, KEY_STATE).save(data)\n\n}\n\n\n", "file_path": "contracts/market/src/state.rs", "rank": 78, "score": 106927.49400798045 }, { "content": "pub fn store_config(storage: &mut dyn Storage, config: &Config) -> StdResult<()> {\n\n singleton(storage, KEY_CONFIG).save(config)\n\n}\n\n\n", "file_path": "contracts/liquidation/src/state.rs", "rank": 79, "score": 106927.49400798045 }, { "content": "#[test]\n\nfn distribute_hook() {\n\n let mut deps = mock_dependencies(&[Coin {\n\n denom: \"uusd\".to_string(),\n\n amount: Uint128::new(1000000u128),\n\n }]);\n\n\n\n deps.querier.with_token_balances(&[(\n\n &\"beth\".to_string(),\n\n &[(&MOCK_CONTRACT_ADDR.to_string(), &Uint128::from(1000u128))],\n\n )]);\n\n\n\n deps.querier.with_tax(\n\n Decimal::percent(1),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner\".to_string(),\n\n collateral_token: \"beth\".to_string(),\n\n overseer_contract: \"overseer\".to_string(),\n", "file_path": "contracts/custody_beth/src/testing/tests.rs", "rank": 80, "score": 106649.52440296885 }, { "content": "#[test]\n\nfn update_config() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner\".to_string(),\n\n collateral_token: \"beth\".to_string(),\n\n overseer_contract: \"overseer\".to_string(),\n\n market_contract: \"market\".to_string(),\n\n reward_contract: \"reward\".to_string(),\n\n liquidation_contract: \"liquidation\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n basset_info: BAssetInfo {\n\n name: \"beth\".to_string(),\n\n symbol: \"beth\".to_string(),\n\n decimals: 6,\n\n },\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n\n", "file_path": "contracts/custody_beth/src/testing/tests.rs", "rank": 81, "score": 106649.52440296885 }, { "content": "#[test]\n\nfn withdraw_collateral() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner\".to_string(),\n\n collateral_token: \"beth\".to_string(),\n\n overseer_contract: \"overseer\".to_string(),\n\n market_contract: \"market\".to_string(),\n\n reward_contract: \"reward\".to_string(),\n\n liquidation_contract: \"liquidation\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n basset_info: BAssetInfo {\n\n name: \"beth\".to_string(),\n\n symbol: \"beth\".to_string(),\n\n decimals: 6,\n\n },\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();\n", "file_path": "contracts/custody_beth/src/testing/tests.rs", "rank": 82, "score": 106649.52440296885 }, { "content": "#[test]\n\nfn lock_collateral() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner\".to_string(),\n\n collateral_token: \"beth\".to_string(),\n\n overseer_contract: \"overseer\".to_string(),\n\n market_contract: \"market\".to_string(),\n\n reward_contract: \"reward\".to_string(),\n\n liquidation_contract: \"liquidation\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n basset_info: BAssetInfo {\n\n name: \"beth\".to_string(),\n\n symbol: \"beth\".to_string(),\n\n decimals: 6,\n\n },\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();\n", "file_path": "contracts/custody_beth/src/testing/tests.rs", "rank": 83, "score": 106649.52440296885 }, { "content": "#[test]\n\nfn lock_collateral() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner\".to_string(),\n\n collateral_token: \"bluna\".to_string(),\n\n overseer_contract: \"overseer\".to_string(),\n\n market_contract: \"market\".to_string(),\n\n reward_contract: \"reward\".to_string(),\n\n liquidation_contract: \"liquidation\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n basset_info: BAssetInfo {\n\n name: \"bluna\".to_string(),\n\n symbol: \"bluna\".to_string(),\n\n decimals: 6,\n\n },\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();\n", "file_path": "contracts/custody_bluna/src/testing/tests.rs", "rank": 84, "score": 106649.52440296885 }, { "content": "#[test]\n\nfn deposit_collateral() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner\".to_string(),\n\n collateral_token: \"bluna\".to_string(),\n\n overseer_contract: \"overseer\".to_string(),\n\n market_contract: \"market\".to_string(),\n\n reward_contract: \"reward\".to_string(),\n\n liquidation_contract: \"liquidation\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n basset_info: BAssetInfo {\n\n name: \"bluna\".to_string(),\n\n symbol: \"bluna\".to_string(),\n\n decimals: 6,\n\n },\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();\n", "file_path": "contracts/custody_bluna/src/testing/tests.rs", "rank": 85, "score": 106649.52440296885 }, { "content": "#[test]\n\nfn proper_initialization() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner\".to_string(),\n\n collateral_token: \"beth\".to_string(),\n\n overseer_contract: \"overseer\".to_string(),\n\n market_contract: \"market\".to_string(),\n\n reward_contract: \"reward\".to_string(),\n\n liquidation_contract: \"liquidation\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n basset_info: BAssetInfo {\n\n name: \"beth\".to_string(),\n\n symbol: \"beth\".to_string(),\n\n decimals: 6,\n\n },\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n\n", "file_path": "contracts/custody_beth/src/testing/tests.rs", "rank": 86, "score": 106649.52440296885 }, { "content": "#[test]\n\nfn retract_bid() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier\n\n .with_collateral_max_ltv(&[(&\"asset0000\".to_string(), &Decimal256::percent(90))]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(10),\n\n bid_fee: Decimal256::percent(1),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::from(100000000u64),\n\n price_timeframe: 60u64,\n\n waiting_period: 60u64,\n\n overseer: \"overseer0000\".to_string(),\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();\n", "file_path": "contracts/liquidation_queue/src/testing/tests.rs", "rank": 87, "score": 106649.52440296885 }, { "content": "#[test]\n\nfn update_config() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner\".to_string(),\n\n collateral_token: \"bluna\".to_string(),\n\n overseer_contract: \"overseer\".to_string(),\n\n market_contract: \"market\".to_string(),\n\n reward_contract: \"reward\".to_string(),\n\n liquidation_contract: \"liquidation\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n basset_info: BAssetInfo {\n\n name: \"bluna\".to_string(),\n\n symbol: \"bluna\".to_string(),\n\n decimals: 6,\n\n },\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n\n", "file_path": "contracts/custody_bluna/src/testing/tests.rs", "rank": 88, "score": 106649.52440296885 }, { "content": "#[test]\n\nfn submit_bid() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier\n\n .with_collateral_max_ltv(&[(&\"asset0000\".to_string(), &Decimal256::percent(90))]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(10),\n\n bid_fee: Decimal256::percent(1),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::from(100000000u64),\n\n price_timeframe: 60u64,\n\n waiting_period: 60u64,\n\n overseer: \"overseer0000\".to_string(),\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();\n", "file_path": "contracts/liquidation_queue/src/testing/tests.rs", "rank": 89, "score": 106649.52440296885 }, { "content": "#[test]\n\nfn distribute_rewards() {\n\n let mut deps = mock_dependencies(&[Coin {\n\n denom: \"uusd\".to_string(),\n\n amount: Uint128::new(1000000u128),\n\n }]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner\".to_string(),\n\n collateral_token: \"beth\".to_string(),\n\n overseer_contract: \"overseer\".to_string(),\n\n market_contract: \"market\".to_string(),\n\n reward_contract: \"reward\".to_string(),\n\n liquidation_contract: \"liquidation\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n basset_info: BAssetInfo {\n\n name: \"beth\".to_string(),\n\n symbol: \"beth\".to_string(),\n\n decimals: 6,\n\n },\n\n };\n", "file_path": "contracts/custody_beth/src/testing/tests.rs", "rank": 90, "score": 106649.52440296885 }, { "content": "#[test]\n\nfn withdraw_collateral() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner\".to_string(),\n\n collateral_token: \"bluna\".to_string(),\n\n overseer_contract: \"overseer\".to_string(),\n\n market_contract: \"market\".to_string(),\n\n reward_contract: \"reward\".to_string(),\n\n liquidation_contract: \"liquidation\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n basset_info: BAssetInfo {\n\n name: \"bluna\".to_string(),\n\n symbol: \"bluna\".to_string(),\n\n decimals: 6,\n\n },\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();\n", "file_path": "contracts/custody_bluna/src/testing/tests.rs", "rank": 91, "score": 106649.52440296885 }, { "content": "#[test]\n\nfn update_config() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(10),\n\n bid_fee: Decimal256::percent(1),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::from(100000000u64),\n\n price_timeframe: 60u64,\n\n waiting_period: 60u64,\n\n overseer: \"overseer0000\".to_string(),\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();\n\n\n\n // update owner\n", "file_path": "contracts/liquidation_queue/src/testing/tests.rs", "rank": 92, "score": 106649.52440296885 }, { "content": "#[test]\n\nfn execute_bid() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(1),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier\n\n .with_collateral_max_ltv(&[(&\"asset0000\".to_string(), &Decimal256::percent(90))]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(10),\n\n bid_fee: Decimal256::percent(1),\n\n liquidator_fee: Decimal256::percent(1),\n\n liquidation_threshold: Uint256::from(100000000u64),\n\n price_timeframe: 100000u64,\n\n waiting_period: 60u64,\n\n overseer: \"overseer0000\".to_string(),\n", "file_path": "contracts/liquidation_queue/src/testing/tests.rs", "rank": 93, "score": 106649.52440296885 }, { "content": "#[test]\n\nfn deposit_collateral() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner\".to_string(),\n\n collateral_token: \"beth\".to_string(),\n\n overseer_contract: \"overseer\".to_string(),\n\n market_contract: \"market\".to_string(),\n\n reward_contract: \"reward\".to_string(),\n\n liquidation_contract: \"liquidation\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n basset_info: BAssetInfo {\n\n name: \"beth\".to_string(),\n\n symbol: \"beth\".to_string(),\n\n decimals: 6,\n\n },\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();\n", "file_path": "contracts/custody_beth/src/testing/tests.rs", "rank": 94, "score": 106649.52440296885 }, { "content": "#[test]\n\nfn proper_initialization() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner\".to_string(),\n\n collateral_token: \"bluna\".to_string(),\n\n overseer_contract: \"overseer\".to_string(),\n\n market_contract: \"market\".to_string(),\n\n reward_contract: \"reward\".to_string(),\n\n liquidation_contract: \"liquidation\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n basset_info: BAssetInfo {\n\n name: \"bluna\".to_string(),\n\n symbol: \"bluna\".to_string(),\n\n decimals: 6,\n\n },\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n\n", "file_path": "contracts/custody_bluna/src/testing/tests.rs", "rank": 95, "score": 106649.52440296885 }, { "content": "#[test]\n\nfn activate_bid() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier\n\n .with_collateral_max_ltv(&[(&\"asset0000\".to_string(), &Decimal256::percent(90))]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(10),\n\n bid_fee: Decimal256::percent(1),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::from(100000000u64),\n\n price_timeframe: 60u64,\n\n waiting_period: 60u64,\n\n overseer: \"overseer0000\".to_string(),\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();\n", "file_path": "contracts/liquidation_queue/src/testing/tests.rs", "rank": 96, "score": 106649.52440296885 }, { "content": "#[test]\n\nfn liquidate_collateral() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner\".to_string(),\n\n collateral_token: \"beth\".to_string(),\n\n overseer_contract: \"overseer\".to_string(),\n\n market_contract: \"market\".to_string(),\n\n reward_contract: \"reward\".to_string(),\n\n liquidation_contract: \"liquidation\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n basset_info: BAssetInfo {\n\n name: \"beth\".to_string(),\n\n symbol: \"beth\".to_string(),\n\n decimals: 6,\n\n },\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n let _res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();\n", "file_path": "contracts/custody_beth/src/testing/tests.rs", "rank": 97, "score": 106649.52440296885 }, { "content": "#[test]\n\nfn claim_liquidations() {\n\n let mut deps = mock_dependencies(&[]);\n\n deps.querier.with_tax(\n\n Decimal::percent(1),\n\n &[(&\"uusd\".to_string(), &Uint128::from(1000000u128))],\n\n );\n\n deps.querier\n\n .with_collateral_max_ltv(&[(&\"asset0000\".to_string(), &Decimal256::percent(90))]);\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(10),\n\n bid_fee: Decimal256::percent(1),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::from(100000000u64),\n\n price_timeframe: 1000000u64,\n\n waiting_period: 60u64,\n\n overseer: \"overseer0000\".to_string(),\n\n };\n", "file_path": "contracts/liquidation_queue/src/testing/tests.rs", "rank": 98, "score": 106649.52440296885 }, { "content": "#[test]\n\nfn proper_initialization() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = InstantiateMsg {\n\n owner: \"owner0000\".to_string(),\n\n oracle_contract: \"oracle0000\".to_string(),\n\n stable_denom: \"uusd\".to_string(),\n\n safe_ratio: Decimal256::percent(10),\n\n bid_fee: Decimal256::percent(1),\n\n liquidator_fee: Decimal256::percent(0),\n\n liquidation_threshold: Uint256::from(100000000u64),\n\n price_timeframe: 60u64,\n\n waiting_period: 60u64,\n\n overseer: \"overseer0000\".to_string(),\n\n };\n\n\n\n let info = mock_info(\"addr0000\", &[]);\n\n\n\n // we can just call .unwrap() to assert this was a success\n\n let res = instantiate(deps.as_mut(), mock_env(), info, msg).unwrap();\n", "file_path": "contracts/liquidation_queue/src/testing/tests.rs", "rank": 99, "score": 106649.52440296885 } ]
Rust
micro-color-chooser/src/service/game_color_prefs.rs
CoinArcade/BUGOUT
ec01dc3dae54e1e248d540d442caa1731f2822e4
use crate::components::Repos; use crate::repo::*; use api::GameReady; use color_model::*; use core_model::*; pub fn by_session_id(session_id: &SessionId, repos: &Repos) -> Result<GameColorPref, FetchErr> { repos.game_ready.get(session_id).and_then(|sg| match sg { None => Ok(GameColorPref::NotReady), Some(game_ready) => { let first_pref = repos.prefs.get(&game_ready.sessions.0); let second_pref = repos.prefs.get(&game_ready.sessions.1); match (first_pref, second_pref) { (Ok(Some(first)), Ok(Some(second))) => Ok(GameColorPref::Complete { game_id: game_ready.game_id.clone(), prefs: (first, second), }), (Ok(Some(partial)), Ok(None)) => Ok(GameColorPref::Partial { game_id: game_ready.game_id.clone(), pref: partial, }), (Ok(None), Ok(Some(partial))) => Ok(GameColorPref::Partial { game_id: game_ready.game_id.clone(), pref: partial, }), (Ok(None), Ok(None)) => Ok(GameColorPref::NotReady), (Err(e), _) => Err(e), (_, Err(e)) => Err(e), } } }) } pub fn by_game_ready(game_ready: &GameReady, repos: &Repos) -> Result<GameColorPref, FetchErr> { let first_pref = repos.prefs.get(&game_ready.sessions.0)?; let second_pref = repos.prefs.get(&game_ready.sessions.1)?; Ok(match (first_pref, second_pref) { (Some(first), Some(second)) => GameColorPref::Complete { game_id: game_ready.game_id.clone(), prefs: (first, second), }, (Some(partial), None) => GameColorPref::Partial { game_id: game_ready.game_id.clone(), pref: partial, }, (None, Some(partial)) => GameColorPref::Partial { game_id: game_ready.game_id.clone(), pref: partial, }, _ => GameColorPref::NotReady, }) } #[cfg(test)] mod tests { use super::*; use std::rc::Rc; struct SGNotReady; struct SGReady(pub GameReady); struct PrefsNone; struct PrefsOne(pub SessionColorPref); struct PrefsTwo(pub SessionColorPref, pub SessionColorPref); impl GameReadyRepo for SGNotReady { fn get(&self, _: &SessionId) -> Result<Option<GameReady>, FetchErr> { Ok(None) } fn put(&self, _: GameReady) -> Result<(), WriteErr> { panic!() } } impl GameReadyRepo for SGReady { fn get(&self, session_id: &SessionId) -> Result<Option<GameReady>, FetchErr> { if session_id == &self.0.sessions.0 { Ok(Some(self.0.clone())) } else if session_id == &self.0.sessions.1 { Ok(Some(self.0.clone())) } else { Ok(None) } } fn put(&self, _: GameReady) -> Result<(), WriteErr> { panic!() } } impl PrefsRepo for PrefsNone { fn get(&self, _: &SessionId) -> Result<Option<SessionColorPref>, FetchErr> { Ok(None) } fn put(&self, _: &SessionColorPref) -> Result<(), WriteErr> { panic!() } } impl PrefsRepo for PrefsOne { fn get(&self, session_id: &SessionId) -> Result<Option<SessionColorPref>, FetchErr> { if session_id == &self.0.session_id { Ok(Some(self.0.clone())) } else { Ok(None) } } fn put(&self, _: &SessionColorPref) -> Result<(), WriteErr> { panic!() } } impl PrefsRepo for PrefsTwo { fn get(&self, session_id: &SessionId) -> Result<Option<SessionColorPref>, FetchErr> { if session_id == &self.0.session_id { Ok(Some(self.0.clone())) } else if session_id == &self.1.session_id { Ok(Some(self.1.clone())) } else { Ok(None) } } fn put(&self, _: &SessionColorPref) -> Result<(), WriteErr> { panic!() } } #[test] fn test_by_session_id_complete() { let sid = SessionId::new(); let cid = ClientId::new(); let gid = GameId::new(); let one_pref = SessionColorPref { session_id: sid.clone(), color_pref: ColorPref::Black, client_id: cid.clone(), }; let another_sid = SessionId::new(); let another_cid = ClientId::new(); let another_pref = SessionColorPref { session_id: another_sid.clone(), color_pref: ColorPref::Black, client_id: another_cid.clone(), }; let board_size = 9; let repos = Repos { prefs: Rc::new(PrefsTwo(one_pref.clone(), another_pref.clone())), game_ready: Rc::new(SGReady(GameReady { sessions: (sid.clone(), another_sid.clone()), game_id: gid.clone(), event_id: EventId::new(), board_size, })), }; let actual = by_session_id(&sid, &repos).expect("ok"); assert_eq!( actual, GameColorPref::Complete { game_id: gid, prefs: (one_pref, another_pref) } ) } #[test] fn test_by_session_id_partial() { let sid = SessionId::new(); let cid = ClientId::new(); let gid = GameId::new(); let pref = SessionColorPref { session_id: sid.clone(), color_pref: ColorPref::Black, client_id: cid.clone(), }; let board_size = 9; let repos = Repos { prefs: Rc::new(PrefsOne(pref.clone())), game_ready: Rc::new(SGReady(GameReady { sessions: (sid.clone(), SessionId::new()), game_id: gid.clone(), event_id: EventId::new(), board_size, })), }; let actual = by_session_id(&sid, &repos).expect("ok"); assert_eq!(actual, GameColorPref::Partial { game_id: gid, pref }) } #[test] fn test_by_session_id_not_ready() { let sid = SessionId::new(); let cid = ClientId::new(); let repos = Repos { prefs: Rc::new(PrefsOne(SessionColorPref { session_id: sid.clone(), color_pref: ColorPref::Black, client_id: cid.clone(), })), game_ready: Rc::new(SGNotReady), }; let actual = by_session_id(&sid, &repos).expect("ok"); assert_eq!(actual, GameColorPref::NotReady) } #[test] fn test_by_game_ready_two_prefs() { let sid = SessionId::new(); let gid = GameId::new(); let another_sid = SessionId::new(); let sessions = (sid.clone(), another_sid.clone()); let board_size = 9; let game_ready = GameReady { sessions: sessions.clone(), game_id: gid.clone(), event_id: EventId::new(), board_size, }; let first_pref = SessionColorPref { session_id: sid.clone(), color_pref: ColorPref::Black, client_id: ClientId::new(), }; let second_pref = SessionColorPref { session_id: another_sid.clone(), color_pref: ColorPref::Black, client_id: ClientId::new(), }; let repos = Repos { prefs: Rc::new(PrefsTwo(first_pref.clone(), second_pref.clone())), game_ready: Rc::new(SGReady(game_ready.clone())), }; let actual = by_game_ready(&game_ready, &repos).expect("ok"); assert_eq!( actual, GameColorPref::Complete { game_id: gid, prefs: (first_pref, second_pref) } ) } #[test] fn test_by_game_ready_one_pref() { let sid = SessionId::new(); let gid = GameId::new(); let sessions = (sid.clone(), SessionId::new()); let board_size = 9; let game_ready = GameReady { sessions: sessions.clone(), game_id: gid.clone(), event_id: EventId::new(), board_size, }; let pref = SessionColorPref { session_id: sid.clone(), color_pref: ColorPref::Black, client_id: ClientId::new(), }; let repos = Repos { prefs: Rc::new(PrefsOne(pref.clone())), game_ready: Rc::new(SGReady(game_ready.clone())), }; let actual = by_game_ready(&game_ready, &repos).expect("ok"); assert_eq!(actual, GameColorPref::Partial { game_id: gid, pref }) } #[test] fn test_by_game_ready_no_prefs() { let sid = SessionId::new(); let gid = GameId::new(); let sessions = (sid, SessionId::new()); let board_size = 9; let game_ready = GameReady { sessions: sessions.clone(), game_id: gid, event_id: EventId::new(), board_size, }; let repos = Repos { prefs: Rc::new(PrefsNone), game_ready: Rc::new(SGReady(game_ready.clone())), }; let actual = by_game_ready(&game_ready, &repos).expect("ok"); assert_eq!(actual, GameColorPref::NotReady) } }
use crate::components::Repos; use crate::repo::*; use api::GameReady; use color_model::*; use core_model::*; pub fn by_session_id(session_id: &SessionId, repos: &Repos) -> Result<GameColorPref, FetchErr> { repos.game_ready.get(session_id).and_then(|sg| match sg { None => Ok(GameColorPref::NotReady), Some(game_ready) => { let first_pref = repos.prefs.get(&game_ready.sessions.0); let second_pref = repos.prefs.get(&game_ready.sessions.1); match (first_pref, second_pref) { (Ok(Some(first)), Ok(Some(second))) => Ok(GameColorPref::Complete { game_id: game_ready.game_id.clone(), prefs: (first, second), }), (Ok(Some(partial)), Ok(None)) => Ok(GameColorPref::Partial { game_id: game_ready.game_id.clone(), pref: partial, }), (Ok(None), Ok(Some(partial))) => Ok(GameColorPref::Partial { game_id: game_ready.game_id.clone(), pref: partial, }), (Ok(None), Ok(None)) => Ok(GameColorPref::NotReady), (Err(e), _) => Err(e), (_, Err(e)) => Err(e), } } }) } pub fn by_game_ready(game_ready: &GameReady, repos: &Repos) -> Result<GameColorPref, FetchErr> { let first_pref = repos.prefs.get(&game_ready.sessions.0)?; let second_pref = repos.prefs.get(&game_ready.sessions.1)?; Ok(match (first_pref, second_pref) { (Some(first), Some(second)) => GameColorPref::Complete { game_id: game_ready.game_id.clone(), prefs: (first, second), }, (Some(partial), None) => GameColorPref::Partial { game_id: game_ready.game_id.clone(), pref: partial, }, (None, Some(partial)) => GameColorPref::Partial { game_id: game_ready.game_id.clone(), pref: partial, }, _ => GameColorPref::NotReady, }) } #[cfg(test)] mod tests { use super::*; use std::rc::Rc; struct SGNotReady; struct SGReady(pub GameReady); struct PrefsNone; struct PrefsOne(pub SessionColorPref); struct PrefsTwo(pub SessionColorPref, pub SessionColorPref); impl GameReadyRepo for SGNotReady { fn get(&self, _: &SessionId) -> Result<Option<GameReady>, FetchErr> { Ok(None) } fn put(&self, _: GameReady) -> Result<(), WriteErr> { panic!() } } impl GameReadyRepo for SGReady { fn get(&self, session_id: &SessionId) -> Result<Option<GameReady>, FetchErr> { if session_id == &self.0.sessions.0 { Ok(Some(self.0.clone())) } else if session_id == &self.0.sessions.1 { Ok(Some(self.0.clone())) } else { Ok(None) } } fn put(&self, _: GameReady) -> Result<(), WriteErr> { panic!() } } impl PrefsRepo for PrefsNone { fn get(&self, _: &SessionId) -> Result<Option<SessionColorPref>, FetchErr> { Ok(None) } fn put(&self, _: &SessionColorPref) -> Result<(), WriteErr> { panic!() } } impl PrefsRepo for PrefsOne { fn get(&self, session_id: &SessionId) -> Result<Option<SessionColorPref>, FetchErr> { if session_id == &self.0.session_id { Ok(Some(self.0.clone())) } else { Ok(None) } } fn put(&self, _: &SessionColorPref) -> Result<(), WriteErr> { panic!() } } impl PrefsRepo for PrefsTwo { fn get(&self, session_id: &SessionId) -> Result<Option<SessionColorPref>, FetchErr> { if session_id == &self.0.session_id { Ok(Some(self.0.clone())) } else if session_id == &self.1.session_id { Ok(Some(self.1.clone())) } else { Ok(None) } } fn put(&self, _: &SessionColorPref) -> Result<(), WriteErr> { panic!() } } #[test] fn test_by_session_id_complete() { let sid = SessionId::new(); let cid = ClientId::new(); let gid = GameId::new(); let one_pref = SessionColorPref { session_id: sid.clone(), color_pref: ColorPref::Black, client_id: cid.clone(), }; let another_sid = SessionId::new(); let another_cid = ClientId::new(); let another_pref = SessionColorPref { session_id: another_sid.clone(), color_pref: ColorPref::Black, client_id: another_cid.clone(), }; let board_size = 9; let repos = Repos { prefs: Rc::new(PrefsTwo(one_pref.clone(), another_pref.clone())), game_ready: Rc::new(SGReady(GameReady { sessions: (sid.clone(), another_sid.clone()), game_id: gid.clone(), event_id: EventId::new(), board_size, })), }; let actual = by_session_id(&sid, &repos).expect("ok"); assert_eq!( actual, GameColorPref::Complete { game_id: gid, prefs: (one_p
ert_eq!(actual, GameColorPref::NotReady) } }
ref, another_pref) } ) } #[test] fn test_by_session_id_partial() { let sid = SessionId::new(); let cid = ClientId::new(); let gid = GameId::new(); let pref = SessionColorPref { session_id: sid.clone(), color_pref: ColorPref::Black, client_id: cid.clone(), }; let board_size = 9; let repos = Repos { prefs: Rc::new(PrefsOne(pref.clone())), game_ready: Rc::new(SGReady(GameReady { sessions: (sid.clone(), SessionId::new()), game_id: gid.clone(), event_id: EventId::new(), board_size, })), }; let actual = by_session_id(&sid, &repos).expect("ok"); assert_eq!(actual, GameColorPref::Partial { game_id: gid, pref }) } #[test] fn test_by_session_id_not_ready() { let sid = SessionId::new(); let cid = ClientId::new(); let repos = Repos { prefs: Rc::new(PrefsOne(SessionColorPref { session_id: sid.clone(), color_pref: ColorPref::Black, client_id: cid.clone(), })), game_ready: Rc::new(SGNotReady), }; let actual = by_session_id(&sid, &repos).expect("ok"); assert_eq!(actual, GameColorPref::NotReady) } #[test] fn test_by_game_ready_two_prefs() { let sid = SessionId::new(); let gid = GameId::new(); let another_sid = SessionId::new(); let sessions = (sid.clone(), another_sid.clone()); let board_size = 9; let game_ready = GameReady { sessions: sessions.clone(), game_id: gid.clone(), event_id: EventId::new(), board_size, }; let first_pref = SessionColorPref { session_id: sid.clone(), color_pref: ColorPref::Black, client_id: ClientId::new(), }; let second_pref = SessionColorPref { session_id: another_sid.clone(), color_pref: ColorPref::Black, client_id: ClientId::new(), }; let repos = Repos { prefs: Rc::new(PrefsTwo(first_pref.clone(), second_pref.clone())), game_ready: Rc::new(SGReady(game_ready.clone())), }; let actual = by_game_ready(&game_ready, &repos).expect("ok"); assert_eq!( actual, GameColorPref::Complete { game_id: gid, prefs: (first_pref, second_pref) } ) } #[test] fn test_by_game_ready_one_pref() { let sid = SessionId::new(); let gid = GameId::new(); let sessions = (sid.clone(), SessionId::new()); let board_size = 9; let game_ready = GameReady { sessions: sessions.clone(), game_id: gid.clone(), event_id: EventId::new(), board_size, }; let pref = SessionColorPref { session_id: sid.clone(), color_pref: ColorPref::Black, client_id: ClientId::new(), }; let repos = Repos { prefs: Rc::new(PrefsOne(pref.clone())), game_ready: Rc::new(SGReady(game_ready.clone())), }; let actual = by_game_ready(&game_ready, &repos).expect("ok"); assert_eq!(actual, GameColorPref::Partial { game_id: gid, pref }) } #[test] fn test_by_game_ready_no_prefs() { let sid = SessionId::new(); let gid = GameId::new(); let sessions = (sid, SessionId::new()); let board_size = 9; let game_ready = GameReady { sessions: sessions.clone(), game_id: gid, event_id: EventId::new(), board_size, }; let repos = Repos { prefs: Rc::new(PrefsNone), game_ready: Rc::new(SGReady(game_ready.clone())), }; let actual = by_game_ready(&game_ready, &repos).expect("ok"); ass
random
[ { "content": "pub fn fetch(game_id: &GameId, components: &Components) -> Result<Option<GameState>, FetchErr> {\n\n let mut conn = components.client.get_connection().expect(\"fetch conn\");\n\n let key = components.redis_key_provider.game_states(&game_id);\n\n let bin_data: Option<Vec<u8>> = conn.get(&key)?;\n\n Ok(match bin_data {\n\n Some(b) => {\n\n let r = GameState::from(&b)?;\n\n // Touch TTL whenever you get the record\n\n conn.expire(key, EXPIRY_SECS)?;\n\n Some(r)\n\n }\n\n None => None,\n\n })\n\n}\n\n\n", "file_path": "micro-changelog/src/repo/game_states_repo.rs", "rank": 2, "score": 277227.5249196697 }, { "content": "pub fn expire(key: &str, conn: &mut redis::Connection) -> Result<(), RepoErr> {\n\n Ok(conn.expire(key, TTL_SECS)?)\n\n}\n", "file_path": "botlink/src/repo/expire.rs", "rank": 3, "score": 260554.67235170293 }, { "content": "pub fn expire(key: &str, conn: &mut redis::Connection) -> Result<(), RepoErr> {\n\n Ok(conn.expire(key, TTL_SECS)?)\n\n}\n", "file_path": "undo/src/repo/expire.rs", "rank": 4, "score": 260554.67235170293 }, { "content": "fn redis_key(session_id: &SessionId) -> String {\n\n format!(\"/BUGOUT/micro_color_chooser/prefs/{}\", session_id.0)\n\n}\n", "file_path": "micro-color-chooser/src/repo/prefs.rs", "rank": 5, "score": 256371.6495340858 }, { "content": "pub trait PrefsRepo {\n\n fn get(&self, session_id: &SessionId) -> Result<Option<SessionColorPref>, FetchErr>;\n\n fn put(&self, scp: &SessionColorPref) -> Result<(), WriteErr>;\n\n}\n\n\n\nimpl PrefsRepo for Rc<Client> {\n\n fn get(&self, session_id: &SessionId) -> Result<Option<SessionColorPref>, FetchErr> {\n\n trace!(\"get {:?}\", &session_id);\n\n match self.get_connection() {\n\n Ok(mut conn) => {\n\n let key = redis_key(session_id);\n\n let data: Option<Vec<u8>> = conn.get(&key)?;\n\n\n\n if let Some(bytes) = data {\n\n touch_ttl(&mut conn, &key);\n\n match bincode::deserialize(&bytes) {\n\n Ok(game_ready) => Ok(Some(game_ready)),\n\n Err(e) => Err(FetchErr::Deser(e)),\n\n }\n\n } else {\n", "file_path": "micro-color-chooser/src/repo/prefs.rs", "rank": 6, "score": 220889.2445380271 }, { "content": "pub fn consume_undo(um: &UndoMove, reg: &Components) -> Result<(), UndoProcessingErr> {\n\n let botness = reg.botness_repo.get(&um.game_id, um.player)?;\n\n let requester_is_human = botness == Botness::IsHuman;\n\n\n\n if let Some(game_state) = reg.game_state_repo.get(&um.game_id)? {\n\n let player_up_is_human: bool = requester_is_human && game_state.player_up == um.player;\n\n let at_least_two_moves_made: bool = game_state.moves.len() > 1;\n\n\n\n if player_up_is_human && at_least_two_moves_made {\n\n let rolled_back = rollback(&game_state);\n\n reg.xadd.xadd(&StreamOutput::LOG(rolled_back.clone()))?;\n\n reg.xadd.xadd(&StreamOutput::MU(MoveUndone {\n\n game_id: um.game_id.clone(),\n\n player: um.player,\n\n game_state: rolled_back,\n\n }))?;\n\n } else {\n\n reject(um, reg)?\n\n }\n\n } else {\n\n reject(um, reg)?\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "undo/src/stream/undo.rs", "rank": 7, "score": 213910.0531295981 }, { "content": "fn redis_key(session_id: &SessionId) -> String {\n\n format!(\"/BUGOUT/micro_color_chooser/game_ready/{}\", session_id.0)\n\n}\n", "file_path": "micro-color-chooser/src/repo/game_ready.rs", "rank": 8, "score": 213290.03633194408 }, { "content": "pub fn write(\n\n game_id: &GameId,\n\n game_state: &GameState,\n\n components: &Components,\n\n) -> Result<String, WriteErr> {\n\n let mut conn = components.client.get_connection().expect(\"write conn\");\n\n\n\n let key = components.redis_key_provider.game_states(game_id);\n\n let done = conn.set(&key, game_state.serialize()?)?;\n\n // Touch TTL whenever you set the record\n\n conn.expire(key, EXPIRY_SECS)?;\n\n\n\n Ok(done)\n\n}\n", "file_path": "micro-changelog/src/repo/game_states_repo.rs", "rank": 9, "score": 208309.12138266547 }, { "content": "/// update a record's ttl. will never fail the calling\n\n/// function, but it will write to error log\n\n/// if there's a problem\n\npub fn touch_ttl(conn: &mut Connection, key: &str) {\n\n let exp: Result<(), _> = conn.expire(key, EXPIRY_SECS);\n\n if let Err(e) = exp {\n\n error!(\"touch TTL error {:?}\", e)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum FetchErr {\n\n Deser(Box<bincode::ErrorKind>),\n\n Redis(RedisError),\n\n}\n\npub struct WriteErr;\n\n\n\nimpl From<redis::RedisError> for FetchErr {\n\n fn from(e: redis::RedisError) -> Self {\n\n FetchErr::Redis(e)\n\n }\n\n}\n\nimpl From<redis::RedisError> for WriteErr {\n\n fn from(_: redis::RedisError) -> Self {\n\n Self\n\n }\n\n}\n", "file_path": "micro-sync/src/repo/mod.rs", "rank": 10, "score": 204331.95415352107 }, { "content": "/// update a record's ttl. will never fail the calling\n\n/// function, but it will write to error log\n\n/// if there's a problem\n\npub fn touch_ttl(conn: &mut Connection, key: &str) {\n\n let exp: Result<(), _> = conn.expire(key, EXPIRY_SECS);\n\n if let Err(e) = exp {\n\n error!(\"touch TTL error {:?}\", e)\n\n }\n\n}\n\n\n\nimpl From<redis::RedisError> for FetchErr {\n\n fn from(e: redis::RedisError) -> Self {\n\n FetchErr::Redis(e)\n\n }\n\n}\n\nimpl From<redis::RedisError> for WriteErr {\n\n fn from(_: redis::RedisError) -> Self {\n\n Self\n\n }\n\n}\n\nimpl From<Box<bincode::ErrorKind>> for FetchErr {\n\n fn from(e: Box<bincode::ErrorKind>) -> Self {\n\n FetchErr::Deser(e)\n\n }\n\n}\n", "file_path": "micro-color-chooser/src/repo/mod.rs", "rank": 11, "score": 201368.2093482284 }, { "content": "pub fn session_code(ws_session: &WsSession) -> String {\n\n format!(\n\n \"{} {}\",\n\n ws_session\n\n .client_id\n\n .map(|cid| crate::short_uuid(cid))\n\n .unwrap_or(crate::EMPTY_SHORT_UUID.to_string()),\n\n ws_session\n\n .current_game\n\n .map(|gid| crate::short_uuid(gid))\n\n .unwrap_or(crate::EMPTY_SHORT_UUID.to_string())\n\n )\n\n .to_string()\n\n}\n", "file_path": "gateway/src/logging.rs", "rank": 12, "score": 193000.02711263637 }, { "content": "pub fn opts() -> ConsumerGroupOpts {\n\n ConsumerGroupOpts {\n\n block_ms: BLOCK_MS,\n\n group: Group {\n\n group_name: GROUP_NAME.to_string(),\n\n consumer_name: \"singleton\".to_string(),\n\n },\n\n }\n\n}\n\n\n\nuse redis_streams::anyhow;\n\nimpl LobbyStreams {\n\n pub fn new(reg: Components) -> Self {\n\n Self { reg }\n\n }\n\n\n\n pub fn process(&self, streams: &mut dyn SortedStreams) {\n\n loop {\n\n if let Err(e) = streams.consume() {\n\n error!(\"Stream err {:?}\", e)\n", "file_path": "micro-game-lobby/src/stream/mod.rs", "rank": 13, "score": 185585.86263874685 }, { "content": "pub fn process(components: &Components) {\n\n let mut unacked = Unacknowledged::default();\n\n loop {\n\n match components.xread.read_sorted() {\n\n Ok(xrr) => {\n\n for (xid, event) in xrr {\n\n process_event(&event, components);\n\n unacked.push(xid, event);\n\n }\n\n }\n\n Err(_) => error!(\"xread\"),\n\n }\n\n\n\n unacked.ack_all(&components)\n\n }\n\n}\n\n\n", "file_path": "micro-sync/src/stream/mod.rs", "rank": 14, "score": 185227.57935312603 }, { "content": "pub fn create_redis_client() -> Arc<Client> {\n\n Arc::new(Client::open(REDIS_URL).expect(\"redis client\"))\n\n}\n", "file_path": "gateway/src/redis_io/mod.rs", "rank": 15, "score": 180142.58745378524 }, { "content": "fn ready_game(session_id: &SessionId, lobby: &GameLobby, queued: &Game, reg: &Components) {\n\n let updated: GameLobby = lobby.ready(queued);\n\n if let Err(_) = reg.game_lobby_repo.put(&updated) {\n\n error!(\"game lobby write F1\");\n\n } else {\n\n if let Err(_) = reg.xadd.xadd(StreamOutput::GR(GameReady {\n\n game_id: queued.game_id.clone(),\n\n event_id: EventId::new(),\n\n board_size: queued.board_size,\n\n sessions: (queued.creator.clone(), session_id.clone()),\n\n })) {\n\n error!(\"XADD Game ready\")\n\n } else {\n\n trace!(\"Game ready. Lobby: {:?}\", &updated);\n\n init_changelog(&queued.game_id, queued.board_size, &reg)\n\n }\n\n }\n\n}\n\n\n", "file_path": "micro-game-lobby/src/stream/mod.rs", "rank": 16, "score": 177995.50839902763 }, { "content": "pub fn process(components: &mut Components) {\n\n let repos = Repos::new(components);\n\n loop {\n\n let mut gr_processed: Vec<XReadEntryId> = Vec::with_capacity(ACK_QUEUE_CAPACITY);\n\n let mut ccp_processed: Vec<XReadEntryId> = Vec::with_capacity(ACK_QUEUE_CAPACITY);\n\n match components.xread.sorted() {\n\n Ok(xrr) => {\n\n for time_ordered_event in xrr {\n\n let (result, pxid) = match time_ordered_event {\n\n (xid, StreamInput::CCP(ccp)) => {\n\n info!(\"Stream: Choose Color Pref {:?}\", &ccp);\n\n let scp = SessionColorPref {\n\n color_pref: ccp.color_pref,\n\n session_id: ccp.session_id.clone(),\n\n client_id: ccp.client_id,\n\n };\n\n\n\n if let Err(_e) = components.prefs_repo.put(&scp) {\n\n error!(\"write to pref repo\")\n\n }\n", "file_path": "micro-color-chooser/src/stream/mod.rs", "rank": 17, "score": 175619.31955829985 }, { "content": "pub fn xread_loop(opts: &mut StreamOpts) {\n\n let mut unack = Unacknowledged::default();\n\n loop {\n\n match opts.xread.xread_sorted() {\n\n Ok(xrr) => {\n\n for (xid, event) in xrr {\n\n process(&event, opts);\n\n\n\n unack.push(xid, &event)\n\n }\n\n\n\n unack.ack_all(&opts)\n\n }\n\n Err(e) => error!(\"Stream error {:?}\", e),\n\n }\n\n }\n\n}\n\n\n", "file_path": "botlink/src/stream/mod.rs", "rank": 18, "score": 175619.31955829985 }, { "content": "fn launch_child() -> Result<Child, std::io::Error> {\n\n Command::new(PROGRAM)\n\n .arg(&ARGS[0])\n\n .arg(&ARGS[1])\n\n .arg(&ARGS[2])\n\n .arg(&ARGS[3])\n\n .arg(&ARGS[4])\n\n .arg(&ARGS[5])\n\n .arg(&ARGS[6])\n\n .stdin(Stdio::piped())\n\n .stdout(Stdio::piped())\n\n .spawn()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use core_model::*;\n\n use json::KataGoResponse;\n\n use move_model::*;\n", "file_path": "tinybrain/src/katago/mod.rs", "rank": 19, "score": 172369.3103570896 }, { "content": "pub fn process(topics: StreamTopics, components: &crate::Components) {\n\n info!(\"Processing {:#?}\", topics);\n\n let mut ma_processed = vec![];\n\n let mut gs_processed = vec![];\n\n loop {\n\n match read_sorted(&topics, &components.client) {\n\n Ok(xrr) => {\n\n for time_ordered_event in xrr {\n\n match time_ordered_event {\n\n (entry_id, StreamData::MA(move_acc)) => {\n\n info!(\"Stream: Move Accepted {:?}\", &move_acc);\n\n match update_game_state(&move_acc, &components) {\n\n Err(e) => error!(\"err updating game state {:?}\", e),\n\n Ok(gs) => {\n\n // These next two ops are concurrent in the kafka impl\n\n if let Err(e) = xadd_game_states_changelog(\n\n gs,\n\n &topics.game_states_changelog,\n\n components,\n\n ) {\n", "file_path": "micro-changelog/src/stream/mod.rs", "rank": 20, "score": 163827.7833490948 }, { "content": "/// Associates SessionIds with GameIds and allows retrieval by SessionId\n\npub trait GameReadyRepo {\n\n /// Get a game ready record for this session, if it exists\n\n /// And then update the record's TTL\n\n fn get(&self, session_id: &SessionId) -> Result<Option<GameReady>, FetchErr>;\n\n /// Save a game ready record, associating it with both session IDs\n\n /// found in its `sessions` field. Updates record TTL.\n\n fn put(&self, game_ready: GameReady) -> Result<(), WriteErr>;\n\n}\n\n\n\nimpl GameReadyRepo for Rc<Client> {\n\n fn get(&self, session_id: &SessionId) -> Result<Option<GameReady>, FetchErr> {\n\n match self.get_connection() {\n\n Ok(mut conn) => {\n\n let key = redis_key(session_id);\n\n let data: Option<Vec<u8>> = conn.get(&key)?;\n\n\n\n if let Some(bytes) = data {\n\n touch_ttl(&mut conn, &key);\n\n match bincode::deserialize(&bytes) {\n\n Ok(game_ready) => Ok(Some(game_ready)),\n", "file_path": "micro-color-chooser/src/repo/game_ready.rs", "rank": 21, "score": 162492.7335101195 }, { "content": "pub fn init() {\n\n dotenv::dotenv().ok();\n\n}\n", "file_path": "botlink/src/env.rs", "rank": 22, "score": 161269.8571352163 }, { "content": "/// start the select! loop responsible for sending\n\n/// kafka messages to relevant websocket clients.\n\n/// it must respond to requests to add and drop listeners\n\npub fn start(\n\n router_commands_out: Receiver<RouterCommand>,\n\n backend_events_out: Receiver<BackendEvents>,\n\n idle_resp_out: Receiver<IdleStatusResponse>,\n\n) {\n\n thread::spawn(move || {\n\n let mut router = Router::new();\n\n loop {\n\n select! {\n\n recv(router_commands_out) -> command =>\n\n match command {\n\n Ok(RouterCommand::ObserveGame(game_id)) => router.observe_game(game_id),\n\n // A create private game request, or a find public\n\n // game request, will result in us tracking a\n\n // client_id -> event channel mapping\n\n // We'll use this to send messages back to the browser,\n\n // later\n\n Ok(RouterCommand::AddSession { session_id, events_in }) => {\n\n router.sessions.insert(session_id, events_in);\n\n },\n", "file_path": "gateway/src/router.rs", "rank": 23, "score": 161269.8571352163 }, { "content": "pub fn init() {\n\n dotenv::dotenv().ok();\n\n}\n\n\n", "file_path": "gateway/src/env.rs", "rank": 24, "score": 161269.8571352163 }, { "content": "pub fn init() {\n\n dotenv().ok();\n\n}\n", "file_path": "tinybrain/src/env.rs", "rank": 25, "score": 161269.8571352163 }, { "content": "fn reject(undo_move: &UndoMove, reg: &Components) -> Result<(), StreamAddErr> {\n\n reg.xadd.xadd(&StreamOutput::REJECT(undo_move.clone()))\n\n}\n\n\n", "file_path": "undo/src/stream/undo.rs", "rank": 26, "score": 160507.9605571695 }, { "content": "pub fn create_consumer_group(topics: &StreamTopics, client: &redis::Client) {\n\n let mut conn = client.get_connection().expect(\"group create conn\");\n\n let mm: Result<(), _> = conn.xgroup_create_mkstream(&topics.move_accepted_ev, GROUP_NAME, \"$\");\n\n if let Err(e) = mm {\n\n warn!(\n\n \"Ignoring error creating MoveAcceptedEv consumer group (it probably exists already) {:?}\",\n\n e\n\n );\n\n }\n\n let gs: Result<(), _> =\n\n conn.xgroup_create_mkstream(&topics.game_states_changelog, GROUP_NAME, \"$\");\n\n if let Err(e) = gs {\n\n warn!(\n\n \"Ignoring error creating GameStates consumer group (it probably exists already) {:?}\",\n\n e\n\n );\n\n }\n\n}\n", "file_path": "micro-changelog/src/stream/mod.rs", "rank": 27, "score": 159458.2621502283 }, { "content": "pub fn judge(mm: &MakeMove, game_state: &GameState) -> Judgement {\n\n info!(\"Judge {:?}\", mm);\n\n if validate_move(mm, game_state) {\n\n let captured: Vec<Coord> = mm\n\n .coord\n\n .map(|c| {\n\n captures_for(mm.player, c, &game_state.board)\n\n .iter()\n\n .cloned()\n\n .collect()\n\n })\n\n .unwrap_or(vec![]);\n\n\n\n let move_made = MoveMade {\n\n player: mm.player,\n\n coord: mm.coord,\n\n captured,\n\n event_id: EventId::new(),\n\n game_id: mm.game_id.clone(),\n\n reply_to: mm.req_id.clone(),\n\n };\n\n Judgement::Accepted(move_made)\n\n } else {\n\n Judgement::Rejected\n\n }\n\n}\n\n\n", "file_path": "micro-judge/src/game/mod.rs", "rank": 28, "score": 159458.2621502283 }, { "content": "pub fn main() {\n\n env_logger::init();\n\n info!(\"🔢 {}\", VERSION);\n\n let client = micro_sync::create_redis_client();\n\n let components = Components::new(&client);\n\n stream::init::create_consumer_group(&client);\n\n stream::process(&components)\n\n}\n", "file_path": "micro-sync/src/main.rs", "rank": 29, "score": 159226.58940504325 }, { "content": "pub fn start(channels: &MainChannels, redis_client: Arc<redis::Client>) {\n\n stream::xread::create_consumer_group(&redis_client);\n\n\n\n let client_c = redis_client.clone();\n\n let c_out = channels.session_commands_out.clone();\n\n thread::spawn(move || {\n\n stream::write_loop(c_out, &stream::xadd::RedisXAddCommands::create(client_c))\n\n });\n\n\n\n let bei = channels.backend_events_in.clone();\n\n let client_d = redis_client.clone();\n\n stream::read_loop(\n\n bei,\n\n stream::StreamOpts {\n\n xread: Box::new(stream::xread::RedisXReader {\n\n client: client_d.clone(),\n\n }),\n\n xack: Box::new(client_d),\n\n },\n\n )\n\n}\n", "file_path": "gateway/src/backend/mod.rs", "rank": 30, "score": 158777.28418248065 }, { "content": "fn complain_no_client_id() -> Result<()> {\n\n Ok(error!(\"❌ UNEXPECTED: NO CLIENT ID DEFINED ❌\"))\n\n}\n\n\n", "file_path": "gateway/src/websocket.rs", "rank": 31, "score": 158007.91671513463 }, { "content": "pub fn read_loop(events_in: Sender<BackendEvents>, opts: StreamOpts) {\n\n let mut unacked = Unacknowledged::default();\n\n loop {\n\n match opts.xread.xread_sorted() {\n\n Err(e) => error!(\"cannot xread {:?}\", e),\n\n Ok(xrr) => {\n\n for (xid, data) in xrr {\n\n match &data {\n\n StreamData::HistoryProvided(_) => info!(\"📥 Stream HistoryProvided\"),\n\n StreamData::SyncReply(_) => info!(\"📥 Stream SyncReply\"),\n\n _ => info!(\"📥 Stream: {:?}\", &data),\n\n }\n\n\n\n let dc = data.clone();\n\n if let Err(e) = events_in.send(BackendEvents::from(data.clone())) {\n\n error!(\"send backend event {:?}\", e)\n\n }\n\n unacked.push(xid, data);\n\n info!(\"🏞 OK {:?}\", dc)\n\n }\n", "file_path": "gateway/src/redis_io/stream/mod.rs", "rank": 32, "score": 157388.89227925078 }, { "content": "pub fn ack(\n\n key: &str,\n\n group: &str,\n\n ids: &[XReadEntryId],\n\n client: &redis::Client,\n\n) -> Result<(), redis::RedisError> {\n\n let mut conn = client.get_connection().expect(\"conn\");\n\n let idstrs: Vec<String> = ids.iter().map(|id| id.to_string()).collect();\n\n let _: usize = conn.xack(key, group, &idstrs)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "micro-changelog/src/stream/messages.rs", "rank": 33, "score": 157268.81267800825 }, { "content": "pub fn ack(\n\n key: &str,\n\n group: &str,\n\n ids: &[XReadEntryId],\n\n client: &Client,\n\n) -> Result<(), redis::RedisError> {\n\n let mut conn = client.get_connection().expect(\"conn\");\n\n let idstrs: Vec<String> = ids.iter().map(|id| id.to_string()).collect();\n\n let _: usize = conn.xack(key, group, &idstrs)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "micro-judge/src/io/messages.rs", "rank": 34, "score": 157268.81267800825 }, { "content": "pub fn main() {\n\n env_logger::init();\n\n info!(\"🔢 {}\", VERSION);\n\n let client = micro_color_chooser::create_redis_client();\n\n let mut components = Components::new(&client);\n\n stream::create_consumer_group(&client);\n\n stream::process(&mut components)\n\n}\n", "file_path": "micro-color-chooser/src/main.rs", "rank": 35, "score": 157268.81267800825 }, { "content": "pub fn start_monitor(\n\n status_resp_in: Sender<IdleStatusResponse>,\n\n req_status_out: Receiver<RequestIdleStatus>,\n\n) {\n\n thread::spawn(move || loop {\n\n select! {\n\n recv(req_status_out) -> req =>\n\n if let Ok(RequestIdleStatus(client_id)) = req {\n\n if let Err(e) = status_resp_in.send(IdleStatusResponse(client_id, STATUS)) {\n\n error!(\"err sending idle status resp {}\", e)\n\n }\n\n } else {\n\n error!(\"err on idle recv req status\")\n\n },\n\n }\n\n });\n\n}\n", "file_path": "gateway/src/idle_status.rs", "rank": 36, "score": 157268.81267800825 }, { "content": "pub fn read_sorted(\n\n topics: &StreamTopics,\n\n client: &Client,\n\n) -> Result<Vec<(XReadEntryId, StreamData)>, redis::RedisError> {\n\n let mut conn = client.get_connection().expect(\"redis conn\");\n\n let opts = StreamReadOptions::default()\n\n .block(BLOCK_MS)\n\n .group(GROUP_NAME, \"singleton\");\n\n let ser = conn.xread_options(\n\n &[&topics.make_move_cmd, &topics.game_states_changelog],\n\n &[\">\", \">\"],\n\n opts,\n\n )?;\n\n\n\n let unsorted = deser(ser, &topics);\n\n let mut sorted_keys: Vec<XReadEntryId> = unsorted.keys().map(|k| *k).collect();\n\n sorted_keys.sort();\n\n\n\n let mut answer = vec![];\n\n for sk in sorted_keys {\n\n if let Some(data) = unsorted.get(&sk) {\n\n answer.push((sk, data.clone()))\n\n }\n\n }\n\n\n\n Ok(answer)\n\n}\n\n\n", "file_path": "micro-judge/src/io/messages.rs", "rank": 37, "score": 155391.27144218094 }, { "content": "pub fn xadd_loop(\n\n move_computed_out: Receiver<MoveComputed>,\n\n xadder: Arc<dyn XAdder>,\n\n board_size_repo: Arc<dyn BoardSizeRepo>,\n\n) {\n\n loop {\n\n select! { recv(move_computed_out) -> msg =>\n\n match msg {\n\n Ok(MoveComputed { game_id, player, alphanum_coord }) => {\n\n if let Ok(board_size) = board_size_repo.get(&game_id) {\n\n let coord = alphanum_coord.map(|a|convert(a, board_size));\n\n\n\n let command = MakeMove { game_id, player, req_id: ReqId(Uuid::new_v4()), coord };\n\n\n\n if let Err(e) = xadder.xadd_make_move_command(&command) {\n\n error!(\"could not xadd move command : {:?}\",e)\n\n }\n\n } else {\n\n error!(\"Could not fetch board size for {}\", game_id.0)\n\n }\n\n }\n\n Err(e) =>\n\n error!(\"loop recv: {}\", e)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "botlink/src/stream/write_moves.rs", "rank": 38, "score": 155391.27144218094 }, { "content": "pub fn choose(\n\n first: &SessionColorPref,\n\n second: &SessionColorPref,\n\n game_id: &GameId,\n\n rng: &mut Random,\n\n) -> ColorsChosen {\n\n let (black, white): (ClientId, ClientId) = match (first.color_pref, second.color_pref) {\n\n (ColorPref::Black, ColorPref::Black) => rng.roll(&first.client_id, &second.client_id),\n\n (ColorPref::White, ColorPref::White) => rng.roll(&first.client_id, &second.client_id),\n\n (ColorPref::Black, _) => (first.client_id.clone(), second.client_id.clone()),\n\n (_, ColorPref::White) => (first.client_id.clone(), second.client_id.clone()),\n\n (ColorPref::White, _) => (second.client_id.clone(), first.client_id.clone()),\n\n (_, ColorPref::Black) => (second.client_id.clone(), first.client_id.clone()),\n\n (ColorPref::Any, ColorPref::Any) => rng.roll(&first.client_id, &second.client_id),\n\n };\n\n ColorsChosen {\n\n game_id: game_id.clone(),\n\n white,\n\n black,\n\n }\n", "file_path": "micro-color-chooser/src/service/choose.rs", "rank": 39, "score": 155391.27144218094 }, { "content": "pub fn read_sorted(\n\n topics: &StreamTopics,\n\n client: &redis::Client,\n\n) -> Result<Vec<(XReadEntryId, StreamData)>, redis::RedisError> {\n\n let mut conn = client.get_connection().expect(\"conn\");\n\n let opts = StreamReadOptions::default()\n\n .block(BLOCK_MS)\n\n .group(\"micro-changelog\", \"singleton\");\n\n let ser = conn.xread_options(\n\n &[&topics.move_accepted_ev, &topics.game_states_changelog],\n\n &[\">\", \">\"],\n\n opts,\n\n )?;\n\n\n\n let unsorted = deser(ser, &topics);\n\n let mut sorted_keys: Vec<XReadEntryId> = unsorted.keys().map(|k| *k).collect();\n\n sorted_keys.sort();\n\n\n\n let mut answer = vec![];\n\n for sk in sorted_keys {\n\n if let Some(data) = unsorted.get(&sk) {\n\n answer.push((sk, data.clone()))\n\n }\n\n }\n\n Ok(answer)\n\n}\n\n\n", "file_path": "micro-changelog/src/stream/messages.rs", "rank": 40, "score": 155391.27144218094 }, { "content": "pub fn start(move_computed_in: Sender<MoveComputed>, compute_move_out: Receiver<ComputeMove>) {\n\n let mut process = launch_child().expect(\"failed to start katago\");\n\n\n\n let mut child_in = process.stdin.take().expect(\"no handle to stdin\");\n\n thread::spawn(move || loop {\n\n select! {\n\n recv(compute_move_out) -> request =>\n\n match request {\n\n Ok(r) =>{\n\n if let Ok(query) = KataGoQuery::from(r) {\n\n match query.to_json() {\n\n Ok(qj) => match child_in.write(&qj) {\n\n Err(why) => panic!(\"couldn't write to stdin: {:?}\", why),\n\n Ok(_) => info!(\"> requested compute for {:?}\",query),\n\n },\n\n Err(e) => error!(\"failed query ser {:?}\",e)\n\n }\n\n } else {\n\n error!(\"ERR Bad coord in game state\")\n\n }\n", "file_path": "tinybrain/src/katago/mod.rs", "rank": 41, "score": 154563.02104874415 }, { "content": "/// This can be used to satisfy https://github.com/Terkwood/BUGOUT/issues/363\n\nfn _clean_add_make_move(client: &Client, data: MakeMove) -> Result<(), XAddErr> {\n\n let ser_bytes_result = bincode::serialize(&data);\n\n\n\n if let Ok(bytes) = ser_bytes_result {\n\n let mut m: BTreeMap<&str, &[u8]> = BTreeMap::new();\n\n m.insert(MAP_KEY, &bytes);\n\n if let Ok(mut conn) = client.get_connection() {\n\n conn.xadd_maxlen_map(topics::MAKE_MOVE, StreamMaxlen::Approx(MAX_LEN), AUTO_ID, m)\n\n .map_err(|e| XAddErr::Redis(e))\n\n } else {\n\n Err(XAddErr::Conn)\n\n }\n\n } else {\n\n Err(XAddErr::Ser)\n\n }\n\n}\n", "file_path": "micro-sync/src/stream/xadd.rs", "rank": 42, "score": 154477.90136660484 }, { "content": "/// Note that the check for last_move.is_some() makes\n\n/// sense. If the player passed, we'll still see a `Move`,\n\n/// but its `coord` field will be empty.\n\npub fn is_client_ahead_by_one_turn(\n\n req_sync: &ReqSync,\n\n system_turn: u32,\n\n system_player_up: Player,\n\n) -> bool {\n\n req_sync.turn == system_turn + 1\n\n && req_sync.player_up == other_player(system_player_up)\n\n && req_sync.last_move.is_some()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use core_model::*;\n\n use sync_model::*;\n\n #[test]\n\n fn expected_client_ahead_by_one() {\n\n let game_id = GameId::new();\n\n let session_id = SessionId::new();\n\n let req_id = ReqId::new();\n", "file_path": "micro-sync/src/sync.rs", "rank": 43, "score": 153589.13230860798 }, { "content": "fn ack(client: &Client, key: &str, ids: &[XReadEntryId]) -> Result<(), StreamAckErr> {\n\n match client.get_connection() {\n\n Ok(mut conn) => {\n\n let idstrs: Vec<String> = ids.iter().map(|id| id.to_string()).collect();\n\n let _: usize = conn.xack(key, super::GROUP_NAME, &idstrs)?;\n\n Ok(())\n\n }\n\n Err(_) => Err(StreamAckErr),\n\n }\n\n}\n\n\n\nimpl From<redis::RedisError> for StreamAckErr {\n\n fn from(_: redis::RedisError) -> Self {\n\n Self\n\n }\n\n}\n\n\n\nimpl Unacknowledged {\n\n pub fn ack_all(&mut self, reg: &Components) {\n\n if !self.changelog.is_empty() {\n", "file_path": "undo/src/stream/xack.rs", "rank": 44, "score": 149139.35047675233 }, { "content": "pub fn now_millis() -> u128 {\n\n let start = SystemTime::now();\n\n start\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"Time went backwards\")\n\n .as_millis()\n\n}\n", "file_path": "micro-sync/src/time.rs", "rank": 45, "score": 148715.52587386454 }, { "content": "fn ack(client: &Client, key: &str, ids: &[XReadEntryId]) -> Result<(), StreamAckErr> {\n\n let mut conn = client.get_connection().expect(\"conn\");\n\n let idstrs: Vec<String> = ids.iter().map(|id| id.to_string()).collect();\n\n let _: usize = conn.xack(key, GROUP_NAME, &idstrs)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "micro-sync/src/stream/xread.rs", "rank": 46, "score": 147262.33242688887 }, { "content": "fn ack(client: &Client, key: &str, ids: &[XReadEntryId]) -> Result<(), StreamAckErr> {\n\n if let Ok(mut conn) = client.get_connection() {\n\n let idstrs: Vec<String> = ids.iter().map(|id| id.to_string()).collect();\n\n let _: usize = conn.xack(key, super::GROUP_NAME, &idstrs)?;\n\n Ok(())\n\n } else {\n\n Err(StreamAckErr)\n\n }\n\n}\n\n\n\nimpl From<redis::RedisError> for StreamAckErr {\n\n fn from(_: redis::RedisError) -> Self {\n\n Self\n\n }\n\n}\n", "file_path": "gateway/src/redis_io/stream/xack.rs", "rank": 47, "score": 145447.0576136521 }, { "content": "fn ack(client: &Client, key: &str, ids: &[XReadEntryId]) -> Result<(), StreamAckErr> {\n\n let c = client.get_connection();\n\n if let Ok(mut conn) = c {\n\n let idstrs: Vec<String> = ids.iter().map(|id| id.to_string()).collect();\n\n let _: usize = conn.xack(key, GROUP_NAME, &idstrs)?;\n\n Ok(())\n\n } else {\n\n Err(StreamAckErr)\n\n }\n\n}\n\n\n\nimpl From<redis::RedisError> for StreamAckErr {\n\n fn from(_: redis::RedisError) -> Self {\n\n Self\n\n }\n\n}\n\n\n\nimpl From<redis::RedisError> for StreamReadErr {\n\n fn from(e: redis::RedisError) -> Self {\n\n StreamReadErr::XRead(e)\n\n }\n\n}\n", "file_path": "micro-color-chooser/src/stream/xread.rs", "rank": 48, "score": 145447.0576136521 }, { "content": "fn deser(srr: StreamReadReply) -> Result<HashMap<XReadEntryId, StreamInput>, StreamDeserErr> {\n\n let mut out = HashMap::new();\n\n for k in srr.keys {\n\n let key = k.key;\n\n for e in k.ids {\n\n if let Ok(eid) = XReadEntryId::from_str(&e.id) {\n\n let maybe_data: Option<Vec<u8>> = e.get(\"data\");\n\n if let Some(data) = maybe_data {\n\n let sd: Option<StreamInput> = if key == topics::GAME_STATES_CHANGELOG {\n\n bincode::deserialize(&data)\n\n .map(|gs| StreamInput::GS(gs))\n\n .ok()\n\n } else if key == topics::REQ_SYNC {\n\n bincode::deserialize(&data)\n\n .map(|rs| StreamInput::RS(rs))\n\n .ok()\n\n } else if key == topics::PROVIDE_HISTORY {\n\n bincode::deserialize(&data)\n\n .map(|ph| StreamInput::PH(ph))\n\n .ok()\n", "file_path": "micro-sync/src/stream/xread.rs", "rank": 49, "score": 144477.09350224936 }, { "content": "fn deser(srr: StreamReadReply) -> Result<HashMap<XReadEntryId, StreamInput>, XReadDeserErr> {\n\n let mut out = HashMap::new();\n\n for k in srr.keys {\n\n let key = k.key;\n\n for e in k.ids {\n\n if let Ok(eid) = XReadEntryId::from_str(&e.id) {\n\n let maybe_data: Option<Vec<u8>> = e.get(\"data\");\n\n if let Some(data) = maybe_data {\n\n let sd: Option<StreamInput> = if key == GAME_STATES_CHANGELOG {\n\n bincode::deserialize(&data)\n\n .map(|gs| StreamInput::LOG(gs))\n\n .ok()\n\n } else if key == BOT_ATTACHED {\n\n bincode::deserialize(&data)\n\n .map(|ba| StreamInput::BA(ba))\n\n .ok()\n\n } else if key == UNDO_MOVE {\n\n bincode::deserialize(&data)\n\n .map(|um| StreamInput::UM(um))\n\n .ok()\n", "file_path": "undo/src/stream/xread.rs", "rank": 50, "score": 144477.09350224936 }, { "content": "pub fn header() -> Option<String> {\n\n let cleartext = &*crate::env::AUTHORIZATION;\n\n cleartext\n\n .as_ref()\n\n .map(|a| format!(\"Basic {}\", base64::encode(a)))\n\n}\n", "file_path": "tinybrain/src/websocket/authorization.rs", "rank": 51, "score": 143272.25068890292 }, { "content": "pub fn process(reg: &Components) {\n\n let mut unacked = Unacknowledged::default();\n\n loop {\n\n match reg.xread.xread_sorted() {\n\n Ok(xrr) => {\n\n for (xid, data) in xrr {\n\n consume(xid, &data, &reg);\n\n unacked.push(xid, data);\n\n }\n\n }\n\n Err(e) => error!(\"Stream err {:?}\", e),\n\n }\n\n\n\n unacked.ack_all(&reg)\n\n }\n\n}\n\n\n", "file_path": "undo/src/stream/process.rs", "rank": 52, "score": 143272.25068890292 }, { "content": "fn panic_cleanup(stream_names: Vec<String>, keys: Vec<String>) {\n\n let client = redis_client();\n\n panic::set_hook(Box::new(move |e| {\n\n println!(\"{}\", USAGE);\n\n println!(\"{:#?}\", e);\n\n clean_streams(stream_names.clone(), &client);\n\n clean_keys(keys.clone(), &client);\n\n FIRST_TEST_COMPLETE.swap(true, std::sync::atomic::Ordering::Relaxed);\n\n }));\n\n}\n\n\n", "file_path": "micro-judge/tests/integration.rs", "rank": 53, "score": 143162.70732641837 }, { "content": "fn panic_cleanup(stream_names: Vec<String>, keys: Vec<String>) {\n\n let client = redis_client();\n\n std::panic::set_hook(Box::new(move |e| {\n\n println!(\"{:#?}\", e);\n\n clean_streams(stream_names.clone(), &client);\n\n clean_keys(keys.clone(), &client);\n\n }));\n\n}\n", "file_path": "micro-changelog/tests/integration.rs", "rank": 54, "score": 143162.70732641837 }, { "content": "fn xadd_io(client: &Client, key: &str, m: BTreeMap<&str, &[u8]>) -> Result<(), StreamAddErr> {\n\n if let Ok(mut conn) = client.get_connection() {\n\n conn.xadd_maxlen_map(key, StreamMaxlen::Approx(MAX_LEN), AUTO_ID, m)\n\n .map_err(|e| StreamAddErr::Redis(e))\n\n } else {\n\n Err(StreamAddErr::Conn)\n\n }\n\n}\n", "file_path": "undo/src/stream/xadd.rs", "rank": 55, "score": 143124.83468059124 }, { "content": "fn deser(srr: StreamReadReply) -> Result<HashMap<XReadEntryId, StreamInput>, StreamDeserErr> {\n\n let mut out = HashMap::new();\n\n for k in srr.keys {\n\n let key = k.key;\n\n for e in k.ids {\n\n if let Ok(eid) = XReadEntryId::from_str(&e.id) {\n\n let maybe_data: Option<Vec<u8>> = e.get(\"data\");\n\n if let Some(data) = maybe_data {\n\n let sd: Option<StreamInput> = if key == topics::CHOOSE_COLOR_PREF {\n\n bincode::deserialize(&data)\n\n .map(|ccp| StreamInput::CCP(ccp))\n\n .ok()\n\n } else if key == topics::GAME_READY {\n\n bincode::deserialize(&data)\n\n .map(|gr| StreamInput::GR(gr))\n\n .ok()\n\n } else {\n\n return Err(StreamDeserErr::DataDeser);\n\n };\n\n if let Some(s) = sd {\n", "file_path": "micro-color-chooser/src/stream/xread.rs", "rank": 56, "score": 142829.81380444183 }, { "content": "fn deser(srr: StreamReadReply) -> Result<HashMap<XReadEntryId, StreamData>, StreamDeserErr> {\n\n let mut out: HashMap<XReadEntryId, StreamData> = HashMap::new();\n\n\n\n for k in srr.keys {\n\n let key: &str = &k.key;\n\n for e in k.ids {\n\n if let Ok(xid) = XReadEntryId::from_str(&e.id) {\n\n let maybe_data: Option<Vec<u8>> = e.get(\"data\");\n\n if let Some(data) = maybe_data {\n\n let sd: Option<StreamData> = match key {\n\n topics::BOT_ATTACHED_TOPIC => bincode::deserialize(&data)\n\n .map(|b| StreamData::BotAttached(b))\n\n .ok(),\n\n topics::MOVE_MADE_TOPIC => bincode::deserialize(&data)\n\n .map(|m| StreamData::MoveMade(m))\n\n .ok(),\n\n topics::HISTORY_PROVIDED_TOPIC => bincode::deserialize(&data)\n\n .map(|hp| StreamData::HistoryProvided(hp))\n\n .ok(),\n\n topics::SYNC_REPLY_TOPIC => bincode::deserialize(&data)\n", "file_path": "gateway/src/redis_io/stream/xread.rs", "rank": 57, "score": 142829.81380444183 }, { "content": "pub fn create_redis_client() -> Rc<Client> {\n\n Rc::new(Client::open(REDIS_URL).expect(\"redis client\"))\n\n}\n", "file_path": "micro-sync/src/lib.rs", "rank": 58, "score": 139738.92171112457 }, { "content": "/// Spins too much. See https://github.com/Terkwood/BUGOUT/issues/217\n\npub fn process(opts: StreamOpts) {\n\n loop {\n\n if let Ok(xread_result) = read_sorted(&opts.topics, &opts.client) {\n\n let mut mm_processed = vec![];\n\n let mut gs_processed = vec![];\n\n for time_ordered_event in xread_result {\n\n match time_ordered_event {\n\n (entry_id, StreamData::MM(mm)) => {\n\n info!(\"Stream: Move Made {:?}\", &mm);\n\n let fetched_gs = opts.game_states_repo.fetch(&mm.game_id);\n\n match fetched_gs {\n\n Ok(Some(game_state)) => match judge(&mm, &game_state) {\n\n Judgement::Accepted(move_made) => {\n\n if let Err(e) = xadd_move_accepted(\n\n &move_made,\n\n &opts.client,\n\n &opts.topics.move_accepted_ev,\n\n ) {\n\n error!(\"Error XADD to move_accepted {:?}\", e)\n\n } else {\n", "file_path": "micro-judge/src/io/stream.rs", "rank": 59, "score": 139738.92171112457 }, { "content": "pub fn create_consumer_group(client: &Client) {\n\n let mut conn = client.get_connection().expect(\"group create conn\");\n\n let to_create = vec![topics::GAME_STATES_CHANGELOG, topics::ATTACH_BOT_CMD];\n\n for topic in to_create {\n\n let created: Result<(), _> = conn.xgroup_create_mkstream(topic, GROUP_NAME, \"$\");\n\n if let Err(e) = created {\n\n warn!(\n\n \"Ignoring error creating {} consumer group (it probably exists already) {:?}\",\n\n topic, e\n\n );\n\n }\n\n }\n\n}\n", "file_path": "botlink/src/stream/init.rs", "rank": 60, "score": 139738.92171112457 }, { "content": "fn xadd_io(client: &Client, key: &str, m: BTreeMap<&str, &[u8]>) -> Result<(), XAddErr> {\n\n if let Ok(mut conn) = client.get_connection() {\n\n conn.xadd_maxlen_map(key, StreamMaxlen::Approx(MAX_LEN), AUTO_ID, m)\n\n .map_err(|e| XAddErr::Redis(e))\n\n } else {\n\n Err(XAddErr::Conn)\n\n }\n\n}\n", "file_path": "micro-game-lobby/src/stream/xadd.rs", "rank": 61, "score": 139553.03108341157 }, { "content": "#[test]\n\nfn test_process_move() {\n\n let keys_to_clean = vec![];\n\n let streams_to_clean = vec![\n\n GAME_STATES_TOPIC,\n\n MOVE_ACCEPTED_EV_TOPIC,\n\n MOVE_MADE_EV_TOPIC,\n\n GAME_READY_EV_TOPIC,\n\n ];\n\n let client = rc_redis_client();\n\n panic_cleanup(\n\n streams_to_clean\n\n .clone()\n\n .iter()\n\n .map(|s| s.to_string())\n\n .collect(),\n\n keys_to_clean.clone(),\n\n );\n\n\n\n stream::create_consumer_group(&test_topics(), &client);\n\n\n", "file_path": "micro-changelog/tests/integration.rs", "rank": 62, "score": 138777.72257640978 }, { "content": "#[test]\n\nfn test_moves_processed() {\n\n while !FIRST_TEST_COMPLETE.load(std::sync::atomic::Ordering::Relaxed) {\n\n thread::sleep(Duration::from_secs(1))\n\n }\n\n let test_client = rc_redis_client();\n\n let streams_to_clean = vec![\n\n TEST_GAME_STATES_TOPIC.to_string(),\n\n TEST_MAKE_MOVE_CMD_TOPIC.to_string(),\n\n TEST_MOVE_ACCEPTED_EV_TOPIC.to_string(),\n\n ];\n\n\n\n let game_id = GameId(uuid::Uuid::new_v4());\n\n let game_states_data_key = redis_keys::game_states_key(&test_namespace(), &game_id);\n\n let keys_to_clean = vec![\n\n game_states_data_key.clone(),\n\n redis_keys::entry_ids_hash_key(&test_namespace()),\n\n ];\n\n panic_cleanup(streams_to_clean.clone(), keys_to_clean.clone());\n\n\n\n let to = test_opts();\n", "file_path": "micro-judge/tests/integration.rs", "rank": 63, "score": 138777.72257640978 }, { "content": "pub fn emoji(player: &Player) -> String {\n\n match player {\n\n Player::BLACK => vec![\"♚\", \"♛\", \"♜\", \"♝\", \"♞\", \"♟\"]\n\n .choose(&mut rand::thread_rng())\n\n .map(|s| s.to_string())\n\n .unwrap_or(\"♚\".to_owned()),\n\n Player::WHITE => vec![\"♔\", \"♕\", \"♖\", \"♗\", \"♘\", \"♙\"]\n\n .choose(&mut rand::thread_rng())\n\n .map(|s| s.to_string())\n\n .unwrap_or(\"♔\".to_owned()),\n\n }\n\n}\n\n\n", "file_path": "gateway/src/logging.rs", "rank": 64, "score": 138748.98279341753 }, { "content": "fn key(game_id: &GameId) -> String {\n\n format!(\"/BUGOUT/undo/game_state/{}\", game_id.0.to_string())\n\n}\n", "file_path": "undo/src/repo/game_state.rs", "rank": 65, "score": 138228.69046820965 }, { "content": "pub fn create_redis_client() -> Rc<Client> {\n\n Rc::new(Client::open(REDIS_URL).expect(\"redis client\"))\n\n}\n", "file_path": "micro-color-chooser/src/lib.rs", "rank": 66, "score": 138074.57218486854 }, { "content": "#[test]\n\nfn test_emitted_game_states() {\n\n let test_client = rc_redis_client();\n\n let streams_to_clean = vec![TEST_GAME_STATES_TOPIC.to_string()];\n\n\n\n let game_id = GameId(uuid::Uuid::new_v4());\n\n let data_key = redis_keys::game_states_key(&test_namespace(), &game_id);\n\n let keys_to_clean = vec![\n\n data_key.clone(),\n\n redis_keys::entry_ids_hash_key(&test_namespace()),\n\n ];\n\n panic_cleanup(streams_to_clean.clone(), keys_to_clean.clone());\n\n\n\n let to = test_opts();\n\n stream::create_consumer_groups(&to.topics, &to.client);\n\n\n\n thread::spawn(move || stream::process(test_opts()));\n\n\n\n let mut conn = test_client.get_connection().unwrap();\n\n\n\n let expected_game_state = GameState {\n", "file_path": "micro-judge/tests/integration.rs", "rank": 67, "score": 137125.2315020436 }, { "content": "/// The first 8 digits of a UUID\n\npub fn short_uuid(uuid: Uuid) -> String {\n\n uuid.to_string()[..8].to_string()\n\n}\n\n\n\npub const FULL_BOARD_SIZE: u8 = 19;\n", "file_path": "gateway/src/lib.rs", "rank": 68, "score": 136951.97071793576 }, { "content": "pub fn redis_client() -> Rc<redis::Client> {\n\n Rc::new(redis::Client::open(REDIS_URL).expect(\"redis client\"))\n\n}\n\n\n\nimpl Components {\n\n pub fn new(client: Rc<redis::Client>) -> Self {\n\n Components {\n\n botness_repo: Box::new(client.clone()),\n\n game_state_repo: Box::new(client.clone()),\n\n xadd: Box::new(client.clone()),\n\n xack: Box::new(client.clone()),\n\n xread: Box::new(client),\n\n }\n\n }\n\n}\n", "file_path": "undo/src/components.rs", "rank": 69, "score": 136946.84365984454 }, { "content": "pub fn other_player(player: Player) -> Player {\n\n match player {\n\n Player::BLACK => Player::WHITE,\n\n Player::WHITE => Player::BLACK,\n\n }\n\n}\n", "file_path": "micro-sync/src/player.rs", "rank": 70, "score": 136946.84365984454 }, { "content": "fn redis_key(game_id: &GameId) -> String {\n\n format!(\"/BUGOUT/micro_sync/history/{}\", game_id.0)\n\n}\n", "file_path": "micro-sync/src/repo/history.rs", "rank": 71, "score": 136633.33027595872 }, { "content": "pub fn game_states_key(namespace: &RedisKeyNamespace, game_id: &GameId) -> String {\n\n format!(\"/{}/micro_judge/game_states/{}\", namespace.0, game_id.0)\n\n}\n", "file_path": "micro-judge/src/io/redis_keys.rs", "rank": 72, "score": 136625.03179312305 }, { "content": "pub fn create_consumer_group(client: &Client) {\n\n let mut conn = client.get_connection().expect(\"group create conn\");\n\n let mm: Result<(), _> = conn.xgroup_create_mkstream(topics::GAME_READY, GROUP_NAME, \"$\");\n\n if let Err(e) = mm {\n\n warn!(\n\n \"Ignoring error creating {} consumer group (it probably exists already) {:?}\",\n\n topics::GAME_READY,\n\n e\n\n );\n\n }\n\n let gs: Result<(), _> = conn.xgroup_create_mkstream(topics::CHOOSE_COLOR_PREF, GROUP_NAME, \"$\");\n\n if let Err(e) = gs {\n\n warn!(\n\n \"Ignoring error creating {} consumer group (it probably exists already) {:?}\",\n\n topics::CHOOSE_COLOR_PREF,\n\n e\n\n );\n\n }\n\n}\n", "file_path": "micro-color-chooser/src/stream/init.rs", "rank": 73, "score": 136473.2652863395 }, { "content": "class SessionDisconnectedTest {\n\n private val testDriver: TopologyTestDriver = setup()\n\n\n\n @BeforeAll\n\n fun init() {\n\n initLobby(testDriver)\n\n }\n\n\n\n\n\n @Test\n\n fun sessionDisconnected() {\n\n\n\n val sessionId = UUID.randomUUID()\n\n val gameId = UUID.randomUUID()\n\n\n\n val stringKeyFactory =\n\n ConsumerRecordFactory(StringSerializer(), StringSerializer())\n\n\n\n val lobbyWithOneGame = GameLobby()\n\n lobbyWithOneGame.games = listOf(Game(gameId, Visibility\n", "file_path": "kafka-reference-impl/game-lobby/src/test/kotlin/ClientDisconnectedTest.kt", "rank": 74, "score": 136419.9927608848 }, { "content": "pub fn create_redis_client() -> Arc<redis::Client> {\n\n Arc::new(Client::open(REDIS_URL).expect(\"redis client\"))\n\n}\n\n\n\nimpl Components {\n\n pub fn new(client: Arc<Client>) -> Self {\n\n let (compute_move_in, compute_move_out): (Sender<ComputeMove>, Receiver<ComputeMove>) =\n\n unbounded();\n\n\n\n let (move_computed_in, move_computed_out): (Sender<MoveComputed>, Receiver<MoveComputed>) =\n\n unbounded();\n\n\n\n Components {\n\n attachment_repo: Box::new(client.clone()),\n\n board_size_repo: Arc::new(client.clone()),\n\n xreader: Box::new(client.clone()),\n\n xadder: Arc::new(client.clone()),\n\n xack: Arc::new(client),\n\n compute_move_in,\n\n compute_move_out,\n\n move_computed_in,\n\n move_computed_out,\n\n }\n\n }\n\n}\n", "file_path": "botlink/src/registry.rs", "rank": 75, "score": 135215.65381563918 }, { "content": "class FirstFindPublicGameCausesWaitTest {\n\n private val testDriver: TopologyTestDriver = setup()\n\n\n\n @BeforeAll\n\n fun init() {\n\n initLobby(testDriver)\n\n }\n\n\n\n\n\n @Test\n\n fun firstFindPublicGameCausesWaitForOpponentEvent() {\n\n val factory =\n\n ConsumerRecordFactory(UUIDSerializer(), StringSerializer())\n\n\n\n val creatorClientId = UUID.randomUUID()\n\n val creatorSessionId = UUID.randomUUID()\n\n\n\n val fpg = FindPublicGame(\n\n clientId = creatorClientId, sessionId = creatorSessionId\n\n )\n", "file_path": "kafka-reference-impl/game-lobby/src/test/kotlin/FirstFindPublicGameCausesWaitTest.kt", "rank": 76, "score": 135151.49321573458 }, { "content": "pub trait BotnessRepo {\n\n fn get(&self, game_id: &GameId, player: Player) -> Result<Botness, RepoErr>;\n\n fn put(&self, game_id: &GameId, player: Player, botness: Botness) -> Result<(), RepoErr>;\n\n}\n\n\n\nimpl BotnessRepo for Rc<Client> {\n\n fn get(&self, game_id: &GameId, player: Player) -> Result<Botness, RepoErr> {\n\n let mut conn = self.get_connection()?;\n\n let key = bot_id(game_id, player);\n\n let data: Result<Option<Vec<u8>>, _> = conn.get(&key).map_err(|e| RepoErr::Redis(e));\n\n\n\n if data.is_ok() {\n\n expire(&key, &mut conn)?\n\n }\n\n\n\n match data {\n\n Ok(Some(bytes)) => {\n\n let deser: Result<Botness, _> = bincode::deserialize(&bytes);\n\n deser.map_err(|e| RepoErr::SerDes(e))\n\n }\n", "file_path": "undo/src/repo/botness.rs", "rank": 77, "score": 135105.86431876413 }, { "content": "fn board_size_key(game_id: &GameId) -> String {\n\n format!(\"/BUGOUT/botlink/board_size/{}\", game_id.0.to_string())\n\n}\n", "file_path": "botlink/src/repo/board_size.rs", "rank": 78, "score": 135097.27630934195 }, { "content": "fn test_components() -> Components {\n\n Components {\n\n client: rc_redis_client(),\n\n redis_key_provider: KeyProvider(test_namespace()),\n\n }\n\n}\n\n\n", "file_path": "micro-changelog/tests/integration.rs", "rank": 79, "score": 134611.39996554283 }, { "content": "#[derive(Debug, Clone)]\n\nstruct SessionSender {\n\n pub session_id: SessionId,\n\n pub events_in: Sender<ClientEvents>,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum RouterCommand {\n\n ObserveGame(GameId),\n\n DeleteSession {\n\n session_id: SessionId,\n\n game_id: Option<GameId>,\n\n client_id: Option<ClientId>,\n\n },\n\n Reconnect {\n\n client_id: ClientId,\n\n game_id: GameId,\n\n req_id: ReqId,\n\n },\n\n AddSession {\n\n session_id: SessionId,\n", "file_path": "gateway/src/router.rs", "rank": 80, "score": 134424.44038566298 }, { "content": "#[derive(Debug)]\n\nstruct GameSessions {\n\n pub sessions: Vec<SessionSender>,\n\n pub playerup: Player,\n\n pub modified_at: Instant,\n\n}\n\n\n\nimpl GameSessions {\n\n pub fn new(session: SessionSender) -> GameSessions {\n\n GameSessions {\n\n sessions: vec![session],\n\n playerup: Player::BLACK,\n\n modified_at: Instant::now(),\n\n }\n\n }\n\n\n\n pub fn add_session(&mut self, session: SessionSender) {\n\n self.sessions.push(session);\n\n self.modified_at = Instant::now()\n\n }\n\n\n\n /// Observe that this game still has an open connection somewhere\n\n pub fn observe_game(&mut self) {\n\n self.modified_at = Instant::now()\n\n }\n\n}\n\n\n", "file_path": "gateway/src/router.rs", "rank": 81, "score": 134424.44038566298 }, { "content": "pub fn create_consumer_group(client: &redis::Client) {\n\n let mut conn = client.get_connection().expect(\"group create conn\");\n\n let to_create = vec![\n\n topics::GAME_STATES_CHANGELOG,\n\n topics::BOT_ATTACHED,\n\n topics::UNDO_MOVE,\n\n ];\n\n for topic in to_create {\n\n let created: Result<(), _> = conn.xgroup_create_mkstream(topic, GROUP_NAME, \"$\");\n\n if let Err(e) = created {\n\n warn!(\n\n \"Ignoring error creating {} consumer group (it probably exists already) {:?}\",\n\n topic, e\n\n );\n\n }\n\n }\n\n}\n", "file_path": "undo/src/stream/init.rs", "rank": 82, "score": 133551.30428938314 }, { "content": "pub fn redis_client() -> Rc<redis::Client> {\n\n Rc::new(redis::Client::open(REDIS_URL).expect(\"redis client\"))\n\n}\n\n\n\nimpl Components {\n\n pub fn new(client: Rc<redis::Client>) -> Self {\n\n Components {\n\n game_lobby_repo: Box::new(client.clone()),\n\n xadd: Box::new(client),\n\n }\n\n }\n\n}\n", "file_path": "micro-game-lobby/src/components.rs", "rank": 83, "score": 133551.30428938314 }, { "content": "pub trait HistoryRepo {\n\n fn get(&self, game_id: &GameId) -> Result<Option<Vec<Move>>, FetchErr>;\n\n fn put(&self, game_id: &GameId, moves: Vec<Move>) -> Result<(), WriteErr>;\n\n}\n\n\n\nimpl HistoryRepo for Rc<Client> {\n\n fn get(&self, game_id: &GameId) -> Result<Option<Vec<Move>>, FetchErr> {\n\n match self.get_connection() {\n\n Ok(mut conn) => {\n\n let key = redis_key(game_id);\n\n let data: Result<Option<Vec<u8>>, _> =\n\n conn.get(&key).map_err(|e| FetchErr::Redis(e));\n\n\n\n if data.is_ok() {\n\n touch_ttl(&mut conn, &key)\n\n }\n\n\n\n match data {\n\n Ok(Some(bytes)) => {\n\n let deser: Result<Vec<Move>, _> = bincode::deserialize(&bytes);\n", "file_path": "micro-sync/src/repo/history.rs", "rank": 84, "score": 133451.03650497948 }, { "content": "fn test_topics() -> StreamTopics {\n\n StreamTopics {\n\n game_states_changelog: GAME_STATES_TOPIC.to_string(),\n\n move_accepted_ev: MOVE_ACCEPTED_EV_TOPIC.to_string(),\n\n move_made_ev: MOVE_MADE_EV_TOPIC.to_string(),\n\n }\n\n}\n", "file_path": "micro-changelog/tests/integration.rs", "rank": 85, "score": 132958.9088911767 }, { "content": "pub trait GameLobbyRepo {\n\n fn get(&self) -> Result<GameLobby, FetchErr>;\n\n fn put(&self, game_lobby: &GameLobby) -> Result<(), WriteErr>;\n\n}\n\n\n\nimpl GameLobbyRepo for Rc<Client> {\n\n fn get(&self) -> Result<GameLobby, FetchErr> {\n\n if let Ok(mut conn) = self.get_connection() {\n\n let data: Result<Option<Vec<u8>>, _> = conn\n\n .get(super::GAME_LOBBY_KEY)\n\n .map_err(|_| FetchErr::RedisCall);\n\n match data {\n\n Ok(Some(bytes)) => Ok(bincode::deserialize(&bytes)?),\n\n Ok(None) => Ok(GameLobby::default()),\n\n Err(_) => Err(FetchErr::RedisCall),\n\n }\n\n } else {\n\n Err(FetchErr::Conn)\n\n }\n\n }\n\n fn put(&self, game_lobby: &GameLobby) -> Result<(), WriteErr> {\n\n if let (Ok(mut conn), Ok(bytes)) = (self.get_connection(), bincode::serialize(&game_lobby))\n\n {\n\n conn.set(super::GAME_LOBBY_KEY, bytes).map_err(|_| WriteErr)\n\n } else {\n\n Err(WriteErr)\n\n }\n\n }\n\n}\n", "file_path": "micro-game-lobby/src/repo/game_lobby_repo.rs", "rank": 86, "score": 132740.9769480934 }, { "content": "pub fn create_consumer_group(client: &redis::Client) {\n\n let mut conn = client.get_connection().expect(\"group create conn\");\n\n let to_create = vec![PROVIDE_HISTORY, GAME_STATES_CHANGELOG, REQ_SYNC, MOVE_MADE];\n\n for topic in to_create {\n\n let created: Result<(), _> = conn.xgroup_create_mkstream(topic, GROUP_NAME, \"$\");\n\n if let Err(e) = created {\n\n warn!(\n\n \"Ignoring error creating {} consumer group (it probably exists already) {:?}\",\n\n topic, e\n\n );\n\n }\n\n }\n\n}\n", "file_path": "micro-sync/src/stream/init.rs", "rank": 87, "score": 131949.9973908541 }, { "content": "/// \"Do we need to form a reply?\"\n\n/// Used when client is ahead of the system. Stores\n\n/// a requested sync event which can later be merged\n\n/// with a MOVE MADE event to form a sync reply.\n\npub trait ReplyOnMoveRepo {\n\n fn get(&self, game_id: &GameId, req_id: &ReqId) -> Result<Option<ReqSync>, FetchErr>;\n\n fn put(&self, req: &ReqSync) -> Result<(), WriteErr>;\n\n fn del(&self, game_id: &GameId, req_id: &ReqId) -> Result<(), WriteErr>;\n\n}\n\n\n\nimpl ReplyOnMoveRepo for Rc<Client> {\n\n fn get(&self, game_id: &GameId, req_id: &ReqId) -> Result<Option<ReqSync>, FetchErr> {\n\n match self.get_connection() {\n\n Ok(mut conn) => {\n\n let key = redis_key(game_id, req_id);\n\n let data: Result<Option<Vec<u8>>, _> =\n\n conn.get(&key).map_err(|e| FetchErr::Redis(e));\n\n\n\n if data.is_ok() {\n\n touch_ttl(&mut conn, &key)\n\n }\n\n match data {\n\n Ok(Some(bytes)) => {\n\n let deser: Result<ReqSync, _> = bincode::deserialize(&bytes);\n", "file_path": "micro-sync/src/repo/reply.rs", "rank": 88, "score": 131856.12412560204 }, { "content": "pub trait GameStateRepo {\n\n fn get(&self, game_id: &GameId) -> Result<Option<GameState>, RepoErr>;\n\n fn put(&self, game_state: &GameState) -> Result<(), RepoErr>;\n\n}\n\n\n\nimpl GameStateRepo for Rc<Client> {\n\n fn get(&self, game_id: &GameId) -> Result<Option<GameState>, RepoErr> {\n\n let mut conn = self.get_connection()?;\n\n let data: Option<Vec<u8>> = conn.get(key(game_id))?;\n\n Ok(if let Some(bytes) = data {\n\n Some(bincode::deserialize(&bytes)?)\n\n } else {\n\n None\n\n })\n\n }\n\n\n\n fn put(&self, game_state: &GameState) -> Result<(), RepoErr> {\n\n log::info!(\n\n \"🐌 game turn: {}, player up: {:?}, moves: {}\",\n\n game_state.turn,\n\n game_state.player_up,\n\n game_state.moves.len()\n\n );\n\n let mut conn = self.get_connection()?;\n\n let bytes = bincode::serialize(&game_state)?;\n\n conn.set(key(&game_state.game_id), bytes)?;\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "undo/src/repo/game_state.rs", "rank": 89, "score": 131851.92611898214 }, { "content": "fn test_namespace() -> RedisKeyNamespace {\n\n RedisKeyNamespace(\"BUGTEST\".to_string())\n\n}\n", "file_path": "micro-changelog/tests/integration.rs", "rank": 90, "score": 131362.0051242636 }, { "content": "fn test_namespace() -> RedisKeyNamespace {\n\n RedisKeyNamespace(\"BUGTEST\".to_string())\n\n}\n\n\n", "file_path": "micro-judge/tests/integration.rs", "rank": 91, "score": 131362.0051242636 }, { "content": "pub fn create_consumer_group(client: &redis::Client) {\n\n let mut conn = client.get_connection().expect(\"group create conn\");\n\n let to_create = INPUT_TOPICS.to_vec();\n\n for topic in to_create {\n\n let created: Result<(), _> = conn.xgroup_create_mkstream(topic, GROUP_NAME, \"$\");\n\n if let Err(e) = created {\n\n warn!(\n\n \"Ignoring error creating {} consumer group (it probably exists already) {:?}\",\n\n topic, e\n\n );\n\n }\n\n }\n\n}\n\n\n\npub struct RedisXReader {\n\n pub client: Arc<redis::Client>,\n\n}\n\n\n\nconst INPUT_TOPICS: &[&str; 10] = &[\n\n topics::BOT_ATTACHED_TOPIC,\n", "file_path": "gateway/src/redis_io/stream/xread.rs", "rank": 92, "score": 130408.21778195849 }, { "content": "pub fn create(host_url: RedisHostUrl) -> Pool {\n\n let manager = RedisConnectionManager::new(host_url.0).unwrap();\n\n r2d2::Pool::builder().build(manager).unwrap()\n\n}\n", "file_path": "redis-conn-pool/src/lib.rs", "rank": 93, "score": 130408.21778195849 }, { "content": "pub fn max_visits(bot: Bot) -> Option<u16> {\n\n match bot {\n\n Bot::KataGoOneStar => Some(ONE_STAR_VISITS),\n\n Bot::KataGoTwoStars => Some(TWO_STAR_VISITS),\n\n Bot::KataGoThreeStars => Some(THREE_STAR_VISITS),\n\n Bot::KataGoFourStars => None, // give it everything you've got (~500 visits)\n\n }\n\n}\n", "file_path": "botlink/src/max_visits.rs", "rank": 94, "score": 129732.97335455156 }, { "content": "\n\n OutputVerifier.compareKeyValue(\n\n output,\n\n creatorSessionId,\n\n jsonMapper.writeValueAsString(\n\n WaitForOpponent\n\n (\n\n gameId = actual.gameId,\n\n sessionId = creatorSessionId,\n\n eventId = actual.eventId,\n\n visibility = Visibility.Public\n\n )\n\n )\n\n )\n\n }\n\n\n\n\n\n\n\n @AfterAll\n\n fun tearDown() {\n\n testDriver.close()\n\n }\n\n\n\n\n\n}", "file_path": "kafka-reference-impl/game-lobby/src/test/kotlin/FirstFindPublicGameCausesWaitTest.kt", "rank": 95, "score": 128815.32457030957 }, { "content": "\n\n testDriver.pipeInput(\n\n factory.create(\n\n Topics.FIND_PUBLIC_GAME,\n\n creatorSessionId,\n\n jsonMapper.writeValueAsString(fpg)\n\n )\n\n )\n\n\n\n val output =\n\n testDriver.readOutput(\n\n Topics.WAIT_FOR_OPPONENT,\n\n UUIDDeserializer(),\n\n StringDeserializer()\n\n )\n\n\n\n val actual = jsonMapper.readValue(\n\n output.value(), WaitForOpponent::class\n\n .java\n\n )\n", "file_path": "kafka-reference-impl/game-lobby/src/test/kotlin/FirstFindPublicGameCausesWaitTest.kt", "rank": 96, "score": 128813.3550072583 }, { "content": "import org.apache.kafka.common.serialization.*\n\nimport org.apache.kafka.streams.TopologyTestDriver\n\nimport org.apache.kafka.streams.test.ConsumerRecordFactory\n\nimport org.apache.kafka.streams.test.OutputVerifier\n\nimport org.junit.jupiter.api.*\n\nimport serdes.jsonMapper\n\nimport java.util.*\n\n\n\n@TestInstance(TestInstance.Lifecycle.PER_CLASS)\n", "file_path": "kafka-reference-impl/game-lobby/src/test/kotlin/FirstFindPublicGameCausesWaitTest.kt", "rank": 97, "score": 128807.24709794561 }, { "content": "fn test_opts() -> stream::StreamOpts {\n\n let client = rc_redis_client();\n\n stream::StreamOpts {\n\n topics: StreamTopics {\n\n make_move_cmd: TEST_MAKE_MOVE_CMD_TOPIC.to_string(),\n\n game_states_changelog: TEST_GAME_STATES_TOPIC.to_string(),\n\n move_accepted_ev: TEST_MOVE_ACCEPTED_EV_TOPIC.to_string(),\n\n },\n\n game_states_repo: GameStatesRepo {\n\n namespace: test_namespace(),\n\n client: client.clone(),\n\n },\n\n client,\n\n }\n\n}\n\n\n", "file_path": "micro-judge/tests/integration.rs", "rank": 98, "score": 128047.13501509097 }, { "content": "/// Track unacknowledged messages by stream name\n\nstruct Unacknowledged(pub HashMap<String, Vec<XId>>);\n\nimpl Default for Unacknowledged {\n\n fn default() -> Self {\n\n Self(HashMap::new())\n\n }\n\n}\n", "file_path": "redis_streams/src/sorted_streams.rs", "rank": 99, "score": 127787.06774512911 } ]
Rust
src/util.rs
rantan/tapyrus-signer
bd8026460860469b15b2fca3ae158c801d15870f
use curv::{BigInt, GE}; use std::convert::TryFrom; use std::os::raw::c_int; use std::sync::atomic::AtomicUsize; use std::sync::Arc; pub fn sum_point(points: &Vec<GE>) -> GE { let mut iter = points.iter(); let head = iter.next().unwrap(); let tail = iter; tail.fold(head.clone(), |acc, x| acc + x) } pub fn jacobi(a: &BigInt, n: &BigInt) -> i8 { assert!(*n >= BigInt::from(3)); assert!(a < n); if a.is_zero() { return 0; } if *a == BigInt::from(1) { return 1; } let mut a1: BigInt = a.clone(); let mut e = 0; while a1.is_multiple_of(&BigInt::from(2)) { a1 = a1 >> 1; e += 1; } let mut s: i8 = if e & 1 == 0 || n.modulus(&BigInt::from(8)) == BigInt::from(1) || n.modulus(&BigInt::from(8)) == BigInt::from(7) { 1 } else if n.modulus(&BigInt::from(8)) == BigInt::from(3) || n.modulus(&BigInt::from(8)) == BigInt::from(5) { -1 } else { 0 }; if n.modulus(&BigInt::from(4)) == BigInt::from(3) && a1.modulus(&BigInt::from(4)) == BigInt::from(3) { s = -s } if a1 == BigInt::from(1) { s } else { s * jacobi(&(n % a1.clone()), &a1.clone()) } } const STOP_SIGNALS: [usize; 6] = [ signal_hook::SIGABRT as usize, signal_hook::SIGHUP as usize, signal_hook::SIGINT as usize, signal_hook::SIGQUIT as usize, signal_hook::SIGTERM as usize, signal_hook::SIGTRAP as usize, ]; pub fn set_stop_signal_handler() -> Result<Arc<AtomicUsize>, std::io::Error> { let handler = Arc::new(AtomicUsize::new(0)); for signal in &STOP_SIGNALS { signal_hook::flag::register_usize( *signal as c_int, Arc::clone(&handler), *signal as usize, )?; } Ok(handler) } pub fn signal_to_string(signal: usize) -> &'static str { let signal: u32 = TryFrom::try_from(signal).unwrap(); match signal as i32 { signal_hook::SIGABRT => "SIGABRT", signal_hook::SIGHUP => "SIGHUP", signal_hook::SIGINT => "SIGINT", signal_hook::SIGQUIT => "SIGQUIT", signal_hook::SIGTERM => "SIGTERM", signal_hook::SIGTRAP => "SIGTRAP", _ => unreachable!("unregistered signal received"), } } #[cfg(test)] mod tests { use super::*; use std::sync::atomic::Ordering; #[test] fn test_sum_point() { use curv::elliptic::curves::secp256_k1::*; use curv::elliptic::curves::traits::ECPoint; use curv::elliptic::curves::traits::ECScalar; use curv::BigInt; let s1 = ECScalar::from(&BigInt::from(1)); let s2 = ECScalar::from(&BigInt::from(2)); let s3 = ECScalar::from(&BigInt::from(3)); let p1 = GE::generator() * &s1; let p2 = GE::generator() * &s2; let p3 = GE::generator() * &s3; let sum = sum_point(&vec![p1, p2, p3]); let s6 = ECScalar::from(&BigInt::from(6)); let p6 = GE::generator() * &s6; assert_eq!(sum, p6); } #[test] fn test_jacobi() { assert_eq!(jacobi(&BigInt::from(158), &BigInt::from(235)), -1); assert_eq!(jacobi(&BigInt::from(5), &BigInt::from(12)), -1); assert_eq!(jacobi(&BigInt::from(16), &BigInt::from(60)), 1); } #[test] fn test_signals() { let handler = set_stop_signal_handler().unwrap(); unsafe { libc::raise(signal_hook::SIGINT); assert_eq!( handler.load(Ordering::Relaxed), signal_hook::SIGINT as usize ); libc::raise(signal_hook::SIGABRT); assert_eq!( handler.load(Ordering::Relaxed), signal_hook::SIGABRT as usize ); libc::raise(signal_hook::SIGHUP); assert_eq!( handler.load(Ordering::Relaxed), signal_hook::SIGHUP as usize ); libc::raise(signal_hook::SIGQUIT); assert_eq!( handler.load(Ordering::Relaxed), signal_hook::SIGQUIT as usize ); libc::raise(signal_hook::SIGTERM); assert_eq!( handler.load(Ordering::Relaxed), signal_hook::SIGTERM as usize ); libc::raise(signal_hook::SIGTRAP); assert_eq!( handler.load(Ordering::Relaxed), signal_hook::SIGTRAP as usize ); } } }
use curv::{BigInt, GE}; use std::convert::TryFrom; use std::os::raw::c_int; use std::sync::atomic::AtomicUsize; use std::sync::Arc; pub fn sum_point(points: &Vec<GE>) -> GE { let mut iter = points.iter(); let head = iter.next().unwrap(); let tail = iter; tail.fold(head.clone(), |acc, x| acc + x) } pub fn jacobi(a: &BigInt, n: &BigInt) -> i8 { assert!(*n >= BigInt::from(3)); assert!(a < n); if a.is_zero() { return 0; } if *a == BigInt::from(1) { return 1; } let mut a1: BigInt = a.clone(); let mut e = 0;
const STOP_SIGNALS: [usize; 6] = [ signal_hook::SIGABRT as usize, signal_hook::SIGHUP as usize, signal_hook::SIGINT as usize, signal_hook::SIGQUIT as usize, signal_hook::SIGTERM as usize, signal_hook::SIGTRAP as usize, ]; pub fn set_stop_signal_handler() -> Result<Arc<AtomicUsize>, std::io::Error> { let handler = Arc::new(AtomicUsize::new(0)); for signal in &STOP_SIGNALS { signal_hook::flag::register_usize( *signal as c_int, Arc::clone(&handler), *signal as usize, )?; } Ok(handler) } pub fn signal_to_string(signal: usize) -> &'static str { let signal: u32 = TryFrom::try_from(signal).unwrap(); match signal as i32 { signal_hook::SIGABRT => "SIGABRT", signal_hook::SIGHUP => "SIGHUP", signal_hook::SIGINT => "SIGINT", signal_hook::SIGQUIT => "SIGQUIT", signal_hook::SIGTERM => "SIGTERM", signal_hook::SIGTRAP => "SIGTRAP", _ => unreachable!("unregistered signal received"), } } #[cfg(test)] mod tests { use super::*; use std::sync::atomic::Ordering; #[test] fn test_sum_point() { use curv::elliptic::curves::secp256_k1::*; use curv::elliptic::curves::traits::ECPoint; use curv::elliptic::curves::traits::ECScalar; use curv::BigInt; let s1 = ECScalar::from(&BigInt::from(1)); let s2 = ECScalar::from(&BigInt::from(2)); let s3 = ECScalar::from(&BigInt::from(3)); let p1 = GE::generator() * &s1; let p2 = GE::generator() * &s2; let p3 = GE::generator() * &s3; let sum = sum_point(&vec![p1, p2, p3]); let s6 = ECScalar::from(&BigInt::from(6)); let p6 = GE::generator() * &s6; assert_eq!(sum, p6); } #[test] fn test_jacobi() { assert_eq!(jacobi(&BigInt::from(158), &BigInt::from(235)), -1); assert_eq!(jacobi(&BigInt::from(5), &BigInt::from(12)), -1); assert_eq!(jacobi(&BigInt::from(16), &BigInt::from(60)), 1); } #[test] fn test_signals() { let handler = set_stop_signal_handler().unwrap(); unsafe { libc::raise(signal_hook::SIGINT); assert_eq!( handler.load(Ordering::Relaxed), signal_hook::SIGINT as usize ); libc::raise(signal_hook::SIGABRT); assert_eq!( handler.load(Ordering::Relaxed), signal_hook::SIGABRT as usize ); libc::raise(signal_hook::SIGHUP); assert_eq!( handler.load(Ordering::Relaxed), signal_hook::SIGHUP as usize ); libc::raise(signal_hook::SIGQUIT); assert_eq!( handler.load(Ordering::Relaxed), signal_hook::SIGQUIT as usize ); libc::raise(signal_hook::SIGTERM); assert_eq!( handler.load(Ordering::Relaxed), signal_hook::SIGTERM as usize ); libc::raise(signal_hook::SIGTRAP); assert_eq!( handler.load(Ordering::Relaxed), signal_hook::SIGTRAP as usize ); } } }
while a1.is_multiple_of(&BigInt::from(2)) { a1 = a1 >> 1; e += 1; } let mut s: i8 = if e & 1 == 0 || n.modulus(&BigInt::from(8)) == BigInt::from(1) || n.modulus(&BigInt::from(8)) == BigInt::from(7) { 1 } else if n.modulus(&BigInt::from(8)) == BigInt::from(3) || n.modulus(&BigInt::from(8)) == BigInt::from(5) { -1 } else { 0 }; if n.modulus(&BigInt::from(4)) == BigInt::from(3) && a1.modulus(&BigInt::from(4)) == BigInt::from(3) { s = -s } if a1 == BigInt::from(1) { s } else { s * jacobi(&(n % a1.clone()), &a1.clone()) } }
function_block-function_prefix_line
[ { "content": "fn compute_e(r: &GE, y: &GE, message: &[u8]) -> FE {\n\n let mut hasher = Sha256::new();\n\n hasher.input(&r.get_element().serialize()[1..33]);\n\n hasher.input(&y.get_element().serialize()[..]);\n\n hasher.input(message);\n\n let e_bn = BigInt::from(&hasher.result()[..]);\n\n\n\n ECScalar::from(&e_bn)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::compute_e;\n\n use curv::elliptic::curves::traits::{ECPoint, ECScalar};\n\n use curv::{BigInt, FE, GE};\n\n\n\n #[test]\n\n fn test_compute_e() {\n\n let g: GE = ECPoint::generator();\n\n\n", "file_path": "src/crypto/multi_party_schnorr.rs", "rank": 2, "score": 94462.60836495357 }, { "content": "pub fn keygen_t_n_parties(\n\n t: usize,\n\n n: usize,\n\n parties: &[usize],\n\n) -> (Vec<Keys>, Vec<SharedKeys>, GE, Vec<VerifiableSS>) {\n\n let parames = Parameters {\n\n threshold: t,\n\n share_count: n.clone(),\n\n };\n\n assert_eq!(parties.len(), n.clone());\n\n let party_keys_vec = (0..n.clone())\n\n .map(|i| Keys::phase1_create(parties[i]))\n\n .collect::<Vec<Keys>>();\n\n\n\n let mut bc1_vec = Vec::new();\n\n let mut blind_vec = Vec::new();\n\n for i in 0..n.clone() {\n\n let (bc1, blind) = party_keys_vec[i].phase1_broadcast();\n\n bc1_vec.push(bc1);\n\n blind_vec.push(blind);\n", "file_path": "src/crypto/test_multi_party_schnorr.rs", "rank": 3, "score": 91105.8029849815 }, { "content": "pub fn create_message() -> Message {\n\n let signer_id = SignerID::new(TEST_KEYS.pubkeys()[0]);\n\n Message {\n\n message_type: MessageType::BlockGenerationRoundMessages(\n\n BlockGenerationRoundMessageType::Roundfailure,\n\n ),\n\n sender_id: signer_id,\n\n receiver_id: None,\n\n }\n\n}\n\n\n", "file_path": "src/tests/helper/mod.rs", "rank": 4, "score": 90215.71690496916 }, { "content": "pub fn process_completedblock<T>(\n\n sender_id: &SignerID,\n\n _block: &Block,\n\n prev_state: &NodeState,\n\n params: &NodeParameters<T>,\n\n) -> NodeState\n\nwhere\n\n T: TapyrusApi,\n\n{\n\n if !is_master(sender_id, prev_state, params) {\n\n log::warn!(\"Peer {} may be malicious node. It might impersonate as master or your node might be behind from others.\", sender_id);\n\n return prev_state.clone(); // Ignore message\n\n }\n\n\n\n NodeState::RoundComplete {\n\n master_index: master_index(prev_state, params)\n\n .expect(\"Previous state getting round complete should have round master\"),\n\n next_master_index: next_master_index(prev_state, params),\n\n }\n\n}\n", "file_path": "src/signer_node/message_processor/process_completedblock.rs", "rank": 5, "score": 84733.07852206085 }, { "content": "pub fn get_block(index: u8) -> Block {\n\n let bytes: Vec<u8> = match index {\n\n 0 => vec![\n\n 0, 0, 0, 32, 77, 228, 137, 121, 91, 31, 137, 198, 243, 119, 113, 157, 141, 178, 102,\n\n 20, 70, 231, 35, 162, 74, 119, 24, 168, 174, 160, 175, 210, 32, 50, 130, 188, 150, 67,\n\n 146, 147, 210, 142, 105, 133, 119, 45, 47, 25, 75, 133, 112, 7, 79, 233, 69, 167, 215,\n\n 96, 132, 19, 158, 148, 208, 190, 32, 64, 71, 79, 96, 189, 255, 240, 47, 231, 6, 230,\n\n 177, 165, 201, 103, 20, 170, 124, 253, 51, 113, 94, 190, 113, 177, 76, 137, 120, 230,\n\n 165, 107, 85, 240, 52, 212, 5, 180, 11, 93, 0, 1, 2, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0,\n\n 0, 0, 3, 81, 1, 1, 255, 255, 255, 255, 2, 0, 242, 5, 42, 1, 0, 0, 0, 25, 118, 169, 20,\n\n 207, 18, 219, 192, 75, 176, 222, 111, 182, 168, 122, 90, 235, 75, 46, 116, 201, 112, 6,\n\n 178, 136, 172, 0, 0, 0, 0, 0, 0, 0, 0, 38, 106, 36, 170, 33, 169, 237, 226, 246, 28,\n\n 63, 113, 209, 222, 253, 63, 169, 153, 223, 163, 105, 83, 117, 92, 105, 6, 137, 121,\n\n 153, 98, 180, 139, 235, 216, 54, 151, 78, 140, 249, 1, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n ],\n\n 1 => vec![\n\n 0, 0, 0, 32, 125, 4, 68, 194, 18, 166, 21, 58, 24, 227, 192, 57, 180, 106, 161, 164,\n\n 58, 121, 201, 176, 104, 60, 253, 128, 132, 142, 93, 198, 8, 102, 167, 156, 76, 187,\n", "file_path": "src/tests/helper/blocks.rs", "rank": 6, "score": 81923.59005057714 }, { "content": "/// command example:\n\n/// ./target/debug/node -p=03831a69b8009833ab5b0326012eaf489bfea35a7321b1ca15b11d88131423fafc -p=02ce7edc292d7b747fab2f23584bbafaffde5c8ff17cf689969614441e0527b900 -p=02785a891f323acd6cef0fc509bb14304410595914267c50467e51c87142acbb5e --privatekey=cUwpWhH9CbYwjUWzfz1UVaSjSQm9ALXWRqeFFiZKnn8cV6wqNXQA -t 2\n\npub fn get_options<'a, 'b>() -> clap::App<'a, 'b> {\n\n App::new(\"node\")\n\n .about(\"Tapyrus siner node\")\n\n .arg(Arg::with_name(OPTION_NAME_CONFIG)\n\n .short(\"c\")\n\n .long(\"config\")\n\n .value_name(\"CONFIG_FILE_PATH\")\n\n .default_value(DEFAULT_CONFIG_FILENAME)\n\n .help(\"Load settings from this file. when defined both in file and command line args, then command line args take precedence.\"))\n\n .arg(Arg::with_name(OPTION_NAME_TO_ADDRESS)\n\n .long(\"to_address\")\n\n .value_name(\"TO_ADDRESS\")\n\n .help(\"Coinbase pay to address.\"))\n\n .arg(Arg::with_name(OPTION_NAME_PUBLIC_KEY)\n\n .short(\"p\")\n\n .long(\"publickey\")\n\n .value_name(\"PUBKEY\")\n\n .multiple(true)\n\n .help(\"Tapyrus signer public key. not need '0x' prefix. example: 03831a69b8009833ab5b0326012eaf489bfea35a7321b1ca15b11d88131423fafc\"))\n\n .arg(Arg::with_name(OPTION_NAME_THRESHOLD)\n", "file_path": "src/command_args.rs", "rank": 8, "score": 80849.11536233136 }, { "content": "pub fn process_blocksig<T, C>(\n\n sender_id: &SignerID,\n\n blockhash: Hash,\n\n gamma_i: FE,\n\n e: FE,\n\n priv_shared_keys: &SharedKeys,\n\n shared_secrets: &SharedSecretMap,\n\n prev_state: &NodeState,\n\n conman: &C,\n\n params: &NodeParameters<T>,\n\n) -> NodeState\n\nwhere\n\n T: TapyrusApi,\n\n C: ConnectionManager,\n\n{\n\n match prev_state {\n\n NodeState::Master {\n\n block_key,\n\n block_shared_keys,\n\n shared_block_secrets,\n", "file_path": "src/signer_node/message_processor/process_blocksig.rs", "rank": 9, "score": 80707.39195534887 }, { "content": "pub fn process_nodevss<T, C>(\n\n sender_id: &SignerID,\n\n vss: VerifiableSS,\n\n secret_share: FE,\n\n signer_node: &mut SignerNode<T, C>,\n\n) -> NodeState\n\nwhere\n\n T: TapyrusApi,\n\n C: ConnectionManager,\n\n{\n\n let params = signer_node.params.sharing_params();\n\n\n\n signer_node.shared_secrets.insert(\n\n sender_id.clone(),\n\n SharedSecret {\n\n vss: vss.clone(),\n\n secret_share,\n\n },\n\n );\n\n\n", "file_path": "src/signer_node/message_processor/process_nodevss.rs", "rank": 10, "score": 80707.39195534887 }, { "content": "pub fn process_candidateblock<T, C>(\n\n sender_id: &SignerID,\n\n block: &Block,\n\n prev_state: &NodeState,\n\n conman: &C,\n\n params: &NodeParameters<T>,\n\n) -> NodeState\n\nwhere\n\n T: TapyrusApi,\n\n C: ConnectionManager,\n\n{\n\n log::info!(\n\n \"candidateblock received. block hash for signing: {:?}\",\n\n block.sighash()\n\n );\n\n\n\n if let Err(_e) = params.rpc.testproposedblock(&block) {\n\n log::warn!(\"Received Invalid candidate block sender: {}\", sender_id);\n\n return prev_state.clone();\n\n }\n", "file_path": "src/signer_node/message_processor/process_candidateblock.rs", "rank": 11, "score": 80707.39195534887 }, { "content": "pub fn process_blockvss<T, C>(\n\n sender_id: &SignerID,\n\n blockhash: Hash,\n\n vss_for_positive: VerifiableSS,\n\n secret_share_for_positive: FE,\n\n vss_for_negative: VerifiableSS,\n\n secret_share_for_negative: FE,\n\n priv_shared_keys: &SharedKeys,\n\n prev_state: &NodeState,\n\n conman: &C,\n\n params: &NodeParameters<T>,\n\n) -> NodeState\n\nwhere\n\n T: TapyrusApi,\n\n C: ConnectionManager,\n\n{\n\n match prev_state {\n\n NodeState::Master {\n\n block_key,\n\n shared_block_secrets,\n", "file_path": "src/signer_node/message_processor/process_blockvss.rs", "rank": 12, "score": 80707.39195534887 }, { "content": "pub fn address(private_key: &PrivateKey) -> Address {\n\n let secp = secp256k1::Secp256k1::new();\n\n let self_pubkey = private_key.public_key(&secp);\n\n Address::p2pkh(&self_pubkey, private_key.network)\n\n}\n\n\n\npub mod test_vectors {\n\n use crate::blockdata::Block;\n\n use crate::net::SignerID;\n\n use crate::signer_node::NodeParameters;\n\n use crate::signer_node::SharedSecret;\n\n use crate::tests::helper::node_parameters_builder::NodeParametersBuilder;\n\n use crate::tests::helper::rpc::MockRpc;\n\n use bitcoin::{PrivateKey, PublicKey};\n\n use curv::{FE, GE};\n\n use serde_json::Value;\n\n use std::fs::read_to_string;\n\n use std::str::FromStr;\n\n\n\n pub fn load_test_vector(file: &str) -> Result<Value, LoadJsonFileError> {\n", "file_path": "src/tests/helper/mod.rs", "rank": 13, "score": 80216.58024396702 }, { "content": "pub fn enable_log(log_level: Option<log::Level>) {\n\n if let Some(level) = log_level {\n\n std::env::set_var(\"RUST_LOG\", level.to_string());\n\n } else {\n\n std::env::set_var(\"RUST_LOG\", \"TRACE\");\n\n }\n\n\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n}\n\n\n", "file_path": "src/tests/helper/mod.rs", "rank": 14, "score": 76791.22008865325 }, { "content": "pub fn sender_index(sender_id: &SignerID, pubkey_list: &[PublicKey]) -> usize {\n\n //Unknown sender is already ignored.\n\n pubkey_list\n\n .iter()\n\n .position(|pk| pk == &sender_id.pubkey)\n\n .unwrap()\n\n}\n", "file_path": "src/signer_node/utils.rs", "rank": 16, "score": 69663.18203320519 }, { "content": "pub fn next_master_index<T>(state: &NodeState, params: &NodeParameters<T>) -> usize\n\nwhere\n\n T: TapyrusApi,\n\n{\n\n let next = match state {\n\n NodeState::Joining => 0,\n\n NodeState::Master { .. } => params.self_node_index + 1,\n\n NodeState::Member { master_index, .. } => master_index + 1,\n\n NodeState::RoundComplete {\n\n next_master_index, ..\n\n } => *next_master_index,\n\n };\n\n\n\n next % params.pubkey_list.len()\n\n}\n\n\n", "file_path": "src/signer_node/mod.rs", "rank": 17, "score": 65776.28279062937 }, { "content": "pub fn master_index<T>(state: &NodeState, params: &NodeParameters<T>) -> Option<usize>\n\nwhere\n\n T: TapyrusApi,\n\n{\n\n match state {\n\n NodeState::Master { .. } => Some(params.self_node_index),\n\n NodeState::Member { master_index, .. } => Some(*master_index),\n\n NodeState::RoundComplete { master_index, .. } => Some(*master_index),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/signer_node/mod.rs", "rank": 18, "score": 64636.75154304577 }, { "content": "pub fn is_master<T>(sender_id: &SignerID, state: &NodeState, params: &NodeParameters<T>) -> bool\n\nwhere\n\n T: TapyrusApi,\n\n{\n\n match state {\n\n NodeState::Master { .. } => params.signer_id == *sender_id,\n\n NodeState::Member { master_index, .. } => {\n\n let master_id = params.pubkey_list[*master_index];\n\n master_id == sender_id.pubkey\n\n }\n\n _ => false,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::net::{ConnectionManager, ConnectionManagerError, Message, SignerID};\n\n use crate::rpc::tests::{safety, safety_error, MockRpc};\n\n use crate::rpc::TapyrusApi;\n\n use crate::signer_node::{\n", "file_path": "src/signer_node/mod.rs", "rank": 19, "score": 61398.4179680397 }, { "content": "pub trait ConnectionManager {\n\n type ERROR: std::error::Error;\n\n fn broadcast_message(&self, message: Message);\n\n fn send_message(&self, message: Message);\n\n fn start(\n\n &self,\n\n message_processor: impl FnMut(Message) -> ControlFlow<()> + Send + 'static,\n\n id: SignerID,\n\n ) -> JoinHandle<()>;\n\n fn error_handler(&mut self) -> Option<Receiver<ConnectionManagerError<Self::ERROR>>>;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ConnectionManagerError<E: std::error::Error> {\n\n description: String,\n\n cause: Option<E>,\n\n}\n\n\n\nimpl<E: std::error::Error> std::fmt::Display for ConnectionManagerError<E> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n", "file_path": "src/net.rs", "rank": 20, "score": 53442.01128646232 }, { "content": "pub trait TapyrusApi {\n\n /// Get or Create candidate block.\n\n fn getnewblock(&self, address: &Address) -> Result<Block, Error>;\n\n /// Validate to candidateblock\n\n fn testproposedblock(&self, block: &Block) -> Result<bool, Error>;\n\n /// Broadcast new block include enough proof.\n\n fn submitblock(&self, block: &Block) -> Result<(), Error>;\n\n /// Get block chain info\n\n fn getblockchaininfo(&self) -> Result<GetBlockchainInfoResult, Error>;\n\n}\n\n\n\nimpl Rpc {\n\n pub fn new(url: String, user: Option<String>, pass: Option<String>) -> Self {\n\n // Check that if we have a password, we have a username; other way around is ok\n\n debug_assert!(pass.is_none() || user.is_some());\n\n Rpc {\n\n client: jsonrpc::client::Client::new(url, user, pass),\n\n }\n\n }\n\n\n", "file_path": "src/rpc.rs", "rank": 21, "score": 53442.01128646232 }, { "content": "pub trait ToShares {\n\n fn to_shares(&self) -> Vec<FE>;\n\n}\n\n\n\nimpl ToShares for SharedSecretMap {\n\n fn to_shares(&self) -> Vec<FE> {\n\n self.values().map(|i| i.secret_share).collect()\n\n }\n\n}\n\n\n\nstatic INITIAL_MASTER_INDEX: usize = 0;\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum NodeState {\n\n Joining,\n\n Master {\n\n /// *block_key* is random value for using int the Signature Issuing Protocol.\n\n /// VSS which is distributed to each other signer is generated by this key. All signers in\n\n /// all block generation rounds has each own block_key.\n\n block_key: Option<FE>,\n", "file_path": "src/signer_node/mod.rs", "rank": 22, "score": 52216.45253998137 }, { "content": "pub trait ToVerifiableSS {\n\n fn to_vss(&self) -> Vec<VerifiableSS>;\n\n}\n\n\n\nimpl ToVerifiableSS for SharedSecretMap {\n\n fn to_vss(&self) -> Vec<VerifiableSS> {\n\n self.values().map(|i| i.vss.clone()).collect()\n\n }\n\n}\n\n\n", "file_path": "src/signer_node/mod.rs", "rank": 23, "score": 51080.60084794463 }, { "content": "fn main() {\n\n let ctx = Ctx::new();\n\n let handle = subscribe(&ctx);\n\n publish( \"candidate_block\");\n\n handle.join().unwrap();\n\n}\n", "file_path": "src/mvs.rs", "rank": 24, "score": 50501.92713661594 }, { "content": "pub trait ToSharedSecretMap {\n\n fn for_negative(&self) -> SharedSecretMap;\n\n fn for_positive(&self) -> SharedSecretMap;\n\n}\n\n\n\nimpl ToSharedSecretMap for BidirectionalSharedSecretMap {\n\n fn for_positive(&self) -> SharedSecretMap {\n\n let mut map = SharedSecretMap::new();\n\n for (key, value) in self.iter() {\n\n map.insert(*key, value.0.clone());\n\n }\n\n map\n\n }\n\n fn for_negative(&self) -> SharedSecretMap {\n\n let mut map = SharedSecretMap::new();\n\n for (key, value) in self.iter() {\n\n map.insert(*key, value.1.clone());\n\n }\n\n map\n\n }\n\n}\n", "file_path": "src/signer_node/mod.rs", "rank": 25, "score": 50024.95454410138 }, { "content": "pub trait Builder {\n\n fn new() -> Self;\n\n fn build(&self) -> NodeState;\n\n}\n\n\n\npub struct Master {\n\n block_key: Option<FE>,\n\n shared_block_secrets: BidirectionalSharedSecretMap,\n\n block_shared_keys: Option<(bool, FE, GE)>,\n\n candidate_block: Block,\n\n signatures: BTreeMap<SignerID, (FE, FE)>,\n\n round_is_done: bool,\n\n}\n\n\n\nimpl Builder for Master {\n\n fn new() -> Self {\n\n Self {\n\n block_key: None,\n\n shared_block_secrets: BidirectionalSharedSecretMap::new(),\n\n block_shared_keys: None,\n", "file_path": "src/tests/helper/node_state_builder.rs", "rank": 26, "score": 50024.95454410138 }, { "content": "#[test]\n\nfn sample() {\n\n assert!(true);\n\n}\n", "file_path": "tests/tapyrus-signer.rs", "rank": 27, "score": 49083.97050074053 }, { "content": "#[test]\n\nfn test_format_signature() {\n\n use curv::elliptic::curves::secp256_k1::*;\n\n use std::str::FromStr;\n\n\n\n let pk = BigInt::from_str(\n\n \"109776030561885333132557262259067839518424530456572565024242550494358478943987\",\n\n )\n\n .unwrap();\n\n let x = BigInt::from_str(\n\n \"90077539296702276303134969795375843753866389548876542277234805612812650094225\",\n\n )\n\n .unwrap();\n\n let y = BigInt::from_str(\n\n \"87890325134225311191847774682692230651684221898402757774563799733641956930425\",\n\n )\n\n .unwrap();\n\n\n\n let sig = Signature {\n\n sigma: ECScalar::from(&pk),\n\n v: Secp256k1Point::from_coor(&x, &y),\n\n };\n\n assert_eq!(Sign::format_signature(&sig), \"40c726149bfb2d4ab64823e0cfd8245645a7950e605ef9222735d821ae570b1e91f2b3080d94faf40969c08b663ff1556fe7fbbcfcb648ac2763c16a15a08676f3\");\n\n\n\n let sig_0 = Signature {\n\n sigma: ECScalar::from(&BigInt::one()),\n\n v: Secp256k1Point::from_coor(&x, &y),\n\n };\n\n assert_eq!(Sign::format_signature(&sig_0), \"40c726149bfb2d4ab64823e0cfd8245645a7950e605ef9222735d821ae570b1e910000000000000000000000000000000000000000000000000000000000000001\");\n\n}\n", "file_path": "src/sign.rs", "rank": 28, "score": 47778.000943834864 }, { "content": "#[test]\n\nfn test_create_key() {\n\n use curv::elliptic::curves::secp256_k1::*;\n\n use std::str::FromStr;\n\n\n\n let pk = BigInt::from_str(\n\n \"45888996919894035081237286108090342830506757770293597094224988299678468039582\",\n\n )\n\n .unwrap();\n\n let key = Sign::create_key(0, Some(pk.clone()));\n\n assert_eq!(key.party_index, 0);\n\n assert_eq!(key.u_i, ECScalar::from(&pk));\n\n let x = BigInt::from_str(\n\n \"59785365775367791548524849652375710528443431367690667459926784930515989662882\",\n\n )\n\n .unwrap();\n\n let y = BigInt::from_str(\n\n \"90722439330137878450843117102075228343061266416912046868469127729012019088799\",\n\n )\n\n .unwrap();\n\n assert_eq!(key.y_i, Secp256k1Point::from_coor(&x, &y));\n\n}\n\n\n", "file_path": "src/sign.rs", "rank": 29, "score": 47778.000943834864 }, { "content": "#[test]\n\nfn test_load() {\n\n let matches = get_options()\n\n .get_matches_from(vec![\"node\", \"-c=tests/resources/signer_config_sample.toml\"]);\n\n let args = CommandArgs::load(matches);\n\n assert!(args.is_ok());\n\n assert!(args.unwrap().config.is_some());\n\n}\n\n\n", "file_path": "src/command_args.rs", "rank": 30, "score": 47778.000943834864 }, { "content": "#[test]\n\n#[should_panic(expected = \"Must be specified public_keys.\")]\n\nfn test_no_pubkeys() {\n\n let matches = get_options().get_matches_from(vec![\"node\"]);\n\n let args = CommandArgs {\n\n matches,\n\n config: Some(ConfigToml::default()),\n\n };\n\n let _pubkeys = args.signer_config().public_keys();\n\n}\n\n\n", "file_path": "src/command_args.rs", "rank": 31, "score": 47778.000943834864 }, { "content": "#[test]\n\n#[should_panic(expected = \"Must be specified threshold.\")]\n\nfn test_no_thrshold() {\n\n let matches = get_options().get_matches_from(vec![\"node\"]);\n\n let args = CommandArgs {\n\n matches,\n\n config: Some(ConfigToml::default()),\n\n };\n\n let _threshold = args.signer_config().threshold();\n\n}\n\n\n", "file_path": "src/command_args.rs", "rank": 32, "score": 47778.000943834864 }, { "content": "#[test]\n\nfn test_load_from_file() {\n\n let matches = get_options()\n\n .get_matches_from(vec![\"node\", \"-c=tests/resources/signer_config_sample.toml\"]);\n\n let args = CommandArgs::load(matches).unwrap();\n\n let pubkeys = args.signer_config().public_keys();\n\n assert_eq!(pubkeys.len(), 3);\n\n assert_eq!(\n\n pubkeys[0].to_string(),\n\n \"033cfe7fa1be58191b9108883543e921d31dc7726e051ee773e0ea54786ce438f8\"\n\n );\n\n assert_eq!(\n\n pubkeys[1].to_string(),\n\n \"020464074b94702e9b07803d247021943bdcc1f8700b92b66defb7fadd76e80acf\"\n\n );\n\n assert_eq!(\n\n pubkeys[2].to_string(),\n\n \"02cbe0ad70ffe110d097db648fda20bef14dc72b5c9979c137c451820c176ac23f\"\n\n );\n\n\n\n let threshold = args.signer_config().threshold();\n", "file_path": "src/command_args.rs", "rank": 33, "score": 46571.255715095875 }, { "content": "fn test_invid_pubkeys() {\n\n let matches = get_options().get_matches_from(vec![\n\n \"node\",\n\n \"-c=tests/resources/signer_config.toml\",\n\n \"-p=aaaa\",\n\n \"-p=bbbb\",\n\n ]);\n\n let args = CommandArgs::load(matches).unwrap();\n\n let _pubkeys = args.signer_config().public_keys();\n\n}\n\n\n", "file_path": "src/command_args.rs", "rank": 34, "score": 46571.255715095875 }, { "content": "#[test]\n\n#[should_panic(expected = \"Must be specified private_key.\")]\n\nfn test_no_private_key() {\n\n let matches = get_options().get_matches_from(vec![\"node\"]);\n\n let args = CommandArgs {\n\n matches,\n\n config: Some(ConfigToml::default()),\n\n };\n\n let _privkey = args.signer_config().private_key();\n\n}\n\n\n", "file_path": "src/command_args.rs", "rank": 35, "score": 46571.255715095875 }, { "content": "#[test]\n\nfn test_priority_commandline() {\n\n let matches = get_options().get_matches_from(vec![\n\n \"node\",\n\n \"-c=tests/resources/signer_config.toml\",\n\n \"-p=020464074b94702e9b07803d247021943bdcc1f8700b92b66defb7fadd76e80acf\",\n\n \"-p=033cfe7fa1be58191b9108883543e921d31dc7726e051ee773e0ea54786ce438f8\",\n\n \"--threshold=1\",\n\n \"--privatekey=L4Bw5GTJXL7Nd5wjprXim2sMpNgTSieZ14FCaHax7zzRnHbx19sc\",\n\n \"--rpchost=tapyrus.dev.chaintope.com\",\n\n \"--rpcport=12345\",\n\n \"--rpcuser=test\",\n\n \"--rpcpass=test\",\n\n \"--redishost=redis.endpoint.dev.chaintope.com\",\n\n \"--redisport=88888\",\n\n \"--daemon\",\n\n \"--pid=/tmp/test.pid\",\n\n \"--logfile=/tmp/tapyrus-signer.log\",\n\n ]);\n\n let args = CommandArgs::load(matches).unwrap();\n\n let pubkeys = args.signer_config().public_keys();\n", "file_path": "src/command_args.rs", "rank": 36, "score": 46571.255715095875 }, { "content": "#[test]\n\n#[should_panic(expected = \"'aabbccdd' is invalid address. error msg:\")]\n\nfn test_invalid_to_address() {\n\n let matches = get_options().get_matches_from(vec![\"node\"]);\n\n let args = CommandArgs {\n\n matches,\n\n config: Some(ConfigToml {\n\n signer: Some(SignerToml {\n\n to_address: Some(\"aabbccdd\".to_string()),\n\n publickeys: None,\n\n threshold: Some(0),\n\n privatekey: None,\n\n }),\n\n ..ConfigToml::default()\n\n }),\n\n };\n\n let _to_address = args.signer_config().to_address();\n\n}\n\n\n", "file_path": "src/command_args.rs", "rank": 37, "score": 46571.255715095875 }, { "content": "#[test]\n\nfn test_private_key_to_big_int() {\n\n use std::str::FromStr;\n\n\n\n let key = secp256k1::SecretKey::from_str(\n\n \"657440783dd10977c49f87c51dc68b63508e88c7ea9371dc19e6fcd0f5f8639e\",\n\n )\n\n .unwrap();\n\n assert_eq!(\n\n Sign::private_key_to_big_int(key).unwrap(),\n\n BigInt::from_str(\n\n \"45888996919894035081237286108090342830506757770293597094224988299678468039582\"\n\n )\n\n .unwrap()\n\n );\n\n}\n\n\n", "file_path": "src/sign.rs", "rank": 38, "score": 45452.84045033913 }, { "content": "#[test]\n\n#[should_panic(expected = \"'aabbccdd' is invalid WIF format!. error msg:\")]\n\nfn test_invalid_private_key() {\n\n let matches = get_options().get_matches_from(vec![\"node\"]);\n\n let args = CommandArgs {\n\n matches,\n\n config: Some(ConfigToml {\n\n signer: Some(SignerToml {\n\n to_address: None,\n\n publickeys: None,\n\n threshold: Some(0),\n\n privatekey: Some(\"aabbccdd\".to_string()),\n\n }),\n\n ..ConfigToml::default()\n\n }),\n\n };\n\n let _privkey = args.signer_config().private_key();\n\n}\n\n\n", "file_path": "src/command_args.rs", "rank": 39, "score": 45452.84045033913 }, { "content": "#[test]\n\nfn test_sign() {\n\n for n in 0..16 {\n\n let msg = {\n\n let m = format!(\"Very secret message {}: 11\", n);\n\n hash(m.as_bytes())\n\n };\n\n\n\n let v = get_random_shared_keys();\n\n\n\n let sign1 = {\n\n let s = LocalSig::compute(&msg[..], &v, &get_shared_keys(STR_SECRET1));\n\n Signature {\n\n sigma: s.gamma_i,\n\n v: v.y,\n\n }\n\n };\n\n assert!(sign1\n\n .verify(&msg[..], &get_shared_keys(STR_SECRET1).y)\n\n .is_ok());\n\n\n", "file_path": "src/crypto/test_multi_party_schnorr.rs", "rank": 40, "score": 44413.399343124445 }, { "content": "#[test]\n\nfn test_allow_no_exists_config_file() {\n\n let matches = get_options().get_matches_from(vec![\"node\", \"-c=hoge.toml\"]);\n\n let args = CommandArgs::load(matches);\n\n assert!(args.is_ok());\n\n assert!(args.unwrap().config.is_none());\n\n}\n\n\n", "file_path": "src/command_args.rs", "rank": 41, "score": 44413.399343124445 }, { "content": "#[test]\n\n#[should_panic(expected = \"InvalidTomlFormat\")]\n\nfn test_invalid_format_config_file() {\n\n let matches =\n\n get_options().get_matches_from(vec![\"node\", \"-c=tests/resources/invalid_format.toml\"]);\n\n let _args = CommandArgs::load(matches).unwrap();\n\n}\n\n\n", "file_path": "src/command_args.rs", "rank": 42, "score": 44413.399343124445 }, { "content": "#[test]\n\n#[allow(unused_doc_comments)]\n\nfn test_t2_n4() {\n\n /// this test assumes that in keygen we have n=4 parties and in signing we have 4 parties as well.\n\n let t = 2;\n\n let n = 4;\n\n let key_gen_parties_index_vec: [usize; 4] = [0, 1, 2, 3];\n\n let key_gen_parties_points_vec = (0..key_gen_parties_index_vec.len())\n\n .map(|i| key_gen_parties_index_vec[i].clone() + 1)\n\n .collect::<Vec<usize>>();\n\n\n\n let (_priv_keys_vec, priv_shared_keys_vec, Y, key_gen_vss_vec) =\n\n keygen_t_n_parties(t.clone(), n.clone(), &key_gen_parties_points_vec);\n\n let parties_index_vec: [usize; 4] = [0, 1, 2, 3];\n\n let parties_points_vec = (0..parties_index_vec.len())\n\n .map(|i| parties_index_vec[i].clone() + 1)\n\n .collect::<Vec<usize>>();\n\n\n\n let (_eph_keys_vec, eph_shared_keys_vec, V, eph_vss_vec) =\n\n keygen_t_n_parties(t.clone(), n.clone(), &parties_points_vec);\n\n let message: [u8; 4] = [79, 77, 69, 82];\n\n let local_sig_vec = (0..n.clone())\n", "file_path": "src/crypto/test_multi_party_schnorr.rs", "rank": 43, "score": 43444.8528033279 }, { "content": "#[test]\n\n#[should_panic(expected = \"Network should be same among with to_address and WIF of private_key\")]\n\nfn test_invalid_network_among_with_to_address_and_private_key() {\n\n let matches = get_options().get_matches_from(vec![\"node\"]);\n\n let args = CommandArgs {\n\n matches,\n\n config: Some(ConfigToml {\n\n signer: Some(SignerToml {\n\n to_address: Some(\"mkWk6dDtB6A1UtenWN3W24osVmbsyYD2oM\".to_string()),\n\n publickeys: None,\n\n threshold: Some(0),\n\n privatekey: Some(\n\n \"KzEdPiFuaQktBK8WdsbuzXARzzyJ9uZWB9dnq78UF17Pe1fra33P\".to_string(),\n\n ),\n\n }),\n\n ..ConfigToml::default()\n\n }),\n\n };\n\n let _to_address = args.signer_config().to_address();\n\n}\n", "file_path": "src/command_args.rs", "rank": 44, "score": 42540.187024122904 }, { "content": "#[test]\n\n#[allow(unused_doc_comments)]\n\nfn test_t2_n5_sign_with_4() {\n\n /// this test assumes that in keygen we have n=4 parties and in signing we have 4 parties, indices 0,1,3,4.\n\n let t = 2;\n\n let n = 5;\n\n /// keygen:\n\n let key_gen_parties_index_vec: [usize; 5] = [0, 1, 2, 3, 4];\n\n let key_gen_parties_points_vec = (0..key_gen_parties_index_vec.len())\n\n .map(|i| key_gen_parties_index_vec[i].clone() + 1)\n\n .collect::<Vec<usize>>();\n\n let (_priv_keys_vec, priv_shared_keys_vec, Y, key_gen_vss_vec) =\n\n keygen_t_n_parties(t.clone(), n.clone(), &key_gen_parties_points_vec);\n\n /// signing:\n\n let parties_index_vec: [usize; 4] = [0, 1, 3, 4];\n\n let parties_points_vec = (0..parties_index_vec.len())\n\n .map(|i| parties_index_vec[i].clone() + 1)\n\n .collect::<Vec<usize>>();\n\n let num_parties = parties_index_vec.len();\n\n let (_eph_keys_vec, eph_shared_keys_vec, V, eph_vss_vec) =\n\n keygen_t_n_parties(t.clone(), num_parties.clone(), &parties_points_vec);\n\n let message: [u8; 4] = [79, 77, 69, 82];\n", "file_path": "src/crypto/test_multi_party_schnorr.rs", "rank": 45, "score": 42540.187024122904 }, { "content": "fn roundfailure(message: &str) -> ControlFlow<()> {\n\n println!(\"call roundfailure: {}\", message);\n\n publish(\"end\");\n\n ControlFlow::Continue\n\n}\n\n\n", "file_path": "src/mvs.rs", "rank": 46, "score": 41920.74819355407 }, { "content": "fn signature(message: &str) -> ControlFlow<()> {\n\n println!(\"call signature: {}\", message);\n\n publish(\"completed_block\");\n\n ControlFlow::Continue\n\n}\n\n\n", "file_path": "src/mvs.rs", "rank": 47, "score": 41920.74819355407 }, { "content": "fn completed_block(message: &str) -> ControlFlow<()> {\n\n println!(\"call completedBlock: {}\", message);\n\n publish(\"end\");\n\n ControlFlow::Continue\n\n}\n\n\n", "file_path": "src/mvs.rs", "rank": 48, "score": 40881.30708633939 }, { "content": "fn candidate_block(message: &str) -> ControlFlow<()> {\n\n println!(\"call candidateBlock: {}\", message);\n\n publish(\"signature\");\n\n ControlFlow::Continue\n\n}\n\n\n", "file_path": "src/mvs.rs", "rank": 49, "score": 40881.30708633939 }, { "content": "fn publish(message: &str) -> thread::JoinHandle<()> {\n\n let ctx = Ctx::new();\n\n let client = Arc::clone(ctx.client());\n\n let message_in_thread = message.to_string();\n\n thread::spawn(move || {\n\n let conn = client.get_connection().unwrap();\n\n thread::sleep(Duration::from_millis(500));\n\n println!(\"Publish {} to boo.\", message_in_thread);\n\n let _: () = conn.publish(\"boo\", message_in_thread).unwrap();\n\n })\n\n}\n\n\n", "file_path": "src/mvs.rs", "rank": 50, "score": 40026.508503647114 }, { "content": "fn get_random_shared_keys() -> SharedKeys {\n\n let privkey = Secp256k1Scalar::new_random();\n\n\n\n SharedKeys {\n\n y: &ECPoint::generator() * &privkey,\n\n x_i: privkey,\n\n }\n\n}\n\n\n", "file_path": "src/crypto/test_multi_party_schnorr.rs", "rank": 51, "score": 39518.42437359666 }, { "content": "fn process_blockvss_inner<T, C>(\n\n blockhash: Hash,\n\n shared_block_secrets: &BidirectionalSharedSecretMap,\n\n priv_shared_keys: &SharedKeys,\n\n prev_state: &NodeState,\n\n conman: &C,\n\n params: &NodeParameters<T>,\n\n) -> Result<Option<(bool, SharedKeys)>, Error>\n\nwhere\n\n T: TapyrusApi,\n\n C: ConnectionManager,\n\n{\n\n let sharing_params = params.sharing_params();\n\n log::trace!(\n\n \"number of shared_block_secrets: {:?}\",\n\n shared_block_secrets.len()\n\n );\n\n let block_opt: Option<Block> = match prev_state {\n\n NodeState::Master {\n\n candidate_block, ..\n", "file_path": "src/signer_node/message_processor/process_blockvss.rs", "rank": 52, "score": 38357.170907673164 }, { "content": "/// returns sha256 value from input.\n\nfn hash(data: &[u8]) -> [u8; 32] {\n\n use bitcoin_hashes::Hash;\n\n let hash = bitcoin_hashes::sha256::Hash::hash(data);\n\n hash.into_inner()\n\n}\n\n\n", "file_path": "src/crypto/test_multi_party_schnorr.rs", "rank": 53, "score": 38157.44910943446 }, { "content": "fn subscribe(state: &impl AppState) -> thread::JoinHandle<()> {\n\n let client = Arc::clone(state.client());\n\n thread::spawn(move || {\n\n let mut conn = client.get_connection().unwrap();\n\n\n\n conn.subscribe(&[\"boo\"], |msg| {\n\n let ch = msg.get_channel_name();\n\n let payload: String = msg.get_payload().unwrap();\n\n match payload.as_ref() {\n\n \"candidate_block\" => candidate_block( &payload),\n\n \"signature\" => signature( &payload),\n\n \"completed_block\" => completed_block( &payload),\n\n \"roundfailure\" => roundfailure( &payload),\n\n \"end\" => ControlFlow::Break(()),\n\n a => {\n\n println!(\"unknown message: {}\", a);\n\n ControlFlow::Break(())\n\n }\n\n }\n\n }).unwrap();\n\n })\n\n}\n\n\n", "file_path": "src/mvs.rs", "rank": 54, "score": 37408.08716383692 }, { "content": "fn to_thread_safe<T>(r: Receiver<T>) -> ThreadSafeReceiver<T> {\n\n Arc::new(Mutex::new(r))\n\n}\n\n\n\npub struct RoundTimeOutObserver {\n\n name: String,\n\n timelimit: Duration,\n\n sender: Sender<()>,\n\n pub receiver: Receiver<()>,\n\n command_sender: SyncSender<Command>,\n\n command_receiver: ThreadSafeReceiver<Command>,\n\n thread: Option<JoinHandle<()>>,\n\n state: Arc<RwLock<State>>,\n\n}\n\n\n\npub enum Command {\n\n Stop,\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/timer.rs", "rank": 55, "score": 35905.75919547805 }, { "content": "fn get_shared_keys(str_secret_key: &str) -> SharedKeys {\n\n let privkey: FE = ECScalar::from(&BigInt::from_hex(str_secret_key));\n\n\n\n SharedKeys {\n\n y: &ECPoint::generator() * &privkey,\n\n x_i: privkey,\n\n }\n\n}\n\n\n", "file_path": "src/crypto/test_multi_party_schnorr.rs", "rank": 56, "score": 35253.09050544334 }, { "content": "fn read_config(file_path: &str) -> Result<ConfigToml, crate::errors::Error> {\n\n let contents = std::fs::read_to_string(file_path)?;\n\n let toml: ConfigToml = toml::from_str(&contents)?;\n\n Ok(toml)\n\n}\n\n\n", "file_path": "src/command_args.rs", "rank": 57, "score": 33080.19412411612 }, { "content": "fn create_block_vss<T, C>(block: Block, params: &NodeParameters<T>, conman: &C) -> Keys\n\nwhere\n\n T: TapyrusApi,\n\n C: ConnectionManager,\n\n{\n\n let sharing_params = params.sharing_params();\n\n let key = Sign::create_key(params.self_node_index + 1, None);\n\n\n\n let parties = (0..sharing_params.share_count)\n\n .map(|i| i + 1)\n\n .collect::<Vec<usize>>();\n\n\n\n let (vss_scheme, secret_shares) = VerifiableSS::share_at_indices(\n\n sharing_params.threshold,\n\n sharing_params.share_count,\n\n &key.u_i,\n\n &parties,\n\n );\n\n let order: BigInt = FE::q();\n\n let (vss_scheme_for_negative, secret_shares_for_negative) = VerifiableSS::share_at_indices(\n", "file_path": "src/signer_node/message_processor/process_candidateblock.rs", "rank": 58, "score": 27705.750478681904 }, { "content": " }\n\n\n\n let y_vec = (0..n.clone())\n\n .map(|i| party_keys_vec[i].y_i.clone())\n\n .collect::<Vec<GE>>();\n\n let mut y_vec_iter = y_vec.iter();\n\n let head = y_vec_iter.next().unwrap();\n\n let tail = y_vec_iter;\n\n let y_sum = tail.fold(head.clone(), |acc, x| acc + x);\n\n let mut vss_scheme_vec = Vec::new();\n\n let mut secret_shares_vec = Vec::new();\n\n let mut index_vec = Vec::new();\n\n for i in 0..n.clone() {\n\n let (vss_scheme, secret_shares, index) = party_keys_vec[i]\n\n .phase1_verify_com_phase2_distribute(&parames, &blind_vec, &y_vec, &bc1_vec, parties)\n\n .expect(\"invalid key\");\n\n vss_scheme_vec.push(vss_scheme);\n\n secret_shares_vec.push(secret_shares);\n\n index_vec.push(index);\n\n }\n", "file_path": "src/crypto/test_multi_party_schnorr.rs", "rank": 59, "score": 18.00442502914303 }, { "content": "\n\n // Vec of joint commitments:\n\n // n' = num of signers, n - num of parties in keygen\n\n // [com0_eph_0,... ,com0_eph_n', e*com0_kg_0, ..., e*com0_kg_n ;\n\n // ... ;\n\n // comt_eph_0,... ,comt_eph_n', e*comt_kg_0, ..., e*comt_kg_n ]\n\n let comm_vec = (0..vss_private_keys[0].parameters.threshold + 1)\n\n .map(|i| {\n\n let mut key_gen_comm_i_vec = (0..vss_private_keys.len())\n\n .map(|j| vss_private_keys[j].commitments[i].clone() * &gamma_vec[i].e)\n\n .collect::<Vec<GE>>();\n\n let mut eph_comm_i_vec = (0..vss_ephemeral_keys.len())\n\n .map(|j| vss_ephemeral_keys[j].commitments[i].clone())\n\n .collect::<Vec<GE>>();\n\n key_gen_comm_i_vec.append(&mut eph_comm_i_vec);\n\n let mut comm_i_vec_iter = key_gen_comm_i_vec.iter();\n\n let comm_i_0 = comm_i_vec_iter.next().unwrap();\n\n comm_i_vec_iter.fold(comm_i_0.clone(), |acc, x| acc + x)\n\n })\n\n .collect::<Vec<GE>>();\n", "file_path": "src/crypto/multi_party_schnorr.rs", "rank": 60, "score": 12.931853283148092 }, { "content": " let y0 = y_vec_iter.next().unwrap();\n\n let y = y_vec_iter.fold(y0.clone(), |acc, x| acc + x);\n\n let x_i = secret_shares_vec.iter().fold(FE::zero(), |acc, x| acc + x);\n\n Ok(SharedKeys { y, x_i })\n\n }\n\n false => Err(InvalidSS),\n\n }\n\n }\n\n\n\n // remove secret shares from x_i for parties that are not participating in signing\n\n pub fn update_shared_key(\n\n shared_key: &SharedKeys,\n\n parties_in: &[usize],\n\n secret_shares_vec: &Vec<FE>,\n\n ) -> SharedKeys {\n\n let mut new_xi: FE = FE::zero();\n\n for i in 0..secret_shares_vec.len() {\n\n if parties_in.iter().find(|&&x| x == i).is_some() {\n\n new_xi = new_xi + &secret_shares_vec[i]\n\n }\n", "file_path": "src/crypto/multi_party_schnorr.rs", "rank": 61, "score": 12.093761254095424 }, { "content": "\n\nuse curv::elliptic::curves::traits::*;\n\n\n\nuse curv::cryptographic_primitives::commitments::hash_commitment::HashCommitment;\n\nuse curv::cryptographic_primitives::commitments::traits::Commitment;\n\nuse curv::cryptographic_primitives::secret_sharing::feldman_vss::VerifiableSS;\n\nuse curv::{BigInt, FE, GE};\n\nuse sha2::{Digest, Sha256};\n\n\n\nconst SECURITY: usize = 256;\n\n\n\npub struct Keys {\n\n pub u_i: FE,\n\n pub y_i: GE,\n\n pub party_index: usize,\n\n}\n\n\n\npub struct KeyGenBroadcastMessage1 {\n\n com: BigInt,\n\n}\n", "file_path": "src/crypto/multi_party_schnorr.rs", "rank": 62, "score": 11.790594120932589 }, { "content": " }\n\n}\n\n\n\nimpl Member {\n\n pub fn block_key(&mut self, block_key: Option<FE>) -> &mut Self {\n\n self.block_key = block_key;\n\n self\n\n }\n\n\n\n pub fn shared_block_secrets(\n\n &mut self,\n\n shared_block_secrets: BidirectionalSharedSecretMap,\n\n ) -> &mut Self {\n\n self.shared_block_secrets = shared_block_secrets;\n\n self\n\n }\n\n\n\n pub fn block_shared_keys(&mut self, block_shared_keys: Option<(bool, FE, GE)>) -> &mut Self {\n\n self.block_shared_keys = block_shared_keys;\n\n self\n", "file_path": "src/tests/helper/node_state_builder.rs", "rank": 63, "score": 10.488589543415321 }, { "content": " self.block_key = block_key;\n\n self\n\n }\n\n\n\n pub fn shared_block_secrets(\n\n &mut self,\n\n shared_block_secrets: BidirectionalSharedSecretMap,\n\n ) -> &mut Self {\n\n self.shared_block_secrets = shared_block_secrets;\n\n self\n\n }\n\n\n\n pub fn block_shared_keys(&mut self, block_shared_keys: Option<(bool, FE, GE)>) -> &mut Self {\n\n self.block_shared_keys = block_shared_keys;\n\n self\n\n }\n\n\n\n pub fn candidate_block(&mut self, candidate_block: Block) -> &mut Self {\n\n self.candidate_block = candidate_block;\n\n self\n", "file_path": "src/tests/helper/node_state_builder.rs", "rank": 64, "score": 10.441732437624943 }, { "content": " }\n\n\n\n pub fn signatures(&mut self, signatures: BTreeMap<SignerID, (FE, FE)>) -> &mut Self {\n\n self.signatures = signatures;\n\n self\n\n }\n\n\n\n pub fn round_is_done(&mut self, round_is_done: bool) -> &mut Self {\n\n self.round_is_done = round_is_done;\n\n self\n\n }\n\n}\n\n\n\npub struct Member {\n\n block_key: Option<FE>,\n\n shared_block_secrets: BidirectionalSharedSecretMap,\n\n block_shared_keys: Option<(bool, FE, GE)>,\n\n candidate_block: Option<Block>,\n\n master_index: usize,\n\n}\n", "file_path": "src/tests/helper/node_state_builder.rs", "rank": 65, "score": 10.156408082362267 }, { "content": " master_index, next_master_index, BidirectionalSharedSecretMap, NodeParameters, NodeState,\n\n SignerNode,\n\n };\n\n use crate::tests::helper::blocks::get_block;\n\n use crate::tests::helper::keys::TEST_KEYS;\n\n use crate::tests::helper::{address, enable_log};\n\n use redis::ControlFlow;\n\n use std::sync::mpsc::{channel, Receiver, Sender};\n\n use std::sync::Arc;\n\n use std::thread;\n\n use std::thread::JoinHandle;\n\n use std::time::Duration;\n\n\n\n pub type SpyMethod = Box<dyn Fn(Arc<Message>) -> () + Send + 'static>;\n\n\n\n /// ConnectionManager for testing.\n\n pub struct TestConnectionManager {\n\n /// This is count of messages. TestConnectionManager waits for receiving the number of message.\n\n pub receive_count: u32,\n\n /// sender of message\n", "file_path": "src/signer_node/mod.rs", "rank": 66, "score": 9.392636780768518 }, { "content": "use crate::signer_node::message_processor::process_completedblock;\n\nuse crate::signer_node::message_processor::process_nodevss;\n\nuse crate::timer::RoundTimeOutObserver;\n\nuse crate::util::*;\n\nuse curv::cryptographic_primitives::secret_sharing::feldman_vss::VerifiableSS;\n\nuse curv::elliptic::curves::traits::*;\n\nuse curv::{FE, GE};\n\npub use node_parameters::NodeParameters;\n\nuse redis::ControlFlow;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::collections::BTreeMap;\n\nuse std::sync::mpsc::{channel, Receiver, Sender, TryRecvError};\n\nuse std::time::Duration;\n\n\n\n/// Round interval.\n\npub static ROUND_INTERVAL_DEFAULT_SECS: u64 = 60;\n\n/// Round time limit delta. Round timeout timer should be little longer than `ROUND_INTERVAL_DEFAULT_SECS`.\n\nstatic ROUND_TIMELIMIT_DELTA: u64 = 10;\n\n\n\npub struct SignerNode<T: TapyrusApi, C: ConnectionManager> {\n", "file_path": "src/signer_node/mod.rs", "rank": 67, "score": 9.1424279700244 }, { "content": "use crate::signer_node::NodeParameters;\n\nuse crate::tests::helper::address;\n\nuse crate::tests::helper::keys::TEST_KEYS;\n\nuse crate::tests::helper::rpc::MockRpc;\n\nuse bitcoin::{Address, PrivateKey, PublicKey};\n\n\n\npub struct NodeParametersBuilder {\n\n pubkey_list: Vec<PublicKey>,\n\n threshold: u8,\n\n private_key: PrivateKey,\n\n rpc: Option<MockRpc>,\n\n address: Address,\n\n round_duration: u64,\n\n skip_waiting_ibd: bool,\n\n}\n\n\n\nimpl NodeParametersBuilder {\n\n /// Returns instance with default value for test.(it not same with production default)\n\n pub fn new() -> Self {\n\n Self {\n", "file_path": "src/tests/helper/node_parameters_builder.rs", "rank": 68, "score": 9.046724384996722 }, { "content": " }\n\n\n\n pub fn pubkey_list(&mut self, pubkey_list: Vec<PublicKey>) -> &mut Self {\n\n self.pubkey_list = pubkey_list;\n\n self\n\n }\n\n\n\n pub fn private_key(&mut self, private_key: PrivateKey) -> &mut Self {\n\n self.private_key = private_key;\n\n self\n\n }\n\n\n\n pub fn threshold(&mut self, threshold: u8) -> &mut Self {\n\n self.threshold = threshold;\n\n self\n\n }\n\n\n\n pub fn rpc(&mut self, rpc: MockRpc) -> &mut Self {\n\n self.rpc = Some(rpc);\n\n self\n", "file_path": "src/tests/helper/node_parameters_builder.rs", "rank": 69, "score": 8.945492650577037 }, { "content": "use super::utils::sender_index;\n\nuse crate::crypto::multi_party_schnorr::Parameters;\n\nuse crate::net::SignerID;\n\nuse crate::rpc::TapyrusApi;\n\nuse bitcoin::{Address, PrivateKey, PublicKey};\n\nuse std::convert::TryInto;\n\nuse std::sync::Arc;\n\n\n\npub struct NodeParameters<T: TapyrusApi> {\n\n pub pubkey_list: Vec<PublicKey>,\n\n pub threshold: u8,\n\n pub private_key: PrivateKey,\n\n pub rpc: std::sync::Arc<T>,\n\n pub address: Address,\n\n pub signer_id: SignerID,\n\n pub self_node_index: usize,\n\n pub round_duration: u64,\n\n pub skip_waiting_ibd: bool,\n\n}\n\n\n", "file_path": "src/signer_node/node_parameters.rs", "rank": 70, "score": 8.815291761045096 }, { "content": "mod process_blocksig;\n\nmod process_blockvss;\n\nmod process_candidateblock;\n\nmod process_completedblock;\n\nmod process_nodevss;\n\npub use process_blocksig::process_blocksig;\n\npub use process_blockvss::process_blockvss;\n\npub use process_candidateblock::process_candidateblock;\n\npub use process_completedblock::process_completedblock;\n\npub use process_nodevss::process_nodevss;\n", "file_path": "src/signer_node/message_processor/mod.rs", "rank": 71, "score": 8.791177412227372 }, { "content": "// Copyright (c) 2019 Chaintope Inc.\n\n// Distributed under the MIT software license, see the accompanying\n\n// file COPYING or http://www.opensource.org/licenses/mit-license.php.\n\n\n\nuse bitcoin::Address;\n\nuse log::Level::Trace;\n\nuse log::{log_enabled, trace};\n\nuse serde::Deserialize;\n\n\n\nuse crate::blockdata::Block;\n\nuse crate::errors::Error;\n\n\n\n#[derive(Debug, Deserialize, Clone)]\n\npub struct GetBlockchainInfoResult {\n\n pub chain: String,\n\n pub blocks: u64,\n\n pub headers: u64,\n\n pub bestblockhash: String,\n\n pub mediantime: u64,\n\n pub initialblockdownload: bool,\n\n}\n\n\n\npub struct Rpc {\n\n client: jsonrpc::client::Client,\n\n}\n\n\n", "file_path": "src/rpc.rs", "rank": 72, "score": 8.645456229634458 }, { "content": " }\n\n\n\n pub fn address(&mut self, address: Address) -> &mut Self {\n\n self.address = address;\n\n self\n\n }\n\n\n\n pub fn round_duration(&mut self, round_duration: u64) -> &mut Self {\n\n self.round_duration = round_duration;\n\n self\n\n }\n\n\n\n pub fn skip_waiting_ibd(&mut self, skip_waiting_ibd: bool) -> &mut Self {\n\n self.skip_waiting_ibd = skip_waiting_ibd;\n\n self\n\n }\n\n}\n", "file_path": "src/tests/helper/node_parameters_builder.rs", "rank": 73, "score": 8.619983806686184 }, { "content": "\n\nuse curv::cryptographic_primitives::secret_sharing::feldman_vss::VerifiableSS;\n\nuse curv::FE;\n\n\n\n/// Signerの識別子。公開鍵を識別子にする。\n\n#[derive(Debug, Eq, Hash, Copy, Clone)]\n\npub struct SignerID {\n\n pub pubkey: PublicKey,\n\n}\n\n\n\nimpl SignerID {\n\n pub fn new(pubkey: PublicKey) -> Self {\n\n SignerID { pubkey }\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for SignerID {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{}\", self.pubkey)\n\n }\n", "file_path": "src/net.rs", "rank": 74, "score": 8.569709727442678 }, { "content": "// Copyright (c) 2019 Chaintope Inc.\n\n// Distributed under the MIT software license, see the accompanying\n\n// file COPYING or http://www.opensource.org/licenses/mit-license.php.\n\n\n\nuse crate::crypto::multi_party_schnorr::*;\n\nuse curv::arithmetic::traits::*;\n\nuse curv::cryptographic_primitives::secret_sharing::feldman_vss::VerifiableSS;\n\nuse curv::elliptic::curves::traits::*;\n\nuse curv::{BigInt, FE, GE};\n\n\n\nuse crate::blockdata::hash::Hash;\n\nuse crate::errors::Error;\n\nuse crate::signer_node::SharedSecretMap;\n\nuse crate::signer_node::ToShares;\n\nuse crate::signer_node::ToVerifiableSS;\n\nuse crate::util::*;\n\n\n\npub struct Sign;\n\n\n\nimpl Sign {\n", "file_path": "src/sign.rs", "rank": 75, "score": 8.448913258989554 }, { "content": "use crate::net::{BlockGenerationRoundMessageType, Message, MessageType, SignerID};\n\nuse crate::tests::helper::keys::TEST_KEYS;\n\nuse bitcoin::{Address, PrivateKey};\n\n\n\npub mod blocks;\n\npub mod keys;\n\npub mod net;\n\npub mod node_parameters_builder;\n\npub mod node_state_builder;\n\npub mod rpc;\n\n\n", "file_path": "src/tests/helper/mod.rs", "rank": 76, "score": 8.370203122557754 }, { "content": " }\n\n\n\n pub fn candidate_block(&mut self, candidate_block: Option<Block>) -> &mut Self {\n\n self.candidate_block = candidate_block;\n\n self\n\n }\n\n\n\n pub fn master_index(&mut self, master_index: usize) -> &mut Self {\n\n self.master_index = master_index;\n\n self\n\n }\n\n}\n", "file_path": "src/tests/helper/node_state_builder.rs", "rank": 77, "score": 8.337400942699958 }, { "content": "use crate::net::SignerID;\n\nuse bitcoin::{PrivateKey, PublicKey};\n\nuse std::str::FromStr;\n\n\n\npub struct TestKeys {\n\n pub key: [PrivateKey; 5],\n\n}\n\n\n\nlazy_static! {\n\n pub static ref TEST_KEYS: TestKeys = TestKeys::new();\n\n}\n\n\n\nimpl TestKeys {\n\n pub fn new() -> TestKeys {\n\n // corresponding public keys are:\n\n // 03831a69b8009833ab5b0326012eaf489bfea35a7321b1ca15b11d88131423fafc\n\n // 02ce7edc292d7b747fab2f23584bbafaffde5c8ff17cf689969614441e0527b900\n\n // 02785a891f323acd6cef0fc509bb14304410595914267c50467e51c87142acbb5e\n\n\n\n // command example for 3 of 5 signer node network, using same public and private keys at test.\n", "file_path": "src/tests/helper/keys.rs", "rank": 78, "score": 8.119213584491616 }, { "content": "// Copyright (c) 2019 Chaintope Inc.\n\n// Distributed under the MIT software license, see the accompanying\n\n// file COPYING or http://www.opensource.org/licenses/mit-license.php.\n\n\n\nuse std::str::FromStr;\n\n\n\nuse crate::signer_node::ROUND_INTERVAL_DEFAULT_SECS;\n\nuse bitcoin::{Address, PrivateKey, PublicKey};\n\nuse clap::{App, Arg};\n\nuse log;\n\nuse serde::Deserialize;\n\nuse std::path::PathBuf;\n\n\n\npub const OPTION_NAME_CONFIG: &str = \"config\";\n\n\n\n/// # Signer Config\n\npub const OPTION_NAME_TO_ADDRESS: &str = \"coinbase_pay_to_address\";\n\npub const OPTION_NAME_PUBLIC_KEY: &str = \"publickeys\";\n\npub const OPTION_NAME_PRIVATE_KEY: &str = \"privatekey\";\n\npub const OPTION_NAME_THRESHOLD: &str = \"threshold\";\n", "file_path": "src/command_args.rs", "rank": 79, "score": 8.096355568730985 }, { "content": "use crate::net::{ConnectionManager, ConnectionManagerError, Message, SignerID};\n\nuse redis::ControlFlow;\n\nuse std::cell::RefCell;\n\nuse std::sync::mpsc::Receiver;\n\nuse std::thread;\n\nuse std::thread::JoinHandle;\n\n\n\npub struct TestConnectionManager {\n\n should_broadcast: Vec<Message>,\n\n pub broadcasted: RefCell<Vec<Message>>,\n\n\n\n should_send: Vec<Message>,\n\n pub sent: RefCell<Vec<Message>>,\n\n}\n\n\n\nimpl TestConnectionManager {\n\n pub fn new() -> Self {\n\n Self {\n\n should_broadcast: vec![],\n\n broadcasted: RefCell::new(vec![]),\n", "file_path": "src/tests/helper/net.rs", "rank": 80, "score": 8.096022536133374 }, { "content": " }\n\n }\n\n\n\n pub fn should_call_testproposedblock_and_returns_invalid_block_error(&mut self) {\n\n let err = Error::JsonRpc(jsonrpc::error::Error::Rpc(jsonrpc::error::RpcError {\n\n code: -25,\n\n message: \"proposal was not based on our best chain\".to_string(),\n\n data: None,\n\n }));\n\n self.should_call_testproposedblock(Err(err));\n\n }\n\n}\n\n\n\nimpl TapyrusApi for MockRpc {\n\n fn getnewblock(&self, address: &Address) -> Result<Block, Error> {\n\n let mut list = self.getnewblock_results.borrow_mut();\n\n let result = list.pop_back().expect(&format!(\n\n \"Unexpected RPC call method=getnewblock, args(address={:?})\",\n\n address\n\n ));\n", "file_path": "src/tests/helper/rpc.rs", "rank": 82, "score": 7.984571921266823 }, { "content": " }\n\n\n\n pub fn private_key_from_wif(wif: &Value) -> PrivateKey {\n\n PrivateKey::from_wif(wif.as_str().unwrap()).unwrap()\n\n }\n\n\n\n pub fn to_fe(fe: &Value) -> FE {\n\n serde_json::from_value(fe.clone()).unwrap()\n\n }\n\n\n\n pub fn to_point(ge: &Value) -> GE {\n\n serde_json::from_value(ge.clone()).unwrap()\n\n }\n\n\n\n pub fn to_block(block: &Value) -> Option<Block> {\n\n if block.is_null() {\n\n None\n\n } else {\n\n let hex = hex::decode(block.as_str().unwrap()).unwrap();\n\n let block = Block::new(hex);\n", "file_path": "src/tests/helper/mod.rs", "rank": 83, "score": 7.837421146447198 }, { "content": " }\n\n }\n\n state @ _ => state.clone(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::process_blocksig;\n\n use crate::blockdata::hash::Hash;\n\n use crate::crypto::multi_party_schnorr::SharedKeys;\n\n use crate::signer_node::*;\n\n use crate::tests::helper::net::TestConnectionManager;\n\n use crate::tests::helper::node_state_builder::{Builder, Master, Member};\n\n use crate::tests::helper::rpc::MockRpc;\n\n use crate::tests::helper::test_vectors::*;\n\n use curv::FE;\n\n use serde_json::Value;\n\n use std::collections::BTreeMap;\n\n use std::iter::FromIterator;\n", "file_path": "src/signer_node/message_processor/process_blocksig.rs", "rank": 84, "score": 7.8177471020627 }, { "content": " let y = sum_point(&y_vec);\n\n let x_i = secret_shares\n\n .to_shares()\n\n .iter()\n\n .fold(FE::zero(), |acc, x| acc + x);\n\n Ok(SharedKeys { y, x_i })\n\n }\n\n false => Err(Error::InvalidSS),\n\n }\n\n }\n\n\n\n pub fn sign(\n\n eph_shared_keys: &SharedKeys,\n\n priv_shared_keys: &SharedKeys,\n\n message: Hash,\n\n ) -> Result<LocalSig, Error> {\n\n let message_slice = message.borrow_inner();\n\n let local_sig =\n\n LocalSig::compute(&message_slice.clone(), &eph_shared_keys, &priv_shared_keys);\n\n Ok(local_sig)\n", "file_path": "src/sign.rs", "rank": 86, "score": 7.786878807011016 }, { "content": " {\n\n let mut values = Vec::new();\n\n while let Some(value) = visitor.next_element()? {\n\n values.push(value);\n\n }\n\n Ok(values)\n\n }\n\n\n\n fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>\n\n where\n\n E: Error,\n\n {\n\n Ok(v.to_vec())\n\n }\n\n\n\n fn visit_byte_buf<E>(self, v: Vec<u8>) -> Result<Self::Value, E>\n\n where\n\n E: Error,\n\n {\n\n Ok(v)\n", "file_path": "src/serialize.rs", "rank": 87, "score": 7.581911209962888 }, { "content": "use crate::blockdata::Block;\n\nuse crate::net::SignerID;\n\nuse crate::signer_node::{BidirectionalSharedSecretMap, NodeState};\n\nuse crate::tests::helper::blocks::get_block;\n\nuse curv::{FE, GE};\n\nuse std::collections::BTreeMap;\n\n\n", "file_path": "src/tests/helper/node_state_builder.rs", "rank": 88, "score": 7.561388973780417 }, { "content": " let mut list = self.testproposedblock_results.borrow_mut();\n\n list.push_front(result);\n\n }\n\n\n\n pub fn should_call_getblockchaininfo(\n\n &mut self,\n\n result: Result<GetBlockchainInfoResult, Error>,\n\n ) {\n\n let mut list = self.getblockchaininfo_results.borrow_mut();\n\n match result {\n\n Ok(r) => list.push_front(r),\n\n Err(_) => unimplemented!(\"MockRpc not support testing Error result yet.\"),\n\n }\n\n }\n\n\n\n pub fn should_call_submitblock(&mut self, result: Result<(), Error>) {\n\n let mut list = self.submitblock_results.borrow_mut();\n\n match result {\n\n Ok(r) => list.push_front(r),\n\n Err(_) => unimplemented!(\"MockRpc not support testing Error result yet.\"),\n", "file_path": "src/tests/helper/rpc.rs", "rank": 89, "score": 7.471263180252669 }, { "content": "\n\n#[derive(Debug)]\n\npub struct Parameters {\n\n pub threshold: usize, //t\n\n pub share_count: usize, //n\n\n}\n\n#[derive(Clone, Serialize, Deserialize)]\n\npub struct SharedKeys {\n\n pub y: GE,\n\n pub x_i: FE,\n\n}\n\n\n\nimpl Keys {\n\n pub fn phase1_create(index: usize) -> Keys {\n\n let u: FE = ECScalar::new_random();\n\n let y = &ECPoint::generator() * &u;\n\n\n\n Keys {\n\n u_i: u,\n\n y_i: y,\n", "file_path": "src/crypto/multi_party_schnorr.rs", "rank": 90, "score": 7.391874015197786 }, { "content": " );\n\n assert!(\n\n self.testproposedblock_results.borrow().is_empty(),\n\n \"testproposedblock RPC should be called once or more, but not.\"\n\n );\n\n assert!(\n\n self.submitblock_results.borrow().is_empty(),\n\n \"submitblock RPC should be called once or more, but not.\"\n\n );\n\n }\n\n\n\n pub fn should_call_getnewblock(&mut self, result: Result<Block, Error>) {\n\n let mut list = self.getnewblock_results.borrow_mut();\n\n match result {\n\n Ok(r) => list.push_front(r),\n\n Err(_) => unimplemented!(\"MockRpc not support testing Error result yet.\"),\n\n }\n\n }\n\n\n\n pub fn should_call_testproposedblock(&mut self, result: Result<bool, Error>) {\n", "file_path": "src/tests/helper/rpc.rs", "rank": 91, "score": 7.334252366510592 }, { "content": " }\n\n pub fn add_proof(&self, proof: Vec<u8>) -> Block {\n\n let (header, txs) = self.payload().split_at(Self::PROOF_POSITION);\n\n let new_payload = [header, &proof[..], &txs[1..]].concat();\n\n Block(new_payload)\n\n }\n\n}\n\n\n\nimpl Debug for Block {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let h = hex::encode(&self.0);\n\n write!(f, \"Block({})\", h)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const TEST_BLOCK: &str = \"010000000000000000000000000000000000000000000000000000000000000000000000c1457ff3e5c527e69858108edf0ff1f49eea9c58d8d37300a164b3b4f8c8c7cef1a2e72770d547feae29f2dd40123a97c580d44fd4493de072416d53331997617b96f05d00403a4c09253c7b583e5260074380c9b99b895f938e37799d326ded984fb707e91fa4df2e0524a4ccf5fe224945b4fb94784b411a760eb730d95402d3383dd7ffdc01010000000100000000000000000000000000000000000000000000000000000000000000000000000022210366262690cbdf648132ce0c088962c6361112582364ede120f3780ab73438fc4bffffffff0100f2052a010000002776a9226d70757956774d32596a454d755a4b72687463526b614a787062715447417346484688ac00000000\";\n", "file_path": "src/blockdata.rs", "rank": 92, "score": 7.330371928456929 }, { "content": "// Copyright (c) 2019 Chaintope Inc.\n\n// Distributed under the MIT software license, see the accompanying\n\n// file COPYING or http://www.opensource.org/licenses/mit-license.php.\n\n\n\nuse bitcoin_hashes::{sha256d, Hash};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::fmt::Debug;\n\n\n\npub mod hash {\n\n use crate::errors::Error;\n\n use serde::{Deserialize, Serialize};\n\n use std::fmt::Debug;\n\n\n\n /// This is hash value container struct.\n\n /// This struct assumes porting value from sha256d::Hash.\n\n #[derive(Serialize, Deserialize, PartialEq)]\n\n pub struct Hash([u8; 32]);\n\n\n\n impl Hash {\n\n const LEN: usize = 32;\n", "file_path": "src/blockdata.rs", "rank": 93, "score": 7.3213592486130725 }, { "content": " }\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize)]\n\npub struct Signature {\n\n pub sigma: FE,\n\n pub v: GE,\n\n}\n\n\n\nimpl Signature {\n\n pub fn generate(\n\n vss_sum_local_sigs: &VerifiableSS,\n\n local_sig_vec: &Vec<LocalSig>,\n\n parties_index_vec: &[usize],\n\n v: GE,\n\n ) -> Signature {\n\n let gamma_vec = (0..parties_index_vec.len())\n\n .map(|i| local_sig_vec[i].gamma_i.clone())\n\n .collect::<Vec<FE>>();\n\n let reconstruct_limit = vss_sum_local_sigs.parameters.threshold.clone() + 1;\n", "file_path": "src/crypto/multi_party_schnorr.rs", "rank": 94, "score": 7.27614141934427 }, { "content": " use crate::errors::Error;\n\n use crate::rpc::{GetBlockchainInfoResult, TapyrusApi};\n\n use crate::signer_node::tests::create_node;\n\n use crate::signer_node::{BidirectionalSharedSecretMap, NodeState};\n\n use bitcoin::Address;\n\n use std::cell::Cell;\n\n\n\n struct MockRpc {\n\n pub results: [GetBlockchainInfoResult; 2],\n\n pub call_count: Cell<usize>,\n\n }\n\n\n\n impl TapyrusApi for MockRpc {\n\n fn getnewblock(&self, _address: &Address) -> Result<Block, Error> {\n\n unimplemented!()\n\n }\n\n fn testproposedblock(&self, _block: &Block) -> Result<bool, Error> {\n\n unimplemented!()\n\n }\n\n\n", "file_path": "src/signer_node/mod.rs", "rank": 95, "score": 7.267665477997635 }, { "content": " RedisManager {\n\n client,\n\n error_sender: s,\n\n error_receiver: Some(r),\n\n }\n\n }\n\n\n\n pub fn test_connection(&self) -> Result<(), errors::Error> {\n\n match self.client.get_connection() {\n\n Ok(_) => Ok(()),\n\n Err(e) => Err(errors::Error::from(e)),\n\n }\n\n }\n\n\n\n fn subscribe<F>(&self, message_processor: F, id: SignerID) -> thread::JoinHandle<()>\n\n where\n\n F: FnMut(Message) -> ControlFlow<()> + Send + 'static,\n\n {\n\n let client = Arc::clone(&self.client);\n\n let error_sender = self.error_sender.clone();\n", "file_path": "src/net.rs", "rank": 96, "score": 7.236157853280515 }, { "content": " }\n\n\n\n fn submitblock(&self, block: &Block) -> Result<(), Error> {\n\n self.call::<()>(\"submitblock\", &[block.hex().into()])\n\n }\n\n\n\n fn getblockchaininfo(&self) -> Result<GetBlockchainInfoResult, Error> {\n\n self.call::<GetBlockchainInfoResult>(\"getblockchaininfo\", &[])\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use super::*;\n\n use crate::tests::helper::keys::TEST_KEYS;\n\n use secp256k1::Secp256k1;\n\n\n\n pub fn get_rpc_client() -> Rpc {\n\n Rpc::new(\n\n \"http://127.0.0.1:12381\".to_string(),\n", "file_path": "src/rpc.rs", "rank": 97, "score": 7.215942161070087 }, { "content": "// Copyright (c) 2019 Chaintope Inc.\n\n// Distributed under the MIT software license, see the accompanying\n\n// file COPYING or http://www.opensource.org/licenses/mit-license.php.\n\n\n\nuse serde::de::{Error, SeqAccess, Visitor};\n\nuse std::fmt;\n\n\n\npub struct ByteBufVisitor;\n\n\n\n/// refer to https://github.com/baidu/rust-sgx-sdk/blob/9d4fa0f603e44bb82efae9d913c586a498b7d9da/third_party/serde-rs/serde/test_suite/tests/bytes/mod.rs\n\nimpl<'de> Visitor<'de> for ByteBufVisitor {\n\n type Value = Vec<u8>;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"byte array\")\n\n }\n\n\n\n fn visit_seq<V>(self, mut visitor: V) -> Result<Self::Value, V::Error>\n\n where\n\n V: SeqAccess<'de>,\n", "file_path": "src/serialize.rs", "rank": 98, "score": 7.161585591985396 }, { "content": "\n\n pub fn should_send(&mut self, message: Message) {\n\n self.should_send.push(message);\n\n }\n\n}\n\n\n\nimpl ConnectionManager for TestConnectionManager {\n\n type ERROR = crate::errors::Error;\n\n\n\n fn broadcast_message(&self, message: Message) {\n\n let mut list = self.broadcasted.borrow_mut();\n\n list.push(message);\n\n }\n\n\n\n fn send_message(&self, message: Message) {\n\n let mut list = self.sent.borrow_mut();\n\n list.push(message);\n\n }\n\n\n\n fn start(\n", "file_path": "src/tests/helper/net.rs", "rank": 99, "score": 6.981050395057674 } ]
Rust
iml-gui/crate/src/page/login.rs
intel-hpdd/-intel-manager-for-lustre
f8a6f61205b42cc62f4bbcb8d81214ad4f215cd6
use crate::{ auth, components::{ddn_logo, ddn_logo_lettering, whamcloud_logo}, generated::css_classes::C, GMsg, MergeAttrs, }; use core::fmt; use iml_wire_types::Branding; use seed::{browser::service::fetch, prelude::*, *}; #[derive(Clone, Default, serde::Serialize)] struct Form { username: String, password: String, } #[derive(Clone, Debug, Default, serde::Deserialize)] pub struct Errors { __all__: Option<String>, password: Option<Vec<String>>, username: Option<Vec<String>>, } #[derive(Default)] pub struct Model { errors: Option<Errors>, form: Form, logging_in: bool, } impl Model { fn disabled(&self) -> bool { self.form.username.is_empty() || self.form.password.is_empty() || self.logging_in } } #[allow(clippy::large_enum_variant)] #[derive(Clone)] pub enum Msg { UsernameChange(String), PasswordChange(String), SubmitResp(fetch::FetchObject<Errors>), Submit, } impl fmt::Debug for Msg { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::PasswordChange(_) => f.write_str("*****"), _ => write!(f, "{:?}", self), } } } async fn login(form: Form) -> Result<Msg, Msg> { auth::fetch_session() .method(fetch::Method::Post) .send_json(&form) .fetch_json(Msg::SubmitResp) .await } pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) { match msg { Msg::UsernameChange(x) => model.form.username = x, Msg::PasswordChange(x) => model.form.password = x, Msg::Submit => { model.logging_in = true; orders.perform_cmd(login(model.form.clone())); } Msg::SubmitResp(x) => { match x.result { Err(e) => error!("Response error {:?}", e), Ok(x) => { if x.status.code < 400 { orders.skip().send_g_msg(GMsg::AuthProxy(Box::new(auth::Msg::LoggedIn))); } else { model.logging_in = false; match x.data { Ok(x) => model.errors = Some(x), Err(e) => error!("DataError {:?}", e), } } } }; } } } fn err_item<T>(x: &str) -> Node<T> { p![class![C.text_red_500, C.text_xs, C.italic,], x] } pub fn view(model: &Model, branding: Branding, exa_version: &Option<String>) -> impl View<Msg> { let input_cls = class![ C.appearance_none, C.focus__outline_none, C.focus__shadow_outline, C.px_3, C.py_2, C.rounded_sm, C.text_gray_800, C.bg_gray_200, ]; let errs = Errors::default(); let errs = model.errors.as_ref().unwrap_or_else(|| &errs); let (border_color, text_color, logo) = match branding { Branding::Whamcloud => ( C.border_teal_500, C.text_teal_500, whamcloud_logo().merge_attrs(class![C.h_16, C.w_16]), ), Branding::DDN(_) => ( C.border_red_700, C.text_black, div![ class![C.w_32, C.flex, C.flex_col, C.items_center], ddn_logo().merge_attrs(class![C.w_24, C.mb_4]), ddn_logo_lettering().merge_attrs(class![C.w_24]), ], ), }; let exa_version = if let Some(version) = exa_version { p![class![C.mt_3], "Version ", version] } else { empty![] }; div![ class![ C.bg_gray_100, C.fade_in, C.flex, C.items_center, C.justify_center, C.min_h_screen, ], form![ class![C.bg_white, C.shadow_md, C.px_16, C.py_8, C.border_b_8, border_color], ev(Ev::Submit, move |event| { event.prevent_default(); Msg::Submit }), div![ class![ C.flex_col, C.flex, C.items_center, C.justify_center, C.mb_6 text_color, ], logo, exa_version ], match errs.__all__.as_ref() { Some(x) => err_item(x), None => empty![], }, div![ class![C.mb_4], input![ class![C.mt_2], input_ev(Ev::Input, Msg::UsernameChange), &input_cls, attrs! { At::AutoFocus => true.as_at_value(), At::Required => true.as_at_value(), At::Placeholder => "Username", At::AutoComplete => "username" }, ], match errs.username.as_ref() { Some(errs) => { errs.iter().map(|x| err_item(x)).collect() } None => vec![], } ], div![ class![C.mb_6], input![ class![C.mt_2, C.mb_2], input_ev(Ev::Input, Msg::PasswordChange), &input_cls, attrs! { At::Required => true, At::Type => "password", At::Placeholder => "Password", At::AutoComplete => "current-password" }, ], match errs.password.as_ref() { Some(errs) => { errs.iter().map(|x| err_item(x)).collect() } None => vec![], } ], div![ class![C.flex, C.items_center, C.justify_between], button![ class![ C.bg_gray_500 => model.disabled(), C.cursor_not_allowed => model.disabled(), C.bg_blue_500 => !model.disabled(), C.hover__bg_blue_700 => !model.disabled(), C.text_white, C.py_2, C.px_6, C.rounded_sm, C.focus__outline_none ], attrs! { At::Disabled => model.disabled().as_at_value() }, "Login", ], ], ] ] }
use crate::{ auth, components::{ddn_logo, ddn_logo_lettering, whamcloud_logo}, generated::css_classes::C, GMsg, MergeAttrs, }; use core::fmt; use iml_wire_types::Branding; use seed::{browser::service::fetch, prelude::*, *}; #[derive(Clone, Default, serde::Serialize)] struct Form { username: String, password: String, } #[derive(Clone, Debug, Default, serde::Deserialize)] pub struct Errors { __all__: Option<String>, password: Option<Vec<String>>, username: Option<Vec<String>>, } #[derive(Default)] pub struct Model { errors: Option<Errors>, form: Form, logging_in: bool, } impl Model { fn disabled(&self) -> bool { self.form.username.is_empty() || self.form.password.is_empty() || self.logging_in } } #[allow(clippy::large_enum_variant)] #[derive(Clone)] pub enum Msg { UsernameChange(String), PasswordChange(String), SubmitResp(fetch::FetchObject<Errors>), Submit, } impl fmt::Debug for Msg { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::PasswordChange(_) => f.write_str("*****"), _ => write!(f, "{:?}", self), } } } async fn login(form: Form) -> Result<Msg, Msg> { auth::fetch_session() .method(fetch::Method::Post) .send_json(&form) .fetch_json(Msg::SubmitResp) .await } pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) { match msg { Msg::UsernameChange(x) => model.form.username = x, Msg::PasswordChange(x) => model.form.password = x, Msg::Submit => { model.logging_in = true; orders.perform_cmd(login(model.form.clone())); } Msg::SubmitResp(x) => { match x.result { Err(e) => error!("Response error {:?}", e), Ok(x) => { if x.status.code < 400 { orders.skip().send_g_msg(GMsg::AuthProxy(Box::new(auth::Msg::LoggedIn))); } else { model.logging_in = false; match x.data { Ok(x) => model.errors = Some(x), Err(e) => error!("DataError {:?}", e), } } } }; } } } fn err_item<T>(x: &str) -> Node<T> { p![class![C.text_red_500, C.text_xs, C.italic,], x] } pub fn view(model: &Model, branding: Branding, exa_version: &Option<String>) -> impl View<Msg> { let input_cls = class![ C.appearance_none, C.focus__outline_none, C.focus__shadow_outline, C.px_3, C.py_2, C.rounded_sm, C.text_gray_800, C.bg_gray_200, ]; let errs = Errors::default(); let errs = model.errors.as_ref().unwrap_or_else(|| &errs); let (border_color, text_color, logo) = match branding { Branding::Whamcloud => ( C.border_teal_500, C.text_teal_500, whamcloud_logo().merge_attrs(class![C.h_16, C.w_16]), ), Branding::DDN(_) => ( C.border_red_700, C.text_black, div![ class![C.w_32, C.flex, C.flex_col, C.items_center], ddn_logo().merge_attrs(class![C.w_24, C.mb_4]), ddn_logo_lettering().merge_attrs(class![C.w_24]), ], ), }; let exa_version = if let Some(version) = exa_version { p![class![C.mt_3], "Version ", version] } else { empty![] }; div![ class![ C.bg_gray_100, C.fade_in, C.flex, C.items_center, C.justify_center, C.min_h_screen, ], form![ class![C.bg_white, C.shadow_md, C.px_16, C.py_8, C.border_b_8, border_color], ev(Ev::Submit, move |event| { event.prevent_default(); Msg::Submit }), div![ class![ C.flex_col, C.flex, C.items_center, C.justify_center, C.mb_6 text_color, ], logo, exa_version ], match errs.__all__.as_ref() { Some(x) => err_item(x), None => empty![], }, div![ class![C.mb_4], input![ class![C.mt_2], input_ev(Ev::Input, Msg::UsernameChange), &input_cls, attrs! { At::AutoFocus => true.as_at_value(), At::Required => true.as_at_value(), At::Placeholder => "Username", At::AutoComplete => "username" }, ], match errs.username.as_ref() { Some(errs) => { errs.iter().map(|x| err_item(x)).collect() } None => vec![], } ], div![ class![C.mb_6], input![ class![C.mt_2, C.mb_2], input_ev(Ev::Input, Msg::PasswordChange), &input_cls, attrs! { At::Required => true, At::Type => "passw
] ] }
ord", At::Placeholder => "Password", At::AutoComplete => "current-password" }, ], match errs.password.as_ref() { Some(errs) => { errs.iter().map(|x| err_item(x)).collect() } None => vec![], } ], div![ class![C.flex, C.items_center, C.justify_between], button![ class![ C.bg_gray_500 => model.disabled(), C.cursor_not_allowed => model.disabled(), C.bg_blue_500 => !model.disabled(), C.hover__bg_blue_700 => !model.disabled(), C.text_white, C.py_2, C.px_6, C.rounded_sm, C.focus__outline_none ], attrs! { At::Disabled => model.disabled().as_at_value() }, "Login", ], ],
random
[ { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::Fetch => {\n\n model.cancel = None;\n\n\n\n let request = fetch_session().controller(|controller| model.request_controller = Some(controller));\n\n\n\n orders.skip().perform_cmd(request.fetch_json(Msg::Fetched));\n\n }\n\n Msg::Fetched(data) => {\n\n match data.response() {\n\n Err(fail_reason) => {\n\n log!(format!(\"Error during session poll: {}\", fail_reason.message()));\n\n orders.skip().send_msg(Msg::Loop);\n\n }\n\n Ok(resp) => {\n\n model.session = Some(resp.data);\n\n\n\n if model.session.as_ref().unwrap().needs_login() {\n\n orders.send_g_msg(GMsg::RouteChange(Route::Login.into()));\n", "file_path": "iml-gui/crate/src/auth.rs", "rank": 0, "score": 511317.81603872916 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::RouteChanged(url) => {\n\n model.route = Route::from(url);\n\n\n\n if model.route == Route::Dashboard {\n\n model.breadcrumbs.clear();\n\n }\n\n\n\n orders.send_msg(Msg::LoadPage);\n\n }\n\n Msg::UpdatePageTitle => {\n\n let title = model.page.title();\n\n document().set_title(&format!(\"{} - {}\", &title, TITLE_SUFFIX));\n\n model.breadcrumbs.push(BreadCrumb {\n\n href: model.route.to_href(),\n\n title,\n\n });\n\n }\n\n Msg::EventSourceConnect(_) => {\n", "file_path": "iml-gui/crate/src/lib.rs", "rank": 1, "score": 473653.5048564057 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::Submit => {\n\n model.logging_in = true;\n\n\n\n orders.perform_cmd(update_user(model.edited_user.clone()));\n\n }\n\n Msg::SubmitResp(x) => {\n\n model.logging_in = false;\n\n\n\n match x.response() {\n\n Ok(_) => {\n\n model.toast = Some(toast::Model::Success(format!(\"{} updated\", model.user.username)));\n\n orders.send_g_msg(GMsg::UpdatePageTitle);\n\n }\n\n Err(e) => {\n\n error!(\"An error has occurred {:?}\", e);\n\n\n\n model.toast = Some(toast::Model::Error(format!(\n\n \"There was an issue updating {}. Please try later.\",\n", "file_path": "iml-gui/crate/src/page/user.rs", "rank": 2, "score": 466858.5629947126 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::KeyDown(code) => {\n\n if code == key_codes::ESC {\n\n orders.send_msg(Msg::Close);\n\n }\n\n }\n\n Msg::Close => {\n\n model.open = false;\n\n }\n\n Msg::Open => {\n\n model.open = true;\n\n }\n\n }\n\n}\n\n\n", "file_path": "iml-gui/crate/src/components/modal.rs", "rank": 3, "score": 466858.5629947126 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::SetTotal(total) => {\n\n model.total = total;\n\n }\n\n Msg::SetOffset(offset) => {\n\n model.offset = offset;\n\n }\n\n Msg::SetLimit(limit) => {\n\n model.limit = limit;\n\n orders.send_msg(Msg::Dropdown(dropdown::Msg::Close));\n\n }\n\n Msg::Next => {\n\n model.next_page();\n\n }\n\n Msg::Prev => {\n\n model.prev_page();\n\n }\n\n Msg::Dropdown(msg) => {\n\n dropdown::update(msg, &mut model.dropdown);\n", "file_path": "iml-gui/crate/src/components/paging.rs", "rank": 4, "score": 466858.5629947126 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::FetchOffset => {\n\n let builder = log::logs::Builder::new()\n\n .with_limit(model.pager.limit())\n\n .with_offset(model.pager.offset())\n\n .with_dir(SortDir::Desc);\n\n let query = builder.build();\n\n let req = fetch::Request::graphql_query(&query);\n\n\n\n orders.perform_cmd(req.fetch_json_data(|x| Msg::LogsFetched(x)));\n\n }\n\n Msg::LogsFetched(r) => {\n\n match r {\n\n Ok(Response::Data(d)) => {\n\n orders\n\n .proxy(Msg::Page)\n\n .send_msg(paging::Msg::SetTotal(d.data.logs.meta.total_count as usize));\n\n\n\n model.state = State::Loaded(d.data)\n", "file_path": "iml-gui/crate/src/page/logs.rs", "rank": 5, "score": 466858.56299471256 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::FsUsage(msg) => {\n\n fs_usage::update(msg, &mut model.fs_usage, &mut orders.proxy(Msg::FsUsage));\n\n }\n\n Msg::IoChart(msg) => {\n\n datepicker::update(msg, &mut model.io_date_picker, &mut orders.proxy(Msg::IoChart));\n\n }\n\n Msg::LNetChart(msg) => {\n\n datepicker::update(msg, &mut model.lnet_date_picker, &mut orders.proxy(Msg::LNetChart));\n\n }\n\n Msg::SfaOverview(msg) => {\n\n if let Some(overview) = model.sfa_overview.as_mut() {\n\n sfa_overview::update(msg, overview, &mut orders.proxy(Msg::SfaOverview));\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "iml-gui/crate/src/page/dashboard.rs", "rank": 6, "score": 466858.56299471256 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::SortBy(table::SortBy(x)) => {\n\n let dir = if x == model.sort.0 {\n\n model.sort.1.next()\n\n } else {\n\n paging::Dir::default()\n\n };\n\n\n\n model.sort = (x, dir);\n\n\n\n orders.send_msg(Msg::Sort);\n\n }\n\n Msg::Sort => {\n\n let sort_fn = match model.sort {\n\n (SortField::Path, paging::Dir::Asc) => {\n\n Box::new(|a: &Row, b: &Row| natord::compare(&a.1[0].path, &b.1[0].path))\n\n as Box<dyn FnMut(&Row, &Row) -> Ordering>\n\n }\n\n (SortField::Path, paging::Dir::Desc) => {\n", "file_path": "iml-gui/crate/src/page/volumes.rs", "rank": 8, "score": 466858.56299471256 }, { "content": "pub fn update(msg: Msg, model: &mut Model, _: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::SelectDuration(duration) => {\n\n match duration {\n\n ChartDuration::Day => {\n\n model.from = \"now-1d\".into();\n\n model.to = \"now\".into();\n\n }\n\n ChartDuration::TwoDays => {\n\n model.from = \"now-2d\".into();\n\n model.to = \"now\".into();\n\n }\n\n ChartDuration::Week => {\n\n model.from = \"now-1w\".into();\n\n model.to = \"now\".into();\n\n }\n\n }\n\n\n\n model.duration = duration;\n\n }\n\n }\n\n}\n\n\n", "file_path": "iml-gui/crate/src/components/datepicker.rs", "rank": 9, "score": 466563.6367526666 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::Submit => {\n\n model.submitting = true;\n\n\n\n let query =\n\n snapshot::create::build(&model.fs_name, &model.name, model.comment.as_ref(), Some(model.barrier));\n\n\n\n let req = fetch::Request::graphql_query(&query);\n\n\n\n orders.perform_cmd(req.fetch_json_data(|x| Msg::SnapshotCreateResp(x)));\n\n }\n\n Msg::SnapshotCreateResp(x) => match x {\n\n Ok(Response::Data(x)) => {\n\n let x = command_modal::Input::Commands(vec![Arc::new(x.data.create_snapshot)]);\n\n\n\n orders.send_g_msg(GMsg::OpenCommandModal(x));\n\n\n\n *model = Model {\n\n fs_name: model.fs_name.to_string(),\n", "file_path": "iml-gui/crate/src/page/snapshot/take.rs", "rank": 10, "score": 460317.77914927254 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::Modal(msg) => {\n\n if msg == modal::Msg::Close {\n\n model.clear();\n\n }\n\n modal::update(msg, &mut model.modal, &mut orders.proxy(Msg::Modal));\n\n }\n\n Msg::FireCommands(cmds) => {\n\n model.select = Select(HashSet::new());\n\n model.modal.open = true;\n\n\n\n match cmds {\n\n Input::Commands(cmds) => {\n\n if let Some(cmd) = cmds.first() {\n\n model.select.perform_click(TypedId::Cmd(cmd.id));\n\n }\n\n // use the (little) optimization:\n\n // if we already have the commands and they all finished, we don't need to poll them anymore\n\n model.update_commands(cmds);\n", "file_path": "iml-gui/crate/src/components/command_modal.rs", "rank": 11, "score": 460317.77914927254 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::Report(m) => report::update(m, &mut model.report, &mut orders.proxy(Msg::Report)),\n\n }\n\n}\n\n\n", "file_path": "iml-gui/crate/src/page/stratagem/mod.rs", "rank": 12, "score": 460317.77914927254 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::Take(msg) => {\n\n take::update(msg, &mut model.take, &mut orders.proxy(Msg::Take));\n\n }\n\n Msg::ListInterval(msg) => {\n\n list_interval::update(msg, &mut model.list_interval, &mut orders.proxy(Msg::ListInterval));\n\n }\n\n Msg::ListRetention(msg) => {\n\n list_retention::update(msg, &mut model.list_retention, &mut orders.proxy(Msg::ListRetention));\n\n }\n\n Msg::List(msg) => {\n\n list::update(msg, &mut model.list, &mut orders.proxy(Msg::List));\n\n }\n\n Msg::AddInterval(msg) => {\n\n add_interval::update(msg, &mut model.add_interval, &mut orders.proxy(Msg::AddInterval));\n\n }\n\n Msg::CreatRetention(msg) => {\n\n create_retention::update(msg, &mut model.create_retention, &mut orders.proxy(Msg::CreatRetention));\n\n }\n\n }\n\n}\n\n\n", "file_path": "iml-gui/crate/src/page/snapshot/mod.rs", "rank": 13, "score": 460317.77914927254 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::SortBy(table::SortBy(x)) => {\n\n let dir = if x == model.sort.0 {\n\n model.sort.1.next()\n\n } else {\n\n paging::Dir::default()\n\n };\n\n\n\n model.sort = (x, dir);\n\n\n\n orders.send_msg(Msg::Sort);\n\n }\n\n Msg::Page(msg) => {\n\n paging::update(msg, &mut model.pager, &mut orders.proxy(Msg::Page));\n\n }\n\n Msg::Sort => {\n\n let sort_fn = match model.sort {\n\n (SortField::Name, paging::Dir::Asc) => Box::new(|a: &Arc<SnapshotRecord>, b: &Arc<SnapshotRecord>| {\n\n natord::compare(&a.snapshot_name, &b.snapshot_name)\n", "file_path": "iml-gui/crate/src/page/snapshot/list.rs", "rank": 14, "score": 460317.77914927254 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::Page(m) => {\n\n paging::update(m, &mut model.pager, &mut orders.proxy(Msg::Page));\n\n }\n\n Msg::FetchReports => {\n\n model.cancel = None;\n\n let query = stratagem::list_reports::build();\n\n let req = fetch::Request::graphql_query(&query);\n\n\n\n orders.perform_cmd(req.fetch_json_data(Msg::Reports));\n\n }\n\n Msg::Reports(x) => {\n\n match x {\n\n Ok(Response::Data(d)) => {\n\n model.rows = d.data.stratagem.stratagem_reports;\n\n orders\n\n .proxy(Msg::Page)\n\n .send_msg(paging::Msg::SetTotal(model.rows.len()));\n\n orders.send_msg(Msg::Sort);\n", "file_path": "iml-gui/crate/src/page/stratagem/report.rs", "rank": 15, "score": 460317.77914927254 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::FsUsage(msg) => {\n\n fs_usage::update(msg, &mut model.fs_usage, &mut orders.proxy(Msg::FsUsage));\n\n }\n\n Msg::FsUsageChart(msg) => {\n\n datepicker::update(\n\n msg,\n\n &mut model.fs_usage_date_picker,\n\n &mut orders.proxy(Msg::FsUsageChart),\n\n );\n\n }\n\n Msg::MdtUsageChart(msg) => {\n\n datepicker::update(\n\n msg,\n\n &mut model.mdt_usage_date_picker,\n\n &mut orders.proxy(Msg::MdtUsageChart),\n\n );\n\n }\n\n }\n\n}\n\n\n", "file_path": "iml-gui/crate/src/page/fs_dashboard.rs", "rank": 16, "score": 460317.77914927254 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::IoChart(msg) => {\n\n datepicker::update(msg, &mut model.io_date_picker, &mut orders.proxy(Msg::IoChart));\n\n }\n\n }\n\n}\n\n\n", "file_path": "iml-gui/crate/src/page/target_dashboard.rs", "rank": 17, "score": 460317.77914927254 }, { "content": "pub fn init(model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n model.sort.1 = paging::Dir::Desc;\n\n orders.send_msg(Msg::FetchReports);\n\n}\n", "file_path": "iml-gui/crate/src/page/stratagem/report.rs", "rank": 18, "score": 455373.681966323 }, { "content": "pub fn init(model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n report::init(&mut model.report, &mut orders.proxy(Msg::Report));\n\n}\n", "file_path": "iml-gui/crate/src/page/stratagem/mod.rs", "rank": 19, "score": 455373.681966323 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::FetchData => {\n\n let part = if let Some(fs_name) = &model.fs_name {\n\n format!(r#\"AND \"fs\" = '{}'\"#, fs_name)\n\n } else {\n\n \"\".into()\n\n };\n\n\n\n let query = format!(\n\n r#\"SELECT SUM(bytes_total) as bytes_total,\n\n SUM(bytes_free) as bytes_free,\n\n SUM(\"bytes_avail\") as bytes_avail\n\n FROM (\n\n SELECT LAST(\"bytes_total\") AS bytes_total,\n\n LAST(\"bytes_free\") as bytes_free,\n\n LAST(\"bytes_avail\") as bytes_avail\n\n FROM \"target\" WHERE \"kind\" = 'OST' {} GROUP BY target\n\n )\n\n\"#,\n", "file_path": "iml-gui/crate/src/components/chart/fs_usage.rs", "rank": 20, "score": 454016.5278864717 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::Page(msg) => {\n\n paging::update(msg, &mut model.pager, &mut orders.proxy(Msg::Page));\n\n }\n\n Msg::Delete(x) => {\n\n if let Ok(true) = window().confirm_with_message(\"Are you sure you want to delete this retention policy?\") {\n\n let query = snapshot::remove_retention::build(x.id);\n\n\n\n let req = fetch::Request::graphql_query(&query);\n\n\n\n orders.perform_cmd(req.fetch_json_data(|x| Msg::DeleteRetentionResp(x)));\n\n }\n\n }\n\n Msg::DeleteRetentionResp(x) => match x {\n\n Ok(Response::Data(_)) => {}\n\n Ok(Response::Errors(e)) => {\n\n error!(\"An error has occurred during Snapshot deletion: \", e);\n\n }\n\n Err(e) => {\n", "file_path": "iml-gui/crate/src/page/snapshot/list_retention.rs", "rank": 21, "score": 454016.5278864717 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::SortBy(table::SortBy(x)) => {\n\n let dir = if x == model.sort.0 {\n\n model.sort.1.next()\n\n } else {\n\n paging::Dir::default()\n\n };\n\n\n\n model.sort = (x, dir);\n\n\n\n orders.send_msg(Msg::Sort);\n\n }\n\n Msg::Page(msg) => {\n\n paging::update(msg, &mut model.pager, &mut orders.proxy(Msg::Page));\n\n }\n\n Msg::Sort => {\n\n let sort_fn = match model.sort {\n\n (SortField::FilesystemName, paging::Dir::Asc) => {\n\n Box::new(|a: &Arc<SnapshotInterval>, b: &Arc<SnapshotInterval>| {\n", "file_path": "iml-gui/crate/src/page/snapshot/list_interval.rs", "rank": 22, "score": 454016.5278864717 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::Modal(msg) => {\n\n modal::update(msg, &mut model.modal, &mut orders.proxy(Msg::Modal));\n\n }\n\n Msg::Open => {\n\n model.modal.open = true;\n\n }\n\n Msg::Close => {\n\n model.modal.open = false;\n\n }\n\n Msg::SetFilesystems(x) => {\n\n model.filesystems = x;\n\n }\n\n Msg::FsNameChanged(x) => {\n\n model.fs_name = x;\n\n }\n\n Msg::KeepNumChanged(x) => {\n\n model.keep_num = x.parse().ok();\n\n }\n", "file_path": "iml-gui/crate/src/page/snapshot/create_retention.rs", "rank": 23, "score": 454016.5278864717 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::ResizeObserved(xs) => {\n\n if let Some(x) = xs.get(0).and_then(|x| x.content_rect()) {\n\n let last_cramp = model.cramped;\n\n\n\n if x.width() <= 600. {\n\n model.cramped = true;\n\n } else {\n\n model.cramped = false;\n\n }\n\n\n\n if last_cramp == model.cramped {\n\n orders.skip();\n\n }\n\n }\n\n }\n\n Msg::StartObserving => {\n\n seed::document().get_element_by_id(\"sfa_overview\").and_then(|el| {\n\n let observer = model.resize_observer.as_ref()?;\n\n\n\n observer.observe(&el);\n\n\n\n Some(())\n\n });\n\n }\n\n };\n\n}\n\n\n", "file_path": "iml-gui/crate/src/components/sfa_overview/mod.rs", "rank": 24, "score": 454016.5278864717 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::Modal(msg) => {\n\n modal::update(msg, &mut model.modal, &mut orders.proxy(Msg::Modal));\n\n }\n\n Msg::Open => {\n\n model.modal.open = true;\n\n }\n\n Msg::Close => {\n\n model.modal.open = false;\n\n }\n\n Msg::SetFilesystems(x) => {\n\n model.filesystems = x;\n\n }\n\n Msg::FsNameChanged(x) => {\n\n model.fs_name = x;\n\n }\n\n Msg::BarrierChanged(_) => {\n\n model.barrier = !model.barrier;\n\n }\n", "file_path": "iml-gui/crate/src/page/snapshot/add_interval.rs", "rank": 25, "score": 454016.5278864717 }, { "content": "pub fn init(model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n let resize_observer = resize_observer::init(orders, Msg::ResizeObserved);\n\n\n\n model.resize_observer = Some(resize_observer);\n\n\n\n orders.after_next_render(|_| Msg::StartObserving);\n\n}\n", "file_path": "iml-gui/crate/src/components/sfa_overview/mod.rs", "rank": 26, "score": 448421.66254032595 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::ScanStratagemModal(msg) => {\n\n scan_stratagem_modal::update(\n\n *msg,\n\n &mut model.scan_stratagem_modal,\n\n &mut orders.proxy(|x| Msg::ScanStratagemModal(Box::new(x))),\n\n );\n\n }\n\n }\n\n}\n\n\n", "file_path": "iml-gui/crate/src/components/stratagem/scan_stratagem_button.rs", "rank": 27, "score": 447941.3208396565 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::ReportDurationPicker(msg) => {\n\n duration_picker::update(msg, &mut model.report_duration);\n\n validation::validate_report_and_purge(&mut model.report_duration, &mut model.purge_duration);\n\n }\n\n Msg::PurgeDurationPicker(msg) => {\n\n duration_picker::update(msg, &mut model.purge_duration);\n\n validation::validate_report_and_purge(&mut model.report_duration, &mut model.purge_duration);\n\n }\n\n Msg::SubmitScan => {\n\n model.scanning = true;\n\n\n\n let query = stratagem::fast_file_scan::build(\n\n &model.fsname,\n\n model\n\n .report_duration\n\n .value_as_ms()\n\n .map(Duration::from_millis)\n\n .map(|x| humantime::format_duration(x).to_string()),\n", "file_path": "iml-gui/crate/src/components/stratagem/scan_stratagem_modal.rs", "rank": 28, "score": 447941.3208396565 }, { "content": "pub fn init(orders: &mut impl Orders<Msg, GMsg>) {\n\n //FIXME: This should be proxied via webpack dev-server but there is an issue with buffering contents of SSE.\n\n let uri = if *crate::IS_PRODUCTION {\n\n \"/messaging\"\n\n } else {\n\n \"https://localhost:8443/messaging\"\n\n };\n\n\n\n let es = EventSource::new(uri).unwrap();\n\n\n\n register_eventsource_handle(EventSource::set_onopen, Msg::EventSourceConnect, &es, orders);\n\n\n\n register_eventsource_handle(EventSource::set_onmessage, Msg::EventSourceMessage, &es, orders);\n\n\n\n register_eventsource_handle(EventSource::set_onerror, Msg::EventSourceError, &es, orders);\n\n}\n\n\n", "file_path": "iml-gui/crate/src/event_source.rs", "rank": 29, "score": 443402.54657304345 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::SendJob(message, action) => {\n\n let x = SendCmd {\n\n jobs: vec![SendJob {\n\n class_name: &action.class_name,\n\n args: &action.args,\n\n }],\n\n message,\n\n };\n\n\n\n let req = fetch::Request::api_call(Command::endpoint_name())\n\n .with_auth()\n\n .method(fetch::Method::Post)\n\n .send_json(&x);\n\n\n\n orders\n\n .perform_cmd(req.fetch_json_data(|x| Msg::JobSent(Box::new(x))))\n\n .send_msg(Msg::Modal(modal::Msg::Close));\n\n }\n", "file_path": "iml-gui/crate/src/components/action_dropdown/confirm_action_modal.rs", "rank": 30, "score": 442079.69573634124 }, { "content": "pub fn update(msg: Msg, cache: &ArcCache, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::FetchMountCommand => {\n\n model.mount_cancel = None;\n\n let query = client_mount::list_mount_command::build(model.fs.name.to_string());\n\n let req = seed::fetch::Request::graphql_query(&query);\n\n\n\n orders.perform_cmd(req.fetch_json_data(|x| Msg::MountCommandFetched(x)));\n\n }\n\n Msg::MountCommandFetched(x) => {\n\n match x {\n\n Ok(Response::Data(x)) => {\n\n model.mount_command = Some(x.data.client_mount_command);\n\n }\n\n Ok(Response::Errors(e)) => {\n\n error!(\n\n \"An error occurred while retrieving the mount command for filesytem\",\n\n model.fs.name, e\n\n );\n\n }\n", "file_path": "iml-gui/crate/src/page/filesystem.rs", "rank": 31, "score": 438097.62297924596 }, { "content": "pub fn update(cache: &ArcCache, msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::Reset => {\n\n model.reset();\n\n\n\n // Add hosts\n\n model.insert(\n\n vec![Step::HostCollection].into(),\n\n TreeNode::from_items(sorted_cache(&cache.host)),\n\n );\n\n\n\n // Add fs\n\n model.insert(\n\n vec![Step::FsCollection].into(),\n\n TreeNode::from_items(sorted_cache(&cache.filesystem)),\n\n );\n\n }\n\n Msg::Add(id) => {\n\n add_item(id, cache, model, orders);\n\n }\n", "file_path": "iml-gui/crate/src/components/tree.rs", "rank": 32, "score": 438097.62297924596 }, { "content": "pub fn update(msg: Msg, cache: &ArcCache, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::ServerActionDropdown(x) => {\n\n let action_dropdown::IdMsg(y, msg) = x;\n\n\n\n action_dropdown::update(\n\n action_dropdown::IdMsg(y, msg),\n\n cache,\n\n &mut model.server_dropdown,\n\n &mut orders.proxy(Msg::ServerActionDropdown),\n\n );\n\n }\n\n Msg::LnetActionDropdown(msg) => {\n\n if let Some((_, d)) = model.lnet_config.as_mut() {\n\n action_dropdown::update(msg, cache, d, &mut orders.proxy(Msg::LnetActionDropdown));\n\n }\n\n }\n\n Msg::PacemakerActionDropdown(msg) => {\n\n if let Some((_, d)) = model.pacemaker_config.as_mut() {\n\n action_dropdown::update(msg, cache, d, &mut orders.proxy(Msg::PacemakerActionDropdown));\n", "file_path": "iml-gui/crate/src/page/server.rs", "rank": 33, "score": 438097.62297924596 }, { "content": "pub fn update(msg: Msg, cache: &ArcCache, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::ActionDropdown(x) => {\n\n let action_dropdown::IdMsg(y, msg) = x;\n\n\n\n action_dropdown::update(\n\n action_dropdown::IdMsg(y, msg),\n\n cache,\n\n &mut model.dropdown,\n\n &mut orders.proxy(Msg::ActionDropdown),\n\n );\n\n }\n\n Msg::UpdateTarget(x) => {\n\n if x.id == model.target.id {\n\n model.target = x;\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "iml-gui/crate/src/page/target.rs", "rank": 34, "score": 438097.62297924596 }, { "content": "pub fn update(msg: Msg, cache: &ArcCache, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::ActionDropdown(x) => {\n\n let action_dropdown::IdMsg(id, msg) = *x;\n\n\n\n if let Some(x) = model.rows.get_mut(&id) {\n\n action_dropdown::update(\n\n action_dropdown::IdMsg(id, msg),\n\n cache,\n\n &mut x.dropdown,\n\n &mut orders.proxy(|x| Msg::ActionDropdown(Box::new(x))),\n\n );\n\n }\n\n }\n\n Msg::SetTargets(xs) => {\n\n model.rows = xs\n\n .iter()\n\n .map(|x| {\n\n (\n\n x.id,\n", "file_path": "iml-gui/crate/src/page/mgts.rs", "rank": 35, "score": 438097.62297924596 }, { "content": "pub fn update(msg: Msg, cache: &ArcCache, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::SortBy(table::SortBy(x)) => {\n\n let dir = if x == model.sort.0 {\n\n model.sort.1.next()\n\n } else {\n\n paging::Dir::default()\n\n };\n\n\n\n model.sort = (x, dir);\n\n\n\n orders.send_msg(Msg::Sort);\n\n }\n\n Msg::Sort => {\n\n let sort_fn = match model.sort {\n\n (SortField::Label, paging::Dir::Asc) => {\n\n Box::new(|a: &Arc<Host>, b: &Arc<Host>| natord::compare(a.label(), b.label()))\n\n as Box<dyn FnMut(&Arc<Host>, &Arc<Host>) -> Ordering>\n\n }\n\n (SortField::Label, paging::Dir::Desc) => {\n", "file_path": "iml-gui/crate/src/page/servers.rs", "rank": 36, "score": 438097.62297924596 }, { "content": "pub fn update(msg: Msg, records: &ArcCache, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::Open(section) => {\n\n if model.section.as_ref().map(|x| x.into()).as_ref() == Some(&section) {\n\n orders.send_msg(Msg::Close);\n\n return;\n\n }\n\n\n\n let section = section.into();\n\n\n\n match &section {\n\n Section::Activity(_) => activity::init(&mut orders.proxy(Msg::ActivitySection)),\n\n Section::Logs(_) => logs::init(&mut orders.proxy(Msg::LogsSection)),\n\n }\n\n\n\n model.section = Some(section);\n\n }\n\n Msg::Close => {\n\n model.section = None;\n\n }\n", "file_path": "iml-gui/crate/src/status_section.rs", "rank": 37, "score": 438097.62297924596 }, { "content": "pub fn update(msg: Msg, cache: &ArcCache, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::FetchOffset => {\n\n if let Ok(cmd) = fetch::Request::api_query(\n\n Alert::endpoint_name(),\n\n &[(\"limit\", model.pager.limit()), (\"offset\", model.pager.offset())],\n\n )\n\n .map(|req| req.fetch_json_data(|x| Msg::ActionsFetched(Box::new(x))))\n\n {\n\n orders.skip().perform_cmd(cmd);\n\n } else {\n\n error!(\"Could not fetch alerts.\");\n\n };\n\n }\n\n Msg::ActionsFetched(r) => {\n\n let state = mem::replace(&mut model.state, State::Loading);\n\n\n\n model.state = match (*r, state) {\n\n (Ok(mut resp), State::Loaded(_, mut rows)) => {\n\n update_rows(&mut resp, &mut rows);\n", "file_path": "iml-gui/crate/src/page/activity.rs", "rank": 38, "score": 438097.62297924596 }, { "content": "pub fn update(msg: Msg, cache: &ArcCache, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match msg {\n\n Msg::FetchStats => {\n\n model.stats_cancel = None;\n\n let request =\n\n seed::fetch::Request::new(format!(\"/influx?db=iml_stats&q={}\", iml_influx::filesystems::query()));\n\n orders\n\n .skip()\n\n .perform_cmd(request.fetch_json_data(|x| Msg::StatsFetched(Box::new(x))));\n\n }\n\n Msg::StatsFetched(res) => {\n\n match *res {\n\n Ok(response) => {\n\n model.stats = response.into();\n\n }\n\n Err(e) => {\n\n error!(e);\n\n orders.skip();\n\n }\n\n }\n", "file_path": "iml-gui/crate/src/page/filesystems.rs", "rank": 39, "score": 438097.62297924596 }, { "content": "pub fn init(cache: &ArcCache, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n model.set_records(cache, orders);\n\n\n\n if let Some(overview) = model.sfa_overview.as_mut() {\n\n sfa_overview::init(overview, &mut orders.proxy(Msg::SfaOverview));\n\n }\n\n\n\n orders.proxy(Msg::FsUsage).send_msg(fs_usage::Msg::FetchData);\n\n}\n", "file_path": "iml-gui/crate/src/page/dashboard.rs", "rank": 41, "score": 430701.8144407035 }, { "content": "pub fn init(cache: &ArcCache, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n model.set_records(cache, orders);\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub enum Msg {\n\n Page(paging::Msg),\n\n Sort,\n\n SortBy(table::SortBy<SortField>),\n\n}\n\n\n", "file_path": "iml-gui/crate/src/page/volumes.rs", "rank": 42, "score": 430701.8144407035 }, { "content": "pub fn init(cache: &ArcCache, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n orders.send_msg(Msg::SetTargets(cache.target.values().cloned().collect()));\n\n\n\n stratagem::init(cache, &model.stratagem, &mut orders.proxy(Msg::Stratagem));\n\n\n\n orders.send_msg(Msg::FetchStats);\n\n\n\n orders.send_msg(Msg::FetchMountCommand);\n\n}\n\n\n", "file_path": "iml-gui/crate/src/page/filesystem.rs", "rank": 43, "score": 430701.8144407035 }, { "content": "fn sink(g_msg: GMsg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n match g_msg {\n\n GMsg::UpdatePageTitle => {\n\n orders.send_msg(Msg::UpdatePageTitle);\n\n }\n\n GMsg::RouteChange(url) => {\n\n seed::push_route(url.clone());\n\n orders.send_msg(Msg::RouteChanged(url));\n\n }\n\n GMsg::AuthProxy(msg) => {\n\n orders.proxy(Msg::Auth).send_msg(msg);\n\n }\n\n GMsg::ServerDate(d) => model.server_date.basedate = Some(d),\n\n GMsg::OpenCommandModal(x) => {\n\n orders\n\n .proxy(Msg::CommandModal)\n\n .send_msg(command_modal::Msg::FireCommands(x));\n\n }\n\n }\n\n}\n", "file_path": "iml-gui/crate/src/lib.rs", "rank": 44, "score": 425228.9942768772 }, { "content": "pub fn init(cache: &ArcCache, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n model.set_records(cache, orders);\n\n\n\n take::init(cache, &mut model.take);\n\n}\n\n\n", "file_path": "iml-gui/crate/src/page/snapshot/mod.rs", "rank": 45, "score": 424269.8183356221 }, { "content": "pub fn window_events(model: &Model) -> Vec<EventHandler<Msg>> {\n\n let mut xs = vec![\n\n simple_ev(Ev::Click, Msg::WindowClick),\n\n simple_ev(Ev::Resize, Msg::WindowResize),\n\n ];\n\n\n\n if model.track_slider {\n\n xs.push(simple_ev(Ev::MouseUp, Msg::StopSliderTracking));\n\n }\n\n\n\n xs\n\n}\n\n\n\n// ------ ------\n\n// Start\n\n// ------ ------\n\n\n", "file_path": "iml-gui/crate/src/lib.rs", "rank": 46, "score": 412802.2138290588 }, { "content": "pub fn init(cache: &ArcCache, model: &Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n if !model.use_stratagem {\n\n return;\n\n }\n\n\n\n orders.send_msg(Msg::SetStratagemConfig(\n\n cache.stratagem_config.values().cloned().collect(),\n\n ));\n\n\n\n orders.proxy(Msg::InodeTable).send_msg(inode_table::Msg::FetchInodes);\n\n}\n", "file_path": "iml-gui/crate/src/components/stratagem/mod.rs", "rank": 47, "score": 411337.80302243866 }, { "content": "fn schedule_fetch_tree(model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {\n\n let (cmd_ids, job_ids, _) = &model.select.split();\n\n // grab all the dependencies for the chosen items, except those that already loaded and completed\n\n let load_cmd_ids = extract_sorted_keys(&model.commands)\n\n .into_iter()\n\n .filter(|c| to_load_cmd(model, *c))\n\n .collect::<Vec<i32>>();\n\n let load_job_ids = cmd_ids\n\n .iter()\n\n .filter(|c| model.commands.contains_key(c))\n\n .flat_map(|c| model.commands[c].deps())\n\n .filter(|j| to_load_job(model, **j))\n\n .copied()\n\n .collect::<Vec<i32>>();\n\n let load_step_ids = job_ids\n\n .iter()\n\n .filter(|j| model.jobs.contains_key(j))\n\n .flat_map(|j| model.jobs[j].deps())\n\n .filter(|s| to_load_step(model, **s))\n\n .copied()\n", "file_path": "iml-gui/crate/src/components/command_modal.rs", "rank": 48, "score": 406905.5383806013 }, { "content": "pub fn update(msg: IdMsg, cache: &ArcCache, model: &mut Model, orders: &mut impl Orders<IdMsg, GMsg>) {\n\n let IdMsg(id, msg) = msg;\n\n\n\n match msg {\n\n Msg::StartFetch => {\n\n if model.state.is_inactive() {\n\n model.state = State::Activating;\n\n orders.send_msg(IdMsg(id, Msg::SendFetch));\n\n }\n\n }\n\n Msg::SendFetch => {\n\n model.cancel = None;\n\n\n\n let request = fetch::Request::new(format!(\n\n \"/api/action/?limit=0&{}\",\n\n composite_ids_to_query_string(&model.composite_ids)\n\n ))\n\n .controller(|controller| model.request_controller = Some(controller));\n\n\n\n orders\n", "file_path": "iml-gui/crate/src/components/action_dropdown/mod.rs", "rank": 49, "score": 402867.64422392973 }, { "content": "pub fn init<F, T>(orders: &mut impl Orders<T, GMsg>, msg: F) -> ResizeObserverWrapper\n\nwhere\n\n T: 'static,\n\n F: Fn(Vec<ResizeObserverEntry>) -> T + 'static,\n\n{\n\n let (app, msg_mapper) = (orders.clone_app(), orders.msg_mapper());\n\n\n\n let closure = Closure::new(move |entries: Array| {\n\n let xs = entries.iter().map(JsCast::unchecked_into).collect();\n\n\n\n app.update(msg_mapper(msg(xs)));\n\n });\n\n\n\n let inner = ResizeObserver::new(closure.as_ref().unchecked_ref()).unwrap();\n\n\n\n ResizeObserverWrapper { inner, closure }\n\n}\n", "file_path": "iml-gui/crate/src/resize_observer.rs", "rank": 50, "score": 400800.1373477606 }, { "content": "pub fn init(orders: &mut impl Orders<Msg, GMsg>) {\n\n orders.proxy(Msg::FsUsage).send_msg(fs_usage::Msg::FetchData);\n\n}\n", "file_path": "iml-gui/crate/src/page/fs_dashboard.rs", "rank": 51, "score": 394644.29614213074 }, { "content": "pub fn view(model: &Model, mut input: Node<Msg>) -> Node<Msg> {\n\n let button_cls = class![\n\n C.text_white,\n\n C.font_bold,\n\n C.p_2,\n\n C.rounded,\n\n C.rounded_l_none,\n\n C.w_full,\n\n C.h_full,\n\n C.text_sm\n\n ];\n\n\n\n if let Some(x) = model.value {\n\n input.add_attr(At::Value.as_str(), x);\n\n } else {\n\n input.add_attr(At::Value.as_str(), \"\");\n\n }\n\n\n\n if model.disabled {\n\n input = input.merge_attrs(attrs! {At::Disabled => true});\n", "file_path": "iml-gui/crate/src/components/duration_picker.rs", "rank": 52, "score": 392771.32948623685 }, { "content": "pub fn update(msg: Msg, model: &mut Model) {\n\n match msg {\n\n Msg::Open => {\n\n let _ = mem::replace(model, Model::Open);\n\n }\n\n Msg::Close => {\n\n let _ = mem::replace(model, Model::Close);\n\n }\n\n Msg::Toggle => {\n\n let next_state = match model {\n\n Model::Open => Model::Close,\n\n Model::Close => Model::Open,\n\n };\n\n\n\n let _ = mem::replace(model, next_state);\n\n }\n\n }\n\n}\n\n\n", "file_path": "iml-gui/crate/src/components/dropdown.rs", "rank": 53, "score": 387768.80318258685 }, { "content": "pub fn update(msg: Msg, model: &mut Model) {\n\n match msg {\n\n Msg::SetUnit(unit) => {\n\n if let Some(ms) = model.value_as_ms() {\n\n model.value = Some(convert_ms_to_unit(unit, ms));\n\n }\n\n\n\n model.unit = unit;\n\n }\n\n Msg::InputChange(ev) => {\n\n let target = ev.target().expect(\"Couldn't get input element\");\n\n let input_el = seed::to_input(&target);\n\n\n\n let value = input_el.value_as_number();\n\n\n\n model.value = if value.is_nan() { None } else { Some(value as u64) };\n\n model.validation_message = input_el.validation_message().ok().filter(|x| x != \"\");\n\n }\n\n Msg::Dropdown(msg) => {\n\n dropdown::update(msg, &mut model.dropdown);\n\n }\n\n }\n\n}\n\n\n", "file_path": "iml-gui/crate/src/components/duration_picker.rs", "rank": 54, "score": 381014.84738667234 }, { "content": "fn after_mount(url: Url, orders: &mut impl Orders<Msg, GMsg>) -> AfterMount<Model> {\n\n event_source::init(orders);\n\n\n\n orders.send_msg(Msg::UpdatePageTitle);\n\n\n\n orders.send_msg(Msg::FetchConf);\n\n\n\n orders.proxy(Msg::Notification).perform_cmd(notification::init());\n\n\n\n orders.proxy(Msg::Auth).send_msg(Box::new(auth::Msg::Fetch));\n\n\n\n let (session_tx, session_rx) = oneshot::channel();\n\n let (messages_tx, messages_rx) = oneshot::channel();\n\n let (locks_tx, locks_rx) = oneshot::channel();\n\n let (conf_tx, conf_rx) = oneshot::channel();\n\n\n\n let fut = async {\n\n let (r1, r2, r3, r4) = futures::join!(session_rx, messages_rx, locks_rx, conf_rx);\n\n\n\n if let Err(e) = r1.or(r2).or(r3).or(r4) {\n", "file_path": "iml-gui/crate/src/lib.rs", "rank": 55, "score": 370014.7577080659 }, { "content": "pub fn init(cache: &ArcCache, orders: &mut impl Orders<Msg, GMsg>) {\n\n orders.send_msg(Msg::SetFilesystems(cache.filesystem.values().cloned().collect()));\n\n orders.send_msg(Msg::FetchStats);\n\n}\n\n\n", "file_path": "iml-gui/crate/src/page/filesystems.rs", "rank": 56, "score": 368680.1415595177 }, { "content": "pub fn init(cache: &ArcCache, orders: &mut impl Orders<Msg, GMsg>) {\n\n orders.send_msg(Msg::SetHosts(\n\n cache.host.values().cloned().collect(),\n\n cache.lnet_configuration.clone(),\n\n cache.pacemaker_configuration.clone(),\n\n cache.corosync_configuration.clone(),\n\n ));\n\n}\n\n\n", "file_path": "iml-gui/crate/src/page/servers.rs", "rank": 57, "score": 368680.1415595177 }, { "content": "pub fn init(cache: &ArcCache, orders: &mut impl Orders<Msg, GMsg>) {\n\n orders.send_msg(Msg::SetTargets(cache.target.values().cloned().collect()));\n\n}\n\n\n", "file_path": "iml-gui/crate/src/page/mgts.rs", "rank": 58, "score": 368680.1415595177 }, { "content": "pub fn view(model: &Model) -> impl View<Msg> {\n\n let input_cls = class![\n\n C.appearance_none,\n\n C.focus__outline_none,\n\n C.focus__shadow_outline,\n\n C.px_3,\n\n C.py_2,\n\n C.rounded_sm,\n\n C.text_gray_800,\n\n C.bg_gray_200,\n\n ];\n\n\n\n panel::view(\n\n h3![class![C.py_4, C.font_normal, C.text_lg], \"User: \", &model.user.username],\n\n div![\n\n div![\n\n class![C.text_center, C.p_4, C.h_20],\n\n if let Some(x) = model.toast.as_ref() {\n\n toast::view(x).map_msg(Msg::Toast)\n\n } else {\n", "file_path": "iml-gui/crate/src/page/user.rs", "rank": 59, "score": 360418.7518779688 }, { "content": "pub fn view(model: &Model) -> impl View<Msg> {\n\n panel::view(\n\n h3![class![C.py_4, C.font_normal, C.text_lg], \"Volumes\"],\n\n div![\n\n table::wrapper_view(vec![\n\n table::thead_view(vec![\n\n table::sort_header(\"Path\", SortField::Path, model.sort.0, model.sort.1).map_msg(Msg::SortBy),\n\n table::sort_header(\"Size\", SortField::Size, model.sort.0, model.sort.1).map_msg(Msg::SortBy),\n\n table::th_view(plain![\"Hosts\"]),\n\n ]),\n\n tbody![model.rows[model.pager.range()].iter().map(|(v, vns, hs)| {\n\n tr![\n\n table::td_center(resource_links::label_view(\n\n &vns[0].path,\n\n Route::Volume(RouteId::from(v.id))\n\n )),\n\n table::td_center(plain![match v.size {\n\n Some(x) => number_formatter::format_bytes(x as f64, None),\n\n None => \"---\".into(),\n\n }]),\n", "file_path": "iml-gui/crate/src/page/volumes.rs", "rank": 60, "score": 360418.7518779688 }, { "content": "pub fn view(_model: &Model) -> impl View<Msg> {\n\n seed::empty()\n\n}\n", "file_path": "iml-gui/crate/src/page/about.rs", "rank": 61, "score": 356694.9026603133 }, { "content": "/// Show the logged in user if available.\n\n/// Also show the Login / Logout link\n\npub fn auth_view(auth: &auth::Model) -> Node<Msg> {\n\n let x = match auth.get_session() {\n\n Some(session) => session,\n\n None => return empty![],\n\n };\n\n\n\n let cls = class![\n\n C.block,\n\n C.border_b_2,\n\n C.border_transparent,\n\n C.cursor_pointer\n\n C.hover__text_white,\n\n C.lg__flex_auto,\n\n C.lg__flex_col,\n\n C.lg__flex_grow_0,\n\n C.lg__flex,\n\n C.lg__h_16,\n\n C.lg__inline_block,\n\n C.lg__justify_center,\n\n C.lg__py_0,\n", "file_path": "iml-gui/crate/src/page/partial/header.rs", "rank": 62, "score": 356271.4347767917 }, { "content": "pub fn view(model: &Model) -> impl View<Msg> {\n\n vec![\n\n header![nav(model)],\n\n div![\n\n class![C.bg_menu_active, C.text_gray_300, C.text_center, C.py_2],\n\n breadcrumbs::view(&model.breadcrumbs).els()\n\n ],\n\n ]\n\n}\n", "file_path": "iml-gui/crate/src/page/partial/header.rs", "rank": 63, "score": 353611.16911627055 }, { "content": "fn invalid_input_err(msg: &str) -> io::Error {\n\n io::Error::new(io::ErrorKind::InvalidInput, msg)\n\n}\n\n\n", "file_path": "iml-agent/src/cli.rs", "rank": 64, "score": 352631.00451154733 }, { "content": "pub fn main_panels(model: &Model, children: impl View<page::Msg>) -> impl View<Msg> {\n\n div![\n\n class![\n\n C.fade_in,\n\n C.min_h_screen,\n\n C.flex,\n\n C.flex_col,\n\n C.select_none => model.track_slider\n\n ],\n\n // slider overlay\n\n if model.track_slider {\n\n div![\n\n class![C.w_full, C.h_full, C.fixed, C.top_0, C.cursor_ew_resize,],\n\n style! { St::ZIndex => 9999 },\n\n mouse_ev(Ev::MouseMove, |ev| {\n\n let target = ev.target().unwrap();\n\n let el = seed::to_html_el(&target);\n\n\n\n let rect = el.get_bounding_client_rect();\n\n\n", "file_path": "iml-gui/crate/src/lib.rs", "rank": 65, "score": 351118.2371800674 }, { "content": "pub fn none_error<E>(error: E) -> Error\n\nwhere\n\n E: Into<Box<dyn error::Error + Send + Sync>>,\n\n{\n\n Error::NoneError(error.into())\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n Io(io::Error),\n\n TrySendError(Box<dyn error::Error + Send>),\n\n SerdeJson(serde_json::Error),\n\n LinesCodecError(LinesCodecError),\n\n LibZfsError(libzfs_types::LibZfsError),\n\n ParseIntError(num::ParseIntError),\n\n NoneError(Box<dyn error::Error + Send + Sync>),\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n", "file_path": "device-scanner/device-scanner-daemon/src/error.rs", "rank": 66, "score": 350034.4892853399 }, { "content": "pub fn view(_model: &Model) -> impl View<Msg> {\n\n div![\n\n class![\n\n C.mt_16,\n\n C.flex_grow,\n\n C.flex,\n\n C.flex_col,\n\n C.items_center,\n\n C.justify_center,\n\n // sm__\n\n C.sm__mt_24,\n\n ],\n\n h1![\n\n class![\n\n C.font_display,\n\n C.font_thin,\n\n C.text_29,\n\n C.text_gray_900,\n\n C.sm__text_45,\n\n C.lg__text_55,\n", "file_path": "iml-gui/crate/src/page/not_found.rs", "rank": 67, "score": 349525.06555777264 }, { "content": "pub fn view(_model: &Model) -> impl View<Msg> {\n\n seed::empty()\n\n}\n", "file_path": "iml-gui/crate/src/page/volume.rs", "rank": 68, "score": 349525.06555777264 }, { "content": "pub fn view(_model: &Model) -> impl View<Msg> {\n\n seed::empty()\n\n}\n", "file_path": "iml-gui/crate/src/page/ostpools.rs", "rank": 69, "score": 349525.06555777264 }, { "content": "pub fn view(_model: &Model) -> impl View<Msg> {\n\n div![\"welcome to jobstats\"]\n\n}\n", "file_path": "iml-gui/crate/src/page/jobstats.rs", "rank": 70, "score": 349525.06555777264 }, { "content": "pub fn view(_model: &Model) -> impl View<Msg> {\n\n seed::empty()\n\n}\n", "file_path": "iml-gui/crate/src/page/ostpool.rs", "rank": 71, "score": 349525.06555777264 }, { "content": "pub fn view(_model: &Model) -> impl View<Msg> {\n\n seed::empty()\n\n}\n", "file_path": "iml-gui/crate/src/page/targets.rs", "rank": 72, "score": 349525.06555777264 }, { "content": "pub fn view(_model: &Model) -> impl View<Msg> {\n\n seed::empty()\n\n}\n", "file_path": "iml-gui/crate/src/page/power_control.rs", "rank": 73, "score": 342717.48279607436 }, { "content": "pub fn view(model: &Model) -> Vec<Node<Msg>> {\n\n let scan_stratagem_button = button![\n\n class![\n\n C.bg_blue_500,\n\n C.hover__bg_blue_700,\n\n C.text_white,\n\n C.mt_12,\n\n C.font_bold,\n\n C.py_2,\n\n C.px_2,\n\n C.rounded,\n\n C.w_full,\n\n C.text_sm,\n\n ],\n\n \"Scan Filesystem Now\",\n\n font_awesome_outline(class![C.inline, C.h_4, C.w_4, C.ml_2], \"clock\")\n\n ];\n\n\n\n let scan_stratagem_button = if !model.disabled && !model.locked && !model.scan_stratagem_modal.scanning {\n\n scan_stratagem_button.with_listener(ev(Ev::Click, |_| scan_stratagem_modal::Msg::Modal(modal::Msg::Open)))\n", "file_path": "iml-gui/crate/src/components/stratagem/scan_stratagem_button.rs", "rank": 74, "score": 341017.9228852321 }, { "content": "pub fn view(_: &ArcCache, model: &Model) -> impl View<Msg> {\n\n div![\n\n class![C.grid, C.lg__grid_cols_2, C.gap_6],\n\n vec![\n\n dashboard_container::view(\n\n \"Read/Write Bandwidth\",\n\n div![\n\n class![C.h_full, C.min_h_80, C.p_2],\n\n grafana_chart::view(\n\n IML_METRICS_DASHBOARD_ID,\n\n IML_METRICS_DASHBOARD_NAME,\n\n create_chart_params(6, \"10s\", vec![(\"host_name\", &model.host_name)]),\n\n \"90%\",\n\n ),\n\n ],\n\n ),\n\n dashboard_container::view(\n\n \"CPU Usage\",\n\n div![\n\n class![C.h_full, C.min_h_80, C.p_2],\n", "file_path": "iml-gui/crate/src/page/server_dashboard.rs", "rank": 75, "score": 339806.11933729035 }, { "content": "pub fn config_view(model: &Model, locked: bool) -> Node<Msg> {\n\n let input_cls = class![\n\n C.appearance_none,\n\n C.focus__outline_none,\n\n C.focus__shadow_outline,\n\n C.px_3,\n\n C.py_2,\n\n C.rounded_sm,\n\n C.text_gray_800,\n\n C.bg_gray_200,\n\n C.col_span_5\n\n ];\n\n\n\n let mut configuration_component = vec![\n\n label![attrs! {At::For => \"scan_duration\"}, \"Scan filesystem every\"],\n\n duration_picker::view(\n\n &model.scan_duration_picker,\n\n input![\n\n &input_cls,\n\n attrs! {\n", "file_path": "iml-gui/crate/src/components/stratagem/mod.rs", "rank": 76, "score": 333567.1420419514 }, { "content": "pub fn view(model: &Model, cache: &ArcCache) -> impl View<Msg> {\n\n div![match &model.state {\n\n State::Loading => loading::view(),\n\n State::Fetching => div![\n\n class![C.bg_menu_active],\n\n div![\n\n class![C.px_6, C.py_4, C.bg_blue_1000],\n\n div![class![C.font_medium, C.text_lg, C.text_gray_500], \"Logs\"],\n\n div![\n\n class![C.grid, C.grid_cols_2, C.items_center, C.text_white],\n\n div![\n\n class![C.col_span_1],\n\n paging::page_count_view(&model.pager).map_msg(Msg::Page)\n\n ],\n\n div![\n\n class![C.grid, C.grid_cols_2, C.justify_end],\n\n paging::next_prev_view(&model.pager).map_msg(Msg::Page)\n\n ],\n\n ],\n\n ],\n", "file_path": "iml-gui/crate/src/page/logs.rs", "rank": 77, "score": 327273.87031834817 }, { "content": "pub fn no_vars() -> Vec<(String, String)> {\n\n vec![]\n\n}\n\n\n\n/// Create an iframe that loads the specified stratagem chart\n\npub(crate) fn view<'a, T>(\n\n dashboard_id: &str,\n\n dashboard_name: &str,\n\n chart_data: GrafanaChartData<'a>,\n\n height: &str,\n\n) -> Node<T> {\n\n iframe![attrs! {\n\n At::Src => format!(\"/grafana/d-solo/{}/{}?kiosk&{}\", dashboard_id, dashboard_name, serde_urlencoded::to_string(chart_data).unwrap()),\n\n At::Width => \"100%\",\n\n At::Height => height,\n\n \"frameborder\" => 0\n\n }]\n\n}\n", "file_path": "iml-gui/crate/src/components/grafana_chart.rs", "rank": 78, "score": 324696.95536240604 }, { "content": "pub fn view(cache: &ArcCache, model: &Model) -> impl View<Msg> {\n\n let enc = cache.sfa_enclosure.get(&model.id).unwrap();\n\n\n\n let expansion = build_expansion(cache, Arc::clone(&enc)).unwrap();\n\n\n\n match expansion {\n\n Expansion::SS9012(x) => ss9012::view(&x),\n\n }\n\n}\n", "file_path": "iml-gui/crate/src/page/sfa_enclosure.rs", "rank": 79, "score": 321400.6029383662 }, { "content": "fn view(model: &Model) -> Vec<Node<Msg>> {\n\n let nodes = match &model.page {\n\n Page::AppLoading => loading::view().els(),\n\n Page::About => main_panels(model, page::about::view(model).els().map_msg(page::Msg::About)).els(),\n\n Page::Dashboard(page) => main_panels(model, page::dashboard::view(page).map_msg(page::Msg::Dashboard)).els(),\n\n Page::Filesystems(page) => main_panels(\n\n model,\n\n page::filesystems::view(&model.records, page, &model.locks, model.auth.get_session())\n\n .els()\n\n .map_msg(page::Msg::Filesystems),\n\n )\n\n .els(),\n\n Page::Filesystem(page) => main_panels(\n\n model,\n\n page::filesystem::view(\n\n &model.records,\n\n page,\n\n &model.locks,\n\n model.auth.get_session(),\n\n model.conf.use_stratagem,\n", "file_path": "iml-gui/crate/src/lib.rs", "rank": 80, "score": 321358.87835557427 }, { "content": "pub fn get_var_else(name: &str, default: &str) -> String {\n\n env::var(name).unwrap_or_else(|_| default.to_string())\n\n}\n\n\n\nlazy_static! {\n\n // Gets the manager url or panics\n\n pub static ref MANAGER_URL: Url =\n\n Url::parse(&get_var(\"IML_MANAGER_URL\")).expect(\"Could not parse manager url\");\n\n}\n\n\n", "file_path": "iml-agent/src/env.rs", "rank": 81, "score": 320995.6859117916 }, { "content": "/// Given a paging `Model`, renders left and right chevrons if there are pages.\n\npub fn next_prev_view(paging: &Model) -> Vec<Node<Msg>> {\n\n if !paging.has_pages() {\n\n return vec![];\n\n }\n\n\n\n let cls = class![\n\n C.hover__underline,\n\n C.select_none,\n\n C.hover__text_gray_300,\n\n C.cursor_pointer\n\n ];\n\n\n\n vec![\n\n a![\n\n &cls,\n\n class![\n\n C.px_5,\n\n C.pointer_events_none => !paging.has_less(),\n\n C.text_gray_700 => !paging.has_less(),\n\n ],\n", "file_path": "iml-gui/crate/src/components/paging.rs", "rank": 82, "score": 319951.1999685335 }, { "content": "fn empty_to_dash(x: &str) -> String {\n\n if x.is_empty() {\n\n \"---\".into()\n\n } else {\n\n x.into()\n\n }\n\n}\n\n\n", "file_path": "iml-gui/crate/src/page/users.rs", "rank": 83, "score": 319670.71765893034 }, { "content": "fn empty_str_to_none(x: String) -> Option<String> {\n\n match x.as_ref() {\n\n \"\" => None,\n\n _ => Some(x),\n\n }\n\n}\n\n\n", "file_path": "iml-manager-env/src/lib.rs", "rank": 84, "score": 316333.4089453706 }, { "content": "pub fn view(model: &Model, session: Option<&Session>) -> impl View<Msg> {\n\n report::view(&model.report, session).map_msg(Msg::Report)\n\n}\n\n\n", "file_path": "iml-gui/crate/src/page/stratagem/mod.rs", "rank": 85, "score": 316073.1714579489 }, { "content": "pub fn view(model: &Model) -> Node<Msg> {\n\n div![\n\n class![C.grid, C.lg__grid_cols_2, C.gap_6, C.h_full],\n\n vec![\n\n dashboard_fs_usage::view(&model.fs_usage),\n\n dashboard_container::view(\n\n \"I/O Performance\",\n\n performance_container(\n\n &model.io_date_picker,\n\n 18,\n\n 20,\n\n vec![(\"from\", &model.io_date_picker.from), (\"to\", &model.io_date_picker.to)]\n\n )\n\n .map_msg(Msg::IoChart)\n\n ),\n\n if let Some(overview) = model.sfa_overview.as_ref() {\n\n sfa_overview::view(overview)\n\n } else {\n\n dashboard_container::view(\n\n \"OST Balance\",\n", "file_path": "iml-gui/crate/src/page/dashboard.rs", "rank": 86, "score": 314941.7319875971 }, { "content": "pub fn view(model: &Model) -> Node<Msg> {\n\n div![\n\n class![C.text_center],\n\n div![\n\n class![C.inline_block, C.rounded_full, C.px_2, C.text_gray_400, C.text_xs,],\n\n button![\n\n class![\n\n C.inline_block,\n\n C.bg_blue_500 => model.duration == ChartDuration::Day,\n\n C.bg_gray_200 => model.duration != ChartDuration::Day,\n\n C.px_6,\n\n C.rounded_l_full,\n\n C.text_white => model.duration == ChartDuration::Day,\n\n ],\n\n \"Day\",\n\n simple_ev(Ev::Click, Msg::SelectDuration(ChartDuration::Day))\n\n ],\n\n button![\n\n class![\n\n C.inline_block,\n", "file_path": "iml-gui/crate/src/components/datepicker.rs", "rank": 87, "score": 314941.7319875971 }, { "content": "pub fn view(model: &Model) -> Node<Msg> {\n\n let (toast_bg, toast_status_bg, icon, status_txt, x) = match model {\n\n Model::Success(x) => (C.bg_green_600, C.bg_green_500, \"check-circle\", \"Success\", x),\n\n Model::Warn(x) => (C.bg_yellow_600, C.bg_yellow_500, \"bell\", \"Warning\", x),\n\n Model::Error(x) => (C.bg_red_600, C.bg_red_500, \"bell\", \"Error\", x),\n\n };\n\n\n\n div![\n\n class![\n\n C.text_white,\n\n C.fade_in,\n\n C.p_2,\n\n toast_bg,\n\n C.items_center,\n\n C.leading_none,\n\n C.rounded_full,\n\n C.flex,\n\n C.inline_flex,\n\n ],\n\n span![\n", "file_path": "iml-gui/crate/src/components/toast.rs", "rank": 88, "score": 314941.731987597 }, { "content": "fn empty_str_to_none(x: String) -> Option<String> {\n\n match x.as_str() {\n\n \"\" => None,\n\n y => Some(y.to_string()),\n\n }\n\n}\n\n\n", "file_path": "device-scanner/uevent-listener/src/main.rs", "rank": 89, "score": 309943.8538258183 }, { "content": "pub fn view(model: &Model) -> Node<Msg> {\n\n let input_cls = class![\n\n C.appearance_none,\n\n C.focus__outline_none,\n\n C.focus__shadow_outline,\n\n C.px_3,\n\n C.py_2,\n\n C.rounded_sm,\n\n C.text_gray_800\n\n ];\n\n\n\n panel::view(\n\n h3![\n\n class![C.py_4, C.font_normal, C.text_lg],\n\n \"Take Manual Snapshot\",\n\n help_indicator(\"Take an ad-hoc filesystem snapshot\", Placement::Right),\n\n ],\n\n div![\n\n class![C.items_center],\n\n form![\n", "file_path": "iml-gui/crate/src/page/snapshot/take.rs", "rank": 90, "score": 308999.9968758512 }, { "content": "pub fn view(model: &Model) -> Node<Msg> {\n\n div![\n\n class![C.grid, C.lg__grid_cols_2, C.gap_6],\n\n vec![\n\n dashboard_fs_usage::view(&model.fs_usage),\n\n dashboard_container::view(\n\n \"Filesystem Usage\",\n\n div![\n\n class![C.h_full, C.min_h_80, C.p_2],\n\n grafana_chart::view(\n\n IML_METRICS_DASHBOARD_ID,\n\n IML_METRICS_DASHBOARD_NAME,\n\n create_chart_params(\n\n 31,\n\n \"10s\",\n\n vec![\n\n (\"fs_name\", &model.fs_name),\n\n (\"from\", &model.fs_usage_date_picker.from),\n\n (\"to\", &model.fs_usage_date_picker.to)\n\n ]\n", "file_path": "iml-gui/crate/src/page/fs_dashboard.rs", "rank": 91, "score": 308999.9968758512 }, { "content": "pub fn view(model: &Model) -> Node<Msg> {\n\n let input_cls = class![\n\n C.appearance_none,\n\n C.focus__outline_none,\n\n C.focus__shadow_outline,\n\n C.px_3,\n\n C.py_2,\n\n C.rounded_sm\n\n ];\n\n\n\n modal::bg_view(\n\n model.modal.open,\n\n Msg::Modal,\n\n modal::content_view(\n\n Msg::Modal,\n\n div![\n\n modal::title_view(Msg::Modal, span![\"Create Snapshot Retention Policy\"]),\n\n form![\n\n ev(Ev::Submit, move |event| {\n\n event.prevent_default();\n", "file_path": "iml-gui/crate/src/page/snapshot/create_retention.rs", "rank": 92, "score": 303355.28822201537 }, { "content": "pub fn view(model: &Model) -> Node<Msg> {\n\n let input_cls = class![\n\n C.appearance_none,\n\n C.focus__outline_none,\n\n C.focus__shadow_outline,\n\n C.px_3,\n\n C.py_2,\n\n C.rounded_sm\n\n ];\n\n\n\n modal::bg_view(\n\n model.modal.open,\n\n Msg::Modal,\n\n modal::content_view(\n\n Msg::Modal,\n\n div![\n\n modal::title_view(Msg::Modal, span![\"Add Automated Snapshot Rule\"]),\n\n form![\n\n ev(Ev::Submit, move |event| {\n\n event.prevent_default();\n", "file_path": "iml-gui/crate/src/page/snapshot/add_interval.rs", "rank": 93, "score": 303355.28822201537 }, { "content": "pub fn asset_path(asset: &str) -> String {\n\n format!(\"{}/{}\", STATIC_PATH, asset)\n\n}\n\n\n\n// ------ ------\n\n// Window Events\n\n// ------ ------\n\n\n", "file_path": "iml-gui/crate/src/lib.rs", "rank": 94, "score": 302170.9082501905 }, { "content": "pub fn view(_: &ArcCache, model: &Model) -> Node<Msg> {\n\n let dashboard_type: TargetDashboard = (model.target_name.as_str()).into();\n\n\n\n div![\n\n class![C.grid, C.lg__grid_cols_2, C.gap_6],\n\n match dashboard_type {\n\n TargetDashboard::MdtDashboard => vec![\n\n dashboard_container::view(\n\n \"Metadata Operations\",\n\n div![\n\n class![C.h_full, C.min_h_80, C.p_2],\n\n grafana_chart::view(\n\n IML_METRICS_DASHBOARD_ID,\n\n IML_METRICS_DASHBOARD_NAME,\n\n create_chart_params(37, \"10s\", vec![(\"target_name\", &model.target_name)]),\n\n \"90%\",\n\n ),\n\n ],\n\n ),\n\n dashboard_container::view(\n", "file_path": "iml-gui/crate/src/page/target_dashboard.rs", "rank": 95, "score": 298184.6418141447 }, { "content": "fn required_bool(arg: &str, x: &HashMap<&str, String>) -> Result<bool, ImlAgentError> {\n\n let x = required_arg(arg, x)?;\n\n let x = x.parse()?;\n\n\n\n Ok(x)\n\n}\n\n\n", "file_path": "iml-agent/src/high_availability.rs", "rank": 96, "score": 292410.8360848049 }, { "content": "pub fn view(model: &Model, cache: &ArcCache, session: Option<&Session>) -> impl View<Msg> {\n\n let fs_names = get_fs_names(cache);\n\n\n\n if fs_names.is_empty() {\n\n //TODO Replace with a messasge saying a filesystem must be created first.\n\n return div![\"No filesystems found\"];\n\n }\n\n\n\n div![\n\n take::view(&model.take).map_msg(Msg::Take).merge_attrs(class![C.my_6]),\n\n if cache.snapshot_interval.is_empty() {\n\n vec![add_interval_btn(false, session)]\n\n } else {\n\n vec![\n\n list_interval::view(&model.list_interval, cache, session)\n\n .map_msg(Msg::ListInterval)\n\n .merge_attrs(class![C.my_6]),\n\n add_interval_btn(true, session),\n\n ]\n\n },\n", "file_path": "iml-gui/crate/src/page/snapshot/mod.rs", "rank": 97, "score": 291400.34206981247 }, { "content": "fn stratagem_config(model: &Model, locked: bool) -> Node<Msg> {\n\n div![\n\n class![\n\n C.bg_white,\n\n C.border,\n\n C.border_b,\n\n C.border_t,\n\n C.mt_24,\n\n C.rounded_lg,\n\n C.shadow,\n\n ],\n\n div![\n\n class![C.flex, C.justify_between, C.px_6, C._mb_px, C.bg_gray_200],\n\n h3![class![C.py_4, C.font_normal, C.text_lg], \"Configure Scanning Interval\"]\n\n ],\n\n config_view(model, locked),\n\n ]\n\n}\n\n\n", "file_path": "iml-gui/crate/src/components/stratagem/mod.rs", "rank": 98, "score": 290752.72026480373 }, { "content": "pub fn init(cache: &ArcCache, model: &mut Model) {\n\n let fs_name = get_fs_names(cache).into_iter().next();\n\n\n\n if let Some(fs_name) = fs_name {\n\n model.fs_name = fs_name.to_string();\n\n }\n\n}\n\n\n", "file_path": "iml-gui/crate/src/page/snapshot/take.rs", "rank": 99, "score": 290644.686473097 } ]
Rust
src/gossip/config.rs
devillove084/HierarchicalCache
0e6f95b758dbb9df274075d550e2b0dc8fd66699
#![allow(dead_code)] #[derive(Clone)] pub struct PeerSamplingConfig { push: bool, pull: bool, sampling_period: u64, sampling_deviation: u64, view_size: usize, healing_factor: usize, swapping_factor: usize, } impl PeerSamplingConfig { pub fn new(push: bool, pull: bool, sampling_period: u64, view_size: usize, healing_factor: usize, swapping_factor: usize) -> Self { PeerSamplingConfig { push, pull, sampling_period, sampling_deviation: 0, view_size, healing_factor, swapping_factor, } } pub fn new_with_deviation(push: bool, pull: bool, sampling_period: u64, sampling_deviation: u64, view_size: usize, healing_factor: usize, swapping_factor: usize) -> Self { PeerSamplingConfig { push, pull, sampling_period, sampling_deviation, view_size, healing_factor, swapping_factor, } } pub fn sampling_period(&self) -> u64 { self.sampling_period } pub fn sampling_deviation(&self) -> u64 { self.sampling_deviation } pub fn healing_factor(&self) -> usize { self.healing_factor } pub fn swapping_factor(&self) -> usize { self.swapping_factor } pub fn view_size(&self) -> usize { self.view_size } pub fn is_pull(&self) -> bool { self.pull } pub fn is_push(&self) -> bool { self.push } } impl Default for PeerSamplingConfig { fn default() -> Self { PeerSamplingConfig { push: true, pull: true, sampling_period: 60000, sampling_deviation: 0, view_size: 30, healing_factor: 3, swapping_factor: 12 } } } pub struct GossipConfig { push: bool, pull: bool, gossip_period: u64, gossip_deviation: u64, update_expiration: UpdateExpirationMode, } impl GossipConfig { pub fn new(push: bool, pull: bool, gossip_period: u64, update_expiration: UpdateExpirationMode) -> Self { GossipConfig { push, pull, gossip_period, gossip_deviation: 0, update_expiration, } } pub fn new_with_deviation(push: bool, pull: bool, gossip_period: u64, gossip_deviation: u64, update_expiration: UpdateExpirationMode) -> Self { GossipConfig { push, pull, gossip_period, gossip_deviation, update_expiration, } } pub fn is_push(&self) -> bool { self.push } pub fn is_pull(&self) -> bool { self.pull } pub fn gossip_period(&self) -> u64 { self.gossip_period } pub fn gossip_deviation(&self) -> u64 { self.gossip_deviation } pub fn update_expiration(&self) -> &UpdateExpirationMode { &self.update_expiration } } impl Default for GossipConfig { fn default() -> Self { GossipConfig { push: true, pull: true, gossip_period: 1000, gossip_deviation: 0, update_expiration: UpdateExpirationMode::None } } } #[derive(Debug, Clone)] pub enum UpdateExpirationMode { None, DurationMillis(u128), PushCount(u64), MostRecent(usize, f64), } pub enum UpdateExpirationValue { None, DurationMillis(std::time::Instant, u128), PushCount(u64), MostRecent(std::time::Instant), } impl UpdateExpirationValue { pub fn new(expiration_mode: UpdateExpirationMode) -> Self { match expiration_mode { UpdateExpirationMode::None => UpdateExpirationValue::None, UpdateExpirationMode::PushCount(count) => UpdateExpirationValue::PushCount(count), UpdateExpirationMode::DurationMillis(ms) => UpdateExpirationValue::DurationMillis(std::time::Instant::now(), ms), UpdateExpirationMode::MostRecent(_, _) => UpdateExpirationValue::MostRecent(std::time::Instant::now()), } } pub fn increase_push_count(&mut self) { match self { UpdateExpirationValue::PushCount(ref mut count) => { if *count > 0 { *count -= 1 } }, _ => (), } } pub fn has_expired(&self) -> bool { match self { UpdateExpirationValue::None => false, UpdateExpirationValue::PushCount(count) => *count == 0, UpdateExpirationValue::DurationMillis(start, ttl) => start.elapsed().as_millis() >= *ttl, UpdateExpirationValue::MostRecent(_) => false, } } }
#![allow(dead_code)] #[derive(Clone)] pub struct PeerSamplingConfig { push: bool, pull: bool, sampling_period: u64, sampling_deviation: u64, view_size: usize, healing_factor: usize, swapping_factor: usize, } impl PeerSamplingConfig { pub fn new(push: bool, pull: bool, sampling_period: u64, view_size: usize, healing_factor: usize, swapping_factor: usize) -> Self { PeerSamplingConfig { push, pull, sampling_period, sampling_deviation: 0, view_size, healing_factor, swapping_factor, } } pub fn new_with_deviation(push: bool, pull: bool, sampling_period: u64, sampling_deviation: u64, view_size: usize, healing_factor: usize, swapping_factor: usize) -> Self { PeerSamplingConfig { push, pull, sampling_period, sampling_deviation, view_size, healing_factor, swapping_factor, } } pub fn sampling_period(&self) -> u64 { self.sampling_period } pub fn sampling_deviation(&self) -> u64 { self.sampling_deviation } pub fn healing_factor(&self) -> usize { self.healing_factor } pub fn swapping_factor(&self) -> usize { self.swapping_factor } pub fn view_size(&self) -> usize { self.view_size } pub fn is_pull(&self) -> bool { self.pull } pub fn is_push(&self) -> bool { self.push } } impl Default for PeerSamplingConfig {
} pub struct GossipConfig { push: bool, pull: bool, gossip_period: u64, gossip_deviation: u64, update_expiration: UpdateExpirationMode, } impl GossipConfig { pub fn new(push: bool, pull: bool, gossip_period: u64, update_expiration: UpdateExpirationMode) -> Self { GossipConfig { push, pull, gossip_period, gossip_deviation: 0, update_expiration, } } pub fn new_with_deviation(push: bool, pull: bool, gossip_period: u64, gossip_deviation: u64, update_expiration: UpdateExpirationMode) -> Self { GossipConfig { push, pull, gossip_period, gossip_deviation, update_expiration, } } pub fn is_push(&self) -> bool { self.push } pub fn is_pull(&self) -> bool { self.pull } pub fn gossip_period(&self) -> u64 { self.gossip_period } pub fn gossip_deviation(&self) -> u64 { self.gossip_deviation } pub fn update_expiration(&self) -> &UpdateExpirationMode { &self.update_expiration } } impl Default for GossipConfig { fn default() -> Self { GossipConfig { push: true, pull: true, gossip_period: 1000, gossip_deviation: 0, update_expiration: UpdateExpirationMode::None } } } #[derive(Debug, Clone)] pub enum UpdateExpirationMode { None, DurationMillis(u128), PushCount(u64), MostRecent(usize, f64), } pub enum UpdateExpirationValue { None, DurationMillis(std::time::Instant, u128), PushCount(u64), MostRecent(std::time::Instant), } impl UpdateExpirationValue { pub fn new(expiration_mode: UpdateExpirationMode) -> Self { match expiration_mode { UpdateExpirationMode::None => UpdateExpirationValue::None, UpdateExpirationMode::PushCount(count) => UpdateExpirationValue::PushCount(count), UpdateExpirationMode::DurationMillis(ms) => UpdateExpirationValue::DurationMillis(std::time::Instant::now(), ms), UpdateExpirationMode::MostRecent(_, _) => UpdateExpirationValue::MostRecent(std::time::Instant::now()), } } pub fn increase_push_count(&mut self) { match self { UpdateExpirationValue::PushCount(ref mut count) => { if *count > 0 { *count -= 1 } }, _ => (), } } pub fn has_expired(&self) -> bool { match self { UpdateExpirationValue::None => false, UpdateExpirationValue::PushCount(count) => *count == 0, UpdateExpirationValue::DurationMillis(start, ttl) => start.elapsed().as_millis() >= *ttl, UpdateExpirationValue::MostRecent(_) => false, } } }
fn default() -> Self { PeerSamplingConfig { push: true, pull: true, sampling_period: 60000, sampling_deviation: 0, view_size: 30, healing_factor: 3, swapping_factor: 12 } }
function_block-full_function
[ { "content": "pub fn string_object_hash(object: &RobjPtr, seed: u64) -> usize {\n\n match object.borrow().encoding() {\n\n RobjEncoding::Raw =>\n\n murmur_hash64a(object.borrow().string(), seed) as usize,\n\n RobjEncoding::Int =>\n\n murmur_hash64a(object.borrow().integer().to_string().as_bytes(), seed) as usize,\n\n _ => unreachable!()\n\n }\n\n}\n\n\n", "file_path": "src/svalue/hash.rs", "rank": 0, "score": 171145.11598165066 }, { "content": "pub fn murmur_hash64a(key: &[u8], seed: u64) -> u64 {\n\n let m : u64 = 0xc6a4a7935bd1e995;\n\n let r : u8 = 47;\n\n\n\n let len = key.len();\n\n let mut h : u64 = seed ^ ((len as u64).wrapping_mul(m));\n\n\n\n let endpos = len-(len&7);\n\n let mut i = 0;\n\n while i != endpos {\n\n let mut k : u64;\n\n\n\n k = key[i+0] as u64;\n\n k |= (key[i+1] as u64) << 8;\n\n k |= (key[i+2] as u64) << 16;\n\n k |= (key[i+3] as u64) << 24;\n\n k |= (key[i+4] as u64) << 32;\n\n k |= (key[i+5] as u64) << 40;\n\n k |= (key[i+6] as u64) << 48;\n\n k |= (key[i+7] as u64) << 56;\n", "file_path": "src/svalue/hash.rs", "rank": 1, "score": 164554.9079938982 }, { "content": "pub fn unix_timestamp(t: &SystemTime) -> u64 {\n\n t.duration_since(SystemTime::UNIX_EPOCH).unwrap().as_millis() as u64\n\n}\n\n\n", "file_path": "src/svalue/util.rs", "rank": 2, "score": 158384.80854543013 }, { "content": "pub fn yes_or_no(s: &str) -> Option<bool> {\n\n if s.eq_ignore_ascii_case(\"yes\") {\n\n return Some(true);\n\n } else if s.eq_ignore_ascii_case(\"no\") {\n\n return Some(false);\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/svalue/util.rs", "rank": 3, "score": 158024.18421732128 }, { "content": "pub fn to_system_time(timestamp: u64) -> SystemTime {\n\n SystemTime::UNIX_EPOCH + Duration::from_millis(timestamp)\n\n}\n\n\n", "file_path": "src/svalue/util.rs", "rank": 4, "score": 155379.89351966532 }, { "content": "pub fn is_prefix_of(p: &str, haystack: &str) -> bool {\n\n let p = p.as_bytes();\n\n let hay = haystack.as_bytes();\n\n if p.len() <= hay.len() {\n\n for i in 0..p.len() {\n\n if p[i] != hay[i] {\n\n break;\n\n }\n\n if i == p.len() - 1 {\n\n return true;\n\n }\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/svalue/util.rs", "rank": 5, "score": 151587.13168321014 }, { "content": "pub fn case_eq(lhs: &[u8], rhs: &[u8]) -> bool {\n\n if lhs.len() != rhs.len() {\n\n return false;\n\n }\n\n for p in rhs\n\n .iter()\n\n .map(|x| x.to_ascii_lowercase())\n\n .zip(lhs.iter()) {\n\n if p.0 != *p.1 {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "src/svalue/util.rs", "rank": 6, "score": 145746.78011637257 }, { "content": "#[inline]\n\npub fn parse_usize(s: &str) -> Result<usize, ParseIntError> {\n\n s.parse::<usize>()\n\n}\n\n\n", "file_path": "src/svalue/util.rs", "rank": 7, "score": 136949.38706237535 }, { "content": "pub fn bytes_to_usize(b: &[u8]) -> Result<usize, Box<dyn Error>> {\n\n let s = std::str::from_utf8(b)?;\n\n let i = s.parse::<usize>()?;\n\n Ok(i)\n\n}\n\n\n", "file_path": "src/svalue/util.rs", "rank": 8, "score": 133480.4405512837 }, { "content": "#[inline]\n\npub fn parse_usize_pair(s1: &str, s2: &str) -> Result<(usize, usize), ParseIntError> {\n\n let a: usize = parse_usize(s1)?;\n\n let b: usize = parse_usize(s2)?;\n\n Ok((a, b))\n\n}\n\n\n", "file_path": "src/svalue/util.rs", "rank": 9, "score": 127000.02121216088 }, { "content": "pub fn human_size(s: &str) -> Result<usize, ParseIntError> {\n\n if let Ok(n) = parse_usize(s) {\n\n return Ok(n);\n\n }\n\n let err = s.parse::<usize>();\n\n\n\n let human = s.to_ascii_lowercase();\n\n if let Some(i) = human.find(|ch: char| { ch.is_ascii_alphabetic() }) {\n\n let (num, suffix) = human.split_at(i);\n\n let num: usize = match num.parse::<usize>() {\n\n Ok(n) => n,\n\n Err(_) => return err,\n\n };\n\n let power: usize = match suffix {\n\n \"b\" => 1,\n\n \"kb\" => 1024,\n\n \"mb\" => 1024 * 1024,\n\n \"gb\" => 1024 * 1024 * 1024,\n\n \"tb\" => 1024 * 1024 * 1024 * 1024,\n\n \"pb\" => 1024 * 1024 * 1024 * 1024 * 1024,\n\n _ => return err,\n\n };\n\n return Ok(num * power);\n\n }\n\n\n\n err\n\n}\n\n\n", "file_path": "src/svalue/util.rs", "rank": 10, "score": 125633.09917175816 }, { "content": "fn storage_bucket(expiration_time: &SystemTime) -> u64 {\n\n expiration_time\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"Unix Epoch\")\n\n .as_secs()\n\n}\n\n\n", "file_path": "src/lcache/ttl.rs", "rank": 11, "score": 107107.80738535858 }, { "content": "/// Sends a message to the specified address\n\n///\n\n/// # Arguments\n\n///\n\n/// * `address` - Address of the recipient\n\n/// * `message` - Message implementing the [Message] trait\n\npub fn send<M>(address: &SocketAddr, message: Box<M>) -> Result<usize, Box<dyn Error>>\n\nwhere M: Message + Serialize\n\n{\n\n match message.as_bytes() {\n\n Ok(mut bytes) => {\n\n // insert protocol byte for deserialization\n\n bytes.insert(0, message.protocol());\n\n let written = TcpStream::connect(address)?.write(&bytes)?;\n\n Ok(written)\n\n }\n\n Err(e) => {\n\n log::error!(\"Could not serialize message\");\n\n Err(e)?\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/gossip/network.rs", "rank": 12, "score": 105682.78817306245 }, { "content": "pub trait DictPartialEq<RHS: ?Sized = Self> {\n\n fn eq(&self, other: &RHS) -> bool;\n\n}\n\n\n", "file_path": "src/svalue/dict.rs", "rank": 13, "score": 104659.41900205208 }, { "content": "struct LengthIter(usize, usize);\n\n\n\nimpl Iterator for LengthIter {\n\n type Item = u8;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.0 += 1;\n\n encode_prev_length(self.1, self.0 - 1)\n\n }\n\n}\n\n\n\n\n", "file_path": "src/svalue/zip_list.rs", "rank": 14, "score": 104513.47383695291 }, { "content": "pub fn int_reply_to_int(bytes: &[u8]) -> i64 {\n\n assert!(bytes.len() > 3);\n\n assert_eq!(bytes[0], b':');\n\n reply_preceding_to_int(bytes)\n\n}\n\n\n", "file_path": "src/svalue/util.rs", "rank": 15, "score": 102166.50506437547 }, { "content": "pub fn bulk_reply_to_int(bytes: &[u8]) -> i64 {\n\n assert!(bytes.len() > 3);\n\n assert_eq!(bytes[0], b'$');\n\n reply_preceding_to_int(bytes)\n\n}\n\n\n", "file_path": "src/svalue/util.rs", "rank": 16, "score": 102166.50506437547 }, { "content": "pub fn bytes_vec(b: &[u8]) -> Vec<u8> {\n\n b.iter().cloned().collect()\n\n}\n\n\n", "file_path": "src/svalue/util.rs", "rank": 17, "score": 101784.00768761509 }, { "content": "fn next_power(size: usize) -> usize {\n\n let mut i = DICT_HT_INITIAL_SIZE;\n\n\n\n if size >= std::usize::MAX {\n\n return std::usize::MAX;\n\n }\n\n\n\n loop {\n\n if i >= size {\n\n return i;\n\n }\n\n i *= 2;\n\n }\n\n}\n\n\n", "file_path": "src/svalue/dict.rs", "rank": 18, "score": 100816.7952582485 }, { "content": "pub fn multi_bulk_reply_to_int(bytes: &[u8]) -> i64 {\n\n assert!(bytes.len() > 3);\n\n assert_eq!(bytes[0], b'*');\n\n reply_preceding_to_int(bytes)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_human_size() {\n\n assert_eq!(human_size(\"15\").unwrap(), 15);\n\n assert_eq!(human_size(\"0\").unwrap(), 0);\n\n assert_eq!(human_size(\"17B\").unwrap(), 17);\n\n assert_eq!(human_size(\"17kb\").unwrap(), 17 * 1024);\n\n assert_eq!(human_size(\"5gb\").unwrap(), 5 * (1 << 30));\n\n assert!(human_size(\"kb\").is_err());\n\n assert!(human_size(\"2mib\").is_err());\n\n }\n", "file_path": "src/svalue/util.rs", "rank": 19, "score": 100280.51609355713 }, { "content": "#[cfg(not(miri))]\n\n#[inline]\n\nfn num_cpus() -> usize {\n\n NCPU_INITIALIZER.call_once(|| NCPU.store(num_cpus::get_physical(), Ordering::Relaxed));\n\n NCPU.load(Ordering::Relaxed)\n\n}", "file_path": "src/chashmap/map.rs", "rank": 20, "score": 98108.0018725933 }, { "content": "fn prev_length_size(len: usize) -> usize {\n\n if len < 254 {\n\n 1\n\n } else {\n\n 5\n\n }\n\n}\n\n\n", "file_path": "src/svalue/zip_list.rs", "rank": 21, "score": 96953.58988322417 }, { "content": "#[inline]\n\npub fn parse_port(s: &str) -> Result<u16, ParseIntError> {\n\n s.parse::<u16>()\n\n}\n\n\n", "file_path": "src/svalue/util.rs", "rank": 22, "score": 94033.84915496196 }, { "content": "/// Val Trait alias to reduce redundancy in type decl.\n\npub trait Val<A: Ord>: Clone + Default + ResetRemove<A> + CmRDT {}\n\n\n\nimpl<A, T> Val<A> for T\n\nwhere\n\n A: Ord,\n\n T: Clone + Default + ResetRemove<A> + CmRDT,\n\n{\n\n}\n\n#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]\n\npub struct Map<K: Ord, V: Val<A>, A: Ord + Hash> {\n\n // This clock stores the current version of the Map, it should\n\n // be greator or equal to all Entry.clock's in the Map.\n\n clock: VClock<A>,\n\n entries: BTreeMap<K, Entry<V, A>>,\n\n deferred: HashMap<VClock<A>, BTreeSet<K>>,\n\n}\n\n\n", "file_path": "src/crdts/map.rs", "rank": 23, "score": 92542.31822329383 }, { "content": "pub fn generate_key_from_pattern(pat: &[u8], s: &[u8]) -> Vec<u8> {\n\n let k = match pat.iter()\n\n .enumerate()\n\n .filter(|c| *((*c).1) == b'*')\n\n .next() {\n\n Some(i) => i.0,\n\n None => return pat.to_vec(),\n\n };\n\n let mut ret: Vec<u8> = Vec::with_capacity(pat.len() + s.len() - 1);\n\n ret.extend_from_slice(&pat[0..k]);\n\n ret.extend_from_slice(&s[..]);\n\n ret.extend_from_slice(&pat[k + 1..]);\n\n ret\n\n}\n\n\n", "file_path": "src/svalue/util.rs", "rank": 24, "score": 92196.96759657163 }, { "content": "struct HashSetVisitor<T, S> {\n\n type_marker: PhantomData<T>,\n\n hash_builder_marker: PhantomData<S>,\n\n}\n\n\n\nimpl<T, S> HashSetVisitor<T, S> {\n\n pub(crate) fn new() -> Self {\n\n Self {\n\n type_marker: PhantomData,\n\n hash_builder_marker: PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl<'de, T, S> Visitor<'de> for HashSetVisitor<T, S>\n\nwhere\n\n T: 'static + Deserialize<'de> + Send + Sync + Hash + Clone + Ord,\n\n S: Default + BuildHasher,\n\n{\n\n type Value = HashSet<T, S>;\n", "file_path": "src/chashmap/serde_impl.rs", "rank": 25, "score": 91428.72514686709 }, { "content": "pub fn bytes_to_f64(b: &[u8]) -> Result<f64, Box<dyn Error>> {\n\n let s = std::str::from_utf8(b)?;\n\n let n = s.parse::<f64>()?;\n\n Ok(n)\n\n}\n\n\n", "file_path": "src/svalue/util.rs", "rank": 26, "score": 90564.90264387036 }, { "content": "pub fn bytes_to_i64(b: &[u8]) -> Result<i64, Box<dyn Error>> {\n\n let s = std::str::from_utf8(b)?;\n\n let i = s.parse::<i64>()?;\n\n Ok(i)\n\n}\n\n\n", "file_path": "src/svalue/util.rs", "rank": 27, "score": 90564.90264387036 }, { "content": "pub fn parse_port_from_bytes(b: &[u8]) -> Result<u16, Box<dyn Error>> {\n\n let s = std::str::from_utf8(b)?;\n\n let port = parse_port(s)?;\n\n Ok(port)\n\n}\n\n\n", "file_path": "src/svalue/util.rs", "rank": 28, "score": 88878.33521184044 }, { "content": "fn encode_prev_length(len: usize, idx: usize) -> Option<u8> {\n\n if len < 254 {\n\n if idx != 0 {\n\n return None;\n\n }\n\n return Some(len as u8);\n\n }\n\n if len < std::u32::MAX as usize {\n\n if idx == 0 {\n\n return Some(0xfe);\n\n }\n\n if idx < 5 {\n\n return Some(((len >> (4 - idx) * 8) & 0xff) as u8);\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/svalue/zip_list.rs", "rank": 29, "score": 88229.30324947053 }, { "content": "fn decode_prev_length(x: &[u8]) -> usize {\n\n if x[0] != 0xfe {\n\n return x[0] as usize;\n\n }\n\n let mut v = 0;\n\n for i in 1..5 {\n\n v <<= 8;\n\n v |= x[i] as usize;\n\n }\n\n v\n\n}\n\n\n", "file_path": "src/svalue/zip_list.rs", "rank": 30, "score": 87815.1551908836 }, { "content": "struct HashMapVisitor<K, V, S> {\n\n key_marker: PhantomData<K>,\n\n value_marker: PhantomData<V>,\n\n hash_builder_marker: PhantomData<S>,\n\n}\n\n\n\nimpl<K, V, S> Serialize for HashMapRef<'_, K, V, S>\n\nwhere\n\n K: Serialize,\n\n V: Serialize,\n\n{\n\n fn serialize<Sr>(&self, serializer: Sr) -> Result<Sr::Ok, Sr::Error>\n\n where\n\n Sr: Serializer,\n\n {\n\n serializer.collect_map(self.iter())\n\n }\n\n}\n\n\n\nimpl<K, V, S> Serialize for HashMap<K, V, S>\n", "file_path": "src/chashmap/serde_impl.rs", "rank": 31, "score": 87675.44341743717 }, { "content": "fn prev_length_iter(len: usize) -> LengthIter {\n\n LengthIter(0, len)\n\n}\n\n\n", "file_path": "src/svalue/zip_list.rs", "rank": 32, "score": 83921.79860230707 }, { "content": "fn write_prev_length(len: usize, x: &mut [u8]) {\n\n assert_eq!(prev_length_size(len), x.len());\n\n for p in x.iter_mut().zip(prev_length_iter(len)) {\n\n *p.0 = p.1;\n\n }\n\n}\n\n\n", "file_path": "src/svalue/zip_list.rs", "rank": 33, "score": 81366.86030660813 }, { "content": "fn force_write_large_prev_length(len: usize, x: &mut [u8]) {\n\n assert_eq!(x.len(), 5);\n\n assert!(len < 254);\n\n x[4] = len as u8;\n\n}\n\n\n", "file_path": "src/svalue/zip_list.rs", "rank": 34, "score": 77890.92954574537 }, { "content": "/// Starts listening to TCP connections\n\n///\n\n/// # Arguments\n\n///\n\n/// * `address` - Bind address\n\n/// * `shutdown` - Flag used to check for a shutdown request\n\n/// * `peer_sampling_sender` - Used to dispatch peer sampling messages\n\n/// * `header_sender` - Used to dispatch gossip header messages\n\n/// * `content_sender` - Used to dispatch gossip content messages\n\npub fn listen(address: &SocketAddr, shutdown: Arc<std::sync::atomic::AtomicBool>, peer_sampling_sender: Sender<PeerSamplingMessage>, header_sender: Sender<HeaderMessage>, content_sender: Sender<ContentMessage>) -> std::io::Result<JoinHandle<()>> {\n\n\n\n let listener = std::net::TcpListener::bind(address)?;\n\n log::info!(\"Listener started at {}\", address);\n\n Ok(std::thread::Builder::new().name(format!(\"{} - gossip listener\", address)).spawn(move || {\n\n log::info!(\"Started listener thread\");\n\n // TODO: handle hanging connections where peer connect but does not write\n\n for incoming_stream in listener.incoming() {\n\n\n\n // check for shutdown request\n\n if shutdown.load(std::sync::atomic::Ordering::SeqCst) {\n\n log::info!(\"Shutdown requested\");\n\n break;\n\n }\n\n\n\n // TODO: handle in new thread or worker\n\n // handle request\n\n match incoming_stream {\n\n Ok(mut stream) => {\n\n let mut buf = Vec::new();\n", "file_path": "src/gossip/network.rs", "rank": 35, "score": 71541.49239705059 }, { "content": "/// The view at each node\n\nstruct View {\n\n /// The address of the node\n\n host_address: String,\n\n /// The list of peers in the node view\n\n peers: Vec<Peer>,\n\n /// The queue from which peer are retrieved for the application layer\n\n queue: VecDeque<Peer>,\n\n}\n\nimpl View {\n\n /// Creates a new view with the node's address\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `address` - Addres of peer\n\n fn new(host_address: String) -> View {\n\n View {\n\n host_address,\n\n peers: vec![],\n\n queue: VecDeque::new(),\n\n }\n", "file_path": "src/gossip/sampling.rs", "rank": 36, "score": 67696.8472809486 }, { "content": "fn main() {\n\n \n\n // let mut test = Cache::with_on_evict(100000, DB_Cache::default()).with_metrics();\n\n // let mut db_with_test = db::db::DB::new(0);\n\n \n\n // for i in 0..100 {\n\n // db_with_test.dict.add(Robj::create_string_object_from_long(i), Robj::create_string_object_from_long(i));\n\n // }\n\n\n\n let mut key = vec![1u8];\n\n let mut value = vec![1u8];\n\n let mut r = rand::thread_rng();\n\n for i in 0..999 {\n\n let n: u8 = r.gen();\n\n key.push(n);\n\n value.push(n);\n\n }\n\n\n\n println!(\"This is len of key {}\", key.len());\n\n println!(\"This is len of value {}\", value.len());\n", "file_path": "src/main.rs", "rank": 37, "score": 66664.27389800026 }, { "content": "struct EncodingIter {\n\n enc: Encoding,\n\n curr: usize,\n\n}\n\n\n\nimpl Iterator for EncodingIter {\n\n type Item = u8;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if self.curr < self.enc.blob_len() {\n\n self.curr += 1;\n\n Some(self.enc.index(self.curr - 1))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum ZipListValue<'a> {\n", "file_path": "src/svalue/zip_list.rs", "rank": 38, "score": 65021.374268815634 }, { "content": "struct Node<'a> {\n\n prev_raw_len: usize,\n\n prev_raw_len_size: usize,\n\n encoding: Encoding,\n\n content: &'a [u8],\n\n}\n\n\n\nimpl<'a> Node<'a> {\n\n fn new(x: &'a [u8]) -> Node<'a> {\n\n let prev_raw_len = decode_prev_length(x);\n\n let prev_raw_len_size = prev_length_size(prev_raw_len);\n\n let encoding = Encoding::parse(&x[prev_raw_len_size..]);\n\n Node {\n\n prev_raw_len,\n\n prev_raw_len_size,\n\n encoding,\n\n content: x,\n\n }\n\n }\n\n\n", "file_path": "src/svalue/zip_list.rs", "rank": 39, "score": 63830.812856605764 }, { "content": "type LogTime = u64;\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug, Default)]\n\npub struct VectorEntry {\n\n // The version of the next message we'd like to see\n\n next_version: LogTime,\n\n exceptions: HashSet<LogTime>,\n\n}\n\n\n\nimpl VectorEntry {\n\n pub fn new() -> Self {\n\n VectorEntry::default()\n\n }\n\n\n\n pub fn increment(&mut self, clk: LogTime) {\n\n match clk.cmp(&self.next_version) {\n\n // We've resolved an exception\n\n Ordering::Less => {\n\n self.exceptions.remove(&clk);\n\n }\n", "file_path": "src/crdts/vvwe.rs", "rank": 40, "score": 63042.41067886033 }, { "content": "struct Node<T> {\n\n next: Option<*mut Node<T>>,\n\n prev: Option<*mut Node<T>>,\n\n element: T,\n\n}\n\n\n\npub struct Iter<'a, T: 'a> {\n\n head: Option<*mut Node<T>>,\n\n tail: Option<*mut Node<T>>,\n\n len: usize,\n\n marker: PhantomData<&'a Node<T>>,\n\n}\n\n\n\nimpl<T> Node<T> {\n\n fn new(element: T) -> Self {\n\n Node {\n\n next: None,\n\n prev: None,\n\n element,\n\n }\n", "file_path": "src/svalue/linked_list.rs", "rank": 41, "score": 62538.72003761131 }, { "content": "pub trait Expiration {\n\n fn insert(&mut self, k: u64, expiration: Duration) -> Option<SystemTime>;\n\n\n\n fn update(\n\n &mut self,\n\n k: u64,\n\n expiration_time: &SystemTime,\n\n new_expiration: Duration,\n\n ) -> Option<SystemTime>;\n\n\n\n fn remove(&mut self, k: &u64, expiration_time: &SystemTime) -> bool;\n\n\n\n fn cleanup(&mut self, now: &SystemTime) -> HashSet<u64>;\n\n\n\n fn clear(&mut self);\n\n\n\n fn is_empty(&self) -> bool;\n\n}\n\n\n\n#[derive(Clone, Debug)]\n", "file_path": "src/lcache/ttl.rs", "rank": 42, "score": 61746.496154773915 }, { "content": "/// Message trait with generic implementation for serialization and deserialization\n\npub trait Message {\n\n\n\n /// The message protocol, used for serialization/deserialization\n\n fn protocol(&self) -> u8;\n\n\n\n /// Serializes message for sending over the wire\n\n fn as_bytes(&self) -> Result<Vec<u8>, Box<dyn Error>>\n\n where Self: Serialize\n\n {\n\n match serde_cbor::to_vec(&self) {\n\n Ok(bytes) => Ok(bytes),\n\n Err(e) => Err(e)?,\n\n }\n\n }\n\n\n\n /// Deserializes a message\n\n fn from_bytes<'a>(bytes: &'a [u8]) -> Result<Self, Box<dyn Error>>\n\n where Self: Sized + Deserialize<'a>\n\n {\n\n match serde_cbor::from_slice::<Self>(bytes) {\n", "file_path": "src/gossip/message.rs", "rank": 43, "score": 61746.496154773915 }, { "content": "pub trait SetWrapper {\n\n fn sw_len(&self) -> usize;\n\n fn sw_delete(&mut self, o: &RobjPtr) -> Result<(), ()>;\n\n fn sw_iter<'a>(&'a self) -> Box<dyn Iterator<Item=RobjPtr> + 'a>;\n\n fn sw_exists(&self, o: &RobjPtr) -> bool;\n\n fn sw_pop_random(&mut self) -> RobjPtr;\n\n}\n\n\n\npub struct SWInterIter<'a> {\n\n main: Box<dyn Iterator<Item=RobjPtr> + 'a>,\n\n others: &'a [RobjPtr],\n\n}\n\n\n\nimpl Robj {\n\n pub fn string(&self) -> &[u8] {\n\n self.ptr.bytes_ref()\n\n }\n\n\n\n pub fn string_len(&self) -> usize {\n\n match self.encoding() {\n", "file_path": "src/svalue/object.rs", "rank": 44, "score": 60460.270854132126 }, { "content": "/// State based CRDT's replicate by transmitting the entire CRDT state.\n\npub trait CvRDT {\n\n /// The validation error returned by `validate_merge`.\n\n type Validation: Error;\n\n\n\n /// Some CRDT's have stricter requirements on how they must be used.\n\n /// To avoid violating these requirements, CRDT's provide an interface\n\n /// to optionally validate merge compatibility before attempting to merge.\n\n ///\n\n /// An `Ok(())` response signals that the merge is safe to proceed.\n\n /// Otherwise a structured error is returned to help you determine what\n\n /// is wrong with the merge.\n\n fn validate_merge(&self, other: &Self) -> Result<(), Self::Validation>;\n\n\n\n /// Merge the given CRDT into the current CRDT.\n\n fn merge(&mut self, other: Self);\n\n}\n\n\n", "file_path": "src/crdts/traits.rs", "rank": 45, "score": 60460.270854132126 }, { "content": "/// Trait for receiving updates from the gossip protocol.\n\n///\n\n/// See: [Update]\n\npub trait UpdateHandler {\n\n /// Method called every time a new update is available for the application layer\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `update` - The update that has been received\n\n fn on_update(&self, update: Update);\n\n}\n\n\n\n/// A decorator for handling operations around updates\n\npub struct UpdateDecorator {\n\n /// Active updates\n\n active_updates: HashMap<String, (Update, UpdateExpirationValue)>,\n\n /// Removed/expired updates\n\n removed_updates: Vec<String>,\n\n /// Strategy for expiring updates\n\n expiration_mode: UpdateExpirationMode,\n\n /// Number of digests of expired updates that are kept\n\n max_expired_size: usize,\n\n /// Margin for cleanup of expired updates\n", "file_path": "src/gossip/update.rs", "rank": 46, "score": 60460.270854132126 }, { "content": "/// Operation based CRDT's replicate by transmitting each operation.\n\npub trait CmRDT {\n\n /// Op defines a mutation to the CRDT.\n\n /// As long as Op's from one actor are replayed in exactly the same order they\n\n /// were generated by that actor, the CRDT will converge. In other words, we must\n\n /// have a total ordering on each actors operations, while requiring only a partial\n\n /// order over all ops.\n\n /// E.g.\n\n ///\n\n /// * Actor A produces ops A1, A2\n\n /// * Actor B produces ops B1, B2\n\n ///\n\n /// the only valid orderings are:\n\n /// * A1 < A2 < B1 < B2\n\n /// * A1 < B1 < A2 < B2\n\n /// * B1 < A1 < A2 < B2\n\n /// * A1 < B1 < B2 < A2\n\n /// * B1 < A1 < B2 < A2\n\n /// * B1 < B2 < A1 < A2\n\n ///\n\n /// Applying ops in any of the valid orders will converge to the same CRDT state\n", "file_path": "src/crdts/traits.rs", "rank": 47, "score": 60460.270854132126 }, { "content": "pub trait ObjectData {\n\n fn bytes_ref(&self) -> &[u8] { panic!(\"This is not a byte slice\") }\n\n fn sds_ref(&self) -> &str { panic!(\"This is not an Sds string\") }\n\n fn raw_bytes(&self) -> &[u8] { panic!(\"This type has no raw bytes\") }\n\n fn integer(&self) -> i64 { panic!(\"This is not an integer\") }\n\n fn linked_list_ref(&self) -> &List { panic!(\"This is not a List\") }\n\n fn linked_list_mut(&mut self) -> &mut List { panic!(\"This is not a List\") }\n\n fn set_ref(&self) -> &Set { panic!(\"This is not a Set\") }\n\n fn set_mut(&mut self) -> &mut Set { panic!(\"This is not a Set\") }\n\n fn zip_list_ref(&self) -> &ZipList { panic!(\"This is not a ZipList\") }\n\n fn zip_list_mut(&mut self) -> &mut ZipList { panic!(\"This is not a ZipList\") }\n\n fn hash_table_ref(&self) -> &Dict<RobjPtr, RobjPtr> { panic!(\"This is not a hash table\") }\n\n fn int_set_ref(&self) -> &IntSet { panic!(\"This is not an IntSet\") }\n\n fn int_set_mut(&mut self) -> &mut IntSet { panic!(\"This is not an IntSet\") }\n\n fn set_wrapper_ref(&self) -> &dyn SetWrapper { panic!(\"This is not as SetWrapper\") }\n\n fn set_wrapper_mut(&mut self) -> &mut dyn SetWrapper { panic!(\"This is not as SetWrapper\") }\n\n fn zset_ref(&self) -> &Zset { panic!(\"This is not a Zset\") }\n\n fn encoding(&self) -> RobjEncoding;\n\n}\n\n\n", "file_path": "src/svalue/object.rs", "rank": 48, "score": 60460.270854132126 }, { "content": "pub trait TinyLFU {\n\n fn estimate(&self, k: &u64) -> i64;\n\n\n\n fn increment(&mut self, k: &u64);\n\n\n\n fn reset(&mut self);\n\n\n\n fn clear(&mut self);\n\n}\n\n\n\npub struct TinyLFUCache {\n\n sketcher: CountMinSketch<CountMinStrategy, u64>,\n\n filter: CuckooFilter<u64>,\n\n increments: usize,\n\n window_size: usize,\n\n actual_window: HashSet<u64>,\n\n previous_window: HashSet<u64>,\n\n}\n\n\n\nimpl TinyLFUCache {\n", "file_path": "src/lcache/tiny_lfu.rs", "rank": 49, "score": 59256.210821600456 }, { "content": "pub trait CausalOp<A> {\n\n /// TODO: result should be a VClock<A> since an op could be dependant on a few different msgs\n\n /// If the result is Some(dot) then this operation cannot occur until the operation that\n\n /// occured at dot has.\n\n fn happens_after(&self) -> Option<Dot<A>>;\n\n\n\n /// The time that the current operation occured at\n\n fn dot(&self) -> Dot<A>;\n\n}\n\n\n\nimpl<A: Hash + Eq, T: CausalOp<A>> Default for CausalityBarrier<A, T> {\n\n fn default() -> Self {\n\n CausalityBarrier {\n\n peers: HashMap::new(),\n\n buffer: HashMap::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl<A: Hash + Clone + Eq, T: CausalOp<A>> CausalityBarrier<A, T> {\n", "file_path": "src/crdts/vvwe.rs", "rank": 50, "score": 58553.2957599667 }, { "content": "#[derive(Debug)]\n\nstruct TableStack<'g, K, V> {\n\n length: usize,\n\n index: usize,\n\n table: &'g Table<K, V>,\n\n next: Option<Box<TableStack<'g, K, V>>>,\n\n}", "file_path": "src/chashmap/iter/traverser.rs", "rank": 51, "score": 56380.61633181217 }, { "content": "pub trait OnEvict<K, V> {\n\n fn evict(&self, k: &K, v: &V);\n\n}\n\n\n\n#[derive(Default)]\n\npub struct VoidEvict<K, V> {\n\n _k: PhantomData<K>,\n\n _v: PhantomData<V>,\n\n}\n\n\n\nimpl<K, V> OnEvict<K, V> for VoidEvict<K, V> {\n\n fn evict(&self, _k: &K, _v: &V) {}\n\n}\n\n\n\npub struct Cache<\n\n K,\n\n V,\n\n E = VoidEvict<K, V>,\n\n S = Storage_plus<K, V>,\n\n A = TinyLFUCache,\n", "file_path": "src/lcache/cache.rs", "rank": 52, "score": 55737.07370722521 }, { "content": "/// CRDT's are causal if they are built on top of vector clocks.\n\npub trait ResetRemove<A: Ord> {\n\n /// Remove data that is strictly smaller than this clock\n\n fn reset_remove(&mut self, clock: &VClock<A>);\n\n}\n", "file_path": "src/crdts/traits.rs", "rank": 53, "score": 55737.07370722521 }, { "content": "#[cfg(target_endian = \"big\")]\n\nfn _i64_from(b: &[u8]) -> i64 {\n\n let (int_bytes, _) = b.split_at(std::mem::size_of::<i64>());\n\n i64::from_be_bytes(int_bytes.try_into().unwrap())\n\n}\n\n\n\n\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use std::{i16, i32, i64};\n\n\n\n #[test]\n\n fn new_int_set() {\n\n let set = IntSet::new();\n\n assert_eq!(set.len(), 0);\n\n assert_eq!(set.encoding(), INT_SET_ENC_INT16);\n\n }\n\n\n\n #[test]\n", "file_path": "src/svalue/int_set.rs", "rank": 54, "score": 55560.21056187154 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]\n\nstruct Entry<V: Val<A>, A: Ord> {\n\n // The entry clock tells us which actors edited this entry.\n\n clock: VClock<A>,\n\n\n\n // The nested CRDT\n\n val: V,\n\n}\n\n\n\n/// Operations which can be applied to the Map CRDT\n\n#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]\n\npub enum Op<K: Ord, V: Val<A>, A: Ord> {\n\n /// Remove a key from the map\n\n Rm {\n\n /// The clock under which we will perform this remove\n\n clock: VClock<A>,\n\n /// Key to remove\n\n keyset: BTreeSet<K>,\n\n },\n\n /// Update an entry in the map\n\n Up {\n", "file_path": "src/crdts/map.rs", "rank": 55, "score": 54722.75266723402 }, { "content": "struct DictTable<K: DictPartialEq, V> {\n\n pub table: Vec<Option<Box<DictEntry<K, V>>>>,\n\n pub size: usize,\n\n pub size_mask: usize,\n\n pub used: usize,\n\n}\n\n\n\nimpl<K, V> DictTable<K, V>\n\n where K: DictPartialEq\n\n{\n\n fn new() -> DictTable<K, V> {\n\n DictTable {\n\n table: vec![],\n\n size: 0,\n\n size_mask: 0,\n\n used: 0,\n\n }\n\n }\n\n\n\n fn iter(&self, index: usize) -> DictEntryIterator<K, V> {\n", "file_path": "src/svalue/dict.rs", "rank": 56, "score": 54309.700490286836 }, { "content": "struct DictEntry<K: DictPartialEq, V> {\n\n pub key: K,\n\n pub value: V,\n\n next: Option<Box<DictEntry<K, V>>>,\n\n}\n\n\n\nimpl<K, V> DictEntry<K, V>\n\n where K: DictPartialEq\n\n{\n\n fn new(key: K, value: V) -> Self {\n\n DictEntry {\n\n key,\n\n value,\n\n next: None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/svalue/dict.rs", "rank": 57, "score": 54309.700490286836 }, { "content": "fn reply_preceding_to_int(bytes: &[u8]) -> i64 {\n\n let content = &bytes[1..bytes.len() - 2];\n\n bytes_to_i64(content).unwrap()\n\n}\n\n\n", "file_path": "src/svalue/util.rs", "rank": 58, "score": 53369.83084158522 }, { "content": "/// Common Actor type. Actors are unique identifier for every `thing` mutating a VClock.\n\n/// VClock based CRDT's will need to expose this Actor type to the user.\n\npub trait Actor: Ord + Clone + Hash {}\n\nimpl<A: Ord + Clone + Hash> Actor for A {}\n\n\n", "file_path": "src/crdts/traits.rs", "rank": 59, "score": 53226.73997758236 }, { "content": "pub trait Store<K, V>: Iterator {\n\n fn capacity(&self) -> usize;\n\n\n\n fn len(&self) -> usize;\n\n\n\n fn is_empty(&self) -> bool {\n\n self.len() == 0\n\n }\n\n\n\n fn room_left(&self) -> usize;\n\n\n\n fn contains(&self, k: &u64) -> bool;\n\n\n\n fn keys(&self) -> Keys<u64, Item<K, V>>;\n\n\n\n fn get(&self, k: &u64) -> Option<&Item<K, V>>;\n\n\n\n fn get_mut(&mut self, k: &u64) -> Option<&Item<K, V>>;\n\n\n\n fn insert(&mut self, k: u64, item: Item<K, V>) -> Option<Item<K, V>> {\n", "file_path": "src/lcache/store.rs", "rank": 60, "score": 53226.73997758236 }, { "content": "struct DictEntryIterator<'a, K: DictPartialEq, V> {\n\n next: Option<&'a DictEntry<K, V>>,\n\n}\n\n\n\nimpl<'a, K, V> Iterator for DictEntryIterator<'a, K, V>\n\n where K: DictPartialEq\n\n{\n\n type Item = (&'a K, &'a V);\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.next.take().map(|entry| {\n\n self.next = entry.next\n\n .as_ref()\n\n .map(|entry| &**entry);\n\n (&entry.key, &entry.value)\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/svalue/dict.rs", "rank": 61, "score": 51865.7654075638 }, { "content": "struct DictEntryIteratorMut<'a, K: DictPartialEq, V> {\n\n next: Option<&'a mut DictEntry<K, V>>,\n\n}\n\n\n\nimpl<'a, K, V> Iterator for DictEntryIteratorMut<'a, K, V>\n\n where K: DictPartialEq {\n\n type Item = (&'a K, &'a mut V);\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.next.take().map(|entry| {\n\n self.next = entry.next\n\n .as_mut()\n\n .map(|entry| &mut **entry);\n\n (&entry.key, &mut entry.value)\n\n })\n\n }\n\n}\n\n\n\n\n", "file_path": "src/svalue/dict.rs", "rank": 62, "score": 50970.4770396375 }, { "content": "#[cfg(target_endian = \"big\")]\n\nfn _serialize_i64(i: i64) -> [u8; mem::size_of::<i64>()] {\n\n i.to_be_bytes()\n\n}\n\n\n", "file_path": "src/svalue/int_set.rs", "rank": 63, "score": 47835.66207251599 }, { "content": "fn rational_between(low: Option<&BigRational>, high: Option<&BigRational>) -> BigRational {\n\n match (low, high) {\n\n (None, None) => BigRational::zero(),\n\n (Some(low), None) => low + BigRational::one(),\n\n (None, Some(high)) => high - BigRational::one(),\n\n (Some(low), Some(high)) => (low + high) / BigRational::from_integer(2.into()),\n\n }\n\n}\n\n\n\n/// A dense Identifier, if you have two identifiers that are different, we can\n\n/// always construct an identifier between them.\n\n#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub struct Identifier<T>(Vec<(BigRational, T)>);\n\n\n\nimpl<T> From<(BigRational, T)> for Identifier<T> {\n\n fn from((rational, value): (BigRational, T)) -> Self {\n\n Self(vec![(rational, value)])\n\n }\n\n}\n\n\n", "file_path": "src/crdts/identifier.rs", "rank": 64, "score": 44388.01940455649 }, { "content": " D: Deserializer<'de>,\n\n {\n\n deserializer.deserialize_map(HashMapVisitor::new())\n\n }\n\n}\n\n\n\nimpl<K, V, S> HashMapVisitor<K, V, S> {\n\n pub(crate) fn new() -> Self {\n\n Self {\n\n key_marker: PhantomData,\n\n value_marker: PhantomData,\n\n hash_builder_marker: PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl<'de, K, V, S> Visitor<'de> for HashMapVisitor<K, V, S>\n\nwhere\n\n K: 'static + Deserialize<'de> + Send + Sync + Hash + Clone + Ord,\n\n V: 'static + Deserialize<'de> + Send + Sync + Ord,\n", "file_path": "src/chashmap/serde_impl.rs", "rank": 65, "score": 40448.01183844071 }, { "content": "impl<T, S> Serialize for HashSet<T, S>\n\nwhere\n\n T: Serialize,\n\n{\n\n fn serialize<Sr>(&self, serializer: Sr) -> Result<Sr::Ok, Sr::Error>\n\n where\n\n Sr: Serializer,\n\n {\n\n self.pin().serialize(serializer)\n\n }\n\n}\n\n\n\nimpl<'de, T, S> Deserialize<'de> for HashSet<T, S>\n\nwhere\n\n T: 'static + Deserialize<'de> + Send + Sync + Hash + Clone + Ord,\n\n S: Default + BuildHasher,\n\n{\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n deserializer.deserialize_seq(HashSetVisitor::new())\n\n }\n\n}\n\n\n", "file_path": "src/chashmap/serde_impl.rs", "rank": 66, "score": 40447.223500295826 }, { "content": " where\n\n I: IntoParallelIterator<Item = (K, V)>,\n\n {\n\n self.map.par_extend(par_iter);\n\n }\n\n}\n\n\n\nimpl<K, S> FromParallelIterator<K> for HashSet<K, S>\n\nwhere\n\n K: Clone + Hash + Ord + Send + Sync + 'static,\n\n S: BuildHasher + Default + Sync,\n\n{\n\n fn from_par_iter<I>(par_iter: I) -> Self\n\n where\n\n I: IntoParallelIterator<Item = K>,\n\n {\n\n let mut created_set = HashSet::with_hasher(S::default());\n\n created_set.par_extend(par_iter);\n\n created_set\n\n }\n", "file_path": "src/chashmap/rayon_impl.rs", "rank": 67, "score": 40446.61482913979 }, { "content": "where\n\n K: Serialize,\n\n V: Serialize,\n\n{\n\n fn serialize<Sr>(&self, serializer: Sr) -> Result<Sr::Ok, Sr::Error>\n\n where\n\n Sr: Serializer,\n\n {\n\n self.pin().serialize(serializer)\n\n }\n\n}\n\n\n\nimpl<'de, K, V, S> Deserialize<'de> for HashMap<K, V, S>\n\nwhere\n\n K: 'static + Deserialize<'de> + Send + Sync + Hash + Clone + Ord,\n\n V: 'static + Deserialize<'de> + Send + Sync + Ord,\n\n S: Default + BuildHasher,\n\n{\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n", "file_path": "src/chashmap/serde_impl.rs", "rank": 68, "score": 40446.55312675145 }, { "content": "use super::{HashMap, HashMapRef, HashSet, HashSetRef};\n\nuse rayon::iter::{FromParallelIterator, IntoParallelIterator, ParallelExtend, ParallelIterator};\n\nuse std::hash::{BuildHasher, Hash};\n\n\n\nimpl<K, V, S> FromParallelIterator<(K, V)> for HashMap<K, V, S>\n\nwhere\n\n K: Clone + Hash + Ord + Send + Sync + 'static,\n\n V: Send + Sync + 'static,\n\n S: BuildHasher + Default + Sync,\n\n{\n\n fn from_par_iter<I>(par_iter: I) -> Self\n\n where\n\n I: IntoParallelIterator<Item = (K, V)>,\n\n {\n\n let mut created_map = HashMap::with_hasher(S::default());\n\n created_map.par_extend(par_iter);\n\n created_map\n\n }\n\n}\n\n\n", "file_path": "src/chashmap/rayon_impl.rs", "rank": 69, "score": 40445.20272024921 }, { "content": " S: Default + BuildHasher,\n\n{\n\n type Value = HashMap<K, V, S>;\n\n\n\n fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n write!(f, \"a map\")\n\n }\n\n\n\n fn visit_map<M>(self, mut access: M) -> Result<Self::Value, M::Error>\n\n where\n\n M: MapAccess<'de>,\n\n {\n\n let map = match access.size_hint() {\n\n Some(n) => HashMap::with_capacity_and_hasher(n, S::default()),\n\n None => HashMap::with_hasher(S::default()),\n\n };\n\n let guard = map.guard();\n\n\n\n while let Some((key, value)) = access.next_entry()? {\n\n if let Some(_old_value) = map.insert(key, value, &guard) {\n", "file_path": "src/chashmap/serde_impl.rs", "rank": 70, "score": 40443.40943920093 }, { "content": "}\n\n\n\nimpl<K, S> ParallelExtend<K> for HashSet<K, S>\n\nwhere\n\n K: Clone + Hash + Ord + Send + Sync + 'static,\n\n S: BuildHasher + Sync,\n\n{\n\n fn par_extend<I>(&mut self, par_iter: I)\n\n where\n\n I: IntoParallelIterator<Item = K>,\n\n {\n\n (&*self).par_extend(par_iter);\n\n }\n\n}\n\n\n\nimpl<K, S> ParallelExtend<K> for &HashSet<K, S>\n\nwhere\n\n K: Clone + Hash + Ord + Send + Sync + 'static,\n\n S: BuildHasher + Sync,\n\n{\n", "file_path": "src/chashmap/rayon_impl.rs", "rank": 71, "score": 40441.72760641542 }, { "content": "impl<K, V, S> ParallelExtend<(K, V)> for HashMap<K, V, S>\n\nwhere\n\n K: Clone + Hash + Ord + Send + Sync + 'static,\n\n V: Send + Sync + 'static,\n\n S: BuildHasher + Sync,\n\n{\n\n fn par_extend<I>(&mut self, par_iter: I)\n\n where\n\n I: IntoParallelIterator<Item = (K, V)>,\n\n {\n\n (&*self).par_extend(par_iter);\n\n }\n\n}\n\n\n\nimpl<K, V, S> ParallelExtend<(K, V)> for &HashMap<K, V, S>\n\nwhere\n\n K: Clone + Hash + Ord + Send + Sync + 'static,\n\n V: Send + Sync + 'static,\n\n S: BuildHasher + Sync,\n\n{\n", "file_path": "src/chashmap/rayon_impl.rs", "rank": 72, "score": 40441.45640208936 }, { "content": "\n\n fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n write!(f, \"a set\")\n\n }\n\n\n\n fn visit_seq<A>(self, mut access: A) -> Result<Self::Value, A::Error>\n\n where\n\n A: SeqAccess<'de>,\n\n {\n\n let set = HashSet::default();\n\n let guard = set.guard();\n\n\n\n while let Some(value) = access.next_element()? {\n\n let _ = set.insert(value, &guard);\n\n }\n\n\n\n Ok(set)\n\n }\n\n}\n\n\n", "file_path": "src/chashmap/serde_impl.rs", "rank": 73, "score": 40441.17134420944 }, { "content": " unreachable!(\"Serialized map held two values with the same key\");\n\n }\n\n }\n\n\n\n Ok(map)\n\n }\n\n}\n\n\n\nimpl<T, S> Serialize for HashSetRef<'_, T, S>\n\nwhere\n\n T: Serialize,\n\n{\n\n fn serialize<Sr>(&self, serilizer: Sr) -> Result<Sr::Ok, Sr::Error>\n\n where\n\n Sr: Serializer,\n\n {\n\n serilizer.collect_seq(self.iter())\n\n }\n\n}\n\n\n", "file_path": "src/chashmap/serde_impl.rs", "rank": 74, "score": 40441.146946563975 }, { "content": "\n\n assert_eq!(map.len(), 2);\n\n\n\n assert_eq!(map.get(&1, &guard), Some(&2));\n\n assert_eq!(map.get(&3, &guard), Some(&4));\n\n }\n\n\n\n #[test]\n\n fn hm_parallel_extend_by_a_bunch() {\n\n let mut to_extend_with = Vec::new();\n\n for i in 0..100 {\n\n to_extend_with.push((i + 100, i * 10));\n\n }\n\n\n\n let mut map = HashMap::new();\n\n let guard = map.guard();\n\n map.insert(1, 2, &guard);\n\n map.insert(3, 4, &guard);\n\n\n\n map.par_extend(to_extend_with.into_par_iter());\n", "file_path": "src/chashmap/rayon_impl.rs", "rank": 75, "score": 40441.00884392374 }, { "content": "\n\n assert_eq!(map.get(&1, &guard), Some(&2));\n\n assert_eq!(map.get(&3, &guard), Some(&4));\n\n }\n\n\n\n #[test]\n\n fn hm_ref_parallel_extend_by_a_bunch() {\n\n let mut to_extend_with = Vec::new();\n\n for i in 0..100 {\n\n to_extend_with.push((i + 100, i * 10));\n\n }\n\n\n\n let map = HashMap::new();\n\n let guard = map.guard();\n\n map.insert(1, 2, &guard);\n\n map.insert(3, 4, &guard);\n\n\n\n map.pin().par_extend(to_extend_with.into_par_iter());\n\n assert_eq!(map.len(), 102);\n\n\n", "file_path": "src/chashmap/rayon_impl.rs", "rank": 76, "score": 40440.983827054624 }, { "content": " assert!(!set.contains(&17, &guard));\n\n }\n\n\n\n #[test]\n\n fn hs_parallel_extend_by_a_bunch() {\n\n let mut to_extend_with = Vec::new();\n\n for i in 0..100 {\n\n to_extend_with.push((i + 100, i * 10));\n\n }\n\n\n\n let mut set = HashSet::new();\n\n let guard = set.guard();\n\n set.insert((1, 2), &guard);\n\n set.insert((3, 4), &guard);\n\n\n\n set.par_extend(to_extend_with.into_par_iter());\n\n assert_eq!(set.len(), 102);\n\n\n\n assert!(set.contains(&(1, 2), &guard));\n\n assert!(set.contains(&(199, 990), &guard));\n", "file_path": "src/chashmap/rayon_impl.rs", "rank": 77, "score": 40440.95918460882 }, { "content": " #[test]\n\n fn hs_ref_parallel_extend_by_a_bunch() {\n\n let mut to_extend_with = Vec::new();\n\n for i in 0..100 {\n\n to_extend_with.push((i + 100, i * 10));\n\n }\n\n\n\n let set = HashSet::new();\n\n let mut set_ref = set.pin();\n\n set_ref.insert((1, 2));\n\n set_ref.insert((3, 4));\n\n\n\n set_ref.par_extend(to_extend_with.into_par_iter());\n\n assert_eq!(set.len(), 102);\n\n\n\n assert!(set_ref.contains(&(1, 2)));\n\n assert!(set_ref.contains(&(199, 990)));\n\n assert!(!set_ref.contains(&(199, 167)));\n\n }\n\n}", "file_path": "src/chashmap/rayon_impl.rs", "rank": 78, "score": 40440.88742159796 }, { "content": " fn par_extend<I>(&mut self, par_iter: I)\n\n where\n\n I: IntoParallelIterator<Item = (K, V)>,\n\n {\n\n par_iter.into_par_iter().for_each_init(\n\n || self.guard(),\n\n |guard, (k, v)| {\n\n self.insert(k, v, &guard);\n\n },\n\n );\n\n }\n\n}\n\n\n\nimpl<'map, K, V, S> ParallelExtend<(K, V)> for HashMapRef<'map, K, V, S>\n\nwhere\n\n K: Clone + Hash + Ord + Send + Sync + 'static,\n\n V: Send + Sync + 'static,\n\n S: BuildHasher + Sync,\n\n{\n\n fn par_extend<I>(&mut self, par_iter: I)\n", "file_path": "src/chashmap/rayon_impl.rs", "rank": 79, "score": 40440.65890176342 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::{HashMap, HashSet};\n\n use rayon::iter::{FromParallelIterator, IntoParallelIterator, ParallelExtend};\n\n\n\n #[test]\n\n fn hm_from_empty_parallel_iter() {\n\n let to_create_from: Vec<(i32, i32)> = Vec::new();\n\n let created_map: HashMap<i32, i32> = HashMap::from_par_iter(to_create_from.into_par_iter());\n\n assert_eq!(created_map.len(), 0);\n\n }\n\n\n\n #[test]\n\n fn hm_from_large_parallel_iter() {\n\n let mut to_create_from: Vec<(i32, i32)> = Vec::new();\n\n for i in 0..100 {\n\n to_create_from.push((i + 100, i * 10));\n\n }\n", "file_path": "src/chashmap/rayon_impl.rs", "rank": 80, "score": 40440.62933958269 }, { "content": " fn par_extend<I>(&mut self, par_iter: I)\n\n where\n\n I: IntoParallelIterator<Item = K>,\n\n {\n\n let tuple_iter = par_iter.into_par_iter().map(|k| (k, ()));\n\n (&self.map).par_extend(tuple_iter);\n\n }\n\n}\n\n\n\nimpl<'set, K, S> ParallelExtend<K> for HashSetRef<'set, K, S>\n\nwhere\n\n K: Clone + Hash + Ord + Send + Sync + 'static,\n\n S: BuildHasher + Sync,\n\n{\n\n fn par_extend<I>(&mut self, par_iter: I)\n\n where\n\n I: IntoParallelIterator<Item = K>,\n\n {\n\n self.set.par_extend(par_iter);\n\n }\n", "file_path": "src/chashmap/rayon_impl.rs", "rank": 81, "score": 40440.577379009956 }, { "content": " let created_map: HashMap<i32, i32> = HashMap::from_par_iter(to_create_from.into_par_iter());\n\n assert_eq!(created_map.len(), 100);\n\n\n\n let guard = created_map.guard();\n\n assert_eq!(created_map.get(&100, &guard), Some(&0));\n\n assert_eq!(created_map.get(&199, &guard), Some(&990));\n\n }\n\n\n\n #[test]\n\n fn hs_from_empty_parallel_iter() {\n\n let to_create_from: Vec<i32> = Vec::new();\n\n let created_set: HashSet<i32> = HashSet::from_par_iter(to_create_from.into_par_iter());\n\n assert_eq!(created_set.len(), 0);\n\n }\n\n\n\n #[test]\n\n fn hs_from_large_parallel_iter() {\n\n let mut to_create_from: Vec<(i32, i32)> = Vec::new();\n\n for i in 0..100 {\n\n to_create_from.push((i + 100, i * 10));\n", "file_path": "src/chashmap/rayon_impl.rs", "rank": 82, "score": 40440.39299625004 }, { "content": "use super::{HashMap, HashMapRef, HashSet, HashSetRef};\n\nuse serde::{\n\n de::{MapAccess, SeqAccess, Visitor},\n\n Deserialize, Deserializer, Serialize, Serializer,\n\n};\n\nuse std::fmt::{self, Formatter};\n\nuse std::hash::{BuildHasher, Hash};\n\nuse std::marker::PhantomData;\n\n\n", "file_path": "src/chashmap/serde_impl.rs", "rank": 83, "score": 40437.69088648347 }, { "content": " assert!(!set.contains(&(199, 167), &guard));\n\n }\n\n\n\n #[test]\n\n fn hs_ref_parallel_extend_by_nothing() {\n\n let to_extend_with = Vec::new();\n\n\n\n let mut set = HashSet::new();\n\n let guard = set.guard();\n\n set.insert((1, 2), &guard);\n\n set.insert((3, 4), &guard);\n\n\n\n set.par_extend(to_extend_with.into_par_iter());\n\n assert_eq!(set.len(), 2);\n\n\n\n assert!(set.contains(&(1, 2), &guard));\n\n assert!(!set.contains(&(199, 990), &guard));\n\n assert!(!set.contains(&(199, 167), &guard));\n\n }\n\n\n", "file_path": "src/chashmap/rayon_impl.rs", "rank": 84, "score": 40437.69088648347 }, { "content": " assert_eq!(map.get(&1, &guard), Some(&2));\n\n assert_eq!(map.get(&3, &guard), Some(&4));\n\n assert_eq!(map.get(&100, &guard), Some(&0));\n\n assert_eq!(map.get(&199, &guard), Some(&990));\n\n }\n\n\n\n #[test]\n\n fn hs_parallel_extend_by_nothing() {\n\n let to_extend_with = Vec::new();\n\n\n\n let mut set = HashSet::new();\n\n let guard = set.guard();\n\n set.insert(1, &guard);\n\n set.insert(3, &guard);\n\n\n\n set.par_extend(to_extend_with.into_par_iter());\n\n\n\n assert_eq!(set.len(), 2);\n\n\n\n assert!(set.contains(&1, &guard));\n", "file_path": "src/chashmap/rayon_impl.rs", "rank": 85, "score": 40437.69088648347 }, { "content": " }\n\n let created_map: HashSet<(i32, i32)> =\n\n HashSet::from_par_iter(to_create_from.into_par_iter());\n\n assert_eq!(created_map.len(), 100);\n\n\n\n let guard = created_map.guard();\n\n assert!(created_map.contains(&(100, 0), &guard));\n\n assert!(!created_map.contains(&(100, 10000), &guard));\n\n }\n\n\n\n #[test]\n\n fn hm_parallel_extend_by_nothing() {\n\n let to_extend_with = Vec::new();\n\n\n\n let mut map = HashMap::new();\n\n let guard = map.guard();\n\n map.insert(1, 2, &guard);\n\n map.insert(3, 4, &guard);\n\n\n\n map.par_extend(to_extend_with.into_par_iter());\n", "file_path": "src/chashmap/rayon_impl.rs", "rank": 86, "score": 40437.69088648347 }, { "content": " assert_eq!(map, deserialized);\n\n }\n\n\n\n #[test]\n\n fn test_set() {\n\n let set: HashSet<u8> = HashSet::with_capacity(5);\n\n let guard = set.guard();\n\n\n\n let _ = set.insert(0, &guard);\n\n let _ = set.insert(1, &guard);\n\n let _ = set.insert(2, &guard);\n\n let _ = set.insert(3, &guard);\n\n let _ = set.insert(4, &guard);\n\n\n\n let serialized = serde_json::to_string(&set).expect(\"Couldn't serialize map\");\n\n\n\n let deserialized: HashSet<u8> =\n\n serde_json::from_str(&serialized).expect(\"Couldn't deserialize map\");\n\n\n\n assert_eq!(set, deserialized);\n\n }\n\n}", "file_path": "src/chashmap/serde_impl.rs", "rank": 87, "score": 40437.69088648347 }, { "content": "#[cfg(test)]\n\nmod test {\n\n use crate::{HashMap, HashSet};\n\n\n\n #[test]\n\n fn test_map() {\n\n let map: HashMap<u8, u8> = HashMap::with_capacity(5);\n\n let guard = map.guard();\n\n\n\n let _ = map.insert(0, 4, &guard);\n\n let _ = map.insert(1, 3, &guard);\n\n let _ = map.insert(2, 2, &guard);\n\n let _ = map.insert(3, 1, &guard);\n\n let _ = map.insert(4, 0, &guard);\n\n\n\n let serialized = serde_json::to_string(&map).expect(\"Couldn't serialize map\");\n\n\n\n let deserialized: HashMap<u8, u8> =\n\n serde_json::from_str(&serialized).expect(\"Couldn't deserialize map\");\n\n\n", "file_path": "src/chashmap/serde_impl.rs", "rank": 88, "score": 40437.69088648347 }, { "content": " assert_eq!(map.len(), 102);\n\n\n\n assert_eq!(map.get(&1, &guard), Some(&2));\n\n assert_eq!(map.get(&3, &guard), Some(&4));\n\n assert_eq!(map.get(&100, &guard), Some(&0));\n\n assert_eq!(map.get(&199, &guard), Some(&990));\n\n }\n\n\n\n #[test]\n\n fn hm_ref_parallel_extend_by_nothing() {\n\n let to_extend_with = Vec::new();\n\n\n\n let map = HashMap::new();\n\n let guard = map.guard();\n\n map.insert(1, 2, &guard);\n\n map.insert(3, 4, &guard);\n\n\n\n map.pin().par_extend(to_extend_with.into_par_iter());\n\n\n\n assert_eq!(map.len(), 2);\n", "file_path": "src/chashmap/rayon_impl.rs", "rank": 89, "score": 40437.69088648347 }, { "content": "fn handle_message(buffer: Vec<u8>, peer_sampling_sender: &Sender<PeerSamplingMessage>, header_sender: &Sender<HeaderMessage>, content_sender: &Sender<ContentMessage>) -> Result<(), Box<dyn Error>> {\n\n let protocol = buffer[0] & MASK_MESSAGE_PROTOCOL;\n\n match protocol {\n\n MESSAGE_PROTOCOL_NOOP_MESSAGE => Ok(()),\n\n MESSAGE_PROTOCOL_SAMPLING_MESSAGE => {\n\n let message = PeerSamplingMessage::from_bytes(&buffer[1..])?;\n\n peer_sampling_sender.send(message)?;\n\n Ok(())\n\n }\n\n MESSAGE_PROTOCOL_CONTENT_MESSAGE => {\n\n let message = ContentMessage::from_bytes(&buffer[1..])?;\n\n content_sender.send(message)?;\n\n Ok(())\n\n }\n\n MESSAGE_PROTOCOL_HEADER_MESSAGE => {\n\n let message = HeaderMessage::from_bytes(&buffer[1..])?;\n\n header_sender.send(message)?;\n\n Ok(())\n\n }\n\n _ => Err(format!(\"Unknown protocol: {}\", protocol))?\n\n }\n\n}\n", "file_path": "src/gossip/network.rs", "rank": 90, "score": 30941.692077424093 }, { "content": "}\n\n\n\nimpl<K, V> HashMap<K, V, super::DefaultHashBuilder> {\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n pub fn with_capacity(capacity: usize) -> Self {\n\n Self::with_capacity_and_hasher(capacity, super::DefaultHashBuilder::default())\n\n }\n\n}\n\n\n\nimpl<K, V, S> Default for HashMap<K, V, S>\n\nwhere\n\n S: Default,\n\n{\n\n fn default() -> Self {\n\n Self::with_hasher(S::default())\n\n }\n\n}\n", "file_path": "src/chashmap/map.rs", "rank": 99, "score": 22.07271234165245 } ]
Rust
src/lib.rs
alec-deason/bevy_contrib_schedules
7029bf35f5c784ff4e8eba11152b072b54afc6d2
use bevy::{ app::stage, core::Time, ecs::{Schedule, ParallelExecutor, World, Resources, System, Entity}, utils::HashMap, }; #[derive(Debug)] pub enum ScheduleType { Always, Fixed(f64, f64), } pub struct PackedSchedule(pub ScheduleType, pub Schedule, ParallelExecutor); impl Default for PackedSchedule { fn default() -> Self { PackedSchedule( ScheduleType::Always, Default::default(), ParallelExecutor::without_tracker_clears(), ) } } impl PackedSchedule { fn run(&mut self, mut world: &mut World, mut resources: &mut Resources) { self.1.initialize(world, resources); match &mut self.0 { ScheduleType::Always => { self.2.run(&mut self.1, &mut world, &mut resources); }, ScheduleType::Fixed(rate, accumulator) => { match resources.get::<Time>() { Some(time) => { *accumulator += time.delta_seconds_f64; }, None => log::debug!("Time does not exist, Fixed Schedule cannot run!"), }; while accumulator >= rate { self.2.run(&mut self.1, &mut world, &mut resources); *accumulator -= *rate; } }, }; } } pub struct ScheduleRunner(pub PackedSchedule); impl Default for ScheduleRunner { fn default() -> Self { ScheduleRunner(PackedSchedule { 0: ScheduleType::Always , .. Default::default() }) .add_default_stages() } } impl ScheduleRunner { pub fn from_rate(rate: f64) -> Self { ScheduleRunner(PackedSchedule { 0: ScheduleType::Fixed(rate, 0.0) , .. Default::default() }) .add_default_stages() } pub fn from_rate_inv(rate: f64) -> Self { Self::from_rate(1.0 / rate) } pub fn add_default_stages(self) -> Self { self.add_stage(stage::FIRST) .add_stage(stage::PRE_UPDATE) .add_stage(stage::UPDATE) .add_stage(stage::POST_UPDATE) .add_stage(stage::LAST) } pub fn add_stage(mut self, stage_name: &'static str) -> Self { self.0.1.add_stage(stage_name); self } pub fn add_stage_after(mut self, target: &'static str, stage_name: &'static str) -> Self { self.0.1.add_stage_after(target, stage_name); self } pub fn add_stage_before( mut self, target: &'static str, stage_name: &'static str, ) -> Self { self.0.1.add_stage_before(target, stage_name); self } pub fn add_system(self, system: Box<dyn System>) -> Self { self.add_system_to_stage(stage::UPDATE, system) } pub fn add_systems(self, systems: Vec<Box<dyn System>>) -> Self { self.add_systems_to_stage(stage::UPDATE, systems) } pub fn add_system_to_stage( mut self, stage_name: &'static str, system: Box<dyn System>, ) -> Self { self.0.1.add_system_to_stage(stage_name, system); self } pub fn add_system_to_stage_front( mut self, stage_name: &'static str, system: Box<dyn System>, ) -> Self { self.0.1.add_system_to_stage_front(stage_name, system); self } pub fn add_systems_to_stage( mut self, stage_name: &'static str, systems: Vec<Box<dyn System>>, ) -> Self { for system in systems { self.0.1.add_system_to_stage(stage_name, system); } self } } pub fn schedule_runner_system(mut world: &mut World, mut resources: &mut Resources) { if resources.contains::<ScheduleRunner>() { let mut schedule = std::mem::take(&mut resources.get_mut::<ScheduleRunner>().unwrap().0); schedule.run(&mut world, &mut resources); resources.get_mut::<ScheduleRunner>().unwrap().0 = schedule; } let mut entity_map: HashMap<Entity, PackedSchedule> = world.query_mut::<(Entity, &mut ScheduleRunner)>() .iter() .map(|(entity, mut runner)| (entity, std::mem::take(&mut runner.0))) .collect(); for (_, schedule) in entity_map.iter_mut() { schedule.run(&mut world, &mut resources); } for (entity, mut runner) in &mut world.query_mut::<(Entity, &mut ScheduleRunner)>().iter() { runner.0 = entity_map.remove(&entity).unwrap(); } }
use bevy::{ app::stage, core::Time, ecs::{Schedule, ParallelExecutor, World, Resources, System, Entity}, utils::HashMap, }; #[derive(Debug)] pub enum ScheduleType { Always, Fixed(f64, f64), } pub struct PackedSchedule(pub ScheduleType, pub Schedule, ParallelExecutor); impl Default for PackedSchedule { fn default() -> Self { PackedSchedule( ScheduleType::Always, Default::default(), ParallelExecutor::without_tracker_clears(), ) } } impl PackedSchedule { fn run(&mut self, mut world: &mut World, mut resources: &mut Resources) { self.1.initialize(world, resources); match &mut self.0 { ScheduleType::Always => { self.2.run(&mut self.1, &mut world, &mut resources); }, ScheduleType::Fixed(rate, accumulator) => { match resources.get::<Time>() { Some(time) => { *accumulator += time.delta_seconds_f64; }, None => log::debug!("Time does not exist, Fixed Schedule cannot run!"), }; while accumulator >= rate { self.2.run(&mut self.1, &mut world, &mut resources); *accumulator -= *rate; } }, }; } } pub struct ScheduleRunner(pub PackedSchedule); impl Default for ScheduleRunner { fn default() -> Self { ScheduleRunner(PackedSchedule { 0: ScheduleType::Always , .. Default::default() }) .add_default_stages() } } impl ScheduleRunner { pub fn from_rate(rate: f64) -> Self { ScheduleRunner(PackedSchedule { 0: ScheduleType::Fixed(rate, 0.0) , .. Default::default() }) .add_default_stages() } pub fn from_rate_inv(rate: f64) -> Self { Self::from_rate(1.0 / rate) } pub fn add_default_stages(se
tage::POST_UPDATE) .add_stage(stage::LAST) } pub fn add_stage(mut self, stage_name: &'static str) -> Self { self.0.1.add_stage(stage_name); self } pub fn add_stage_after(mut self, target: &'static str, stage_name: &'static str) -> Self { self.0.1.add_stage_after(target, stage_name); self } pub fn add_stage_before( mut self, target: &'static str, stage_name: &'static str, ) -> Self { self.0.1.add_stage_before(target, stage_name); self } pub fn add_system(self, system: Box<dyn System>) -> Self { self.add_system_to_stage(stage::UPDATE, system) } pub fn add_systems(self, systems: Vec<Box<dyn System>>) -> Self { self.add_systems_to_stage(stage::UPDATE, systems) } pub fn add_system_to_stage( mut self, stage_name: &'static str, system: Box<dyn System>, ) -> Self { self.0.1.add_system_to_stage(stage_name, system); self } pub fn add_system_to_stage_front( mut self, stage_name: &'static str, system: Box<dyn System>, ) -> Self { self.0.1.add_system_to_stage_front(stage_name, system); self } pub fn add_systems_to_stage( mut self, stage_name: &'static str, systems: Vec<Box<dyn System>>, ) -> Self { for system in systems { self.0.1.add_system_to_stage(stage_name, system); } self } } pub fn schedule_runner_system(mut world: &mut World, mut resources: &mut Resources) { if resources.contains::<ScheduleRunner>() { let mut schedule = std::mem::take(&mut resources.get_mut::<ScheduleRunner>().unwrap().0); schedule.run(&mut world, &mut resources); resources.get_mut::<ScheduleRunner>().unwrap().0 = schedule; } let mut entity_map: HashMap<Entity, PackedSchedule> = world.query_mut::<(Entity, &mut ScheduleRunner)>() .iter() .map(|(entity, mut runner)| (entity, std::mem::take(&mut runner.0))) .collect(); for (_, schedule) in entity_map.iter_mut() { schedule.run(&mut world, &mut resources); } for (entity, mut runner) in &mut world.query_mut::<(Entity, &mut ScheduleRunner)>().iter() { runner.0 = entity_map.remove(&entity).unwrap(); } }
lf) -> Self { self.add_stage(stage::FIRST) .add_stage(stage::PRE_UPDATE) .add_stage(stage::UPDATE) .add_stage(s
function_block-random_span
[ { "content": "fn fixed_sys() {\n\n println!(\"game tick!\");\n\n}", "file_path": "examples/fixed_tick.rs", "rank": 1, "score": 37938.93565220559 }, { "content": "fn build(mut commands: Commands) {\n\n // TODO: Demonstrate how to later remove schedules conditionally\n\n // Spoiler: Just `.despawn` the node when you're done!\n\n commands\n\n // Always ticks\n\n .spawn((\n\n Foo, ScheduleRunner::default()\n\n .add_system(foo_sys.system())\n\n ))\n\n // Ticks 10 times per second\n\n .spawn((\n\n Bar, ScheduleRunner::from_rate_inv(10.0)\n\n .add_system(bar_sys.system())\n\n ))\n\n ;\n\n}\n\n\n", "file_path": "examples/multiple.rs", "rank": 2, "score": 36755.96223722401 }, { "content": "fn main() {\n\n if let Err(e) = simple_logger::SimpleLogger::new().with_level(log::LevelFilter::Error).init() {\n\n println!(\"Failed to setup logger!\\n{}\", e);\n\n }\n\n\n\n App::build()\n\n // Ticks every 2 seconds\n\n .add_resource(ScheduleRunner::from_rate(2.0)\n\n .add_system(fixed_sys.system())\n\n )\n\n .add_resource(Time::default())\n\n .add_plugin(TypeRegistryPlugin::default())\n\n .add_plugin(CorePlugin::default())\n\n .add_plugin(ScheduleRunnerPlugin::default())\n\n .add_system(schedule_runner_system.thread_local_system())\n\n .run();\n\n}\n\n\n", "file_path": "examples/fixed_tick.rs", "rank": 3, "score": 34514.17831954433 }, { "content": "struct Foo;\n", "file_path": "examples/multiple.rs", "rank": 4, "score": 26758.729183087707 }, { "content": "struct Bar;\n\n\n", "file_path": "examples/multiple.rs", "rank": 5, "score": 26758.729183087707 }, { "content": "fn main() {\n\n if let Err(e) = simple_logger::SimpleLogger::new().with_level(log::LevelFilter::Error).init() {\n\n println!(\"Failed to setup logger!\\n{}\", e);\n\n }\n\n\n\n App::build()\n\n .add_resource(Time::default())\n\n .add_plugin(TypeRegistryPlugin::default())\n\n .add_plugin(CorePlugin::default())\n\n .add_plugin(ScheduleRunnerPlugin::default())\n\n .add_startup_system(build.system())\n\n .add_system(schedule_runner_system.thread_local_system())\n\n .run();\n\n}\n\n\n", "file_path": "examples/multiple.rs", "rank": 6, "score": 21734.388068001455 }, { "content": "fn foo_sys() {\n\n println!(\"foo\");\n\n}\n\n\n", "file_path": "examples/multiple.rs", "rank": 7, "score": 20842.083094343656 }, { "content": "fn bar_sys() {\n\n println!(\"bar\");\n\n}", "file_path": "examples/multiple.rs", "rank": 8, "score": 20842.083094343656 }, { "content": "# Bevy Schedule Runner\n\n\n\nA component for running systems at a different rate from the main schedule.\n\n\n\n- Run systems at a fixed timestep\n\n- Component schedules\n\n - Add and remove schedules as necessary\n\n - Run multiple schedules\n\n\n", "file_path": "README.md", "rank": 9, "score": 16251.747637453573 }, { "content": "use bevy::{prelude::*, app::ScheduleRunnerPlugin, core::CorePlugin, type_registry::TypeRegistryPlugin};\n\nuse bevy_contrib_schedules::*;\n\n\n", "file_path": "examples/fixed_tick.rs", "rank": 10, "score": 14647.46201975579 }, { "content": "use bevy::{prelude::*, app::ScheduleRunnerPlugin, core::CorePlugin, type_registry::TypeRegistryPlugin};\n\nuse bevy_contrib_schedules::*;\n\n\n", "file_path": "examples/multiple.rs", "rank": 19, "score": 4.2935147494406625 } ]
Rust
crates/model/src/score/snapshots.rs
katandps/beatoraja_play_recommend
c7adf974cdab1b249c86c896aa1ba0a8fdd20819
use crate::score::score::ParamSnap; use crate::*; use chrono::Duration; use std::collections::BTreeSet; #[derive(Deserialize, Serialize, Debug, Clone, Default)] pub struct SnapShots(pub BTreeSet<SnapShot>); impl SnapShots { pub fn create_by_snaps(snapshots: Vec<SnapShot>) -> SnapShots { SnapShots(snapshots.iter().cloned().collect()) } pub fn add(&mut self, snapshot: SnapShot) { self.0.insert(snapshot); } pub fn snap(&self, date: &UpdatedAt) -> Option<&SnapShot> { self.0.iter().rev().find(|&s| s.updated_at.le(date)) } pub fn param_snap<T: ParamSnap>(&self, date: &UpdatedAt) -> Option<T> { match self.snap(date) { Some(last) => { let mut last_date = &last.updated_at; let mut one_day_before = None; for snap in self.0.iter().rev() { if T::cmp(snap, last) { last_date = &snap.updated_at; } else { one_day_before = self.snap(&(last_date - Duration::days(1))); break; } } Some(T::make(last, last_date.clone(), one_day_before)) } None => None, } } } #[cfg(test)] mod test { use super::*; use crate::score::score::ClearTypeSnap; #[test] pub fn test() { let shot1 = SnapShot::from_data(1, 2, 3, 4, 11); let shot2 = SnapShot::from_data(1, 2, 3, 4, 22); let shot3 = SnapShot::from_data(1, 2, 3, 4, 33); let shot4 = SnapShot::from_data(1, 2, 3, 4, 44); let shots = SnapShots::create_by_snaps(vec![ shot1.clone(), shot2.clone(), shot3.clone(), shot4.clone(), ]); assert_eq!(Some(&shot1), shots.snap(&UpdatedAt::from_timestamp(21))); assert_eq!(Some(&shot2), shots.snap(&UpdatedAt::from_timestamp(22))); assert_eq!(Some(&shot2), shots.snap(&UpdatedAt::from_timestamp(23))); assert_eq!(Some(&shot2), shots.snap(&UpdatedAt::from_timestamp(32))); assert_eq!(Some(&shot3), shots.snap(&UpdatedAt::from_timestamp(33))); } #[test] pub fn clear() { const DAY: i64 = 86400; fn asrt(snapshots: &SnapShots, current: ClearType, before: ClearType, timestamp: i64) { let snap = snapshots .param_snap::<ClearTypeSnap>(&UpdatedAt::from_timestamp(timestamp)) .unwrap_or_default(); assert_eq!(current.to_integer(), snap.current); assert_eq!(before.to_integer(), snap.before); } let shot_failed = SnapShot::from_data(1, 2, 3, 4, DAY * 10); let shot_failed2 = SnapShot::from_data(1, 2, 3, 4, DAY * 15); let shot_assist = SnapShot::from_data(2, 2, 3, 4, DAY * 17); let shot_la = SnapShot::from_data(3, 2, 3, 4, DAY * 17 + 1); let shot_la2 = SnapShot::from_data(3, 2, 3, 4, DAY * 20); let shot_easy = SnapShot::from_data(4, 2, 3, 4, DAY * 22); let shot_normal = SnapShot::from_data(5, 2, 3, 4, DAY * 25); let shot_hard = SnapShot::from_data(6, 2, 3, 4, DAY * 25 + DAY - 1); let shot_exhard = SnapShot::from_data(7, 2, 3, 4, DAY * 30); let shots = SnapShots::create_by_snaps(vec![ shot_failed.clone(), shot_failed2.clone(), shot_assist.clone(), shot_la.clone(), shot_la2.clone(), shot_easy.clone(), shot_normal.clone(), shot_hard.clone(), shot_exhard.clone(), ]); use ClearType::*; asrt(&shots, NoPlay, NoPlay, 0); asrt(&shots, NoPlay, NoPlay, DAY * 9); asrt(&shots, Failed, NoPlay, DAY * 10); asrt(&shots, Failed, NoPlay, DAY * 15); asrt(&shots, AssistEasy, Failed, DAY * 17); asrt(&shots, LightAssistEasy, Failed, DAY * 17 + 1); asrt(&shots, LightAssistEasy, Failed, DAY * 20); asrt(&shots, Easy, LightAssistEasy, DAY * 22); asrt(&shots, Normal, Easy, DAY * 25); asrt(&shots, Hard, Easy, DAY * 25 + DAY - 1); asrt(&shots, Hard, Easy, DAY * 26); asrt(&shots, ExHard, Hard, DAY * 30); } }
use crate::score::score::ParamSnap; use crate::*; use chrono::Duration; use std::collections::BTreeSet; #[derive(Deserialize, Serialize, Debug, Clone, Default)] pub struct SnapShots(pub BTreeSet<SnapShot>); impl SnapShots { pub fn create_by_snaps(snapshots: Vec<SnapShot>) -> SnapShots { SnapShots(snapshots.iter().cloned().collect()) } pub fn add(&mut self, snapshot: SnapShot) { self.0.insert(snapshot); } pub fn snap(&self, date: &UpdatedAt) -> Option<&SnapShot> { self.0.iter().rev().find(|&s| s.updated_at.le(date)) } pub fn param_snap<T: ParamSnap>(&self, date: &UpdatedAt) -> Option<T> { match self.snap(date) { Some(last) => { le
Some(T::make(last, last_date.clone(), one_day_before)) } None => None, } } } #[cfg(test)] mod test { use super::*; use crate::score::score::ClearTypeSnap; #[test] pub fn test() { let shot1 = SnapShot::from_data(1, 2, 3, 4, 11); let shot2 = SnapShot::from_data(1, 2, 3, 4, 22); let shot3 = SnapShot::from_data(1, 2, 3, 4, 33); let shot4 = SnapShot::from_data(1, 2, 3, 4, 44); let shots = SnapShots::create_by_snaps(vec![ shot1.clone(), shot2.clone(), shot3.clone(), shot4.clone(), ]); assert_eq!(Some(&shot1), shots.snap(&UpdatedAt::from_timestamp(21))); assert_eq!(Some(&shot2), shots.snap(&UpdatedAt::from_timestamp(22))); assert_eq!(Some(&shot2), shots.snap(&UpdatedAt::from_timestamp(23))); assert_eq!(Some(&shot2), shots.snap(&UpdatedAt::from_timestamp(32))); assert_eq!(Some(&shot3), shots.snap(&UpdatedAt::from_timestamp(33))); } #[test] pub fn clear() { const DAY: i64 = 86400; fn asrt(snapshots: &SnapShots, current: ClearType, before: ClearType, timestamp: i64) { let snap = snapshots .param_snap::<ClearTypeSnap>(&UpdatedAt::from_timestamp(timestamp)) .unwrap_or_default(); assert_eq!(current.to_integer(), snap.current); assert_eq!(before.to_integer(), snap.before); } let shot_failed = SnapShot::from_data(1, 2, 3, 4, DAY * 10); let shot_failed2 = SnapShot::from_data(1, 2, 3, 4, DAY * 15); let shot_assist = SnapShot::from_data(2, 2, 3, 4, DAY * 17); let shot_la = SnapShot::from_data(3, 2, 3, 4, DAY * 17 + 1); let shot_la2 = SnapShot::from_data(3, 2, 3, 4, DAY * 20); let shot_easy = SnapShot::from_data(4, 2, 3, 4, DAY * 22); let shot_normal = SnapShot::from_data(5, 2, 3, 4, DAY * 25); let shot_hard = SnapShot::from_data(6, 2, 3, 4, DAY * 25 + DAY - 1); let shot_exhard = SnapShot::from_data(7, 2, 3, 4, DAY * 30); let shots = SnapShots::create_by_snaps(vec![ shot_failed.clone(), shot_failed2.clone(), shot_assist.clone(), shot_la.clone(), shot_la2.clone(), shot_easy.clone(), shot_normal.clone(), shot_hard.clone(), shot_exhard.clone(), ]); use ClearType::*; asrt(&shots, NoPlay, NoPlay, 0); asrt(&shots, NoPlay, NoPlay, DAY * 9); asrt(&shots, Failed, NoPlay, DAY * 10); asrt(&shots, Failed, NoPlay, DAY * 15); asrt(&shots, AssistEasy, Failed, DAY * 17); asrt(&shots, LightAssistEasy, Failed, DAY * 17 + 1); asrt(&shots, LightAssistEasy, Failed, DAY * 20); asrt(&shots, Easy, LightAssistEasy, DAY * 22); asrt(&shots, Normal, Easy, DAY * 25); asrt(&shots, Hard, Easy, DAY * 25 + DAY - 1); asrt(&shots, Hard, Easy, DAY * 26); asrt(&shots, ExHard, Hard, DAY * 30); } }
t mut last_date = &last.updated_at; let mut one_day_before = None; for snap in self.0.iter().rev() { if T::cmp(snap, last) { last_date = &snap.updated_at; } else { one_day_before = self.snap(&(last_date - Duration::days(1))); break; } }
random
[ { "content": "pub fn changed_visibility_by_query() -> impl Filter<Extract = (bool,), Error = Rejection> + Clone {\n\n warp::body::json().and_then(get_changed_visibility_query)\n\n}\n\n\n\nasync fn get_changed_name_query(body: HashMap<String, String>) -> Result<String, Rejection> {\n\n let changed_name = body\n\n .get(&\"changed_name\".to_string())\n\n .ok_or(HandleError::ChangedNameNotFound)?;\n\n Ok(changed_name.clone())\n\n}\n\n\n\nasync fn get_changed_visibility_query(body: HashMap<String, String>) -> Result<bool, Rejection> {\n\n let changed_visibility = body\n\n .get(&\"visibility\".to_string())\n\n .ok_or(HandleError::ChangedVisibilityNotFound)?;\n\n Ok(changed_visibility == &\"true\".to_string())\n\n}\n\n\n\npub struct DetailQuery {\n\n pub date: UpdatedAt,\n\n pub play_mode: PlayMode,\n\n}\n\n\n\npub struct RankingQuery {\n\n pub date: UpdatedAt,\n\n pub play_mode: PlayMode,\n\n pub sha256: HashSha256,\n\n}\n", "file_path": "crates/server/src/filter.rs", "rank": 0, "score": 227873.0472422898 }, { "content": "pub fn changed_name_by_query() -> impl Filter<Extract = (String,), Error = Rejection> + Clone {\n\n warp::body::json().and_then(get_changed_name_query)\n\n}\n\n\n", "file_path": "crates/server/src/filter.rs", "rank": 1, "score": 227873.0472422898 }, { "content": "pub fn receive_session_key() -> impl Filter<Extract = (String,), Error = Rejection> + Clone {\n\n warp::header::<String>(crate::session::SESSION_KEY)\n\n}\n\n\n", "file_path": "crates/server/src/filter.rs", "rank": 2, "score": 227873.0472422898 }, { "content": "pub fn receive_sqlite_file() -> impl Filter<Extract = (FormData,), Error = Rejection> + Clone {\n\n warp::multipart::form().max_length(100 * 1024 * 1024)\n\n}\n\n\n", "file_path": "crates/server/src/filter.rs", "rank": 3, "score": 225639.00177956646 }, { "content": "pub fn with_table(tables: &Tables) -> impl Filter<Extract = (Tables,), Error = Infallible> + Clone {\n\n let tables = tables.clone();\n\n warp::any().map(move || tables.clone())\n\n}\n\n\n", "file_path": "crates/server/src/filter.rs", "rank": 4, "score": 222414.3853853951 }, { "content": "pub fn logout() -> BoxedFilter<(impl Reply,)> {\n\n warp::get()\n\n .and(path(\"logout\"))\n\n .and(receive_session_key())\n\n .and_then(logout_handler)\n\n .boxed()\n\n}\n\n\n\nasync fn logout_handler(session_key: String) -> Result<impl Reply, Rejection> {\n\n crate::session::remove_session(&session_key)?;\n\n Ok(StatusCode::OK)\n\n}\n", "file_path": "crates/server/src/handler/logout.rs", "rank": 5, "score": 189818.5108233766 }, { "content": "pub fn custom_table_route() -> BoxedFilter<(impl Reply,)> {\n\n warp::get()\n\n .and(path!(\"recommend_table\" / i32 / usize / \"table.html\"))\n\n .and_then(table_handler)\n\n .boxed()\n\n}\n\n\n\nasync fn table_handler(_user_id: i32, _table_index: usize) -> Result<impl Reply, Rejection> {\n\n let body = r#\"\n\n <html>\n\n <head>\n\n <meta name=\"bmstable\" content=\"header.json\">\n\n <meta http-equiv=\"Content-Type\" content=\"text/html; charset=rtf-8\">\n\n </head>\n\n <body>\n\n おすすめ譜面表\n\n </body>\n\n </html>\"#;\n\n Ok(warp::reply::html(body))\n\n}\n\n\n", "file_path": "crates/server/src/handler/custom_table.rs", "rank": 6, "score": 184109.05004349374 }, { "content": "pub fn tables_route(tables: &Tables) -> BoxedFilter<(impl Reply,)> {\n\n warp::get()\n\n .and(path(\"tables\"))\n\n .and(with_table(tables))\n\n .and_then(table_handler)\n\n .boxed()\n\n}\n\n\n\nasync fn table_handler(tables: Tables) -> std::result::Result<impl Reply, Rejection> {\n\n Ok(serde_json::to_string(&TablesFormat::from(tables)).unwrap())\n\n}\n", "file_path": "crates/server/src/handler/tables.rs", "rank": 7, "score": 178761.91814030503 }, { "content": "pub fn users_route(db_pool: &MySqlPool) -> BoxedFilter<(impl Reply,)> {\n\n warp::get()\n\n .and(path(\"users\"))\n\n .and(with_db(db_pool))\n\n .and_then(users_handler)\n\n .boxed()\n\n}\n\n\n\nasync fn users_handler<C: PublishedUsers>(repos: C) -> std::result::Result<impl Reply, Rejection> {\n\n let users = repos.fetch_users().map_err(HandleError::from)?;\n\n Ok(serde_json::to_string(&users).unwrap())\n\n}\n", "file_path": "crates/server/src/handler/users.rs", "rank": 8, "score": 175242.87111411075 }, { "content": "pub fn custom_table_header(tables: &Tables) -> BoxedFilter<(impl Reply,)> {\n\n warp::get()\n\n .and(path!(\"recommend_table\" / i32 / usize / \"header.json\"))\n\n .and(with_table(tables))\n\n .and_then(header_handler)\n\n .boxed()\n\n}\n\n\n\nasync fn header_handler(\n\n _user_id: i32,\n\n table_index: usize,\n\n tables: Tables,\n\n) -> Result<impl Reply, Rejection> {\n\n let table = tables.get(table_index).unwrap();\n\n let header =\n\n &CustomTableHeader::from(table).set_name(format!(\"おすすめ譜面表: {}\", table.title()));\n\n Ok(serde_json::to_string(&header).unwrap())\n\n}\n\n\n", "file_path": "crates/server/src/handler/custom_table.rs", "rank": 9, "score": 175242.87111411075 }, { "content": "pub fn account_route(db_pool: &MySqlPool) -> BoxedFilter<(impl Reply,)> {\n\n warp::get()\n\n .and(path(\"account\"))\n\n .and(account_by_session(db_pool))\n\n .and_then(account_handler)\n\n .boxed()\n\n}\n\n\n\nasync fn account_handler(account: Account) -> Result<impl Reply, Rejection> {\n\n Ok(serde_json::to_string(&account).unwrap())\n\n}\n", "file_path": "crates/server/src/handler/account.rs", "rank": 10, "score": 175242.87111411075 }, { "content": "pub fn health_route(db_pool: &MySqlPool) -> BoxedFilter<(impl Reply,)> {\n\n warp::get()\n\n .and(path(\"health\"))\n\n .and(with_db(db_pool))\n\n .and_then(health_handler)\n\n .boxed()\n\n}\n\n\n\nasync fn health_handler<C: HealthCheck>(client: C) -> std::result::Result<impl Reply, Rejection> {\n\n match client.health() {\n\n Ok(_) => Ok(StatusCode::OK),\n\n Err(_) => Ok(StatusCode::INTERNAL_SERVER_ERROR),\n\n }\n\n}\n", "file_path": "crates/server/src/handler/health.rs", "rank": 11, "score": 175242.87111411075 }, { "content": "pub fn change_name(db_pool: &MySqlPool) -> BoxedFilter<(impl Reply,)> {\n\n warp::post()\n\n .and(path(\"user\"))\n\n .and(path(\"name\"))\n\n .and(with_db(db_pool))\n\n .and(account_by_session(db_pool))\n\n .and(changed_name_by_query())\n\n .and_then(change_name_handler)\n\n .boxed()\n\n}\n\n\n\nasync fn change_name_handler<C: RenameAccount>(\n\n repos: C,\n\n mut account: Account,\n\n changed_name: String,\n\n) -> Result<impl Reply, Rejection> {\n\n account.set_name(&changed_name);\n\n rename_account(&repos, &account)?;\n\n Ok(serde_json::to_string(&account).unwrap())\n\n}\n\n\n", "file_path": "crates/server/src/handler/change_name.rs", "rank": 12, "score": 173561.83712036273 }, { "content": "pub fn change_visibility_route(db_pool: &MySqlPool) -> BoxedFilter<(impl Reply,)> {\n\n warp::post()\n\n .and(path(\"user\"))\n\n .and(path(\"visibility\"))\n\n .and(with_db(db_pool))\n\n .and(account_by_session(db_pool))\n\n .and(changed_visibility_by_query())\n\n .and_then(change_visibility_handler)\n\n .boxed()\n\n}\n\n\n\nasync fn change_visibility_handler<C: ChangeAccountVisibility>(\n\n repos: C,\n\n mut account: Account,\n\n changed_visibility: bool,\n\n) -> Result<impl Reply, Rejection> {\n\n account.set_visibility(changed_visibility);\n\n change_visibility(&repos, &account)?;\n\n Ok(serde_json::to_string(&account).unwrap())\n\n}\n\n\n", "file_path": "crates/server/src/handler/change_visibility.rs", "rank": 13, "score": 171930.06281335873 }, { "content": "pub fn oauth_redirect_route(db_pool: &MySqlPool) -> BoxedFilter<(impl Reply,)> {\n\n warp::get()\n\n .and(path(\"oauth\"))\n\n .and(with_db(db_pool))\n\n .and(warp::query::<HashMap<String, String>>().and_then(verify))\n\n .and_then(oauth_handler)\n\n .boxed()\n\n}\n\n\n\nasync fn oauth_handler<C: RegisterUser + AccountByGoogleId>(\n\n repos: C,\n\n profile: GoogleProfile,\n\n) -> Result<impl Reply, Rejection> {\n\n repos.register(&profile).map_err(HandleError::OtherError)?;\n\n let account = repos\n\n .user(&GoogleId::new(profile.user_id))\n\n .map_err(HandleError::OtherError)?;\n\n let key = crate::session::save_user_id(account.google_id).map_err(HandleError::OtherError)?;\n\n let header = format!(\n\n \"session-token={};domain={};max-age=300\",\n", "file_path": "crates/server/src/handler/oauth_redirect.rs", "rank": 14, "score": 171930.06281335873 }, { "content": "pub fn play_data_upload_route(db_pool: &MySqlPool) -> BoxedFilter<(impl Reply,)> {\n\n warp::post()\n\n .and(path!(\"upload\" / \"play_data\"))\n\n .and(with_db(db_pool))\n\n .and(receive_sqlite_file())\n\n .and(account_by_session(db_pool))\n\n .and_then(play_data_upload_handler)\n\n .boxed()\n\n}\n\n\n", "file_path": "crates/server/src/handler/upload.rs", "rank": 15, "score": 171930.06281335873 }, { "content": "pub fn songs_route(tables: &Tables, songs: &SongData) -> BoxedFilter<(impl Reply,)> {\n\n warp::get()\n\n .and(path(\"songs\"))\n\n .and(with_table(tables))\n\n .and(with_song_data(songs))\n\n .and_then(songs_handler)\n\n .boxed()\n\n}\n\n\n\nasync fn songs_handler(\n\n tables: Tables,\n\n song_data: SongData,\n\n) -> std::result::Result<impl Reply, Rejection> {\n\n let songs = song_data.lock().await;\n\n Ok(serde_json::to_string(&songs.song.get_list(&tables.get_charts())).unwrap())\n\n}\n", "file_path": "crates/server/src/handler/songs.rs", "rank": 16, "score": 168999.28540268363 }, { "content": "pub fn ranking_route(db_pool: &MySqlPool, song_data: &SongData) -> BoxedFilter<(impl Reply,)> {\n\n warp::get()\n\n .and(path(\"ranking\"))\n\n .and(with_db(db_pool))\n\n .and(warp::query::<HashMap<String, String>>().and_then(parse_ranking_query))\n\n .and(with_song_data(song_data))\n\n .and_then(ranking_handler)\n\n .boxed()\n\n}\n\n\n\nasync fn parse_ranking_query(query: HashMap<String, String>) -> Result<RankingQuery, Rejection> {\n\n let date = query\n\n .get(\"date\")\n\n .map(|u| {\n\n UpdatedAt::from_str(u)\n\n .map(|u| &u - Duration::days(-1))\n\n .unwrap_or_else(|_| UpdatedAt::default())\n\n })\n\n .unwrap_or_default();\n\n let play_mode = if let Some(mode) = query.get(\"mode\") {\n", "file_path": "crates/server/src/handler/ranking.rs", "rank": 17, "score": 164243.32730529585 }, { "content": "pub fn with_db(\n\n db_pool: &MySqlPool,\n\n) -> impl Filter<Extract = (MySQLClient,), Error = Infallible> + Clone {\n\n let db_pool = db_pool.clone();\n\n warp::any().map(move || MySQLClient::new(db_pool.get().unwrap()))\n\n}\n\n\n", "file_path": "crates/server/src/filter.rs", "rank": 18, "score": 143442.5383763727 }, { "content": "pub fn api_routes(\n\n db_pool: &MySqlPool,\n\n t: &Tables,\n\n song_data: &SongData,\n\n) -> BoxedFilter<(impl Reply,)> {\n\n health_route(db_pool)\n\n .or(account_route(db_pool))\n\n .or(users_route(db_pool))\n\n .or(change_name(db_pool))\n\n .or(change_visibility_route(db_pool))\n\n .or(logout())\n\n .or(tables_route(t))\n\n .or(songs_route(t, song_data))\n\n .or(ranking_route(db_pool, song_data))\n\n .or(detail_route(db_pool, t, song_data))\n\n .or(play_data_upload_route(db_pool))\n\n .or(song_data_upload_route(db_pool, song_data))\n\n .or(oauth_redirect_route(db_pool))\n\n .with(cors_header())\n\n .with(warp::compression::gzip())\n\n .with(warp::log(\"api_access\"))\n\n .boxed()\n\n}\n\n\n", "file_path": "crates/server/src/routes.rs", "rank": 19, "score": 141785.5541414026 }, { "content": "pub fn with_song_data(\n\n song_data: &SongData,\n\n) -> impl Filter<Extract = (SongData,), Error = Infallible> + Clone {\n\n let song_data = Arc::clone(song_data);\n\n warp::any().map(move || song_data.clone())\n\n}\n\n\n", "file_path": "crates/server/src/filter.rs", "rank": 20, "score": 141785.5541414026 }, { "content": "pub fn account_by_session(\n\n db_pool: &MySqlPool,\n\n) -> impl Filter<Extract = (Account,), Error = Rejection> + Clone {\n\n with_db(db_pool)\n\n .and(receive_session_key())\n\n .and_then(crate::session::get_account_by_session)\n\n}\n\n\n", "file_path": "crates/server/src/filter.rs", "rank": 21, "score": 141785.5541414026 }, { "content": "pub fn table_routes(\n\n db_pool: &MySqlPool,\n\n tables: &Tables,\n\n song_data: &SongData,\n\n) -> BoxedFilter<(impl Reply,)> {\n\n custom_table_header(tables)\n\n .or(custom_table_body(db_pool, tables, song_data))\n\n .or(custom_table_route())\n\n .with(cors_header())\n\n .with(warp::log(\"table_access\"))\n\n .boxed()\n\n}\n\n\n\nuse warp::filters::cors::Builder;\n", "file_path": "crates/server/src/routes.rs", "rank": 22, "score": 141785.5541414026 }, { "content": "pub fn account_id_query(\n\n db_pool: &MySqlPool,\n\n) -> impl Filter<Extract = (Account,), Error = Rejection> + Clone {\n\n with_db(db_pool)\n\n .and(warp::query::<HashMap<String, String>>())\n\n .and_then(get_account_by_query)\n\n}\n\n\n\nasync fn get_account_by_query<C: AccountByUserId>(\n\n repos: C,\n\n query: HashMap<String, String>,\n\n) -> Result<Account, Rejection> {\n\n let user_id = query\n\n .get(&\"user_id\".to_string())\n\n .ok_or(HandleError::AccountIsNotSelected)?;\n\n let user_id = user_id\n\n .parse::<i32>()\n\n .map_err(HandleError::AccountSelectionIsInvalid)?;\n\n let account = repos\n\n .user(user_id)\n\n .map_err(HandleError::AccountIsNotFound)?;\n\n Ok(account)\n\n}\n\n\n", "file_path": "crates/server/src/filter.rs", "rank": 23, "score": 140185.46019801547 }, { "content": "pub fn detail_route(\n\n db_pool: &MySqlPool,\n\n tables: &Tables,\n\n song_data: &SongData,\n\n) -> BoxedFilter<(impl Reply,)> {\n\n warp::get()\n\n .and(path(\"detail\"))\n\n .and(with_db(db_pool))\n\n .and(with_table(tables))\n\n .and(warp::query::<HashMap<String, String>>().and_then(parse_detail_query))\n\n .and(account_id_query(db_pool))\n\n .and(with_song_data(song_data))\n\n .and_then(detail_handler)\n\n .boxed()\n\n}\n\n\n\nasync fn parse_detail_query(query: HashMap<String, String>) -> Result<DetailQuery, Rejection> {\n\n let date = query\n\n .get(\"date\")\n\n .map(|u| {\n", "file_path": "crates/server/src/handler/detail.rs", "rank": 24, "score": 140185.46019801547 }, { "content": "pub fn config() -> Cfg {\n\n (*self::CONFIG).clone()\n\n}\n\n\n\nlazy_static! {\n\n pub static ref CONFIG: Cfg = envy::from_env::<Cfg>().unwrap();\n\n}\n", "file_path": "crates/mysql/src/config.rs", "rank": 25, "score": 139500.77154602602 }, { "content": "pub fn config() -> Cfg {\n\n (*self::CONFIG).clone()\n\n}\n\n\n\nlazy_static! {\n\n pub static ref CONFIG: Cfg = envy::from_env::<Cfg>().unwrap();\n\n}\n", "file_path": "crates/table/src/config.rs", "rank": 26, "score": 139500.77154602602 }, { "content": "#[wasm_bindgen]\n\npub fn test() -> String {\n\n \"hello\".to_string()\n\n}\n", "file_path": "crates/client/src/lib.rs", "rank": 27, "score": 139500.77154602602 }, { "content": "pub fn config() -> Cfg {\n\n (*self::CONFIG).clone()\n\n}\n\n\n\nlazy_static! {\n\n pub static ref CONFIG: Cfg = envy::from_env::<Cfg>().unwrap();\n\n}\n", "file_path": "crates/server/src/config.rs", "rank": 28, "score": 139500.77154602602 }, { "content": "pub fn config() -> Cfg {\n\n (*self::CONFIG).clone()\n\n}\n\n\n\nlazy_static! {\n\n pub static ref CONFIG: Cfg = {\n\n dbg!(&envy::from_env::<Cfg>());\n\n match envy::from_env::<Cfg>() {\n\n Ok(val) => val,\n\n Err(err) => {\n\n println!(\"{}\", err);\n\n std::process::exit(1)\n\n }\n\n }\n\n };\n\n}\n", "file_path": "crates/oauth_google/src/config.rs", "rank": 29, "score": 137900.67760263887 }, { "content": "pub fn config() -> Cfg {\n\n (*self::CONFIG).clone()\n\n}\n\n\n\nlazy_static! {\n\n pub static ref CONFIG: Cfg = envy::from_env::<Cfg>().unwrap();\n\n}\n", "file_path": "crates/send_slack/src/config.rs", "rank": 30, "score": 137900.67760263887 }, { "content": "pub fn song_data_upload_route(\n\n db_pool: &MySqlPool,\n\n song_data: &SongData,\n\n) -> BoxedFilter<(impl Reply,)> {\n\n warp::post()\n\n .and(path!(\"upload\" / \"song_data\"))\n\n .and(with_db(db_pool))\n\n .and(with_song_data(song_data))\n\n .and(receive_sqlite_file())\n\n .and_then(upload_song_data_handler)\n\n .boxed()\n\n}\n\n\n\nasync fn play_data_upload_handler<C: SaveScoreData + AccountByGoogleId>(\n\n repository: C,\n\n form: FormData,\n\n account: Account,\n\n) -> Result<impl Reply, Rejection> {\n\n let mut score_db = NamedTempFile::new().map_err(HandleError::from)?;\n\n let mut scorelog_db = NamedTempFile::new().map_err(HandleError::from)?;\n", "file_path": "crates/server/src/handler/upload.rs", "rank": 31, "score": 137144.61269979784 }, { "content": "pub fn custom_table_body(\n\n db_pool: &MySqlPool,\n\n tables: &Tables,\n\n song_data: &SongData,\n\n) -> BoxedFilter<(impl Reply,)> {\n\n warp::get()\n\n .and(path!(\"recommend_table\" / i32 / usize / \"score.json\"))\n\n .and(with_table(tables))\n\n .and(with_db(db_pool))\n\n .and(with_song_data(song_data))\n\n .and_then(body_handler)\n\n .boxed()\n\n}\n\n\n\nasync fn body_handler<C: AccountByUserId + ScoresByAccount>(\n\n user_id: i32,\n\n table_index: usize,\n\n tables: Tables,\n\n repos: C,\n\n song_data: SongData,\n", "file_path": "crates/server/src/handler/custom_table.rs", "rank": 32, "score": 137144.61269979784 }, { "content": "pub fn get_db_pool() -> MySqlPool {\n\n Pool::builder().build_unchecked(ConnectionManager::new(config::config().mysql_url))\n\n}\n\n\n\npub struct MySQLClient {\n\n connection: MySqlPooledConnection,\n\n}\n\n\n\nimpl MySQLClient {\n\n pub fn new(connection: MySqlPooledConnection) -> Self {\n\n Self { connection }\n\n }\n\n\n\n fn score_log(&self, account: &Account) -> Result<HashMap<ScoreId, SnapShots>, Error> {\n\n let records = models::ScoreSnap::by_user_id(&self.connection, account.user_id().get())?;\n\n let mut map: HashMap<ScoreId, SnapShots> = HashMap::new();\n\n for row in records {\n\n let song_id = ScoreId::new(row.sha256.parse().unwrap(), PlayMode::from(row.mode));\n\n let snap = SnapShot::from_data(\n\n row.clear,\n", "file_path": "crates/mysql/src/lib.rs", "rank": 33, "score": 134859.83010442124 }, { "content": "pub fn get_client() -> RedisResult<Connection> {\n\n let client = redis::Client::open(config().redis_url)?;\n\n client.get_connection()\n\n}\n\n\n", "file_path": "crates/server/src/session.rs", "rank": 34, "score": 132780.31871801286 }, { "content": "pub fn get_account<C: AccountByGoogleId>(\n\n repos: &C,\n\n user_id: GoogleId,\n\n) -> Result<Account, HandleError> {\n\n Ok(repos.user(&user_id)?)\n\n}\n\n\n\npub async fn get_account_by_session<C: AccountByGoogleId>(\n\n repos: C,\n\n key: String,\n\n) -> Result<Account, Rejection> {\n\n let user_id = get_user_id(&key)?;\n\n Ok(get_account(&repos, user_id)?)\n\n}\n\n\n", "file_path": "crates/server/src/session.rs", "rank": 35, "score": 131334.36237444624 }, { "content": "pub fn remove_session(key: &str) -> Result<(), HandleError> {\n\n let mut redis_connection = get_client()?;\n\n let _ = redis_connection.del(key)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/server/src/session.rs", "rank": 36, "score": 126290.12089907826 }, { "content": "pub fn save_user_id(user_id: GoogleId) -> Result<String> {\n\n let mut redis_connection = get_client()?;\n\n let key = generate_session_key();\n\n let _: String = redis_connection.set_ex(key.clone(), user_id.to_string(), EXPIRE_SECONDS)?;\n\n Ok(key)\n\n}\n\n\n", "file_path": "crates/server/src/session.rs", "rank": 37, "score": 123621.80704598778 }, { "content": "pub fn get_user_id(key: &str) -> Result<GoogleId, HandleError> {\n\n let mut redis_connection = get_client()?;\n\n Ok(GoogleId::new(\n\n redis_connection.get(key).map_err(SessionError)?,\n\n ))\n\n}\n\n\n", "file_path": "crates/server/src/session.rs", "rank": 38, "score": 120743.04278109263 }, { "content": "pub fn send(channel: String, title: String, content: String) -> anyhow::Result<String> {\n\n use reqwest::blocking::multipart::Form;\n\n use reqwest::blocking::Client;\n\n\n\n let mut f = fs::File::create(\"buf.txt\").unwrap();\n\n f.write_all(content.as_bytes()).unwrap();\n\n let form = Form::new().file(\"file\", \"buf.txt\").unwrap();\n\n let res = Client::new()\n\n .post(\"https://slack.com/api/files.upload\")\n\n .multipart(form)\n\n .query(&[\n\n (\"token\", config::config().slack_bot_token),\n\n (\"title\", title),\n\n (\"channels\", channel),\n\n (\"pretty\", \"1\".into()),\n\n ])\n\n .send()?;\n\n let _ = fs::remove_file(\"buf.txt\");\n\n match res.text() {\n\n Err(e) => Ok(format!(\"アップロード失敗:{:?}\", e)),\n", "file_path": "crates/send_slack/src/lib.rs", "rank": 39, "score": 114182.52967405914 }, { "content": "fn default_table_urls() -> Vec<String> {\n\n [\n\n \"http://www.ribbit.xyz/bms/tables/insane.html\",\n\n \"http://www.ribbit.xyz/bms/tables/overjoy.html\",\n\n \"https://stellabms.xyz/st/table.html\",\n\n \"https://stellabms.xyz/sl/table.html\",\n\n \"http://flowermaster.web.fc2.com/lrnanido/gla/LN.html\",\n\n \"http://rattoto10.jounin.jp/table_insane.html\",\n\n \"http://rattoto10.jounin.jp/table.html\",\n\n \"http://walkure.net/hakkyou/for_glassist/bms/?lamp=easy\",\n\n ]\n\n .iter()\n\n .map(|&s| s.into())\n\n .collect()\n\n}\n\n\n", "file_path": "crates/table/src/config.rs", "rank": 40, "score": 113484.80494073742 }, { "content": "#[derive(Serialize)]\n\nstruct ErrorResponse {\n\n error: String,\n\n}\n\n\n\npub async fn handle_rejection(err: Rejection) -> std::result::Result<impl Reply, Infallible> {\n\n let (code, message): (StatusCode, String) = if err.is_not_found() {\n\n (StatusCode::NOT_FOUND, \"Not Found\".into())\n\n } else if let Some(e) = err.find::<HandleError>() {\n\n use HandleError::*;\n\n (\n\n match e {\n\n AuthorizationCodeIsNotFound => StatusCode::BAD_REQUEST,\n\n AccountIsNotFound(_) => StatusCode::BAD_REQUEST,\n\n AccountIsNotSelected => StatusCode::BAD_REQUEST,\n\n AccountSelectionIsInvalid(_) => StatusCode::BAD_REQUEST,\n\n WarpError(_) => StatusCode::BAD_REQUEST,\n\n FileIsNotFound => StatusCode::OK,\n\n SaveIsNotComplete => StatusCode::OK,\n\n FileIsNotDeleted => StatusCode::OK,\n\n SessionError(_) => StatusCode::UNAUTHORIZED,\n", "file_path": "crates/server/src/error.rs", "rank": 41, "score": 90483.32276364358 }, { "content": "pub trait AllSongData {\n\n fn song_data(&self) -> Result<Songs>;\n\n}\n\n\n", "file_path": "crates/repository/src/lib.rs", "rank": 42, "score": 81677.26748393514 }, { "content": "pub trait ScoresByAccount {\n\n fn score(&self, account: &Account) -> Result<Scores>;\n\n}\n\n\n", "file_path": "crates/repository/src/lib.rs", "rank": 43, "score": 81677.26748393514 }, { "content": "pub trait RenameAccount {\n\n fn rename(&self, account: &Account) -> Result<()>;\n\n}\n\n\n", "file_path": "crates/repository/src/lib.rs", "rank": 44, "score": 81677.26748393514 }, { "content": "pub trait SnapCmp {\n\n fn cmp(a: &SnapShot, b: &SnapShot) -> bool;\n\n}\n", "file_path": "crates/model/src/score.rs", "rank": 45, "score": 81677.26748393514 }, { "content": "pub trait HealthCheck {\n\n fn health(&self) -> Result<()>;\n\n}\n\n\n", "file_path": "crates/repository/src/lib.rs", "rank": 46, "score": 81677.26748393514 }, { "content": "pub trait PublishedUsers {\n\n fn fetch_users(&self) -> Result<Vec<VisibleAccount>>;\n\n}\n\n\n\n#[derive(Serialize)]\n\npub struct VisibleAccount {\n\n pub id: i32,\n\n pub name: String,\n\n}\n\n\n", "file_path": "crates/repository/src/lib.rs", "rank": 47, "score": 81677.26748393514 }, { "content": "pub trait ScoresBySha256 {\n\n fn score(&self, hash: &HashSha256) -> Result<RankedScore>;\n\n}\n\n\n", "file_path": "crates/repository/src/lib.rs", "rank": 48, "score": 81677.26748393514 }, { "content": "pub trait RegisterUser {\n\n fn register(&self, profile: &GoogleProfile) -> Result<()>;\n\n}\n", "file_path": "crates/oauth_google/src/lib.rs", "rank": 49, "score": 80705.8723430584 }, { "content": "pub trait SaveScoreData {\n\n fn save_score(&self, account: &Account, score: &Scores) -> Result<()>;\n\n}\n", "file_path": "crates/repository/src/lib.rs", "rank": 50, "score": 80705.8723430584 }, { "content": "pub trait ChangeAccountVisibility {\n\n fn change_visibility(&self, account: &Account) -> Result<()>;\n\n}\n\n\n", "file_path": "crates/repository/src/lib.rs", "rank": 51, "score": 80705.8723430584 }, { "content": "pub trait AccountByUserId {\n\n fn user(&self, user_id: i32) -> Result<Account>;\n\n}\n\n\n", "file_path": "crates/repository/src/lib.rs", "rank": 52, "score": 80705.8723430584 }, { "content": "pub trait SaveSongData {\n\n fn save_song(&self, songs: &Songs) -> Result<()>;\n\n}\n\n\n", "file_path": "crates/repository/src/lib.rs", "rank": 53, "score": 80705.8723430584 }, { "content": "pub trait AccountByGoogleId {\n\n fn user(&self, google_id: &GoogleId) -> Result<Account>;\n\n}\n\n\n", "file_path": "crates/repository/src/lib.rs", "rank": 54, "score": 80705.8723430584 }, { "content": "fn cors_header() -> Builder {\n\n warp::cors()\n\n .allow_any_origin()\n\n .allow_methods(vec![\"GET\", \"POST\", \"OPTIONS\"])\n\n .allow_headers(vec![\n\n \"x-requested-with\",\n\n \"origin\",\n\n \"referer\",\n\n \"x-csrftoken\",\n\n \"oauth-token\",\n\n \"content-type\",\n\n \"content-length\",\n\n \"accept\",\n\n \"accept-encoding\",\n\n \"accept-language\",\n\n \"user-agent\",\n\n crate::session::SESSION_KEY,\n\n ])\n\n}\n", "file_path": "crates/server/src/routes.rs", "rank": 55, "score": 79846.63015876089 }, { "content": "fn client_url() -> String {\n\n \"http://localhost:8080\".into()\n\n}\n\n\n", "file_path": "crates/server/src/config.rs", "rank": 56, "score": 79846.63015876089 }, { "content": "fn mysql_url() -> String {\n\n \"mysql://root:root@mysql:3306/user_data\".into()\n\n}\n\n\n", "file_path": "crates/mysql/src/config.rs", "rank": 57, "score": 79846.63015876089 }, { "content": "fn client_domain() -> String {\n\n \"localhost\".into()\n\n}\n\n\n", "file_path": "crates/server/src/config.rs", "rank": 58, "score": 79846.63015876089 }, { "content": "fn redis_url() -> String {\n\n \"redis://session-redis:6379\".into()\n\n}\n\n\n", "file_path": "crates/server/src/config.rs", "rank": 59, "score": 79846.63015876089 }, { "content": "pub trait CanGetHash {\n\n fn hash_sha256(&self) -> String;\n\n}\n", "file_path": "crates/mysql/src/models/scores.rs", "rank": 60, "score": 79767.26585143484 }, { "content": "fn slack_channel() -> String {\n\n \"\".into()\n\n}\n\n\n", "file_path": "crates/send_slack/src/config.rs", "rank": 61, "score": 78879.08681764404 }, { "content": "fn client_url() -> String {\n\n \"http://localhost:8080\".into()\n\n}\n\n\n", "file_path": "crates/oauth_google/src/config.rs", "rank": 62, "score": 78879.08681764404 }, { "content": "fn tls_cert_path() -> String {\n\n \"./files/cert.pem\".into()\n\n}\n\n\n", "file_path": "crates/server/src/config.rs", "rank": 63, "score": 78879.08681764404 }, { "content": "fn tls_key_path() -> String {\n\n \"./files/key.rsa\".into()\n\n}\n\n\n", "file_path": "crates/server/src/config.rs", "rank": 64, "score": 78879.08681764404 }, { "content": "fn client_domain() -> String {\n\n \"localhost\".into()\n\n}\n\n\n", "file_path": "crates/oauth_google/src/config.rs", "rank": 65, "score": 78879.08681764404 }, { "content": "fn generate_session_key() -> String {\n\n use rand::prelude::*;\n\n use rand_chacha::ChaCha20Rng;\n\n\n\n let mut csp_rng = ChaCha20Rng::from_entropy();\n\n let mut data = [0u8; 32];\n\n csp_rng.fill_bytes(&mut data);\n\n join(&data)\n\n}\n\n\n", "file_path": "crates/server/src/session.rs", "rank": 66, "score": 78879.08681764404 }, { "content": "pub trait ParamSnap: SnapCmp {\n\n fn make(current: &SnapShot, updated_at: UpdatedAt, before_snap: Option<&SnapShot>) -> Self;\n\n}\n", "file_path": "crates/model/src/score.rs", "rank": 67, "score": 78587.09989084581 }, { "content": "fn slack_bot_token() -> String {\n\n \"\".into()\n\n}\n\n\n", "file_path": "crates/send_slack/src/config.rs", "rank": 68, "score": 77944.20211142674 }, { "content": "fn slack_file_name() -> String {\n\n \"\".into()\n\n}\n\n\n", "file_path": "crates/send_slack/src/config.rs", "rank": 69, "score": 77944.20211142674 }, { "content": "fn google_oauth_client_id() -> String {\n\n \"hogehogehogehoge.apps.googleusercontent.com\".into()\n\n}\n\n\n", "file_path": "crates/oauth_google/src/config.rs", "rank": 70, "score": 77040.34993571806 }, { "content": "fn google_oauth_redirect_uri() -> String {\n\n \"https://localhost:4431/oauth\".into()\n\n}\n\n\n", "file_path": "crates/oauth_google/src/config.rs", "rank": 71, "score": 77040.34993571806 }, { "content": "fn google_oauth_client_secret() -> String {\n\n \"client secret\".into()\n\n}\n\n\n", "file_path": "crates/oauth_google/src/config.rs", "rank": 72, "score": 77040.34993571806 }, { "content": "fn join(data: &[u8]) -> String {\n\n data.iter().map(|u| u.to_string()).collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test() {\n\n let data = [1u8, 2, 3, 4, 5, 6, 7, 8, 123];\n\n assert_eq!(\"12345678123\".to_string(), join(&data))\n\n }\n\n}\n", "file_path": "crates/server/src/session.rs", "rank": 73, "score": 76769.09537200794 }, { "content": "fn change_visibility<C: ChangeAccountVisibility>(\n\n repos: &C,\n\n account: &Account,\n\n) -> Result<(), HandleError> {\n\n Ok(repos.change_visibility(account)?)\n\n}\n", "file_path": "crates/server/src/handler/change_visibility.rs", "rank": 74, "score": 74215.1445505754 }, { "content": "fn make_levels(header: &Header, charts: Vec<Chart>) -> TableLevels {\n\n let charts = Charts::make(charts.into_iter().map(|c| c.into()).collect());\n\n let order = match &header.level_order {\n\n Some(s) => s.clone(),\n\n None => charts.get_levels().iter().map(Level::to_string).collect(),\n\n };\n\n charts.make_levels(&order)\n\n}\n\n\n\n#[derive(Debug, Error)]\n\npub enum TableParseError {\n\n #[error(\"指定された難易度表URLにアクセスできませんでした: {0:?}\")]\n\n FailedToAccessTableURL(reqwest::Error),\n\n #[error(\"指定された難易度表URLからのレスポンスが取得できませんでした: {0:?}\")]\n\n FailedToGetTableURL(reqwest::Error),\n\n #[error(\"指定された難易度表に有効なmetaタグが設定されていません: {0:?}\")]\n\n NotFoundCSS(anyhow::Error),\n\n #[error(\"難易度表URLが不正です: {0:?}\")]\n\n InvalidURL(url::ParseError),\n\n #[error(\"難易度表にヘッダURLがありません\")]\n", "file_path": "crates/table/src/lib.rs", "rank": 75, "score": 69851.7459013801 }, { "content": "fn make_token_request_body(code: String) -> HashMap<&'static str, String> {\n\n let mut body = HashMap::new();\n\n body.insert(\"client_id\", config().google_oauth_client_id);\n\n body.insert(\"client_secret\", config().google_oauth_client_secret);\n\n body.insert(\"redirect_uri\", config().google_oauth_redirect_uri);\n\n body.insert(\"code\", code);\n\n body.insert(\"grant_type\", \"authorization_code\".to_string());\n\n body\n\n}\n\n\n\nasync fn token_request(body: HashMap<&str, String>) -> Result<Map<String, Value>, Error> {\n\n let res = reqwest::Client::new()\n\n .post(\"https://accounts.google.com/o/oauth2/token\")\n\n .json(&body)\n\n .send()\n\n .await\n\n .map_err(Error::ReqwestError)?;\n\n let body = res.text().await.map_err(Error::ReqwestError)?;\n\n\n\n let json: serde_json::Value = serde_json::from_str(&body).map_err(Error::SerdeJsonError)?;\n\n Ok(json.as_object().unwrap().clone())\n\n}\n\n\n", "file_path": "crates/oauth_google/src/lib.rs", "rank": 76, "score": 67541.70433515038 }, { "content": "fn make_google_profile(payload: &Map<String, Value>) -> Result<GoogleProfile, Error> {\n\n let user_id = payload\n\n .get(&\"sub\".to_string())\n\n .ok_or_else(|| Error::GoogleResponseIsInvalid(\"subject is not found in payload\".into()))?\n\n .to_string()\n\n .replace(\"\\\"\", \"\");\n\n let email = payload\n\n .get(&\"email\".to_string())\n\n .ok_or_else(|| Error::GoogleResponseIsInvalid(\"email is not found in payload\".into()))?\n\n .to_string()\n\n .replace(\"\\\"\", \"\");\n\n let name = \"default_name\".to_string();\n\n log::info!(\"{} {}\", user_id, email);\n\n Ok(GoogleProfile {\n\n user_id,\n\n email,\n\n name,\n\n })\n\n}\n\n\n", "file_path": "crates/oauth_google/src/lib.rs", "rank": 77, "score": 66824.41355569911 }, { "content": "fn make_data_url(header_url: &Url, header: &Header) -> Result<Url, TableParseError> {\n\n header_url\n\n .join(header.data_url.as_ref())\n\n .map_err(InvalidDataURL)\n\n}\n\n\n\nasync fn get_charts(url: &Url) -> Result<Vec<Chart>, TableParseError> {\n\n let data_text = reqwest::get(&url.to_string())\n\n .await\n\n .map_err(FailedToAccessDataURL)?\n\n .text()\n\n .await\n\n .map_err(FailedToGetDataURL)?;\n\n let data_text = data_text.trim_start_matches('\\u{feff}');\n\n Ok(serde_json::from_str(data_text).map_err(FailedToParseData)?)\n\n}\n\n\n", "file_path": "crates/table/src/lib.rs", "rank": 78, "score": 66100.19847626721 }, { "content": "fn get_payload(obj: &Map<String, Value>) -> Result<Map<String, Value>, Error> {\n\n let token = &obj\n\n .get(&\"id_token\".to_string())\n\n .ok_or_else(|| Error::GoogleResponseIsInvalid(\"id_token is not found\".into()))?\n\n .to_string()\n\n .replace(\"\\\"\", \"\")\n\n .replace(\",\", \"\");\n\n\n\n let mut segments = token.split('.');\n\n let _encoded_header = segments\n\n .next()\n\n .ok_or_else(|| Error::GoogleResponseIsInvalid(\"could not get first segment\".into()))?;\n\n let encoded_payload = segments\n\n .next()\n\n .ok_or_else(|| Error::GoogleResponseIsInvalid(\"could not get second segment\".into()))?;\n\n\n\n let payload_string = String::from_utf8(\n\n base64::decode_config(&encoded_payload, base64::URL_SAFE_NO_PAD).map_err(|_| {\n\n Error::GoogleResponseIsInvalid(\"payload is not encoded in base64 \".into())\n\n })?,\n", "file_path": "crates/oauth_google/src/lib.rs", "rank": 79, "score": 65542.11704531385 }, { "content": "fn rename_account<C: RenameAccount>(repos: &C, account: &Account) -> Result<(), HandleError> {\n\n Ok(repos.rename(account)?)\n\n}\n", "file_path": "crates/server/src/handler/change_name.rs", "rank": 80, "score": 64732.538179259485 }, { "content": "use crate::*;\n\nuse std::cmp::Ordering;\n\n\n\n#[derive(Clone, Debug, Deserialize, Serialize, Default)]\n\npub struct SnapShot {\n\n pub clear_type: ClearType,\n\n pub score: ExScore,\n\n pub max_combo: MaxCombo,\n\n pub min_bp: MinBP,\n\n pub updated_at: UpdatedAt,\n\n}\n\n\n\nimpl SnapShot {\n\n pub fn from_data(\n\n clear_type: i32,\n\n score: i32,\n\n combo: i32,\n\n minbp: i32,\n\n timestamp: i64,\n\n ) -> SnapShot {\n", "file_path": "crates/model/src/score/snapshot.rs", "rank": 82, "score": 59634.302293575114 }, { "content": " SnapShot {\n\n clear_type: ClearType::from_integer(clear_type),\n\n score: ExScore::from_score(score),\n\n max_combo: MaxCombo::from_combo(combo),\n\n min_bp: MinBP::from_bp(minbp),\n\n updated_at: UpdatedAt::from_timestamp(timestamp),\n\n }\n\n }\n\n\n\n pub fn score(&self) -> ExScore {\n\n self.score\n\n }\n\n pub fn clear_type(&self) -> &ClearType {\n\n &self.clear_type\n\n }\n\n}\n\n\n\nimpl PartialEq for SnapShot {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.updated_at == other.updated_at\n", "file_path": "crates/model/src/score/snapshot.rs", "rank": 85, "score": 59619.62435363857 }, { "content": " }\n\n}\n\n\n\nimpl Eq for SnapShot {}\n\n\n\nimpl PartialOrd for SnapShot {\n\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n\n self.updated_at.partial_cmp(&other.updated_at)\n\n }\n\n}\n\n\n\nimpl Ord for SnapShot {\n\n fn cmp(&self, other: &Self) -> Ordering {\n\n self.updated_at.cmp(&other.updated_at)\n\n }\n\n}\n", "file_path": "crates/model/src/score/snapshot.rs", "rank": 86, "score": 59619.3798445582 }, { "content": "use chrono::NaiveDateTime;\n\nuse serde::Serialize;\n\n\n\n#[derive(Clone, Debug, Serialize)]\n\npub struct RegisteredDate(NaiveDateTime);\n\n\n\nimpl RegisteredDate {\n\n pub fn new(date: NaiveDateTime) -> Self {\n\n Self(date)\n\n }\n\n\n\n pub fn to_naive_date_time(&self) -> NaiveDateTime {\n\n self.0\n\n }\n\n}\n", "file_path": "crates/model/src/account/registered_date.rs", "rank": 90, "score": 58738.73480728226 }, { "content": "alter table songs add column features int not null default 0;", "file_path": "migrations/2021-01-10-081241_add_features_to_score/up.sql", "rank": 91, "score": 56531.11679266114 }, { "content": " fn cmp(a: &SnapShot, b: &SnapShot) -> bool {\n\n a.score >= b.score\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Clone, Debug, Default)]\n\npub struct MinBPSnap {\n\n pub current: MinBP,\n\n pub updated_at: UpdatedAt,\n\n pub before: MinBP,\n\n}\n\n\n\nimpl ParamSnap for MinBPSnap {\n\n fn make(current: &SnapShot, updated_at: UpdatedAt, before_snap: Option<&SnapShot>) -> Self {\n\n MinBPSnap {\n\n current: current.min_bp,\n\n updated_at,\n\n before: match before_snap {\n\n Some(s) => s.min_bp,\n\n None => Default::default(),\n", "file_path": "crates/model/src/score.rs", "rank": 92, "score": 20852.3563844522 }, { "content": " },\n\n }\n\n }\n\n}\n\n\n\nimpl SnapCmp for MinBPSnap {\n\n fn cmp(a: &SnapShot, b: &SnapShot) -> bool {\n\n a.min_bp <= b.min_bp\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Clone, Debug, Default)]\n\npub struct ClearTypeSnap {\n\n pub current: i32,\n\n pub updated_at: UpdatedAt,\n\n pub before: i32,\n\n}\n\n\n\nimpl ParamSnap for ClearTypeSnap {\n\n fn make(current: &SnapShot, updated_at: UpdatedAt, before_snap: Option<&SnapShot>) -> Self {\n", "file_path": "crates/model/src/score.rs", "rank": 93, "score": 20850.576232679978 }, { "content": "use crate::{Judge, PlayCount, UpdatedAt};\n\nuse serde::Serialize;\n\nuse std::cmp::Ordering;\n\n\n\n#[derive(Debug)]\n\npub struct PlayerStates {\n\n log: Vec<PlayerState>,\n\n}\n\n\n\nimpl PlayerStates {\n\n pub fn new(log: Vec<PlayerState>) -> PlayerStates {\n\n PlayerStates { log }\n\n }\n\n\n\n pub fn last(&self) -> Option<&PlayerState> {\n\n self.log.iter().last()\n\n }\n\n\n\n pub fn diff(&self) -> Vec<PlayerStateDiff> {\n\n let mut log = self.log.clone();\n", "file_path": "crates/model/src/player.rs", "rank": 94, "score": 20850.316855585977 }, { "content": "mod gmail_address;\n\nmod google_id;\n\nmod prelude;\n\nmod registered_date;\n\nmod user_id;\n\nmod user_name;\n\nmod visibility;\n\n\n\npub use prelude::*;\n\n\n\nuse chrono::NaiveDateTime;\n\nuse serde::Serialize;\n\n\n\n#[derive(Debug, Clone, Serialize)]\n\npub struct Account {\n\n pub user_id: UserId,\n\n pub google_id: GoogleId,\n\n pub gmail_address: GmailAddress,\n\n pub name: UserName,\n\n pub registered_date: RegisteredDate,\n", "file_path": "crates/model/src/account.rs", "rank": 95, "score": 20850.139697777737 }, { "content": " total_judge,\n\n }\n\n }\n\n\n\n pub fn cmp_by_date(&self, other: &PlayerState) -> Ordering {\n\n self.date.cmp(&other.date)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize)]\n\npub struct PlayerStateDiff {\n\n before_date: UpdatedAt,\n\n after_date: UpdatedAt,\n\n play_count: PlayCount,\n\n clear_count: PlayCount,\n\n play_time: PlayTime,\n\n total_judge: TotalJudge,\n\n}\n\n\n\nimpl PlayerStateDiff {\n", "file_path": "crates/model/src/player.rs", "rank": 96, "score": 20848.74858255243 }, { "content": "mod chart;\n\nmod charts;\n\nmod prelude;\n\n\n\npub use prelude::*;\n\n\n\nuse crate::*;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct TableSource {\n\n v: HashMap<usize, String>,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct TableId(i64);\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Tables {\n\n v: HashMap<usize, Table>,\n\n}\n", "file_path": "crates/model/src/table.rs", "rank": 97, "score": 20848.337524538703 }, { "content": "\n\nimpl From<TableName> for String {\n\n fn from(name: TableName) -> Self {\n\n name.0\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct TableSymbol(String);\n\n\n\nimpl From<TableSymbol> for String {\n\n fn from(symbol: TableSymbol) -> Self {\n\n symbol.0\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct TableLevels {\n\n v: Vec<TableLevel>,\n\n}\n", "file_path": "crates/model/src/table.rs", "rank": 98, "score": 20848.18887449388 }, { "content": "#[derive(Debug, Clone, Serialize)]\n\npub struct PlayTime(i32);\n\n\n\nimpl PlayTime {\n\n pub fn new(seconds: i32) -> PlayTime {\n\n PlayTime(seconds)\n\n }\n\n}\n\n\n\nimpl std::ops::Sub<PlayTime> for PlayTime {\n\n type Output = PlayTime;\n\n fn sub(self, rhs: PlayTime) -> PlayTime {\n\n PlayTime::new(self.0 - rhs.0)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize)]\n\npub struct TotalJudge(Judge);\n\n\n\nimpl TotalJudge {\n", "file_path": "crates/model/src/player.rs", "rank": 99, "score": 20847.614571169575 } ]
Rust
examples/dlint/config.rs
cdaringe/deno_lint
5ca58cd3f230ba95c75e261192414ee0bb9a758f
use anyhow::bail; use anyhow::Error as AnyError; use deno_lint::rules::{get_filtered_rules, LintRule}; use serde::Deserialize; use std::path::Path; use std::path::PathBuf; #[derive(Debug, Default, Deserialize)] #[serde(default)] pub struct RulesConfig { pub tags: Vec<String>, pub include: Vec<String>, pub exclude: Vec<String>, } #[derive(Debug, Default, Deserialize)] #[serde(default)] pub struct FilesConfig { pub include: Vec<String>, pub exclude: Vec<String>, } #[derive(Debug, Default, Deserialize)] #[serde(default)] pub struct Config { pub rules: RulesConfig, pub files: FilesConfig, } impl Config { pub fn get_rules(&self) -> Vec<Box<dyn LintRule>> { get_filtered_rules( Some(self.rules.tags.clone()), Some(self.rules.exclude.clone()), Some(self.rules.include.clone()), ) } pub fn get_files(&self) -> Result<Vec<PathBuf>, AnyError> { resolve_file_paths(&self.files) } } pub fn load_from_json(config_path: &Path) -> Result<Config, std::io::Error> { let json_str = std::fs::read_to_string(config_path)?; let config: Config = serde_json::from_str(&json_str)?; Ok(config) } fn resolve_file_paths(config: &FilesConfig) -> Result<Vec<PathBuf>, AnyError> { let mut file_patterns = get_file_patterns(config); let absolute_paths = take_absolute_paths(&mut file_patterns); let cwd = std::env::current_dir()?; let mut file_paths = glob(&cwd, &file_patterns)?; file_paths.extend(absolute_paths); return Ok(file_paths); fn get_file_patterns(config: &FilesConfig) -> Vec<String> { let mut file_patterns = Vec::new(); file_patterns.extend(config.include.clone()); file_patterns.extend(config.exclude.clone().into_iter().map(|exclude| { if exclude.starts_with('!') { exclude } else { format!("!{}", exclude) } })); for file_pattern in file_patterns.iter_mut() { if file_pattern.starts_with("./") { *file_pattern = String::from(&file_pattern[2..]); } if file_pattern.starts_with("!./") { *file_pattern = format!("!{}", &file_pattern[3..]); } } file_patterns } fn take_absolute_paths(file_patterns: &mut Vec<String>) -> Vec<PathBuf> { let len = file_patterns.len(); let mut file_paths = Vec::new(); for i in (0..len).rev() { if is_absolute_path(&file_patterns[i]) { file_paths.push(PathBuf::from(file_patterns.swap_remove(i))); } } file_paths } fn is_absolute_path(file_pattern: &str) -> bool { return !has_glob_chars(file_pattern) && PathBuf::from(file_pattern).is_absolute(); fn has_glob_chars(text: &str) -> bool { for c in text.chars() { match c { '*' | '{' | '}' | '[' | ']' | '!' => return true, _ => {} } } false } } } fn glob( base: &Path, file_patterns: &[String], ) -> Result<Vec<PathBuf>, AnyError> { let base = base.canonicalize()?; let walker = globwalk::GlobWalkerBuilder::from_patterns(base, file_patterns) .follow_links(false) .file_type(globwalk::FileType::FILE) .build(); let walker = match walker { Ok(walker) => walker, Err(err) => bail!("Error parsing file patterns: {}", err), }; let mut file_paths = Vec::new(); for result in walker.into_iter() { match result { Ok(result) => file_paths.push(result.into_path()), Err(err) => bail!("Error walking files: {}", err), } } Ok(file_paths) } #[cfg(test)] mod tests { use super::*; use deno_lint::rules::get_recommended_rules; use std::collections::HashSet; macro_rules! svec { ($( $elem:literal ),* $(,)?) => {{ vec![$( $elem.to_string() ),*] }} } macro_rules! set { ($( $elem:literal ),* $(,)?) => {{ vec![$( $elem ),*].into_iter().collect::<HashSet<&'static str>>() }} } fn into_codes(rules: Vec<Box<dyn LintRule>>) -> HashSet<&'static str> { rules.iter().map(|rule| rule.code()).collect() } #[test] fn test_get_rules() { let config = Config { rules: RulesConfig { tags: svec![], include: svec![], exclude: svec![], }, ..Default::default() }; assert!(config.get_rules().is_empty()); let config = Config { rules: RulesConfig { tags: svec!["recommended"], include: svec![], exclude: svec![], }, ..Default::default() }; let recommended_rules_codes = into_codes(get_recommended_rules()); assert_eq!(into_codes(config.get_rules()), recommended_rules_codes); let config = Config { rules: RulesConfig { tags: svec!["recommended"], include: svec!["no-empty"], exclude: svec![], }, ..Default::default() }; let recommended_rules_codes = into_codes(get_recommended_rules()); assert_eq!(into_codes(config.get_rules()), recommended_rules_codes); let config = Config { rules: RulesConfig { tags: svec![], include: svec!["eqeqeq"], exclude: svec!["eqeqeq"], }, ..Default::default() }; assert_eq!(into_codes(config.get_rules()), set!["eqeqeq"]); let config = Config { rules: RulesConfig { tags: svec![], include: svec!["this-is-a-totally-unknown-rule"], exclude: svec!["this-is-also-another-unknown-rule"], }, ..Default::default() }; assert_eq!(into_codes(config.get_rules()), set![]); } }
use anyhow::bail; use anyhow::Error as AnyError; use deno_lint::rules::{get_filtered_rules, LintRule}; use serde::Deserialize; use std::path::Path; use std::path::PathBuf; #[derive(Debug, Default, Deserialize)] #[serde(default)] pub struct RulesConfig { pub tags: Vec<String>, pub include: Vec<String>, pub exclude: Vec<String>, } #[derive(Debug, Default, Deserialize)] #[serde(default)] pub struct FilesConfig { pub include: Vec<String>, pub exclude: Vec<String>, } #[derive(Debug, Default, Deserialize)] #[serde(default)] pub struct Config { pub rules: RulesConfig, pub files: FilesConfig, } impl Config { pub fn get_rules(&self) -> Vec<Box<dyn LintRule>> { get_filtered_rules( Some(self.rules.tags.clone()), Some(self.rules.exclude.clone()), Some(self.rules.include.clone()), ) } pub fn get_files(&self) -> Result<Vec<PathBuf>, AnyError> { resolve_file_paths(&self.files) } } pub fn load_from_json(config_path: &Path) -> Result<Config, std::io::Error> { let json_str = std::fs::read_to_string(config_path)?; let config: Config = serde_json::from_str(&json_str)?; Ok(config) } fn resolve_file_paths(config: &FilesConfig) -> Result<Vec<PathBuf>, AnyError> { let mut file_patterns = get_file_patterns(config); let absolute_paths = take_absolute_paths(&mut file_patterns); let cwd = std::env::current_dir()?; let mut file_paths = glob(&cwd, &file_patterns)?; file_paths.extend(absolute_paths); return Ok(file_paths); fn get_file_patterns(config: &FilesConfig) -> Vec<String> { let mut file_patterns = Vec::new(); file_patterns.extend(config.include.clone()); file_patterns.extend(config.exclude.clone().into_iter().map(|exclude| { if exclude.starts_with('!') { exclude } else { format!("!{}", exclude) } })); for file_pattern in file_patterns.iter_mut() { if file_pattern.starts_with("./") { *file_pattern = String::from(&file_pattern[2..]); } if file_pattern.starts_with("!./") { *file_pattern = format!("!{}", &file_pattern[3..]); } } file_patterns } fn take_absolute_paths(file_patterns: &mut Vec<String>) -> Vec<PathBuf> { let len = file_patterns.len(); let mut file_paths = Vec::new(); for i in (0..len).rev() { if is_absolute_path(&file_patterns[i]) { file_paths.push(PathBuf::from(file_patterns.swap_remove(i))); } } file_paths } fn is_absolute_path(file_pattern: &str) -> bool { return !has_glob_chars(file_pattern) && PathBuf::from(file_pattern).is_absolute(); fn has_glob_chars(text: &str) -> bool { for c in text.chars() { match c { '*' | '{' | '}' | '[' | ']' | '!' => return true, _ => {} } } false } } } fn glob( base: &Path, file_patterns: &[String], ) -> Result<Vec<PathBuf>, AnyError> { let base = base.canonicalize()?; let walker = globwalk::GlobWalkerBuilder::from_patterns(base, file_patterns) .follow_links(false) .file_type(globwalk::FileType::FILE) .build();
let mut file_paths = Vec::new(); for result in walker.into_iter() { match result { Ok(result) => file_paths.push(result.into_path()), Err(err) => bail!("Error walking files: {}", err), } } Ok(file_paths) } #[cfg(test)] mod tests { use super::*; use deno_lint::rules::get_recommended_rules; use std::collections::HashSet; macro_rules! svec { ($( $elem:literal ),* $(,)?) => {{ vec![$( $elem.to_string() ),*] }} } macro_rules! set { ($( $elem:literal ),* $(,)?) => {{ vec![$( $elem ),*].into_iter().collect::<HashSet<&'static str>>() }} } fn into_codes(rules: Vec<Box<dyn LintRule>>) -> HashSet<&'static str> { rules.iter().map(|rule| rule.code()).collect() } #[test] fn test_get_rules() { let config = Config { rules: RulesConfig { tags: svec![], include: svec![], exclude: svec![], }, ..Default::default() }; assert!(config.get_rules().is_empty()); let config = Config { rules: RulesConfig { tags: svec!["recommended"], include: svec![], exclude: svec![], }, ..Default::default() }; let recommended_rules_codes = into_codes(get_recommended_rules()); assert_eq!(into_codes(config.get_rules()), recommended_rules_codes); let config = Config { rules: RulesConfig { tags: svec!["recommended"], include: svec!["no-empty"], exclude: svec![], }, ..Default::default() }; let recommended_rules_codes = into_codes(get_recommended_rules()); assert_eq!(into_codes(config.get_rules()), recommended_rules_codes); let config = Config { rules: RulesConfig { tags: svec![], include: svec!["eqeqeq"], exclude: svec!["eqeqeq"], }, ..Default::default() }; assert_eq!(into_codes(config.get_rules()), set!["eqeqeq"]); let config = Config { rules: RulesConfig { tags: svec![], include: svec!["this-is-a-totally-unknown-rule"], exclude: svec!["this-is-also-another-unknown-rule"], }, ..Default::default() }; assert_eq!(into_codes(config.get_rules()), set![]); } }
let walker = match walker { Ok(walker) => walker, Err(err) => bail!("Error parsing file patterns: {}", err), };
assignment_statement
[ { "content": "fn is_valid_typeof_string(str: &str) -> bool {\n\n matches!(\n\n str,\n\n \"undefined\"\n\n | \"object\"\n\n | \"boolean\"\n\n | \"number\"\n\n | \"string\"\n\n | \"function\"\n\n | \"symbol\"\n\n | \"bigint\"\n\n )\n\n}\n\n\n", "file_path": "src/rules/valid_typeof.rs", "rank": 0, "score": 329556.2115815914 }, { "content": "// TODO(@magurotuna): use MediaType instead\n\n// https://github.com/denoland/deno/blob/76e2edc7e1868d7768e259aacbb9a991e1afc462/cli/media_type.rs#L15-L26\n\nfn is_jsx_file(filename: &str) -> bool {\n\n filename.ends_with(\".jsx\") || filename.ends_with(\".tsx\")\n\n}\n\n\n\n/// Collects information about variable usages.\n", "file_path": "src/rules/no_unused_vars.rs", "rank": 1, "score": 310400.372468389 }, { "content": "fn is_path_ref(s: &str) -> bool {\n\n PATH_REFERENCE_RE.is_match(s)\n\n}\n\n\n", "file_path": "src/rules/no_invalid_triple_slash_reference.rs", "rank": 3, "score": 305141.2440810144 }, { "content": "fn is_no_default_lib_ref(s: &str) -> bool {\n\n NO_DEFAULT_LIB_REFERENCE_RE.is_match(s)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_is_path_ref() {\n\n let testcases = [\n\n (r#\"/ <reference path=\"foo\" />\"#, true),\n\n (r#\"/ <reference path='foo' />\"#, true),\n\n (r#\"/ <reference path = \"foo\" />\"#, true),\n\n (r#\"/ <reference ppath = \"foo\" />\"#, false),\n\n ];\n\n\n\n for (input, expected) in &testcases {\n\n assert_eq!(*expected, is_path_ref(input));\n\n }\n", "file_path": "src/rules/no_invalid_triple_slash_reference.rs", "rank": 4, "score": 300095.8511743017 }, { "content": "pub fn colorize_markdown(input: &str) -> String {\n\n let mut options = Options::empty();\n\n options.insert(Options::ENABLE_STRIKETHROUGH);\n\n let parser = Parser::new_ext(input, options);\n\n let colorizer = MarkdownColorizer::new();\n\n colorizer.run(parser)\n\n}\n\n\n\nconst RESET_CODE: &str = \"\\u{001b}[0m\";\n\n\n", "file_path": "examples/dlint/color.rs", "rank": 5, "score": 296516.0679842852 }, { "content": "pub fn get_specific_rule_metadata(rule_name: &str) -> Vec<Rule> {\n\n get_all_rules_metadata()\n\n .into_iter()\n\n .filter(|r| r.code == rule_name)\n\n .collect()\n\n}\n\n\n", "file_path": "examples/dlint/rules.rs", "rank": 6, "score": 286119.7940634725 }, { "content": "pub fn print_rules<F: RuleFormatter>(mut rules: Vec<Rule>) {\n\n #[cfg(windows)]\n\n ansi_term::enable_ansi_support().expect(\"Failed to enable ANSI support\");\n\n\n\n match F::format(&mut rules) {\n\n Err(e) => {\n\n eprintln!(\"{}\", e);\n\n std::process::exit(1);\n\n }\n\n Ok(text) => {\n\n println!(\"{}\", text);\n\n }\n\n }\n\n}\n\npub enum JsonFormatter {}\n\npub enum PrettyFormatter {}\n\n\n", "file_path": "examples/dlint/rules.rs", "rank": 8, "score": 275089.9353895264 }, { "content": "pub fn parse_and_then(source_code: &str, test: impl Fn(ast_view::Program)) {\n\n let (source_file, program, leading_comments, trailing_comments, tokens) =\n\n parse(source_code);\n\n let program_info = ast_view::ProgramInfo {\n\n program: (&program).into(),\n\n source_file: Some(&source_file),\n\n tokens: Some(&tokens),\n\n comments: Some(ast_view::Comments {\n\n leading: &leading_comments,\n\n trailing: &trailing_comments,\n\n }),\n\n };\n\n\n\n ast_view::with_ast_view(program_info, |pg| {\n\n test(pg);\n\n });\n\n}\n", "file_path": "src/test_util.rs", "rank": 9, "score": 274679.5626107783 }, { "content": "/// Check if it contains underscores, except for leading and trailing ones\n\nfn is_underscored(ident_name: &str) -> bool {\n\n let trimmed_ident = ident_name.trim_matches('_');\n\n trimmed_ident.contains('_')\n\n && trimmed_ident != trimmed_ident.to_ascii_uppercase()\n\n}\n\n\n", "file_path": "src/rules/camelcase.rs", "rank": 10, "score": 270688.2079016563 }, { "content": "/// Convert the name of identifier into camel case. If the name is originally in camel case, return\n\n/// the name as it is. For more detail, see the test cases below.\n\nfn to_camelcase(ident_name: &str) -> String {\n\n if !is_underscored(ident_name) {\n\n return ident_name.to_string();\n\n }\n\n\n\n static UNDERSCORE_CHAR_RE: Lazy<Regex> =\n\n Lazy::new(|| Regex::new(r\"([^_])_([a-z])\").unwrap());\n\n\n\n let result = UNDERSCORE_CHAR_RE.replace_all(ident_name, |caps: &Captures| {\n\n format!(\"{}{}\", &caps[1], caps[2].to_ascii_uppercase())\n\n });\n\n\n\n if result != ident_name {\n\n return result.into_owned();\n\n }\n\n\n\n ident_name.to_ascii_uppercase()\n\n}\n\n\n", "file_path": "src/rules/camelcase.rs", "rank": 11, "score": 270632.4953989651 }, { "content": "fn create_dummy_source(plugin_path: &str) -> String {\n\n let mut dummy_source = String::new();\n\n dummy_source += &format!(\"import Plugin from '{}';\\n\", plugin_path);\n\n dummy_source += r#\"Deno.core.ops();\n\nconst rules = new Map();\n", "file_path": "examples/dlint/js.rs", "rank": 12, "score": 268893.512758545 }, { "content": "fn get_message(prop: &str) -> String {\n\n format!(\n\n \"Access to Object.prototype.{} is not allowed from target object\",\n\n prop\n\n )\n\n}\n\n\n\nimpl LintRule for NoPrototypeBuiltins {\n\n fn new() -> Box<Self> {\n\n Box::new(NoPrototypeBuiltins)\n\n }\n\n\n\n fn tags(&self) -> &'static [&'static str] {\n\n &[\"recommended\"]\n\n }\n\n\n\n fn code(&self) -> &'static str {\n\n CODE\n\n }\n\n\n", "file_path": "src/rules/no_prototype_builtins.rs", "rank": 13, "score": 266246.4733400017 }, { "content": "fn is_lib_ref(s: &str) -> bool {\n\n LIB_REFERENCE_RE.is_match(s)\n\n}\n\n\n", "file_path": "src/rules/no_invalid_triple_slash_reference.rs", "rank": 14, "score": 262140.79672518373 }, { "content": "fn is_types_ref(s: &str) -> bool {\n\n TYPES_REFERENCE_RE.is_match(s)\n\n}\n\n\n", "file_path": "src/rules/no_invalid_triple_slash_reference.rs", "rank": 15, "score": 262140.79672518373 }, { "content": "fn get_message(callee_name: &str) -> String {\n\n format!(\"`{}` call as function is not allowed\", callee_name)\n\n}\n\n\n\nimpl LintRule for NoObjCalls {\n\n fn new() -> Box<Self> {\n\n Box::new(NoObjCalls)\n\n }\n\n\n\n fn tags(&self) -> &'static [&'static str] {\n\n &[\"recommended\"]\n\n }\n\n\n\n fn code(&self) -> &'static str {\n\n CODE\n\n }\n\n\n\n fn lint_program<'view>(\n\n &self,\n\n context: &mut Context<'view>,\n", "file_path": "src/rules/no_obj_calls.rs", "rank": 16, "score": 262081.3314555157 }, { "content": "// TODO(@magurotuna): use MediaType instead\n\n// https://github.com/denoland/deno/blob/76e2edc7e1868d7768e259aacbb9a991e1afc462/cli/media_type.rs#L15-L26\n\nfn is_js_or_jsx(filename: &str) -> bool {\n\n filename.ends_with(\".js\")\n\n || filename.ends_with(\".mjs\")\n\n || filename.ends_with(\".cjs\")\n\n || filename.ends_with(\".jsx\")\n\n}\n\n\n", "file_path": "src/rules/no_invalid_triple_slash_reference.rs", "rank": 17, "score": 258180.12521731853 }, { "content": "fn trailing_newlines(tag: &Tag) -> String {\n\n use Tag::*;\n\n let num_newlines = match tag {\n\n Paragraph | Heading(_) | BlockQuote | CodeBlock(_) => 2,\n\n List(_)\n\n | Item\n\n | FootnoteDefinition(_)\n\n | Table(_)\n\n | TableHead\n\n | TableRow\n\n | TableCell => 1,\n\n Emphasis | Strong | Strikethrough | Link(_, _, _) | Image(_, _, _) => 0,\n\n };\n\n \"\\n\".repeat(num_newlines)\n\n}\n\n\n\nimpl MarkdownColorizer {\n\n fn new() -> MarkdownColorizer {\n\n Self {\n\n attr_stack: vec![],\n", "file_path": "examples/dlint/color.rs", "rank": 18, "score": 256836.90244285867 }, { "content": "pub fn get_default_ts_config() -> Syntax {\n\n let ts_config = TsConfig {\n\n dynamic_import: true,\n\n decorators: true,\n\n ..Default::default()\n\n };\n\n Syntax::Typescript(ts_config)\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct SwcDiagnostic {\n\n pub filename: String,\n\n pub line_display: usize,\n\n pub column_display: usize,\n\n pub message: String,\n\n}\n\n\n\nimpl Error for SwcDiagnostic {}\n\n\n\nimpl fmt::Display for SwcDiagnostic {\n", "file_path": "src/ast_parser.rs", "rank": 19, "score": 246031.26502846874 }, { "content": "#[allow(unused)]\n\npub fn get_default_es_config() -> Syntax {\n\n let config = EsConfig {\n\n num_sep: true,\n\n class_private_props: false,\n\n class_private_methods: false,\n\n class_props: false,\n\n export_default_from: true,\n\n export_namespace_from: true,\n\n dynamic_import: true,\n\n nullish_coalescing: true,\n\n optional_chaining: true,\n\n import_meta: true,\n\n top_level_await: true,\n\n ..Default::default()\n\n };\n\n Syntax::Es(config)\n\n}\n\n\n", "file_path": "src/ast_parser.rs", "rank": 20, "score": 246031.26502846874 }, { "content": "pub fn get_all_rules_metadata() -> Vec<Rule> {\n\n get_all_rules()\n\n .into_iter()\n\n .map(|rule| Rule {\n\n code: rule.code(),\n\n docs: rule.docs(),\n\n tags: rule.tags(),\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "examples/dlint/rules.rs", "rank": 21, "score": 238005.06633732066 }, { "content": "fn test_for_whitespace(value: &str) -> Vec<Matches> {\n\n let mut matches_vector: Vec<Matches> = vec![];\n\n if IRREGULAR_WHITESPACE.is_match(value) {\n\n let matches = IRREGULAR_WHITESPACE.find_iter(value);\n\n matches_vector.push(matches);\n\n }\n\n if IRREGULAR_LINE_TERMINATORS.is_match(value) {\n\n let matches = IRREGULAR_LINE_TERMINATORS.find_iter(value);\n\n matches_vector.push(matches);\n\n }\n\n matches_vector\n\n}\n\n\n\nimpl LintRule for NoIrregularWhitespace {\n\n fn new() -> Box<Self> {\n\n Box::new(NoIrregularWhitespace)\n\n }\n\n\n\n fn tags(&self) -> &'static [&'static str] {\n\n &[\"recommended\"]\n", "file_path": "src/rules/no_irregular_whitespace.rs", "rank": 23, "score": 224950.229749091 }, { "content": "/// Returns a list of rules after filtering.\n\n///\n\n/// Following rules are applied (in the described order):\n\n///\n\n/// - if `maybe_tags` is `None` then all defined rules are returned, otherwise\n\n/// only rules matching at least one tag will be returned; if provided list\n\n/// is empty then all rules will be excluded by default\n\n///\n\n/// - if `maybe_exclude` is `Some`, all rules with matching codes will\n\n/// be filtered out\n\n///\n\n/// - if `maybe_include` is `Some`, rules with matching codes will be added\n\n/// to the return list\n\n///\n\n/// Before returning the list will sorted alphabetically.\n\npub fn get_filtered_rules(\n\n maybe_tags: Option<Vec<String>>,\n\n maybe_exclude: Option<Vec<String>>,\n\n maybe_include: Option<Vec<String>>,\n\n) -> Vec<Box<dyn LintRule>> {\n\n let tags_set =\n\n maybe_tags.map(|tags| tags.into_iter().collect::<HashSet<_>>());\n\n\n\n let mut rules = get_all_rules()\n\n .into_iter()\n\n .filter(|rule| {\n\n let mut passes = if let Some(tags_set) = &tags_set {\n\n rule\n\n .tags()\n\n .iter()\n\n .any(|t| tags_set.contains(&t.to_string()))\n\n } else {\n\n true\n\n };\n\n\n", "file_path": "src/rules.rs", "rank": 24, "score": 221723.59028969312 }, { "content": "pub fn is_large_id_start(cp: char) -> bool {\n\n is_in_range(cp as u32, &LARGE_ID_START_RANGES)\n\n}\n\n\n", "file_path": "src/js_regex/unicode.rs", "rank": 25, "score": 215833.65169506046 }, { "content": "pub fn is_large_id_continue(cp: char) -> bool {\n\n is_in_range(cp as u32, &LARGE_ID_CONTINUE_RANGES)\n\n}\n\n\n", "file_path": "src/js_regex/unicode.rs", "rank": 26, "score": 215833.65169506046 }, { "content": "fn is_nan_identifier(ident: &swc_ecmascript::ast::Ident) -> bool {\n\n ident.sym == *\"NaN\"\n\n}\n\n\n\nimpl<'c, 'view> Visit for UseIsNaNVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_bin_expr(\n\n &mut self,\n\n bin_expr: &swc_ecmascript::ast::BinExpr,\n\n _parent: &dyn Node,\n\n ) {\n\n if bin_expr.op == swc_ecmascript::ast::BinaryOp::EqEq\n\n || bin_expr.op == swc_ecmascript::ast::BinaryOp::NotEq\n\n || bin_expr.op == swc_ecmascript::ast::BinaryOp::EqEqEq\n\n || bin_expr.op == swc_ecmascript::ast::BinaryOp::NotEqEq\n\n || bin_expr.op == swc_ecmascript::ast::BinaryOp::Lt\n\n || bin_expr.op == swc_ecmascript::ast::BinaryOp::LtEq\n\n || bin_expr.op == swc_ecmascript::ast::BinaryOp::Gt\n\n || bin_expr.op == swc_ecmascript::ast::BinaryOp::GtEq\n", "file_path": "src/rules/use_isnan.rs", "rank": 27, "score": 213083.79237978294 }, { "content": "pub fn parse_file_ignore_directives(\n\n ignore_global_directive: &str,\n\n program: ast_view::Program,\n\n) -> Option<FileIgnoreDirective> {\n\n program\n\n .comments()\n\n .unwrap()\n\n .leading_comments(program.span().lo())\n\n .find_map(|comment| parse_ignore_comment(ignore_global_directive, comment))\n\n}\n\n\n", "file_path": "src/ignore_directives.rs", "rank": 28, "score": 211838.01721776716 }, { "content": "struct GetterReturnVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n errors: BTreeMap<Span, GetterReturnMessage>,\n\n /// If this visitor is currently in a getter, its name is stored.\n\n getter_name: Option<String>,\n\n // `true` if a getter contains as least one return statement.\n\n has_return: bool,\n\n}\n\n\n\nimpl<'c, 'view> GetterReturnVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self {\n\n context,\n\n errors: BTreeMap::new(),\n\n getter_name: None,\n\n has_return: false,\n\n }\n\n }\n\n\n\n fn report(&mut self) {\n", "file_path": "src/rules/getter_return.rs", "rank": 29, "score": 210607.98324674816 }, { "content": "struct UseIsNaNVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> UseIsNaNVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n}\n\n\n", "file_path": "src/rules/use_isnan.rs", "rank": 30, "score": 207752.78856422886 }, { "content": "struct DefaultParamLastVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> DefaultParamLastVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n\n\n fn report(&mut self, span: Span) {\n\n self.context.add_diagnostic_with_hint(\n\n span,\n\n CODE,\n\n DefaultParamLastMessage::DefaultLast,\n\n DefaultParamLastHint::MoveToEnd,\n\n );\n\n }\n\n\n\n fn check_params<'a, 'b, I>(&'a mut self, params: I)\n\n where\n", "file_path": "src/rules/default_param_last.rs", "rank": 31, "score": 204801.76697331388 }, { "content": "fn colorize_code_block(lang: CodeBlockLang, src: &str) -> String {\n\n fn decorate(s: &str, attr: Attribute) -> String {\n\n format!(\"{}{}{}\", attr.as_ansi_code(), s, RESET_CODE)\n\n }\n\n\n\n if let CodeBlockLang::Known(media_type) = lang {\n\n let mut v = Vec::new();\n\n\n\n for line in src.split('\\n') {\n\n // Ref: https://github.com/denoland/deno/blob/a0c0daac24c496e49e7c0abaae12f34723785a7d/cli/tools/repl.rs#L251-L298\n\n let mut out_line = String::from(line);\n\n for item in lex(line, media_type) {\n\n let offset = out_line.len() - line.len();\n\n let span = item.span_as_range();\n\n\n\n out_line.replace_range(\n\n span.start + offset..span.end + offset,\n\n &match item.inner {\n\n TokenOrComment::Token(token) => match token {\n\n Token::Str { .. } | Token::Template { .. } | Token::BackQuote => {\n", "file_path": "examples/dlint/color.rs", "rank": 32, "score": 204018.5021640452 }, { "content": "struct ExplicitFunctionReturnTypeVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> ExplicitFunctionReturnTypeVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> Visit for ExplicitFunctionReturnTypeVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_function(\n\n &mut self,\n\n function: &swc_ecmascript::ast::Function,\n\n _parent: &dyn Node,\n\n ) {\n\n if function.return_type.is_none() {\n\n self.context.add_diagnostic_with_hint(\n", "file_path": "src/rules/explicit_function_return_type.rs", "rank": 33, "score": 199239.19914548923 }, { "content": "pub fn get_all_rules() -> Vec<Box<dyn LintRule>> {\n\n vec![\n\n adjacent_overload_signatures::AdjacentOverloadSignatures::new(),\n\n ban_ts_comment::BanTsComment::new(),\n\n ban_types::BanTypes::new(),\n\n ban_unknown_rule_code::BanUnknownRuleCode::new(),\n\n ban_untagged_ignore::BanUntaggedIgnore::new(),\n\n ban_untagged_todo::BanUntaggedTodo::new(),\n\n ban_unused_ignore::BanUnusedIgnore::new(),\n\n camelcase::Camelcase::new(),\n\n constructor_super::ConstructorSuper::new(),\n\n default_param_last::DefaultParamLast::new(),\n\n eqeqeq::Eqeqeq::new(),\n\n explicit_function_return_type::ExplicitFunctionReturnType::new(),\n\n explicit_module_boundary_types::ExplicitModuleBoundaryTypes::new(),\n\n for_direction::ForDirection::new(),\n\n getter_return::GetterReturn::new(),\n\n no_array_constructor::NoArrayConstructor::new(),\n\n no_async_promise_executor::NoAsyncPromiseExecutor::new(),\n\n no_await_in_loop::NoAwaitInLoop::new(),\n", "file_path": "src/rules.rs", "rank": 34, "score": 193801.03837023737 }, { "content": "pub fn lex(source: &str, media_type: MediaType) -> Vec<LexedItem> {\n\n let source_map = SourceMap::default();\n\n let source_file = source_map.new_source_file(\n\n FileName::Custom(format!(\"anonymous.{}\", media_type.ext())),\n\n source.to_string(),\n\n );\n\n let comments = SingleThreadedComments::default();\n\n let lexer = Lexer::new(\n\n media_type.syntax(),\n\n TARGET,\n\n StringInput::from(source_file.as_ref()),\n\n Some(&comments),\n\n );\n\n\n\n let mut tokens: Vec<LexedItem> = lexer\n\n .map(|token| LexedItem {\n\n span: token.span,\n\n inner: TokenOrComment::Token(token.token),\n\n })\n\n .collect();\n", "file_path": "examples/dlint/lexer.rs", "rank": 35, "score": 191470.19270558245 }, { "content": "pub fn get_recommended_rules() -> Vec<Box<dyn LintRule>> {\n\n get_all_rules()\n\n .into_iter()\n\n .filter(|r| r.tags().contains(&\"recommended\"))\n\n .collect()\n\n}\n\n\n", "file_path": "src/rules.rs", "rank": 36, "score": 191130.61012530443 }, { "content": "fn check_expr_for_string_literal(expr: &Expr) -> Option<String> {\n\n if let Expr::Lit(swc_ecmascript::ast::Lit::Str(pattern_string)) = expr {\n\n let s: &str = &pattern_string.value;\n\n return Some(s.to_owned());\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/rules/no_invalid_regexp.rs", "rank": 37, "score": 189124.3273111984 }, { "content": "/// Returns `true` if the comment should be reported.\n\nfn check_comment(comment: &Comment) -> bool {\n\n if comment.kind != CommentKind::Line {\n\n return false;\n\n }\n\n\n\n let text = comment.text.to_lowercase();\n\n let text = text.trim_start();\n\n\n\n if !text.starts_with(\"todo\") {\n\n return false;\n\n }\n\n\n\n static TODO_RE: Lazy<Regex> =\n\n Lazy::new(|| Regex::new(r#\"todo\\((#|@)?\\S+\\)\"#).unwrap());\n\n\n\n if TODO_RE.is_match(text) {\n\n return false;\n\n }\n\n\n\n true\n", "file_path": "src/rules/ban_untagged_todo.rs", "rank": 38, "score": 186499.70847104423 }, { "content": "/// Returns `true` if the comment should be reported.\n\nfn check_comment(comment: &Comment) -> bool {\n\n if comment.kind != CommentKind::Line {\n\n return false;\n\n }\n\n\n\n static TSR_REGEX: Lazy<Regex> = Lazy::new(|| {\n\n Regex::new(r#\"^/\\s*<reference\\s*(types|path|lib)\\s*=\\s*[\"|'](.*)[\"|']\"#)\n\n .unwrap()\n\n });\n\n\n\n TSR_REGEX.is_match(&comment.text)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn triple_slash_reference_valid() {\n\n assert_lint_ok! {\n", "file_path": "src/rules/triple_slash_reference.rs", "rank": 39, "score": 186499.70847104423 }, { "content": "fn is_async_function(expr: &Expr) -> bool {\n\n match expr {\n\n Expr::Fn(fn_expr) => fn_expr.function.is_async,\n\n Expr::Arrow(arrow_expr) => arrow_expr.is_async,\n\n Expr::Paren(ParenExpr { ref expr, .. }) => is_async_function(&**expr),\n\n _ => false,\n\n }\n\n}\n\n\n\nimpl<'c, 'view> VisitAll for NoAsyncPromiseExecutorVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_new_expr(&mut self, new_expr: &NewExpr, _parent: &dyn Node) {\n\n if let Expr::Ident(ident) = &*new_expr.callee {\n\n let name = ident.sym.as_ref();\n\n if name != \"Promise\" {\n\n return;\n\n }\n\n\n\n if let Some(args) = &new_expr.args {\n", "file_path": "src/rules/no_async_promise_executor.rs", "rank": 40, "score": 186488.74084145343 }, { "content": "struct NoSetterReturnHandler;\n\n\n\nimpl Handler for NoSetterReturnHandler {\n\n fn return_stmt(\n\n &mut self,\n\n return_stmt: &ast_view::ReturnStmt,\n\n ctx: &mut Context,\n\n ) {\n\n // return without a value is allowed\n\n if return_stmt.arg.is_none() {\n\n return;\n\n }\n\n\n\n fn inside_setter(node: ast_view::Node) -> bool {\n\n use ast_view::Node::*;\n\n match node {\n\n SetterProp(_) => true,\n\n ClassMethod(method) => {\n\n method.method_kind() == ast_view::MethodKind::Setter\n\n }\n", "file_path": "src/rules/no_setter_return.rs", "rank": 41, "score": 185732.35878232346 }, { "content": "fn expr_callee_is_boolean(expr: &Expr) -> bool {\n\n matches!(expr, Expr::Ident(Ident { ref sym, .. }) if sym == \"Boolean\")\n\n}\n\n\n", "file_path": "src/rules/no_extra_boolean_cast.rs", "rank": 42, "score": 183557.35367595643 }, { "content": "/// Determine what syntax should be used as parse config from the file path.\n\nfn determine_syntax(path: &Path) -> Syntax {\n\n match path.extension() {\n\n Some(os_str) => match os_str.to_str() {\n\n Some(\"ts\") => get_default_ts_config(),\n\n Some(\"js\") | Some(\"mjs\") | Some(\"cjs\") => get_default_es_config(),\n\n Some(\"tsx\") => Syntax::Typescript(TsConfig {\n\n tsx: true,\n\n dynamic_import: true,\n\n decorators: true,\n\n ..Default::default()\n\n }),\n\n Some(\"jsx\") => Syntax::Es(EsConfig {\n\n jsx: true,\n\n num_sep: true,\n\n class_private_props: false,\n\n class_private_methods: false,\n\n class_props: false,\n\n export_default_from: true,\n\n export_namespace_from: true,\n\n dynamic_import: true,\n", "file_path": "examples/dlint/main.rs", "rank": 43, "score": 179117.4365052576 }, { "content": "pub fn assert_lint_ok<T: LintRule + 'static>(\n\n source: &str,\n\n filename: &'static str,\n\n) {\n\n let rule = T::new();\n\n let diagnostics = lint(rule, source, filename);\n\n if !diagnostics.is_empty() {\n\n panic!(\n\n \"Unexpected diagnostics found:\\n{:#?}\\n\\nsource:\\n{}\\n\",\n\n diagnostics, source\n\n );\n\n }\n\n}\n\n\n\nconst TEST_FILE_NAME: &str = \"lint_test.ts\";\n\n\n", "file_path": "src/test_util.rs", "rank": 44, "score": 178963.9547582501 }, { "content": "fn is_subset(arr_a: &[Expr], arr_b: &[Expr]) -> bool {\n\n arr_a\n\n .iter()\n\n .all(|a| arr_b.iter().any(|b| equal_in_if_else(a, b)))\n\n}\n\n\n", "file_path": "src/rules/no_dupe_else_if.rs", "rank": 45, "score": 177838.1749115649 }, { "content": "/// Checks if `expr` has `n` continuous bang operators at the beginning, ignoring parentheses.\n\nfn has_n_bang(expr: &Expr, n: usize) -> bool {\n\n if n == 0 {\n\n return true;\n\n }\n\n\n\n match expr {\n\n Expr::Unary(UnaryExpr {\n\n op: UnaryOp::Bang,\n\n ref arg,\n\n ..\n\n }) => has_n_bang(arg, n - 1),\n\n Expr::Paren(ParenExpr { ref expr, .. }) => has_n_bang(expr, n),\n\n _ => false,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "src/rules/no_extra_boolean_cast.rs", "rank": 46, "score": 177838.1749115649 }, { "content": "fn inherits_from_non_constructor(class: &ast_view::Class) -> bool {\n\n matches!(&class.super_class, Some(ast_view::Expr::Lit(_)))\n\n}\n\n\n", "file_path": "src/rules/constructor_super.rs", "rank": 47, "score": 176128.29015207026 }, { "content": "fn is_body_empty(maybe_body: Option<&BlockStmt>) -> bool {\n\n maybe_body.map_or(true, |body| body.stmts.is_empty())\n\n}\n\n\n\nimpl<'c, 'view> Visit for RequireAwaitVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_fn_decl(&mut self, fn_decl: &FnDecl, _: &dyn Node) {\n\n let function_info = FunctionInfo {\n\n kind: FunctionKind::Function(Some(\n\n fn_decl.ident.sym.as_ref().to_string(),\n\n )),\n\n is_async: fn_decl.function.is_async,\n\n is_generator: fn_decl.function.is_generator,\n\n is_empty: is_body_empty(fn_decl.function.body.as_ref()),\n\n upper: self.function_info.take(),\n\n has_await: false,\n\n };\n\n\n\n self.process_function(fn_decl, Box::new(function_info));\n", "file_path": "src/rules/require_await.rs", "rank": 48, "score": 176128.29015207026 }, { "content": "fn expr_or_super_callee_is_boolean(expr_or_super: &ExprOrSuper) -> bool {\n\n match expr_or_super {\n\n ExprOrSuper::Expr(ref callee) => expr_callee_is_boolean(&**callee),\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/rules/no_extra_boolean_cast.rs", "rank": 49, "score": 175552.54008659424 }, { "content": "/// Determines whether the two given `Expr`s are considered to be equal in if-else condition\n\n/// context. Note that `expr1` and `expr2` must be span-dropped to be compared properly.\n\nfn equal_in_if_else(expr1: &Expr, expr2: &Expr) -> bool {\n\n use swc_ecmascript::ast::Expr::*;\n\n match (expr1, expr2) {\n\n (Bin(ref bin1), Bin(ref bin2))\n\n if matches!(bin1.op, BinaryOp::LogicalOr | BinaryOp::LogicalAnd)\n\n && bin1.op == bin2.op =>\n\n {\n\n equal_in_if_else(&*bin1.left, &*bin2.left)\n\n && equal_in_if_else(&*bin1.right, &*bin2.right)\n\n || equal_in_if_else(&*bin1.left, &*bin2.right)\n\n && equal_in_if_else(&*bin1.right, &*bin2.left)\n\n }\n\n (Paren(ParenExpr { ref expr, .. }), _) => equal_in_if_else(&**expr, expr2),\n\n (_, Paren(ParenExpr { ref expr, .. })) => equal_in_if_else(expr1, &**expr),\n\n (This(_), This(_))\n\n | (Array(_), Array(_))\n\n | (Object(_), Object(_))\n\n | (Fn(_), Fn(_))\n\n | (Unary(_), Unary(_))\n\n | (Update(_), Update(_))\n", "file_path": "src/rules/no_dupe_else_if.rs", "rank": 50, "score": 174906.78774606792 }, { "content": "pub fn parse(\n\n source_code: &str,\n\n) -> (\n\n SourceFileTextInfo,\n\n Program,\n\n SingleThreadedCommentsMapInner,\n\n SingleThreadedCommentsMapInner,\n\n Vec<TokenAndSpan>,\n\n) {\n\n let ast_parser = ast_parser::AstParser::new();\n\n let syntax = ast_parser::get_default_ts_config();\n\n let ast_parser::ParsedData {\n\n source_file,\n\n program,\n\n leading_comments,\n\n trailing_comments,\n\n tokens,\n\n } = ast_parser\n\n .parse_program(TEST_FILE_NAME, syntax, source_code)\n\n .unwrap();\n\n (\n\n source_file,\n\n program,\n\n leading_comments,\n\n trailing_comments,\n\n tokens,\n\n )\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 51, "score": 174206.34488169054 }, { "content": "fn normalize_prop_name(name: &PropName) -> Option<String> {\n\n let normalized = match *name {\n\n PropName::Ident(Ident { ref sym, .. }) => sym.to_string(),\n\n PropName::Str(Str { ref value, .. }) => value.to_string(),\n\n PropName::Num(Number { ref value, .. }) => value.to_string(),\n\n PropName::BigInt(BigInt { ref value, .. }) => value.to_string(),\n\n PropName::Computed(ComputedPropName { ref expr, .. }) => match &**expr {\n\n Expr::Lit(Lit::Str(Str { ref value, .. })) => value.to_string(),\n\n Expr::Lit(Lit::Bool(Bool { ref value, .. })) => value.to_string(),\n\n Expr::Lit(Lit::Null(Null { .. })) => \"null\".to_string(),\n\n Expr::Lit(Lit::Num(Number { ref value, .. })) => value.to_string(),\n\n Expr::Lit(Lit::BigInt(BigInt { ref value, .. })) => value.to_string(),\n\n Expr::Tpl(Tpl {\n\n ref quasis,\n\n ref exprs,\n\n ..\n\n }) if exprs.is_empty() => {\n\n quasis.iter().next().map(|q| q.raw.value.to_string())?\n\n }\n\n _ => return None,\n\n },\n\n };\n\n\n\n Some(normalized)\n\n}\n\n\n", "file_path": "src/rules/no_dupe_class_members.rs", "rank": 52, "score": 173407.63434621255 }, { "content": "pub fn assert_diagnostic(\n\n diagnostic: &LintDiagnostic,\n\n code: &str,\n\n line: usize,\n\n col: usize,\n\n source: &str,\n\n) {\n\n if diagnostic.code == code\n\n // todo(dsherret): we should change these to be consistent (ex. both 1-indexed)\n\n && diagnostic.range.start.line_index + 1 == line\n\n && diagnostic.range.start.column_index == col\n\n {\n\n return;\n\n }\n\n panic!(\n\n \"expect diagnostics {} at {}:{} to be {} at {}:{}\\n\\nsource:\\n{}\\n\",\n\n diagnostic.code,\n\n diagnostic.range.start.line_index + 1,\n\n diagnostic.range.start.column_index,\n\n code,\n\n line,\n\n col,\n\n source,\n\n );\n\n}\n\n\n", "file_path": "src/test_util.rs", "rank": 53, "score": 171534.11900537123 }, { "content": "fn is_shadowed(ident: &ast_view::Ident, scope: &Scope) -> bool {\n\n scope.var(&ident.inner.to_id()).is_some()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n // Test cases are derived from\n\n // https://github.com/nodejs/node/blob/7919ced0c97e9a5b17e6042e0b57bc911d23583d/test/parallel/test-eslint-prefer-primordials.js\n\n //\n\n // Copyright Joyent, Inc. and other Node contributors.\n\n //\n\n // Permission is hereby granted, free of charge, to any person obtaining a\n\n // copy of this software and associated documentation files (the\n\n // \"Software\"), to deal in the Software without restriction, including\n\n // without limitation the rights to use, copy, modify, merge, publish,\n\n // distribute, sublicense, and/or sell copies of the Software, and to permit\n\n // persons to whom the Software is furnished to do so, subject to the\n\n // following conditions:\n", "file_path": "src/rules/prefer_primordials.rs", "rank": 54, "score": 168601.56649407005 }, { "content": "fn extract_ident_from_decl(decl: &ast_view::Decl) -> Option<String> {\n\n match decl {\n\n ast_view::Decl::Fn(ast_view::FnDecl { ident, .. }) => {\n\n Some(ident.sym().to_string())\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/rules/adjacent_overload_signatures.rs", "rank": 55, "score": 166849.5175580207 }, { "content": "pub fn parse_line_ignore_directives(\n\n ignore_diagnostic_directive: &str,\n\n program: ast_view::Program,\n\n) -> HashMap<usize, LineIgnoreDirective> {\n\n program\n\n .comments()\n\n .unwrap()\n\n .all_comments()\n\n .filter_map(|comment| {\n\n parse_ignore_comment(ignore_diagnostic_directive, comment).map(\n\n |directive| {\n\n (\n\n program.source_file().unwrap().line_index(directive.span.lo),\n\n directive,\n\n )\n\n },\n\n )\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/ignore_directives.rs", "rank": 56, "score": 166605.42491365506 }, { "content": "pub fn is_valid_unicode_property(\n\n version: EcmaVersion,\n\n name: &str,\n\n value: &str,\n\n) -> bool {\n\n if GC_NAME_PATTERN.contains(name)\n\n && version >= EcmaVersion::Es2018\n\n && GC_VALUE_PATTERNS.es2018.contains(value)\n\n {\n\n true\n\n } else if SC_NAME_PATTERN.contains(name) {\n\n (version >= EcmaVersion::Es2018 && SC_VALUE_PATTERNS.es2018.contains(value))\n\n || (version >= EcmaVersion::Es2019\n\n && SC_VALUE_PATTERNS.es2019.contains(value))\n\n || (version >= EcmaVersion::Es2020\n\n && SC_VALUE_PATTERNS.es2020.contains(value))\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/js_regex/unicode.rs", "rank": 57, "score": 166605.42491365506 }, { "content": "fn return_before_super<'a, 'view>(\n\n constructor: &'a ast_view::Constructor<'view>,\n\n) -> Option<&'a ast_view::ReturnStmt<'view>> {\n\n if let Some(block_stmt) = &constructor.body {\n\n for stmt in &block_stmt.stmts {\n\n if extract_super_span(stmt).is_some() {\n\n return None;\n\n }\n\n\n\n if let ast_view::Stmt::Return(ret) = stmt {\n\n return Some(ret);\n\n }\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/rules/constructor_super.rs", "rank": 58, "score": 166189.90071697664 }, { "content": "fn allow_fall_through<'c>(\n\n mut comments: impl Iterator<Item = &'c Comment>,\n\n) -> bool {\n\n comments.any(|comment| {\n\n let l = comment.text.to_ascii_lowercase();\n\n l.contains(\"fallthrough\")\n\n || l.contains(\"falls through\")\n\n || l.contains(\"fall through\")\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn no_fallthrough_valid() {\n\n assert_lint_ok! {\n\n NoFallthrough,\n\n \"switch(foo) { case 0: a(); /* falls through */ case 1: b(); }\",\n", "file_path": "src/rules/no_fallthrough.rs", "rank": 59, "score": 166167.92553591885 }, { "content": "/// Read characters until `}` and try to parse it as hexadecimal.\n\nfn read_hex_until_brace(iter: &mut Peekable<Chars>) -> Option<u64> {\n\n iter.next(); // consume `{`\n\n let mut s = String::new();\n\n loop {\n\n let ch = iter.next()?;\n\n if ch == '}' {\n\n break;\n\n }\n\n s.push(ch);\n\n }\n\n u64::from_str_radix(s.as_str(), 16).ok()\n\n}\n\n\n\nimpl<'c, 'view> VisitAll for NoControlRegexVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_regex(&mut self, regex: &Regex, _: &dyn Node) {\n\n self.check_regex(regex.exp.to_string().as_str(), regex.span);\n\n }\n\n\n", "file_path": "src/rules/no_control_regex.rs", "rank": 60, "score": 166087.43980205376 }, { "content": "pub fn is_valid_lone_unicode_property(\n\n version: EcmaVersion,\n\n value: &str,\n\n) -> bool {\n\n (version >= EcmaVersion::Es2018\n\n && BIN_PROPERTY_PATTERNS.es2018.contains(value))\n\n || (version >= EcmaVersion::Es2019\n\n && BIN_PROPERTY_PATTERNS.es2019.contains(value))\n\n}\n\n\n", "file_path": "src/js_regex/unicode.rs", "rank": 61, "score": 164327.93982841622 }, { "content": "/// Extracts idents from the Pat recursively and apply the operation to each ident.\n\nfn extract_idents_from_pat_with<'a, F>(pat: &'a Pat, op: &mut F)\n\nwhere\n\n F: FnMut(ExtractIdentsArgs<'a>),\n\n{\n\n match pat {\n\n Pat::Ident(ident) => op(ExtractIdentsArgs::Ident(&ident.id)),\n\n Pat::Array(array_pat) => {\n\n for elem_pat in array_pat.elems.iter().flatten() {\n\n extract_idents_from_pat_with(elem_pat, op);\n\n }\n\n }\n\n Pat::Rest(rest_pat) => extract_idents_from_pat_with(&*rest_pat.arg, op),\n\n Pat::Object(object_pat) => {\n\n for prop in &object_pat.props {\n\n match prop {\n\n ObjectPatProp::KeyValue(key_value) => {\n\n extract_idents_from_pat_with(&*key_value.value, op);\n\n }\n\n ObjectPatProp::Assign(assign) => {\n\n op(ExtractIdentsArgs::Ident(&assign.key));\n", "file_path": "src/rules/prefer_const.rs", "rank": 62, "score": 160226.90104787078 }, { "content": "/// Read the next n characters and try to parse it as hexadecimal.\n\nfn read_hex_n(iter: &mut Peekable<Chars>, n: usize) -> Option<u64> {\n\n let mut s = String::new();\n\n for _ in 0..n {\n\n let ch = iter.next()?;\n\n s.push(ch);\n\n }\n\n u64::from_str_radix(s.as_str(), 16).ok()\n\n}\n\n\n", "file_path": "src/rules/no_control_regex.rs", "rank": 63, "score": 160226.90104787078 }, { "content": "/// Returns `Some` if the comment should be reported.\n\nfn check_comment(comment: &Comment, is_js_like: bool) -> Option<ReportKind> {\n\n if comment.kind == CommentKind::Block {\n\n return None;\n\n }\n\n if !TRIPLE_SLASH_REFERENCE_RE.is_match(&comment.text) {\n\n return None;\n\n }\n\n\n\n if is_js_like {\n\n // In JavaScript, only the `types` directives are allowed\n\n if is_types_ref(&comment.text) {\n\n None\n\n } else {\n\n Some(ReportKind::NotTypesInJs(comment.span))\n\n }\n\n } else if is_path_ref(&comment.text)\n\n || is_types_ref(&comment.text)\n\n || is_lib_ref(&comment.text)\n\n || is_no_default_lib_ref(&comment.text)\n\n {\n\n None\n\n } else {\n\n Some(ReportKind::InvalidDirective(comment.span))\n\n }\n\n}\n\n\n", "file_path": "src/rules/no_invalid_triple_slash_reference.rs", "rank": 64, "score": 158632.47170504794 }, { "content": "fn extract_idents_from_pat<'a>(idents: &mut Vec<&'a Ident>, pat: &'a Pat) {\n\n let mut op = |args: ExtractIdentsArgs<'a>| {\n\n if let ExtractIdentsArgs::Ident(i) = args {\n\n idents.push(i);\n\n }\n\n };\n\n extract_idents_from_pat_with(pat, &mut op);\n\n}\n\n\n", "file_path": "src/rules/prefer_const.rs", "rank": 65, "score": 157760.61122862596 }, { "content": "fn check<'a, T, U>(items: T, ctx: &'a mut Context)\n\nwhere\n\n T: IntoIterator<Item = &'a U>,\n\n U: ExtractMethod + Spanned + 'a,\n\n{\n\n let mut seen_methods = HashSet::new();\n\n let mut last_method = None;\n\n for item in items {\n\n if let Some(method) = item.get_method() {\n\n if seen_methods.contains(&method) && last_method.as_ref() != Some(&method)\n\n {\n\n ctx.add_diagnostic_with_hint(\n\n item.span(),\n\n CODE,\n\n AdjacentOverloadSignaturesMessage::ShouldBeAdjacent(\n\n method.to_string(),\n\n ),\n\n AdjacentOverloadSignaturesHint::GroupedTogether,\n\n );\n\n }\n\n\n\n seen_methods.insert(method.clone());\n\n last_method = Some(method);\n\n } else {\n\n last_method = None;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/rules/adjacent_overload_signatures.rs", "rank": 66, "score": 157760.61122862596 }, { "content": "struct NoVarVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoVarVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> Visit for NoVarVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_var_decl(&mut self, var_decl: &VarDecl, _parent: &dyn Node) {\n\n if var_decl.kind == VarDeclKind::Var {\n\n self.context.add_diagnostic(var_decl.span, CODE, MESSAGE);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/rules/no_var.rs", "rank": 67, "score": 157720.49042668674 }, { "content": "struct NoEvalVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoEvalVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n\n\n fn maybe_add_diagnostic(&mut self, source: &dyn StringRepr, span: Span) {\n\n if source.string_repr().as_deref() == Some(\"eval\") {\n\n self.add_diagnostic(span);\n\n }\n\n }\n\n\n\n fn add_diagnostic(&mut self, span: Span) {\n\n self\n\n .context\n\n .add_diagnostic_with_hint(span, CODE, MESSAGE, HINT);\n\n }\n", "file_path": "src/rules/no_eval.rs", "rank": 68, "score": 157720.49042668674 }, { "content": "struct NoEmptyVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoEmptyVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> Visit for NoEmptyVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_function(&mut self, function: &Function, _parent: &dyn Node) {\n\n // Empty functions shouldn't be caught by this rule.\n\n // Because function's body is a block statement, we're gonna\n\n // manually visit each member; otherwise rule would produce errors\n\n // for empty function body.\n\n if let Some(body) = &function.body {\n\n body.visit_children_with(self);\n", "file_path": "src/rules/no_empty.rs", "rank": 69, "score": 157720.49042668674 }, { "content": "struct NoRedeclareVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n /// TODO(kdy1): Change this to HashMap<Id, Vec<Span>> and use those spans to point previous bindings/\n\n bindings: HashSet<Id>,\n\n}\n\n\n\nimpl<'c, 'view> NoRedeclareVisitor<'c, 'view> {\n\n fn declare(&mut self, i: &Ident) {\n\n let id = i.to_id();\n\n\n\n if !self.bindings.insert(id) {\n\n self.context.add_diagnostic(i.span, CODE, MESSAGE);\n\n }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> Visit for NoRedeclareVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_fn_decl(&mut self, f: &FnDecl, _: &dyn Node) {\n", "file_path": "src/rules/no_redeclare.rs", "rank": 70, "score": 157720.49042668674 }, { "content": "struct ForDirectionVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> ForDirectionVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n\n\n fn check_update_direction(\n\n &self,\n\n update_expr: &UpdateExpr,\n\n counter_name: impl AsRef<str>,\n\n ) -> i32 {\n\n let mut update_direction = 0;\n\n\n\n if let Expr::Ident(ident) = &*update_expr.arg {\n\n if ident.sym.as_ref() == counter_name.as_ref() {\n\n match update_expr.op {\n\n UpdateOp::PlusPlus => {\n", "file_path": "src/rules/for_direction.rs", "rank": 71, "score": 157720.49042668674 }, { "content": "struct NoFallthroughVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> Visit for NoFallthroughVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_switch_cases(&mut self, cases: &[SwitchCase], parent: &dyn Node) {\n\n let mut should_emit_err = false;\n\n let mut prev_span = DUMMY_SP;\n\n\n\n 'cases: for (case_idx, case) in cases.iter().enumerate() {\n\n case.visit_with(parent, self);\n\n\n\n if should_emit_err {\n\n let comments = self.context.leading_comments_at(case.span.lo);\n\n if !allow_fall_through(comments) {\n\n self.context.add_diagnostic_with_hint(\n\n prev_span,\n\n CODE,\n", "file_path": "src/rules/no_fallthrough.rs", "rank": 72, "score": 157720.49042668674 }, { "content": "struct NoDebuggerVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoDebuggerVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> Visit for NoDebuggerVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_debugger_stmt(\n\n &mut self,\n\n debugger_stmt: &DebuggerStmt,\n\n _parent: &dyn Node,\n\n ) {\n\n self.context.add_diagnostic_with_hint(\n\n debugger_stmt.span,\n", "file_path": "src/rules/no_debugger.rs", "rank": 73, "score": 157720.49042668674 }, { "content": "struct EqeqeqVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> EqeqeqVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> Visit for EqeqeqVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_bin_expr(&mut self, bin_expr: &BinExpr, parent: &dyn Node) {\n\n if matches!(bin_expr.op, BinaryOp::EqEq | BinaryOp::NotEq) {\n\n let (message, hint) = if bin_expr.op == BinaryOp::EqEq {\n\n (EqeqeqMessage::ExpectedEqual, EqeqeqHint::UseEqeqeq)\n\n } else {\n\n (EqeqeqMessage::ExpectedNotEqual, EqeqeqHint::UseNoteqeq)\n\n };\n", "file_path": "src/rules/eqeqeq.rs", "rank": 74, "score": 157720.49042668674 }, { "content": "struct NoUnreachableVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoUnreachableVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> Visit for NoUnreachableVisitor<'c, 'view> {\n\n fn visit_stmt(&mut self, stmt: &Stmt, _: &dyn Node) {\n\n stmt.visit_children_with(self);\n\n\n\n match stmt {\n\n // Don't print unused error for block statements\n\n Stmt::Block(_) => return,\n\n // Hoisted, so reachable.\n\n Stmt::Decl(Decl::Fn(..)) => return,\n\n // Ignore type declarations.\n", "file_path": "src/rules/no_unreachable.rs", "rank": 75, "score": 157720.49042668674 }, { "content": "struct NoOctalVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoOctalVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> Visit for NoOctalVisitor<'c, 'view> {\n\n fn visit_number(&mut self, literal_num: &Number, _parent: &dyn Node) {\n\n static OCTAL: Lazy<Regex> = Lazy::new(|| Regex::new(r\"^0[0-9]\").unwrap());\n\n\n\n let raw_number = self.context.file_text_substring(&literal_num.span);\n\n\n\n if OCTAL.is_match(raw_number) {\n\n self.context.add_diagnostic_with_hint(\n\n literal_num.span,\n\n CODE,\n", "file_path": "src/rules/no_octal.rs", "rank": 76, "score": 157720.49042668674 }, { "content": "struct NoThisAliasVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoThisAliasVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> VisitAll for NoThisAliasVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_var_decl(&mut self, var_decl: &VarDecl, _parent: &dyn Node) {\n\n for decl in &var_decl.decls {\n\n if_chain! {\n\n if let Some(init) = &decl.init;\n\n if matches!(&**init, Expr::This(_));\n\n if matches!(&decl.name, Pat::Ident(_));\n\n then {\n", "file_path": "src/rules/no_this_alias.rs", "rank": 77, "score": 157720.49042668674 }, { "content": "struct NoUndefVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoUndefVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n\n\n fn check(&mut self, ident: &Ident) {\n\n // Thanks to this if statement, we can check for Map in\n\n //\n\n // function foo(Map) { ... }\n\n //\n\n if ident.span.ctxt != self.context.top_level_ctxt() {\n\n return;\n\n }\n\n\n\n // Implicitly defined\n\n // See: https://github.com/denoland/deno_lint/issues/317\n", "file_path": "src/rules/no_undef.rs", "rank": 78, "score": 157720.49042668674 }, { "content": "struct NoExplicitAnyVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoExplicitAnyVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> Visit for NoExplicitAnyVisitor<'c, 'view> {\n\n fn visit_ts_keyword_type(\n\n &mut self,\n\n ts_keyword_type: &TsKeywordType,\n\n _parent: &dyn Node,\n\n ) {\n\n use swc_ecmascript::ast::TsKeywordTypeKind::*;\n\n\n\n if ts_keyword_type.kind == TsAnyKeyword {\n\n self.context.add_diagnostic_with_hint(\n", "file_path": "src/rules/no_explicit_any.rs", "rank": 79, "score": 157720.49042668674 }, { "content": "fn append_test(appeared_conditions: &mut Vec<Vec<Vec<Expr>>>, expr: Expr) {\n\n appeared_conditions.push(split_by_or_then_and(expr));\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn no_dupe_else_if_valid() {\n\n assert_lint_ok! {\n\n NoDupeElseIf,\n\n \"if (a) {} else if (b) {}\",\n\n \"if (a); else if (b); else if (c);\",\n\n \"if (true) {} else if (false) {} else {}\",\n\n \"if (1) {} else if (2) {}\",\n\n \"if (f) {} else if (f()) {}\",\n\n \"if (f(a)) {} else if (g(a)) {}\",\n\n \"if (f(a)) {} else if (f(b)) {}\",\n\n \"if (a === 1) {} else if (a === 2) {}\",\n", "file_path": "src/rules/no_dupe_else_if.rs", "rank": 80, "score": 155244.95137756068 }, { "content": "/// A visitor to check the `no-dupe-else-if` rule.\n\n/// Determination logic is ported from ESLint's implementation. For more, see:\n\n/// [eslint/no-dupe-else-if.js](https://github.com/eslint/eslint/blob/master/lib/rules/no-dupe-else-if.js).\n\nstruct NoDupeElseIfVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n checked_span: HashSet<Span>,\n\n}\n\n\n\nimpl<'c, 'view> NoDupeElseIfVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self {\n\n context,\n\n checked_span: HashSet::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> VisitAll for NoDupeElseIfVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_if_stmt(&mut self, if_stmt: &IfStmt, _: &dyn Node) {\n\n let span = if_stmt.test.span();\n\n\n", "file_path": "src/rules/no_dupe_else_if.rs", "rank": 81, "score": 153180.61643903147 }, { "content": "struct NoGlobalAssignVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoGlobalAssignVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n\n\n fn check(&mut self, span: Span, id: Id) {\n\n if id.1 != self.context.top_level_ctxt() {\n\n return;\n\n }\n\n\n\n if self.context.scope().var(&id).is_some() {\n\n return;\n\n }\n\n\n\n // We only care about globals.\n\n let maybe_global = GLOBALS.iter().find(|(name, _)| name == &&*id.0);\n", "file_path": "src/rules/no_global_assign.rs", "rank": 82, "score": 153175.23205574095 }, { "content": "struct NoCondAssignVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoCondAssignVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n\n\n fn add_diagnostic(&mut self, span: Span) {\n\n self.context.add_diagnostic_with_hint(\n\n span,\n\n CODE,\n\n NoCondAssignMessage::Unexpected,\n\n NoCondAssignHint::ChangeOrMove,\n\n );\n\n }\n\n\n\n fn check_condition(&mut self, condition: &Expr) {\n\n match condition {\n", "file_path": "src/rules/no_cond_assign.rs", "rank": 83, "score": 153175.23205574095 }, { "content": "struct NoArrayConstructorVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoArrayConstructorVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n\n\n fn check_args(&mut self, args: Vec<ExprOrSpread>, span: Span) {\n\n if args.len() != 1 {\n\n self\n\n .context\n\n .add_diagnostic_with_hint(span, CODE, MESSAGE, HINT);\n\n }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> VisitAll for NoArrayConstructorVisitor<'c, 'view> {\n\n noop_visit_type!();\n", "file_path": "src/rules/no_array_constructor.rs", "rank": 84, "score": 153175.23205574095 }, { "content": "struct NoClassAssignVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoClassAssignVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> VisitAll for NoClassAssignVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_assign_expr(&mut self, assign_expr: &AssignExpr, _node: &dyn Node) {\n\n let ids = find_lhs_ids(&assign_expr.left);\n\n for id in ids {\n\n let var = self.context.scope().var(&id);\n\n if let Some(var) = var {\n\n if let BindingKind::Class = var.kind() {\n\n self.context.add_diagnostic_with_hint(\n", "file_path": "src/rules/no_class_assign.rs", "rank": 85, "score": 153175.23205574095 }, { "content": "struct NoRegexSpacesVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoRegexSpacesVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n\n\n fn check_regex(&mut self, regex: &str, span: Span) {\n\n static DOUBLE_SPACE: Lazy<regex::Regex> =\n\n Lazy::new(|| regex::Regex::new(r\"(?u) {2}\").unwrap());\n\n static BRACKETS: Lazy<regex::Regex> =\n\n Lazy::new(|| regex::Regex::new(r\"\\[.*?[^\\\\]\\]\").unwrap());\n\n static SPACES: Lazy<regex::Regex> = Lazy::new(|| {\n\n regex::Regex::new(r#\"(?u)( {2,})(?: [+*{?]|[^+*{?]|$)\"#).unwrap()\n\n });\n\n\n\n if !DOUBLE_SPACE.is_match(regex) {\n\n return;\n", "file_path": "src/rules/no_regex_spaces.rs", "rank": 86, "score": 153175.23205574095 }, { "content": "struct NoDuplicateCaseVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoDuplicateCaseVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> VisitAll for NoDuplicateCaseVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_switch_stmt(&mut self, switch_stmt: &SwitchStmt, _: &dyn Node) {\n\n // Check if there are duplicates by comparing span dropped expressions\n\n let mut seen: HashSet<Box<Expr>> = HashSet::new();\n\n\n\n for case in &switch_stmt.cases {\n\n if let Some(test) = &case.test {\n\n let span_dropped_test = drop_span(test.clone());\n", "file_path": "src/rules/no_duplicate_case.rs", "rank": 87, "score": 153175.23205574095 }, { "content": "struct NoSelfAssignVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoSelfAssignVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n\n\n fn add_diagnostic(&mut self, span: Span, name: impl ToString) {\n\n self.context.add_diagnostic_with_hint(\n\n span,\n\n CODE,\n\n NoSelfAssignMessage::Invalid(name.to_string()),\n\n NoSelfAssignHint::Mistake,\n\n );\n\n }\n\n\n\n fn is_same_property(\n\n &mut self,\n", "file_path": "src/rules/no_self_assign.rs", "rank": 88, "score": 153175.23205574095 }, { "content": "struct NoSparseArraysVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoSparseArraysVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> Visit for NoSparseArraysVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_array_lit(\n\n &mut self,\n\n array_lit: &swc_ecmascript::ast::ArrayLit,\n\n _parent: &dyn Node,\n\n ) {\n\n if array_lit.elems.iter().any(|e| e.is_none()) {\n\n self.context.add_diagnostic(\n", "file_path": "src/rules/no_sparse_arrays.rs", "rank": 89, "score": 153175.23205574095 }, { "content": "struct NoInnerDeclarationsVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n valid_decls: HashSet<Span>,\n\n in_function: bool,\n\n}\n\n\n\nimpl<'c, 'view> NoInnerDeclarationsVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>, valid_decls: HashSet<Span>) -> Self {\n\n Self {\n\n context,\n\n valid_decls,\n\n in_function: false,\n\n }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> NoInnerDeclarationsVisitor<'c, 'view> {\n\n fn add_diagnostic(&mut self, span: Span, kind: &str) {\n\n let root = if self.in_function {\n\n \"function\"\n", "file_path": "src/rules/no_inner_declarations.rs", "rank": 90, "score": 153175.23205574095 }, { "content": "struct NoImportAssignVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoImportAssignVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n\n\n fn check(&mut self, span: Span, i: &Ident, is_assign_to_prop: bool) {\n\n let var = self.context.scope().var(&i.to_id());\n\n if var.map_or(false, |v| v.kind() == BindingKind::NamespaceImport) {\n\n self\n\n .context\n\n .add_diagnostic_with_hint(span, CODE, MESSAGE, HINT);\n\n return;\n\n }\n\n\n\n if !is_assign_to_prop\n\n && var.map_or(false, |v| v.kind() == BindingKind::ValueImport)\n", "file_path": "src/rules/no_import_assign.rs", "rank": 91, "score": 153175.23205574095 }, { "content": "struct NoInferrableTypesVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoInferrableTypesVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n\n\n fn add_diagnostic_helper(&mut self, span: swc_common::Span) {\n\n self.context.add_diagnostic_with_hint(\n\n span,\n\n CODE,\n\n NoInferrableTypesMessage::NotAllowed,\n\n NoInferrableTypesHint::Remove,\n\n )\n\n }\n\n\n\n fn check_callee(\n\n &mut self,\n", "file_path": "src/rules/no_inferrable_types.rs", "rank": 92, "score": 153175.23205574095 }, { "content": "struct NoInvalidRegexpVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n validator: EcmaRegexValidator,\n\n}\n\n\n\nimpl<'c, 'view> NoInvalidRegexpVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self {\n\n context,\n\n validator: EcmaRegexValidator::new(EcmaVersion::Es2018),\n\n }\n\n }\n\n\n\n fn handle_call_or_new_expr(\n\n &mut self,\n\n callee: &Expr,\n\n args: &[ExprOrSpread],\n\n span: Span,\n\n ) {\n\n if let Expr::Ident(ident) = callee {\n", "file_path": "src/rules/no_invalid_regexp.rs", "rank": 93, "score": 153175.23205574095 }, { "content": "struct NoThrowLiteralVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoThrowLiteralVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> Visit for NoThrowLiteralVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_throw_stmt(&mut self, throw_stmt: &ThrowStmt, _parent: &dyn Node) {\n\n match &*throw_stmt.arg {\n\n Expr::Lit(_) => self.context.add_diagnostic(\n\n throw_stmt.span,\n\n CODE,\n\n NoThrowLiteralMessage::ErrObjectExpected,\n\n ),\n", "file_path": "src/rules/no_throw_literal.rs", "rank": 94, "score": 153175.23205574095 }, { "content": "struct NoCaseDeclarationsVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoCaseDeclarationsVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> VisitAll for NoCaseDeclarationsVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_switch_case(\n\n &mut self,\n\n switch_case: &SwitchCase,\n\n _parent: &dyn Node,\n\n ) {\n\n for stmt in &switch_case.cons {\n\n let is_lexical_decl = match stmt {\n", "file_path": "src/rules/no_case_declarations.rs", "rank": 95, "score": 153175.23205574095 }, { "content": "struct NoExAssignVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoExAssignVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> VisitAll for NoExAssignVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_assign_expr(&mut self, assign_expr: &AssignExpr, _: &dyn Node) {\n\n let ids = find_lhs_ids(&assign_expr.left);\n\n\n\n for id in ids {\n\n let var = self.context.scope().var(&id);\n\n\n\n if let Some(var) = var {\n", "file_path": "src/rules/no_ex_assign.rs", "rank": 96, "score": 153175.23205574095 }, { "content": "struct NoPrototypeBuiltinsVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoPrototypeBuiltinsVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> Visit for NoPrototypeBuiltinsVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_call_expr(&mut self, call_expr: &CallExpr, _parent: &dyn Node) {\n\n let member_expr = match &call_expr.callee {\n\n ExprOrSuper::Expr(boxed_expr) => match &**boxed_expr {\n\n Expr::Member(member_expr) => {\n\n if member_expr.computed {\n\n return;\n\n }\n", "file_path": "src/rules/no_prototype_builtins.rs", "rank": 97, "score": 153175.23205574095 }, { "content": "struct NoExtraSemiVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoExtraSemiVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n}\n\n\n\nimpl<'c, 'view> Visit for NoExtraSemiVisitor<'c, 'view> {\n\n noop_visit_type!();\n\n\n\n fn visit_empty_stmt(&mut self, empty_stmt: &EmptyStmt, _parent: &dyn Node) {\n\n self.context.add_diagnostic_with_hint(\n\n empty_stmt.span,\n\n CODE,\n\n NoExtraSemiMessage::Unnecessary,\n\n NoExtraSemiHint::Remove,\n\n );\n", "file_path": "src/rules/no_extra_semi.rs", "rank": 98, "score": 153175.23205574095 }, { "content": "struct NoMisusedNewVisitor<'c, 'view> {\n\n context: &'c mut Context<'view>,\n\n}\n\n\n\nimpl<'c, 'view> NoMisusedNewVisitor<'c, 'view> {\n\n fn new(context: &'c mut Context<'view>) -> Self {\n\n Self { context }\n\n }\n\n\n\n fn match_parent_type(&self, parent: &Ident, return_type: &TsTypeAnn) -> bool {\n\n if let TsType::TsTypeRef(type_ref) = &*return_type.type_ann {\n\n if let TsEntityName::Ident(ident) = &type_ref.type_name {\n\n return ident.sym == parent.sym;\n\n }\n\n }\n\n\n\n false\n\n }\n\n\n\n fn is_constructor_keyword(&self, ident: &Ident) -> bool {\n", "file_path": "src/rules/no_misused_new.rs", "rank": 99, "score": 153175.23205574095 } ]
Rust
src/config/freqency.rs
ywatanabee/libipt-rs
0efe4ff71d8e3236f4d3f691a16e714eb0c745bb
#[cfg(test)] mod test { use super::*; #[test] fn test_freq_props() { let mut freq = Frequency::new(1, 2, 3, 4); assert_eq!(freq.mtc(), 1); assert_eq!(freq.nom(), 2); assert_eq!(freq.ctc(), 3); assert_eq!(freq.tsc(), 4); freq.set_mtc(5); freq.set_nom(6); freq.set_ctc(7); freq.set_tsc(8); assert_eq!(freq.mtc(), 5); assert_eq!(freq.nom(), 6); assert_eq!(freq.ctc(), 7); assert_eq!(freq.tsc(), 8); } } #[derive(Clone, Copy, Default)] pub struct Frequency { pub(super) mtc: u8, pub(super) nom: u8, pub(super) ctc: u32, pub(super) tsc: u32 } impl Frequency { #[inline] pub fn new(mtc: u8, nom: u8, ctc: u32, tsc: u32) -> Self { Frequency {mtc, nom, ctc, tsc} } #[inline] pub fn mtc(self) -> u8 { self.mtc } #[inline] pub fn nom(self) -> u8 { self.nom } #[inline] pub fn ctc(self) -> u32 { self.ctc } #[inline] pub fn tsc(self) -> u32 { self.tsc } #[inline] pub fn set_mtc(&mut self, mtc: u8) { self.mtc = mtc } #[inline] pub fn set_nom(&mut self, nom: u8) { self.nom = nom } #[inline] pub fn set_ctc(&mut self, ctc: u32) { self.ctc = ctc } #[inline] pub fn set_tsc(&mut self, tsc: u32) { self.tsc = tsc } }
#[cfg(test)] mod test { use super::*; #[test] fn test_freq_props() {
} #[derive(Clone, Copy, Default)] pub struct Frequency { pub(super) mtc: u8, pub(super) nom: u8, pub(super) ctc: u32, pub(super) tsc: u32 } impl Frequency { #[inline] pub fn new(mtc: u8, nom: u8, ctc: u32, tsc: u32) -> Self { Frequency {mtc, nom, ctc, tsc} } #[inline] pub fn mtc(self) -> u8 { self.mtc } #[inline] pub fn nom(self) -> u8 { self.nom } #[inline] pub fn ctc(self) -> u32 { self.ctc } #[inline] pub fn tsc(self) -> u32 { self.tsc } #[inline] pub fn set_mtc(&mut self, mtc: u8) { self.mtc = mtc } #[inline] pub fn set_nom(&mut self, nom: u8) { self.nom = nom } #[inline] pub fn set_ctc(&mut self, ctc: u32) { self.ctc = ctc } #[inline] pub fn set_tsc(&mut self, tsc: u32) { self.tsc = tsc } }
let mut freq = Frequency::new(1, 2, 3, 4); assert_eq!(freq.mtc(), 1); assert_eq!(freq.nom(), 2); assert_eq!(freq.ctc(), 3); assert_eq!(freq.tsc(), 4); freq.set_mtc(5); freq.set_nom(6); freq.set_ctc(7); freq.set_tsc(8); assert_eq!(freq.mtc(), 5); assert_eq!(freq.nom(), 6); assert_eq!(freq.ctc(), 7); assert_eq!(freq.tsc(), 8); }
function_block-function_prefix_line
[ { "content": "#[test]\n\nfn test_encoder_all_packets() {\n\n let mut inp = [0; 132];\n\n let mut cfg = ConfigBuilder::new(&mut inp)\n\n .unwrap()\n\n .cpu(Cpu::intel(1, 2, 3))\n\n .finish();\n\n\n\n let mut enc = Encoder::new(&mut cfg).unwrap();\n\n\n\n let mut size: u32 = 0;\n\n\n\n size += enc.next(Pad::new()).unwrap();\n\n size += enc.next(Psb::new()).unwrap();\n\n size += enc.next(Psbend::new()).unwrap();\n\n size += enc.next(Ovf::new()).unwrap();\n\n\n\n size += enc.next(Fup::new(123, Compression::Sext48)).unwrap();\n\n size += enc.next(Tip::new(321, Compression::Full)).unwrap();\n\n size += enc.next(TipPge::new(666, Compression::Suppressed)).unwrap();\n\n size += enc.next(TipPgd::new(888, Compression::Update16)).unwrap();\n", "file_path": "tests/integration_encoding.rs", "rank": 0, "score": 74500.30165735993 }, { "content": "use libipt::{ ConfigBuilder, Cpu };\n\nuse libipt::packet::*;\n\n\n\n#[test]\n", "file_path": "tests/integration_encoding.rs", "rank": 1, "score": 34702.9042518477 }, { "content": " size += enc.next(Tnt8::new(3, 4)).unwrap();\n\n size += enc.next(Tnt64::new(4, 13)).unwrap();\n\n size += enc.next(Mode::new(Payload::Exec(Exec::CSL | Exec::CSD))).unwrap();\n\n size += enc.next(Pip::new(1337, false)).unwrap();\n\n size += enc.next(Tsc::new(69)).unwrap();\n\n size += enc.next(Cbr::new(5)).unwrap();\n\n size += enc.next(Tma::new(420, 421)).unwrap();\n\n size += enc.next(Mtc::new(0)).unwrap();\n\n size += enc.next(Cyc::new(0xCA7)).unwrap();\n\n size += enc.next(Stop::new()).unwrap(); \n\n size += enc.next(Vmcs::new(111)).unwrap();\n\n size += enc.next(Mnt::new(222)).unwrap();\n\n size += enc.next(Exstop::new(true)).unwrap();\n\n size += enc.next(Mwait::new(333, 444)).unwrap();\n\n size += enc.next(Pwre::new(101, 10, false)).unwrap();\n\n size += enc.next(Pwrx::new(1, 2, false, true, false)).unwrap();\n\n size += enc.next(Ptw::new(5, 0, false)).unwrap();\n\n\n\n assert_eq!(size, 132);\n\n assert!(enc.next(Pad::new()).is_err());\n\n}", "file_path": "tests/integration_encoding.rs", "rank": 2, "score": 34694.882837272744 }, { "content": "pub use ptw::*;\n\nmod unknown;\n\npub use unknown::*;\n\n\n\nmod decoder;\n\npub use decoder::PacketDecoder;\n\n\n\nmod encoder;\n\npub use encoder::Encoder;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use libipt_sys::pt_packet_mnt;\n\n use libipt_sys::pt_packet__bindgen_ty_1;\n\n\n\n #[test]\n\n fn test_pkt_from() {\n\n let p1 = pt_packet_mnt { payload: 666 };\n\n let p2 = pt_packet {\n", "file_path": "src/packet/mod.rs", "rank": 3, "score": 34197.18679556117 }, { "content": "#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use std::mem;\n\n use libipt_sys::pt_event_type_ptev_stop;\n\n\n\n #[test]\n\n fn test_create_event() {\n\n let evt = pt_event {\n\n type_: pt_event_type_ptev_stop,\n\n tsc: 1,\n\n lost_mtc: 2,\n\n lost_cyc: 3,\n\n _bitfield_1: pt_event::new_bitfield_1(1, 0, 1),\n\n variant: unsafe { mem::zeroed() },\n\n reserved: [0; 2]\n\n };\n\n\n\n let evt = Event(evt);\n\n assert!(evt.ip_suppressed());\n", "file_path": "src/event/mod.rs", "rank": 4, "score": 34195.027487085026 }, { "content": "pub use cbr::*;\n\nmod tma;\n\npub use tma::*;\n\nmod mtc;\n\npub use mtc::*;\n\nmod cyc;\n\npub use cyc::*;\n\nmod vmcs;\n\npub use vmcs::*;\n\nmod mnt;\n\npub use mnt::*;\n\nmod exstop;\n\npub use exstop::*;\n\nmod mwait;\n\npub use mwait::*;\n\nmod pwre;\n\npub use pwre::*;\n\nmod pwrx;\n\npub use pwrx::*;\n\nmod ptw;\n", "file_path": "src/packet/mod.rs", "rank": 5, "score": 34190.73424042788 }, { "content": "mod psb;\n\npub use psb::*;\n\nmod psbend;\n\npub use psbend::*;\n\nmod stop;\n\npub use stop::*;\n\nmod invalid;\n\npub use invalid::*;\n\n\n\nmod tnt;\n\npub use tnt::*;\n\nmod ip;\n\npub use ip::*;\n\nmod mode;\n\npub use mode::*;\n\nmod pip;\n\npub use pip::*;\n\nmod tsc;\n\npub use tsc::*;\n\nmod cbr;\n", "file_path": "src/packet/mod.rs", "rank": 6, "score": 34190.72504910559 }, { "content": "mod exstop;\n\npub use exstop::*;\n\nmod mwait;\n\npub use mwait::*;\n\nmod pwre;\n\npub use pwre::*;\n\nmod pwrx;\n\npub use pwrx::*;\n\nmod ptwrite;\n\npub use ptwrite::*;\n\nmod tick;\n\npub use tick::*;\n\nmod mnt;\n\npub use mnt::*;\n\nmod cbr;\n\npub use cbr::*;\n\n\n\nmod qry;\n\npub use qry::*;\n\n\n", "file_path": "src/event/mod.rs", "rank": 7, "score": 34190.704828599715 }, { "content": "mod flags;\n\nmod cpu;\n\nmod freqency;\n\nmod filter;\n\n\n\nmod config;\n\n\n\npub use config::*;\n\npub use cpu::*;\n\npub use freqency::*;\n\npub use flags::*;\n\npub use filter::*;", "file_path": "src/config/mod.rs", "rank": 8, "score": 34190.475844826935 }, { "content": " pt_event_type_ptev_tsx as PT_EVENT_TYPE_PTEV_TSX,\n\n pt_event_type_ptev_vmcs as PT_EVENT_TYPE_PTEV_VMCS\n\n};\n\n\n\nmod enabled;\n\npub use enabled::*;\n\nmod disabled;\n\npub use disabled::*;\n\nmod branch;\n\npub use branch::*;\n\nmod paging;\n\npub use paging::*;\n\nmod overflow;\n\npub use overflow::*;\n\nmod exec_mode;\n\npub use exec_mode::*;\n\nmod tsx;\n\npub use tsx::*;\n\nmod vmcs;\n\npub use vmcs::*;\n", "file_path": "src/event/mod.rs", "rank": 9, "score": 34190.39533491608 }, { "content": "mod class;\n\nmod decoder;\n\nmod insn;\n\npub use class::*;\n\npub use decoder::*;\n\npub use insn::*;", "file_path": "src/insn/mod.rs", "rank": 10, "score": 34190.213516640215 }, { "content": "mod block;\n\nmod decoder;\n\n\n\npub use block::*;\n\npub use decoder::*;", "file_path": "src/block/mod.rs", "rank": 11, "score": 34189.77762995974 }, { "content": "mod image;\n\nmod iscache;\n\n\n\npub use image::*;\n\npub use iscache::*;", "file_path": "src/image/mod.rs", "rank": 12, "score": 34189.77762995974 }, { "content": " pt_packet_type_ppt_pwrx as PT_PACKET_TYPE_PPT_PWRX,\n\n pt_packet_type_ppt_stop as PT_PACKET_TYPE_PPT_STOP,\n\n pt_packet_type_ppt_tip as PT_PACKET_TYPE_PPT_TIP,\n\n pt_packet_type_ppt_tip_pgd as PT_PACKET_TYPE_PPT_TIP_PGD,\n\n pt_packet_type_ppt_tip_pge as PT_PACKET_TYPE_PPT_TIP_PGE,\n\n pt_packet_type_ppt_tma as PT_PACKET_TYPE_PPT_TMA,\n\n pt_packet_type_ppt_tnt_8 as PT_PACKET_TYPE_PPT_TNT_8,\n\n pt_packet_type_ppt_tnt_64 as PT_PACKET_TYPE_PPT_TNT_64,\n\n pt_packet_type_ppt_tsc as PT_PACKET_TYPE_PPT_TSC,\n\n pt_packet_type_ppt_unknown as PT_PACKET_TYPE_PPT_UNKNOWN,\n\n pt_packet_type_ppt_vmcs as PT_PACKET_TYPE_PPT_VMCS\n\n};\n\n\n\n#[macro_use]\n\nmod conversions;\n\n\n\nmod pad;\n\npub use pad::*;\n\nmod ovf;\n\npub use ovf::*;\n", "file_path": "src/packet/mod.rs", "rank": 13, "score": 34187.656943020695 }, { "content": "use std::fmt::{Debug, Formatter};\n\n\n\nuse libipt_sys::{\n\n pt_packet,\n\n pt_packet_type_ppt_cbr as PT_PACKET_TYPE_PPT_CBR,\n\n pt_packet_type_ppt_cyc as PT_PACKET_TYPE_PPT_CYC,\n\n pt_packet_type_ppt_exstop as PT_PACKET_TYPE_PPT_EXSTOP,\n\n pt_packet_type_ppt_fup as PT_PACKET_TYPE_PPT_FUP,\n\n pt_packet_type_ppt_invalid as PT_PACKET_TYPE_PPT_INVALID,\n\n pt_packet_type_ppt_mnt as PT_PACKET_TYPE_PPT_MNT,\n\n pt_packet_type_ppt_mode as PT_PACKET_TYPE_PPT_MODE,\n\n pt_packet_type_ppt_mtc as PT_PACKET_TYPE_PPT_MTC,\n\n pt_packet_type_ppt_mwait as PT_PACKET_TYPE_PPT_MWAIT,\n\n pt_packet_type_ppt_ovf as PT_PACKET_TYPE_PPT_OVF,\n\n pt_packet_type_ppt_pad as PT_PACKET_TYPE_PPT_PAD,\n\n pt_packet_type_ppt_pip as PT_PACKET_TYPE_PPT_PIP,\n\n pt_packet_type_ppt_psb as PT_PACKET_TYPE_PPT_PSB,\n\n pt_packet_type_ppt_psbend as PT_PACKET_TYPE_PPT_PSBEND,\n\n pt_packet_type_ppt_ptw as PT_PACKET_TYPE_PPT_PTW,\n\n pt_packet_type_ppt_pwre as PT_PACKET_TYPE_PPT_PWRE,\n", "file_path": "src/packet/mod.rs", "rank": 14, "score": 34183.46635298112 }, { "content": "use libipt_sys::{\n\n pt_event,\n\n pt_event_type_ptev_async_branch as PT_EVENT_TYPE_PTEV_ASYNC_BRANCH,\n\n pt_event_type_ptev_async_disabled as PT_EVENT_TYPE_PTEV_ASYNC_DISABLED,\n\n pt_event_type_ptev_async_paging as PT_EVENT_TYPE_PTEV_ASYNC_PAGING,\n\n pt_event_type_ptev_async_vmcs as PT_EVENT_TYPE_PTEV_ASYNC_VMCS,\n\n pt_event_type_ptev_cbr as PT_EVENT_TYPE_PTEV_CBR,\n\n pt_event_type_ptev_disabled as PT_EVENT_TYPE_PTEV_DISABLED,\n\n pt_event_type_ptev_enabled as PT_EVENT_TYPE_PTEV_ENABLED,\n\n pt_event_type_ptev_exec_mode as PT_EVENT_TYPE_PTEV_EXEC_MODE,\n\n pt_event_type_ptev_exstop as PT_EVENT_TYPE_PTEV_EXSTOP,\n\n pt_event_type_ptev_mnt as PT_EVENT_TYPE_PTEV_MNT,\n\n pt_event_type_ptev_mwait as PT_EVENT_TYPE_PTEV_MWAIT,\n\n pt_event_type_ptev_overflow as PT_EVENT_TYPE_PTEV_OVERFLOW,\n\n pt_event_type_ptev_paging as PT_EVENT_TYPE_PTEV_PAGING,\n\n pt_event_type_ptev_ptwrite as PT_EVENT_TYPE_PTEV_PTWRITE,\n\n pt_event_type_ptev_pwre as PT_EVENT_TYPE_PTEV_PWRE,\n\n pt_event_type_ptev_pwrx as PT_EVENT_TYPE_PTEV_PWRX,\n\n pt_event_type_ptev_stop as PT_EVENT_TYPE_PTEV_STOP,\n\n pt_event_type_ptev_tick as PT_EVENT_TYPE_PTEV_TICK,\n", "file_path": "src/event/mod.rs", "rank": 15, "score": 34182.51687944062 }, { "content": " Ptw(ptw::Ptw)\n\n}\n\n\n\nimpl<T> Debug for Packet<T> {\n\n fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {\n\n match self {\n\n Self::Invalid(pack) => f.write_fmt(format_args!(\"Invalid({:?})\", pack)),\n\n Self::Psbend(pack) => f.write_fmt(format_args!(\"Psbend({:?})\", pack)),\n\n Self::Stop(pack) => f.write_fmt(format_args!(\"Stop({:?})\", pack)),\n\n Self::Pad(pack) => f.write_fmt(format_args!(\"Pad({:?})\", pack)),\n\n Self::Psb(pack) => f.write_fmt(format_args!(\"Psb({:?})\", pack)),\n\n Self::Ovf(pack) => f.write_fmt(format_args!(\"Ovf({:?})\", pack)),\n\n Self::Unknown(_) => f.write_str(\"Unknown\"),\n\n Self::Fup(pack) => f.write_fmt(format_args!(\"Fup({:?})\", pack)),\n\n Self::Tip(pack) => f.write_fmt(format_args!(\"Tip({:?})\", pack)),\n\n Self::TipPge(pack) => f.write_fmt(format_args!(\"TipPge({:?})\", pack)),\n\n Self::TipPgd(pack) => f.write_fmt(format_args!(\"TipPgd({:?})\", pack)),\n\n Self::Tnt8(pack) => f.write_fmt(format_args!(\"Tnt8({:?})\", pack)),\n\n Self::Tnt64(pack) => f.write_fmt(format_args!(\"Tnt64({:?})\", pack)),\n\n Self::Mode(pack) => f.write_fmt(format_args!(\"Mode({:?})\", pack)),\n", "file_path": "src/packet/mod.rs", "rank": 16, "score": 34181.100186089876 }, { "content": " type_: PT_PACKET_TYPE_PPT_MNT,\n\n size: std::mem::size_of::<pt_packet_mnt>() as u8,\n\n payload: pt_packet__bindgen_ty_1 { mnt: p1 }\n\n };\n\n let p3: Packet::<()> = p2.into();\n\n match p3 {\n\n Packet::Mnt(m) => assert_eq!(m.payload(), p1.payload),\n\n _ => unreachable!()\n\n };\n\n }\n\n}\n\n\n\npub enum Packet<T> {\n\n Invalid(invalid::Invalid),\n\n Psbend(psbend::Psbend),\n\n Stop(stop::Stop),\n\n Pad(pad::Pad),\n\n Psb(psb::Psb),\n\n Ovf(ovf::Ovf),\n\n Unknown(unknown::Unknown<T>),\n", "file_path": "src/packet/mod.rs", "rank": 17, "score": 34181.100186089876 }, { "content": "\n\n Fup(ip::Fup),\n\n Tip(ip::Tip),\n\n TipPge(ip::TipPge),\n\n TipPgd(ip::TipPgd),\n\n Tnt8(tnt::Tnt8),\n\n Tnt64(tnt::Tnt64),\n\n Mode(mode::Mode),\n\n Pip(pip::Pip),\n\n Vmcs(vmcs::Vmcs),\n\n Cbr(cbr::Cbr),\n\n Tsc(tsc::Tsc),\n\n Tma(tma::Tma),\n\n Mtc(mtc::Mtc),\n\n Cyc(cyc::Cyc),\n\n Mnt(mnt::Mnt),\n\n Exstop(exstop::Exstop),\n\n Mwait(mwait::Mwait),\n\n Pwre(pwre::Pwre),\n\n Pwrx(pwrx::Pwrx),\n", "file_path": "src/packet/mod.rs", "rank": 18, "score": 34181.100186089876 }, { "content": " Paging(Paging),\n\n AsyncPaging(AsyncPaging),\n\n Overflow(Overflow),\n\n ExecMode(ExecMode),\n\n Tsx(Tsx),\n\n Vmcs(Vmcs),\n\n AsyncVmcs(AsyncVmcs),\n\n Exstop(Exstop),\n\n Mwait(Mwait),\n\n Pwre(Pwre),\n\n Pwrx(Pwrx),\n\n Ptwrite(Ptwrite),\n\n Tick(Tick),\n\n Mnt(Mnt),\n\n Cbr(Cbr),\n\n Stop\n\n}\n\n\n\nimpl From<pt_event> for Payload {\n\n fn from(evt: pt_event) -> Payload {\n", "file_path": "src/event/mod.rs", "rank": 19, "score": 34181.100186089876 }, { "content": " PT_PACKET_TYPE_PPT_TIP_PGD => Packet::TipPgd(pkt.payload.ip.into()),\n\n PT_PACKET_TYPE_PPT_TIP_PGE => Packet::TipPge(pkt.payload.ip.into()),\n\n PT_PACKET_TYPE_PPT_TMA => Packet::Tma(pkt.payload.tma.into()),\n\n PT_PACKET_TYPE_PPT_TNT_8 => Packet::Tnt8(pkt.payload.tnt.into()),\n\n PT_PACKET_TYPE_PPT_TNT_64 => Packet::Tnt64(pkt.payload.tnt.into()),\n\n PT_PACKET_TYPE_PPT_TSC => Packet::Tsc(pkt.payload.tsc.into()),\n\n PT_PACKET_TYPE_PPT_VMCS => Packet::Vmcs(pkt.payload.vmcs.into()),\n\n PT_PACKET_TYPE_PPT_UNKNOWN => Packet::Unknown(unknown::Unknown::<T>::from(pkt.payload.unknown)),\n\n _ => unreachable!(\"invalid packet type\")\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/packet/mod.rs", "rank": 20, "score": 34181.100186089876 }, { "content": " unsafe {\n\n match evt.type_ {\n\n PT_EVENT_TYPE_PTEV_ASYNC_BRANCH => Payload::AsyncBranch(AsyncBranch(evt.variant.async_branch)),\n\n PT_EVENT_TYPE_PTEV_ASYNC_DISABLED => Payload::AsnycDisabled(AsyncDisabled(evt.variant.async_disabled)),\n\n PT_EVENT_TYPE_PTEV_ASYNC_PAGING => Payload::AsyncPaging(AsyncPaging(evt.variant.async_paging)),\n\n PT_EVENT_TYPE_PTEV_ASYNC_VMCS => Payload::AsyncVmcs(AsyncVmcs(evt.variant.async_vmcs)),\n\n PT_EVENT_TYPE_PTEV_CBR => Payload::Cbr(Cbr(evt.variant.cbr)),\n\n PT_EVENT_TYPE_PTEV_DISABLED => Payload::Disabled(Disabled(evt.variant.disabled)),\n\n PT_EVENT_TYPE_PTEV_ENABLED => Payload::Enabled(Enabled(evt.variant.enabled)),\n\n PT_EVENT_TYPE_PTEV_EXEC_MODE => Payload::ExecMode(ExecMode(evt.variant.exec_mode)),\n\n PT_EVENT_TYPE_PTEV_EXSTOP => Payload::Exstop(Exstop(evt.variant.exstop)),\n\n PT_EVENT_TYPE_PTEV_MNT => Payload::Mnt(Mnt(evt.variant.mnt)),\n\n PT_EVENT_TYPE_PTEV_MWAIT => Payload::Mwait(Mwait(evt.variant.mwait)),\n\n PT_EVENT_TYPE_PTEV_OVERFLOW => Payload::Overflow(Overflow(evt.variant.overflow)),\n\n PT_EVENT_TYPE_PTEV_PAGING => Payload::Paging(Paging(evt.variant.paging)),\n\n PT_EVENT_TYPE_PTEV_PTWRITE => Payload::Ptwrite(Ptwrite(evt.variant.ptwrite)),\n\n PT_EVENT_TYPE_PTEV_PWRE => Payload::Pwre(Pwre(evt.variant.pwre)),\n\n PT_EVENT_TYPE_PTEV_PWRX => Payload::Pwrx(Pwrx(evt.variant.pwrx)),\n\n PT_EVENT_TYPE_PTEV_TICK => Payload::Tick(Tick(evt.variant.tick)),\n\n PT_EVENT_TYPE_PTEV_TSX => Payload::Tsx(Tsx(evt.variant.tsx)),\n", "file_path": "src/event/mod.rs", "rank": 21, "score": 34181.100186089876 }, { "content": " assert!(!evt.status_update());\n\n assert!(evt.has_tsc());\n\n\n\n assert_eq!(evt.tsc(), 1);\n\n assert_eq!(evt.lost_mtc(), 2);\n\n assert_eq!(evt.lost_cyc(), 3);\n\n\n\n match evt.payload() {\n\n Payload::Stop => (),\n\n _ => unreachable!()\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Payload {\n\n Enabled(Enabled),\n\n Disabled(Disabled),\n\n AsnycDisabled(AsyncDisabled),\n\n AsyncBranch(AsyncBranch),\n", "file_path": "src/event/mod.rs", "rank": 22, "score": 34181.100186089876 }, { "content": " match pkt.type_ {\n\n PT_PACKET_TYPE_PPT_CBR => Packet::Cbr(pkt.payload.cbr.into()),\n\n PT_PACKET_TYPE_PPT_CYC => Packet::Cyc(pkt.payload.cyc.into()),\n\n PT_PACKET_TYPE_PPT_EXSTOP => Packet::Exstop(pkt.payload.exstop.into()),\n\n PT_PACKET_TYPE_PPT_FUP => Packet::Fup(pkt.payload.ip.into()),\n\n PT_PACKET_TYPE_PPT_INVALID => Packet::Invalid(pkt.into()),\n\n PT_PACKET_TYPE_PPT_MNT => Packet::Mnt(pkt.payload.mnt.into()),\n\n PT_PACKET_TYPE_PPT_MODE => Packet::Mode(pkt.payload.mode.into()),\n\n PT_PACKET_TYPE_PPT_MTC => Packet::Mtc(pkt.payload.mtc.into()),\n\n PT_PACKET_TYPE_PPT_MWAIT => Packet::Mwait(pkt.payload.mwait.into()),\n\n PT_PACKET_TYPE_PPT_OVF => Packet::Ovf(pkt.into()),\n\n PT_PACKET_TYPE_PPT_PAD => Packet::Pad(pkt.into()),\n\n PT_PACKET_TYPE_PPT_PIP => Packet::Pip(pkt.payload.pip.into()),\n\n PT_PACKET_TYPE_PPT_PSB => Packet::Psb(pkt.into()),\n\n PT_PACKET_TYPE_PPT_PSBEND => Packet::Psbend(pkt.into()),\n\n PT_PACKET_TYPE_PPT_PTW => Packet::Ptw(pkt.payload.ptw.into()),\n\n PT_PACKET_TYPE_PPT_PWRE => Packet::Pwre(pkt.payload.pwre.into()),\n\n PT_PACKET_TYPE_PPT_PWRX => Packet::Pwrx(pkt.payload.pwrx.into()),\n\n PT_PACKET_TYPE_PPT_STOP => Packet::Stop(pkt.into()),\n\n PT_PACKET_TYPE_PPT_TIP => Packet::Tip(pkt.payload.ip.into()),\n", "file_path": "src/packet/mod.rs", "rank": 23, "score": 34181.100186089876 }, { "content": " /// The number of lost mtc packets.\n\n ///\n\n /// This gives an idea about the quality of the \\@tsc.\n\n /// The more packets were dropped, the less precise timing is.\n\n pub fn lost_mtc(self) -> u32 { self.0.lost_mtc }\n\n /// The number of lost cyc packets.\n\n ///\n\n /// This gives an idea about the quality of the \\@tsc.\n\n /// The more packets were dropped, the less precise timing is.\n\n pub fn lost_cyc(self) -> u32 { self.0.lost_cyc }\n\n /// Event specific data.\n\n pub fn payload(self) -> Payload { self.0.into() }\n\n}", "file_path": "src/event/mod.rs", "rank": 24, "score": 34181.100186089876 }, { "content": " PT_EVENT_TYPE_PTEV_VMCS => Payload::Vmcs(Vmcs(evt.variant.vmcs)),\n\n PT_EVENT_TYPE_PTEV_STOP => Payload::Stop,\n\n _ => unreachable!()\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub struct Event(pub(crate) pt_event);\n\nimpl Event {\n\n /// A flag indicating that the event IP has been suppressed.\n\n pub fn ip_suppressed(self) -> bool { self.0.ip_suppressed() > 0 }\n\n /// A flag indicating that the event is for status update.\n\n pub fn status_update(self) -> bool { self.0.status_update() > 0 }\n\n /// A flag indicating that the event has timing information.\n\n pub fn has_tsc(self) -> bool { self.0.has_tsc() > 0 }\n\n /// The time stamp count of the event.\n\n /// This field is only valid if \\@has_tsc is set.\n\n pub fn tsc(self) -> u64 { self.0.tsc }\n", "file_path": "src/event/mod.rs", "rank": 25, "score": 34181.100186089876 }, { "content": " Self::Pip(pack) => f.write_fmt(format_args!(\"Pip({:?})\", pack)),\n\n Self::Vmcs(pack) => f.write_fmt(format_args!(\"Vmcs({:?})\", pack)),\n\n Self::Cbr(pack) => f.write_fmt(format_args!(\"Cbr({:?})\", pack)),\n\n Self::Tsc(pack) => f.write_fmt(format_args!(\"Tsc({:?})\", pack)),\n\n Self::Tma(pack) => f.write_fmt(format_args!(\"Tma({:?})\", pack)),\n\n Self::Mtc(pack) => f.write_fmt(format_args!(\"Mtc({:?})\", pack)),\n\n Self::Cyc(pack) => f.write_fmt(format_args!(\"Cyc({:?})\", pack)),\n\n Self::Mnt(pack) => f.write_fmt(format_args!(\"Mnt({:?})\", pack)),\n\n Self::Exstop(pack) => f.write_fmt(format_args!(\"Exstop({:?})\", pack)),\n\n Self::Mwait(pack) => f.write_fmt(format_args!(\"Mwait({:?})\", pack)),\n\n Self::Pwre(pack) => f.write_fmt(format_args!(\"Pwre({:?})\", pack)),\n\n Self::Pwrx(pack) => f.write_fmt(format_args!(\"Pwrx({:?})\", pack)),\n\n Self::Ptw(pack) => f.write_fmt(format_args!(\"Ptw({:?})\", pack)),\n\n }\n\n }\n\n}\n\n\n\nimpl<T> From<pt_packet> for Packet<T> {\n\n fn from(pkt: pt_packet) -> Self {\n\n unsafe {\n", "file_path": "src/packet/mod.rs", "rank": 26, "score": 34181.100186089876 }, { "content": "use libipt_sys::{\n\n pt_event__bindgen_ty_1__bindgen_ty_5,\n\n pt_event__bindgen_ty_1__bindgen_ty_6\n\n};\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use super::super::Payload;\n\n use std::mem;\n\n use libipt_sys::{\n\n pt_event,\n\n pt_event_type_ptev_paging,\n\n pt_event_type_ptev_async_paging\n\n };\n\n\n\n #[test]\n\n fn test_paging_payload() {\n\n let mut evt: pt_event = unsafe { mem::zeroed() };\n\n evt.type_ = pt_event_type_ptev_paging;\n", "file_path": "src/event/paging.rs", "rank": 27, "score": 15.143237465408818 }, { "content": "use libipt_sys::{\n\n pt_event__bindgen_ty_1__bindgen_ty_2,\n\n pt_event__bindgen_ty_1__bindgen_ty_3\n\n};\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use super::super::Payload;\n\n use std::mem;\n\n use libipt_sys::{\n\n pt_event,\n\n pt_event_type_ptev_disabled,\n\n pt_event_type_ptev_async_disabled\n\n };\n\n\n\n #[test]\n\n fn test_disabled_payload() {\n\n let mut evt: pt_event = unsafe { mem::zeroed() };\n\n evt.type_ = pt_event_type_ptev_disabled;\n", "file_path": "src/event/disabled.rs", "rank": 28, "score": 15.143237465408818 }, { "content": "use libipt_sys::{\n\n pt_event__bindgen_ty_1__bindgen_ty_10,\n\n pt_event__bindgen_ty_1__bindgen_ty_11\n\n};\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use super::super::Payload;\n\n use std::mem;\n\n use libipt_sys::{\n\n pt_event,\n\n pt_event_type_ptev_vmcs,\n\n pt_event_type_ptev_async_vmcs\n\n };\n\n\n\n #[test]\n\n fn test_vmcs_payload() {\n\n let mut evt: pt_event = unsafe { mem::zeroed() };\n\n evt.type_ = pt_event_type_ptev_vmcs;\n", "file_path": "src/event/vmcs.rs", "rank": 29, "score": 15.143237465408818 }, { "content": "use std::convert::TryFrom;\n\nuse libipt_sys::{\n\n pt_event__bindgen_ty_1__bindgen_ty_8,\n\n pt_exec_mode_ptem_16bit,\n\n pt_exec_mode_ptem_32bit,\n\n pt_exec_mode_ptem_64bit,\n\n pt_exec_mode_ptem_unknown\n\n};\n\n\n\nuse num_enum::TryFromPrimitive;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use super::super::Payload;\n\n use std::mem;\n\n use libipt_sys::{ pt_event, pt_event_type_ptev_exec_mode };\n\n\n\n #[test]\n\n fn test_exec_mode_payload() {\n", "file_path": "src/event/exec_mode.rs", "rank": 30, "score": 15.100522303376154 }, { "content": "use crate::event::ExecModeType;\n\nuse super::Class;\n\n\n\nuse std::convert::TryFrom;\n\n\n\nuse libipt_sys::pt_insn;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use libipt_sys::pt_insn_class_ptic_call;\n\n use libipt_sys::pt_exec_mode_ptem_32bit;\n\n\n\n #[test]\n\n fn test_insn_props() {\n\n let data: [u8; 15] = [17; 15];\n\n let blk = Insn(pt_insn{\n\n ip: 1,\n\n isid: 2,\n\n mode: pt_exec_mode_ptem_32bit,\n", "file_path": "src/insn/insn.rs", "rank": 31, "score": 15.049902323446254 }, { "content": "use libipt_sys::pt_event__bindgen_ty_1__bindgen_ty_16;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use super::super::Payload;\n\n use std::mem;\n\n use libipt_sys::{ pt_event, pt_event_type_ptev_ptwrite };\n\n\n\n #[test]\n\n fn test_ptwrite_payload() {\n\n let mut evt: pt_event = unsafe { mem::zeroed() };\n\n evt.type_ = pt_event_type_ptev_ptwrite;\n\n evt.variant.ptwrite = pt_event__bindgen_ty_1__bindgen_ty_16 {\n\n ip: 11,\n\n size: 22,\n\n payload: 33\n\n };\n\n\n\n let payload: Payload = evt.into();\n", "file_path": "src/event/ptwrite.rs", "rank": 32, "score": 15.008588126888663 }, { "content": "use libipt_sys::pt_event__bindgen_ty_1__bindgen_ty_13;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use super::super::Payload;\n\n use std::mem;\n\n use libipt_sys::{ pt_event, pt_event_type_ptev_mwait };\n\n\n\n #[test]\n\n fn test_mwait_payload() {\n\n let mut evt: pt_event = unsafe { mem::zeroed() };\n\n evt.type_ = pt_event_type_ptev_mwait;\n\n evt.variant.mwait = pt_event__bindgen_ty_1__bindgen_ty_13 {\n\n ip: 11,\n\n hints: 22,\n\n ext: 33\n\n };\n\n\n\n let payload: Payload = evt.into();\n", "file_path": "src/event/mwait.rs", "rank": 33, "score": 15.008588126888663 }, { "content": "use libipt_sys::pt_event__bindgen_ty_1__bindgen_ty_17;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use super::super::Payload;\n\n use std::mem;\n\n use libipt_sys::{ pt_event, pt_event_type_ptev_tick};\n\n\n\n #[test]\n\n fn test_tick_payload() {\n\n let mut evt: pt_event = unsafe { mem::zeroed() };\n\n evt.type_ = pt_event_type_ptev_tick;\n\n evt.variant.tick = pt_event__bindgen_ty_1__bindgen_ty_17 {\n\n ip: 11,\n\n };\n\n\n\n let payload: Payload = evt.into();\n\n match payload {\n\n Payload::Tick(e) => {\n", "file_path": "src/event/tick.rs", "rank": 34, "score": 14.920404318944492 }, { "content": "use libipt_sys::pt_event__bindgen_ty_1__bindgen_ty_12;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use super::super::Payload;\n\n use std::mem;\n\n use libipt_sys::{ pt_event, pt_event_type_ptev_exstop };\n\n\n\n #[test]\n\n fn test_exstop_payload() {\n\n let mut evt: pt_event = unsafe { mem::zeroed() };\n\n evt.type_ = pt_event_type_ptev_exstop;\n\n evt.variant.exstop = pt_event__bindgen_ty_1__bindgen_ty_12 {\n\n ip: 11,\n\n };\n\n\n\n let payload: Payload = evt.into();\n\n match payload {\n\n Payload::Exstop(e) => {\n", "file_path": "src/event/exstop.rs", "rank": 35, "score": 14.920404318944492 }, { "content": "use libipt_sys::pt_event__bindgen_ty_1__bindgen_ty_7;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use super::super::Payload;\n\n use std::mem;\n\n use libipt_sys::{ pt_event, pt_event_type_ptev_overflow };\n\n\n\n #[test]\n\n fn test_overflow_payload() {\n\n let mut evt: pt_event = unsafe { mem::zeroed() };\n\n evt.type_ = pt_event_type_ptev_overflow;\n\n evt.variant.overflow = pt_event__bindgen_ty_1__bindgen_ty_7 {\n\n ip: 11\n\n };\n\n\n\n let payload: Payload = evt.into();\n\n match payload {\n\n Payload::Overflow(e) => {\n", "file_path": "src/event/overflow.rs", "rank": 36, "score": 14.920404318944492 }, { "content": "use libipt_sys::pt_event__bindgen_ty_1__bindgen_ty_18;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use super::super::Payload;\n\n use std::mem;\n\n use libipt_sys::{ pt_event, pt_event_type_ptev_cbr };\n\n\n\n #[test]\n\n fn test_cbr_payload() {\n\n let mut evt: pt_event = unsafe { mem::zeroed() };\n\n evt.type_ = pt_event_type_ptev_cbr;\n\n evt.variant.cbr = pt_event__bindgen_ty_1__bindgen_ty_18 {\n\n ratio: 18\n\n };\n\n\n\n let payload: Payload = evt.into();\n\n match payload {\n\n Payload::Cbr(e) => {\n", "file_path": "src/event/cbr.rs", "rank": 37, "score": 14.920404318944492 }, { "content": "use libipt_sys::pt_event__bindgen_ty_1__bindgen_ty_19;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use super::super::Payload;\n\n use std::mem;\n\n use libipt_sys::{ pt_event, pt_event_type_ptev_mnt };\n\n\n\n #[test]\n\n fn test_mnt_payload() {\n\n let mut evt: pt_event = unsafe { mem::zeroed() };\n\n evt.type_ = pt_event_type_ptev_mnt;\n\n evt.variant.mnt= pt_event__bindgen_ty_1__bindgen_ty_19 {\n\n payload: 17\n\n };\n\n\n\n let payload: Payload = evt.into();\n\n match payload {\n\n Payload::Mnt(e) => {\n", "file_path": "src/event/mnt.rs", "rank": 38, "score": 14.920404318944492 }, { "content": "use libipt_sys::pt_event__bindgen_ty_1__bindgen_ty_4;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use super::super::Payload;\n\n use std::mem;\n\n use libipt_sys::{ pt_event, pt_event_type_ptev_async_branch };\n\n\n\n #[test]\n\n fn test_branch_async_payload() {\n\n let mut evt: pt_event = unsafe { mem::zeroed() };\n\n evt.type_ = pt_event_type_ptev_async_branch;\n\n evt.variant.async_branch = pt_event__bindgen_ty_1__bindgen_ty_4 {\n\n from: 1,\n\n to: 2\n\n };\n\n\n\n let payload: Payload = evt.into();\n\n match payload {\n", "file_path": "src/event/branch.rs", "rank": 39, "score": 14.876775184576084 }, { "content": "use super::cpu::Cpu;\n\nuse super::freqency::Frequency;\n\nuse super::filter::AddrFilter;\n\nuse crate::packet::Unknown;\n\nuse crate::error::{ PtError, PtErrorCode };\n\n\n\nuse std::mem;\n\nuse std::borrow::Cow;\n\nuse std::marker::PhantomData;\n\nuse std::ffi::c_void;\n\nuse std::os::raw::c_int;\n\n\n\nuse libipt_sys::{\n\n pt_config,\n\n pt_conf_flags,\n\n pt_packet_unknown\n\n};\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "src/config/config.rs", "rank": 40, "score": 14.853668740342986 }, { "content": "use std::ffi::CStr;\n\nuse libipt_sys::{\n\n pt_version,\n\n pt_library_version\n\n};\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_get_version() {\n\n let v = Version::version();\n\n assert_ne!(v.major(), 0);\n\n }\n\n}\n\n\n\n/// The library version.\n\n#[derive(Clone, Copy, Debug)]\n\npub struct Version(pt_version);\n", "file_path": "src/version.rs", "rank": 41, "score": 14.719294961577596 }, { "content": "use libipt_sys::pt_event__bindgen_ty_1__bindgen_ty_15;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use super::super::Payload;\n\n use std::mem;\n\n use libipt_sys::{ pt_event, pt_event_type_ptev_pwrx };\n\n\n\n #[test]\n\n fn test_pwrx_payload() {\n\n let mut evt: pt_event = unsafe { mem::zeroed() };\n\n evt.type_ = pt_event_type_ptev_pwrx;\n\n evt.variant.pwrx = pt_event__bindgen_ty_1__bindgen_ty_15 {\n\n last: 11,\n\n deepest: 22,\n\n _bitfield_1: pt_event__bindgen_ty_1__bindgen_ty_15::new_bitfield_1(1, 0, 1),\n\n __bindgen_padding_0: Default::default()\n\n };\n\n\n", "file_path": "src/event/pwrx.rs", "rank": 42, "score": 14.621242090592714 }, { "content": "use libipt_sys::pt_event__bindgen_ty_1__bindgen_ty_14;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use super::super::Payload;\n\n use std::mem;\n\n use libipt_sys::{ pt_event, pt_event_type_ptev_pwre };\n\n\n\n #[test]\n\n fn test_pwre_payload() {\n\n let mut evt: pt_event = unsafe { mem::zeroed() };\n\n evt.type_ = pt_event_type_ptev_pwre;\n\n evt.variant.pwre = pt_event__bindgen_ty_1__bindgen_ty_14 {\n\n state: 11,\n\n sub_state: 22,\n\n _bitfield_1: pt_event__bindgen_ty_1__bindgen_ty_14::new_bitfield_1(1),\n\n __bindgen_padding_0: Default::default()\n\n };\n\n\n", "file_path": "src/event/pwre.rs", "rank": 43, "score": 14.579660023746921 }, { "content": "use libipt_sys::pt_event__bindgen_ty_1__bindgen_ty_1;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use super::super::Payload;\n\n use std::mem;\n\n use libipt_sys::{ pt_event, pt_event_type_ptev_enabled };\n\n\n\n #[test]\n\n fn test_enabled_payload() {\n\n let mut evt: pt_event = unsafe { mem::zeroed() };\n\n evt.type_ = pt_event_type_ptev_enabled;\n\n evt.variant.enabled = pt_event__bindgen_ty_1__bindgen_ty_1 {\n\n ip: 11,\n\n _bitfield_1: pt_event__bindgen_ty_1__bindgen_ty_1::new_bitfield_1(1),\n\n __bindgen_padding_0: Default::default()\n\n };\n\n\n\n let payload: Payload = evt.into();\n", "file_path": "src/event/enabled.rs", "rank": 44, "score": 14.538356429767877 }, { "content": "use libipt_sys::pt_event__bindgen_ty_1__bindgen_ty_9;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use super::super::Payload;\n\n use std::mem;\n\n use libipt_sys::{ pt_event, pt_event_type_ptev_tsx };\n\n\n\n #[test]\n\n fn test_tsx_payload() {\n\n let mut evt: pt_event = unsafe { mem::zeroed() };\n\n evt.type_ = pt_event_type_ptev_tsx;\n\n evt.variant.tsx = pt_event__bindgen_ty_1__bindgen_ty_9 {\n\n ip: 11,\n\n _bitfield_1: pt_event__bindgen_ty_1__bindgen_ty_9::new_bitfield_1(1, 0),\n\n __bindgen_padding_0: Default::default()\n\n };\n\n\n\n let payload: Payload = evt.into();\n", "file_path": "src/event/tsx.rs", "rank": 45, "score": 14.538356429767877 }, { "content": "use crate::insn::Class;\n\nuse crate::event::ExecModeType;\n\nuse std::convert::TryFrom;\n\nuse libipt_sys::pt_block;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use libipt_sys::{\n\n pt_exec_mode_ptem_32bit,\n\n pt_insn_class_ptic_error,\n\n };\n\n\n\n #[test]\n\n fn test_block_props() {\n\n let data: [u8; 15] = [17; 15];\n\n let blk = Block(pt_block {\n\n ip: 1,\n\n end_ip: 2,\n\n isid: 3,\n", "file_path": "src/block/block.rs", "rank": 46, "score": 14.435412122570268 }, { "content": " pt_pkt_sync_backward,\n\n pt_pkt_sync_forward,\n\n pt_pkt_sync_set\n\n};\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::config::ConfigBuilder;\n\n\n\n #[test]\n\n fn test_pktdec_alloc() {\n\n let daturu = &mut [11; 11];\n\n PacketDecoder::new(&ConfigBuilder::new(daturu)\n\n .unwrap()\n\n .finish()\n\n ).unwrap();\n\n }\n\n\n\n #[test ]\n", "file_path": "src/packet/decoder.rs", "rank": 47, "score": 14.365961367168756 }, { "content": "};\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use std::path::PathBuf;\n\n use std::fs;\n\n\n\n #[test]\n\n fn test_isc_alloc() {\n\n SectionCache::new(None).unwrap();\n\n SectionCache::new(Some(\"yeet\")).unwrap();\n\n }\n\n\n\n #[test]\n\n fn test_isc_name() {\n\n let i = SectionCache::new(None).unwrap();\n\n assert!(i.name().is_none());\n\n let i = SectionCache::new(Some(\"yeet\")).unwrap();\n\n assert_eq!(i.name().unwrap(), \"yeet\");\n", "file_path": "src/image/iscache.rs", "rank": 48, "score": 14.265848332508861 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::config::ConfigBuilder;\n\n use crate::packet::Mnt;\n\n\n\n #[test]\n\n fn test_pktdec_alloc() {\n\n let kek = &mut [1; 2];\n\n Encoder::new(&mut ConfigBuilder::new(kek).unwrap().finish())\n\n .unwrap();\n\n }\n\n\n\n #[test ]\n\n fn test_pktdec_props() {\n\n let kek = &mut [1; 2];\n\n // this just checks memory safety for property access\n\n // usage can be found in the integration tests\n\n let mut p = Encoder::new(\n\n &mut ConfigBuilder::new(kek).unwrap().finish()\n", "file_path": "src/packet/encoder.rs", "rank": 49, "score": 13.93575807966247 }, { "content": " pt_image_free,\n\n pt_image_name,\n\n pt_image_remove_by_asid,\n\n pt_image_remove_by_filename,\n\n pt_image_set_callback,\n\n pt_asid\n\n};\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use std::path::PathBuf;\n\n\n\n // not much to test for in the unit tests\n\n // the integration tests should have way more stuffs\n\n\n\n #[test]\n\n fn test_img_alloc() {\n\n Image::new(None).unwrap();\n\n Image::new(Some(\"yeet\")).unwrap();\n", "file_path": "src/image/image.rs", "rank": 50, "score": 13.895807159614344 }, { "content": "use libipt_sys::{\n\n pt_cpu,\n\n pt_cpu_vendor_pcv_intel,\n\n pt_cpu_vendor_pcv_unknown,\n\n pt_errata,\n\n pt_cpu_errata,\n\n};\n\n\n\nuse bitflags::bitflags;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_cpu_intel_shortcut() {\n\n let cpu1 = Cpu::intel(66, 12, 255);\n\n let cpu2 = Cpu::new(CpuVendor::INTEL, 66, 12, 255);\n\n assert_eq!(cpu1.0.vendor, cpu2.0.vendor);\n\n assert_eq!(cpu1.0.family, cpu2.0.family);\n", "file_path": "src/config/cpu.rs", "rank": 51, "score": 13.703346074096626 }, { "content": "use std::mem;\n\nuse std::convert::TryFrom;\n\nuse libipt_sys::pt_conf_addr_filter;\n\nuse num_enum::TryFromPrimitive;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_addrfilter() {\n\n let filter = AddrFilterBuilder::new()\n\n .addr0(AddrRange::new(1, 2, AddrConfig::DISABLED))\n\n .addr1(AddrRange::new(3, 4, AddrConfig::FILTER))\n\n .addr2(AddrRange::new(5, 6, AddrConfig::STOP))\n\n .addr3(AddrRange::new(7, 8, AddrConfig::DISABLED))\n\n .finish();\n\n\n\n assert_eq!(filter.addr0().a(), 1);\n\n assert_eq!(filter.addr0().b(), 2);\n", "file_path": "src/config/filter.rs", "rank": 52, "score": 13.383975586159044 }, { "content": " pt_event,\n\n pt_qry_free_decoder,\n\n pt_qry_get_config,\n\n pt_qry_get_offset,\n\n pt_qry_get_sync_offset,\n\n pt_qry_indirect_branch,\n\n pt_qry_sync_backward,\n\n pt_qry_sync_forward,\n\n pt_qry_sync_set,\n\n pt_qry_time\n\n};\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::config::ConfigBuilder;\n\n\n\n #[test]\n\n fn test_qrydec_alloc() {\n\n let kek = &mut [2; 1];\n", "file_path": "src/event/qry.rs", "rank": 53, "score": 13.290771536269347 }, { "content": "use libipt_sys::{\n\n pt_conf_flags,\n\n pt_conf_flags__bindgen_ty_1,\n\n pt_conf_flags__bindgen_ty_1__bindgen_ty_1,\n\n pt_conf_flags__bindgen_ty_1__bindgen_ty_2,\n\n pt_conf_flags__bindgen_ty_1__bindgen_ty_3,\n\n __BindgenBitfieldUnit\n\n};\n\n\n\nuse bitflags::bitflags;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_block_flags() {\n\n let blk: BlockFlags = BlockFlags::END_ON_CALL | BlockFlags::END_ON_JUMP;\n\n let raw: pt_conf_flags = blk.into();\n\n\n", "file_path": "src/config/flags.rs", "rank": 54, "score": 13.228935339240532 }, { "content": "use libipt_sys::{pt_asid, pt_asid_no_cr3 as NO_CR3, pt_asid_no_vmcs as NO_VMCS};\n\nuse std::mem;\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_asid_basic_values() {\n\n let mut asid = Asid::new(Some(0), Some(2));\n\n assert_eq!(asid.cr3(), Some(0));\n\n assert_eq!(asid.vmcs(), Some(2));\n\n\n\n // -1 because max would be NO_CR3\n\n asid.set_cr3(std::u64::MAX - 1);\n\n asid.set_vmcs(std::i64::MAX as u64);\n\n assert_eq!(asid.cr3(), Some(std::u64::MAX - 1));\n\n assert_eq!(asid.vmcs(), Some(std::i64::MAX as u64));\n\n }\n\n\n", "file_path": "src/asid.rs", "rank": 55, "score": 12.981465174885264 }, { "content": " use super::*;\n\n use crate::config::*;\n\n use crate::packet::Unknown;\n\n\n\n #[test]\n\n #[should_panic]\n\n fn test_config_empty() {\n\n let c = ConfigBuilder::new(&mut [0; 0]).unwrap().finish();\n\n assert_eq!(c.0.begin, c.0.end);\n\n assert_eq!(c.0.size, mem::size_of::<pt_config>());\n\n }\n\n\n\n #[test]\n\n fn test_config_buf() {\n\n let mut data = [0; 16];\n\n let len = data.len();\n\n let c = ConfigBuilder::new(&mut data).unwrap().finish();\n\n assert_eq!(c.0.end as usize - c.0.begin as usize, len);\n\n }\n\n\n", "file_path": "src/config/config.rs", "rank": 57, "score": 11.035151630178266 }, { "content": " use super::*;\n\n use crate::config::ConfigBuilder;\n\n\n\n #[test]\n\n fn test_insndec_alloc() {\n\n let kek = &mut [1; 2];\n\n InsnDecoder::new(&ConfigBuilder::new(kek).unwrap().finish())\n\n .unwrap();\n\n }\n\n\n\n #[test ]\n\n fn test_insndec_props() {\n\n let kek = &mut [1; 2];\n\n // this just checks memory safety for property access\n\n // usage can be found in the integration tests\n\n let mut b = InsnDecoder::new(\n\n &ConfigBuilder::new(kek).unwrap().finish()\n\n ).unwrap();\n\n\n\n let a = b.asid().unwrap();\n", "file_path": "src/insn/decoder.rs", "rank": 58, "score": 10.756983648490053 }, { "content": " use super::*;\n\n use crate::config::ConfigBuilder;\n\n\n\n #[test]\n\n fn test_blkdec_alloc() {\n\n let kek = &mut [1; 2];\n\n BlockDecoder::new(\n\n &ConfigBuilder::new(kek).unwrap().finish()\n\n ).unwrap();\n\n }\n\n\n\n #[test ]\n\n fn test_blkdec_props() {\n\n let kek = &mut [1; 2];\n\n // this just checks memory safety for property access\n\n // usage can be found in the integration tests\n\n let mut b = BlockDecoder::new(\n\n &ConfigBuilder::new(kek).unwrap().finish()\n\n ).unwrap();\n\n let a = b.asid().unwrap();\n", "file_path": "src/block/decoder.rs", "rank": 59, "score": 10.756983648490053 }, { "content": "pub mod packet;\n\n\n\n/// The event layer deals with packet combinations that encode higher-level events.\n\n///\n\n/// It is used for reconstructing execution flow for users who need finer-grain control not available via the instruction flow layer\n\n/// or for users who want to integrate execution flow reconstruction with other functionality more tightly than it would be possible otherwise.\n\npub mod event;\n\n\n\n/// The block layer provides a simple API for iterating over blocks of sequential instructions in execution order.\n\n///\n\n/// The instructions in a block are sequential in the sense that no trace is required for reconstructing the instructions.\n\n/// The IP of the first instruction is given in struct `Block` and the IP of other instructions in the block can be determined by decoding and examining the previous instruction.\n\npub mod block;\n\n\n\n/// The instruction flow layer provides a simple API for iterating over instructions in execution order.\n\npub mod insn;\n\n\n\nmod version;\n\npub use version::Version;\n\nmod image;\n\npub use image::*;\n\nmod asid;\n\npub use asid::Asid;\n\nmod flags;\n\npub use flags::Status;", "file_path": "src/lib.rs", "rank": 60, "score": 8.355408223877557 }, { "content": "use crate::error::{\n\n PtError, deref_ptresult,\n\n deref_ptresult_mut, PtErrorCode,\n\n ensure_ptok, extract_pterr\n\n};\n\nuse crate::config::Config;\n\nuse crate::Asid;\n\nuse crate::event::Event;\n\nuse crate::Status;\n\nuse crate::Image;\n\nuse super::Insn;\n\n\n\nuse std::mem;\n\nuse std::ptr;\n\nuse std::marker::PhantomData;\n\n\n\nuse libipt_sys::{\n\n pt_insn_decoder,\n\n pt_insn_alloc_decoder,\n\n pt_insn_asid,\n", "file_path": "src/insn/decoder.rs", "rank": 61, "score": 7.287458360811428 }, { "content": "use super::Block;\n\nuse crate::asid::Asid;\n\nuse crate::config::Config;\n\nuse crate::event::Event;\n\nuse crate::flags::Status;\n\nuse crate::image::Image;\n\nuse crate::error::{\n\n PtError, ensure_ptok,\n\n extract_pterr, deref_ptresult,\n\n deref_ptresult_mut, PtErrorCode\n\n};\n\n\n\nuse std::mem;\n\nuse std::ptr;\n\nuse std::marker::PhantomData;\n\n\n\nuse libipt_sys::{\n\n pt_event,\n\n pt_block,\n\n pt_block_decoder,\n", "file_path": "src/block/decoder.rs", "rank": 62, "score": 7.287458360811428 }, { "content": "use super::SectionCache;\n\nuse crate::asid::Asid;\n\nuse crate::error::{\n\n deref_ptresult,\n\n deref_ptresult_mut,\n\n ensure_ptok,\n\n extract_pterr,\n\n PtError,\n\n PtErrorCode\n\n};\n\nuse std::ffi::{CStr, CString, c_void};\n\nuse std::ptr;\n\nuse std::mem;\n\nuse std::slice;\n\nuse libipt_sys::{\n\n pt_image,\n\n pt_image_add_cached,\n\n pt_image_add_file,\n\n pt_image_alloc,\n\n pt_image_copy,\n", "file_path": "src/image/image.rs", "rank": 63, "score": 7.144896260158925 }, { "content": "use crate::error::{\n\n PtError, PtErrorCode,\n\n deref_ptresult, deref_ptresult_mut,\n\n ensure_ptok\n\n};\n\nuse super::Packet;\n\nuse crate::config::Config;\n\n\n\nuse std::mem;\n\nuse std::marker::PhantomData;\n\n\n\nuse libipt_sys::{\n\n pt_packet_decoder,\n\n pt_pkt_alloc_decoder,\n\n pt_pkt_free_decoder,\n\n pt_pkt_get_config,\n\n pt_pkt_get_offset,\n\n pt_pkt_get_sync_offset,\n\n pt_pkt_next,\n\n pt_packet,\n", "file_path": "src/packet/decoder.rs", "rank": 64, "score": 6.9596128399129045 }, { "content": "/// The pt_config structure defines an Intel Processor Trace (Intel PT) encoder or decoder configuration.\n\n///\n\n/// It is required for allocating a trace packet encoder (see pt_alloc_encoder(3)),\n\n/// a trace packet decoder (see pt_pkt_alloc_decoder(3)),\n\n/// a query decoder (see pt_qry_alloc_decoder(3)),\n\n/// or an instruction flow decoder (see pt_insn_alloc_decoder(3)).\n\npub mod config;\n\npub use config::*;\n\n\n\n/// The library uses a single error enum for all layers.\n\n///\n\n/// Not all errors may occur on every layer.\n\n/// Every API function specifies the errors it may return. (not accurate!)\n\npub mod error;\n\npub use error::PtError;\n\npub use error::PtErrorCode;\n\n\n\n/// This layer deals with Intel PT packet encoding and decoding.\n\n///\n\n/// It can further be split into three sub-layers: opcodes, encoding, and decoding.\n", "file_path": "src/lib.rs", "rank": 65, "score": 6.904364831790273 }, { "content": "# Libipt\n\n\n\n> The Intel Processor Trace (Intel PT) Decoder Library is Intel's reference\n\nimplementation for decoding Intel PT. It can be used as a standalone library or\n\nit can be partially or fully integrated into your tool.\n\n\n\nThis Repository contains high level rust bindings for the complete functionality provided by [the original libipt library](https://github.com/intel/libipt).\n\n\n\nHuge thanks to the rust discord community for being awesome and helping me out with some stuffs :D.\n\n\n\n# State\n\n\n\n## Testing\n\n\n\nAll of the functionality is implemented and should be working\n\nbut the test coverage is not complete.\n\nIf there is interest in this library i might add some more testing.\n\nContributions are also appreciated.\n\n\n\n## Documentation\n\n\n\nI did my best to provide useful documentation for most of the library.\n\nIf you see any missing or weird documentation feel free to open an issue or pull request.\n\n\n\ndocs.rs is sadly unable to build the project because of a header file which needs to be copied out of the build dir.\n\nIll need to get the sorted out somehow.\n\n\n\n# Unit Tests\n\n- block: ✔️\n\n- config: ✔️\n\n- event: ✔️\n\n- image: ✔️\n\n- insn: ✔️\n\n- packet: ✔️\n\n- asid: ✔️️\n\n- encoder: ✔️\n\n- query: ✔️\n\n- version: ✔️\n\n\n\n# Integration Tests\n\n- Encoding: ❌\n\n- Block Decoding: ❌\n\n- Insn Decoding: ❌\n\n- Packet Decoding: ❌\n\n- Query Decoding: ❌\n", "file_path": "readme.md", "rank": 66, "score": 6.708522185036571 }, { "content": " pt_asid,\n\n pt_insn_core_bus_ratio,\n\n pt_insn_event,\n\n pt_event,\n\n pt_insn_free_decoder,\n\n pt_insn_get_config,\n\n pt_insn_get_image,\n\n pt_insn_get_offset,\n\n pt_insn_get_sync_offset,\n\n pt_insn_next,\n\n pt_insn,\n\n pt_insn_set_image,\n\n pt_insn_sync_backward,\n\n pt_insn_sync_forward,\n\n pt_insn_sync_set,\n\n pt_insn_time\n\n};\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "src/insn/decoder.rs", "rank": 67, "score": 6.232101421049599 }, { "content": " pt_blk_alloc_decoder,\n\n pt_blk_core_bus_ratio,\n\n pt_blk_free_decoder,\n\n pt_blk_get_config,\n\n pt_blk_get_image,\n\n pt_blk_get_offset,\n\n pt_blk_get_sync_offset,\n\n pt_blk_set_image,\n\n pt_blk_sync_backward,\n\n pt_blk_sync_forward,\n\n pt_blk_sync_set,\n\n pt_blk_time,\n\n pt_blk_next,\n\n pt_blk_event,\n\n pt_blk_asid,\n\n pt_asid\n\n};\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "src/block/decoder.rs", "rank": 68, "score": 6.145229312516349 }, { "content": " }\n\n\n\n #[test]\n\n fn test_img_remove_asid() {\n\n assert_eq!(\n\n img_with_file()\n\n .remove_by_asid(Asid::new(Some(1), Some(2)))\n\n .unwrap(),\n\n 1\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_img_copy() {\n\n assert_eq!(img_with_file().copy(&img_with_file()).unwrap(), 0)\n\n }\n\n\n\n #[test]\n\n fn test_img_add_cached() {\n\n let file: PathBuf = [\n", "file_path": "src/image/image.rs", "rank": 71, "score": 4.7856711408308605 }, { "content": " }\n\n\n\n #[test]\n\n fn test_img_file() {\n\n img_with_file();\n\n }\n\n\n\n #[test]\n\n fn test_img_remove_filename() {\n\n let file: PathBuf = [\n\n env!(\"CARGO_MANIFEST_DIR\"), \"testfiles\", \"garbage.txt\"\n\n ].iter().collect();\n\n\n\n assert_eq!(\n\n img_with_file()\n\n .remove_by_filename(file.to_str().unwrap(), \n\n Asid::new(Some(1), Some(2)))\n\n .unwrap(),\n\n 1\n\n );\n", "file_path": "src/image/image.rs", "rank": 72, "score": 4.537368970865926 }, { "content": "use crate::error::{\n\n PtError, deref_ptresult,\n\n ensure_ptok, extract_pterr,\n\n deref_ptresult_mut, PtErrorCode\n\n};\n\nuse crate::config::Config;\n\nuse crate::Status;\n\nuse crate::event::Event;\n\n\n\nuse std::convert::TryFrom;\n\nuse std::marker::PhantomData;\n\nuse std::mem;\n\n\n\nuse num_enum::TryFromPrimitive;\n\nuse libipt_sys::{\n\n pt_qry_alloc_decoder,\n\n pt_query_decoder,\n\n pt_qry_cond_branch,\n\n pt_qry_core_bus_ratio,\n\n pt_qry_event,\n", "file_path": "src/event/qry.rs", "rank": 73, "score": 4.52953484923653 }, { "content": " }\n\n\n\n #[test]\n\n fn test_isc_file() {\n\n let file: PathBuf = [\n\n env!(\"CARGO_MANIFEST_DIR\"), \"testfiles\", \"garbage.txt\"\n\n ].iter().collect();\n\n println!(\"{:?}\", file);\n\n\n\n SectionCache::new(None).unwrap()\n\n .add_file(file.to_str().unwrap(), 5, 15, 0x1337).unwrap();\n\n }\n\n\n\n #[test]\n\n fn test_isc_memsection() {\n\n let file: PathBuf =\n\n [env!(\"CARGO_MANIFEST_DIR\"), \"testfiles\", \"garbage.txt\"]\n\n .iter()\n\n .collect();\n\n\n", "file_path": "src/image/iscache.rs", "rank": 74, "score": 4.416627672725291 }, { "content": " #[test]\n\n fn test_asid_default() {\n\n let asid: Asid = Default::default();\n\n assert_eq!(asid.cr3(), None);\n\n assert_eq!(asid.vmcs(), None);\n\n\n\n let asid: Asid = Asid::new(None, Some(1));\n\n assert_eq!(asid.cr3(), None);\n\n assert_eq!(asid.vmcs(), Some(1));\n\n\n\n let asid: Asid = Asid::new(Some(2), None);\n\n assert_eq!(asid.cr3(), Some(2));\n\n assert_eq!(asid.vmcs(), None);\n\n }\n\n\n\n #[test]\n\n fn test_asid_equal() {\n\n let asid: Asid = Default::default();\n\n let asid2: Asid = Default::default();\n\n assert_eq!(asid, asid2);\n", "file_path": "src/asid.rs", "rank": 76, "score": 4.252047633092198 }, { "content": "use num_enum::TryFromPrimitive;\n\nuse std::convert::TryFrom;\n\nuse std::ffi::CStr;\n\nuse std::fmt::{Display, Formatter};\n\nuse std::error::Error;\n\n\n\nuse libipt_sys::pt_errstr;\n\nuse libipt_sys::{\n\n pt_error_code_pte_ok,\n\n pt_error_code_pte_internal,\n\n pt_error_code_pte_invalid,\n\n pt_error_code_pte_nosync,\n\n pt_error_code_pte_bad_opc,\n\n pt_error_code_pte_bad_packet,\n\n pt_error_code_pte_bad_context,\n\n pt_error_code_pte_eos,\n\n pt_error_code_pte_bad_query,\n\n pt_error_code_pte_nomem,\n\n pt_error_code_pte_bad_config,\n\n pt_error_code_pte_noip,\n", "file_path": "src/error.rs", "rank": 77, "score": 4.21745762373026 }, { "content": "use crate::error::{\n\n PtError, ensure_ptok,\n\n extract_pterr, deref_ptresult,\n\n deref_ptresult_mut\n\n};\n\nuse crate::config::Config;\n\n\n\nuse std::marker::PhantomData;\n\n\n\nuse libipt_sys::{\n\n pt_packet,\n\n pt_encoder,\n\n pt_alloc_encoder,\n\n pt_free_encoder,\n\n pt_enc_get_config,\n\n pt_enc_get_offset,\n\n pt_enc_next,\n\n pt_enc_sync_set\n\n};\n\n\n", "file_path": "src/packet/encoder.rs", "rank": 78, "score": 4.145608862493967 }, { "content": "use crate::error::{\n\n PtError,\n\n PtErrorCode,\n\n deref_ptresult,\n\n deref_ptresult_mut,\n\n ensure_ptok,\n\n extract_pterr\n\n};\n\n\n\nuse std::ffi::{CString, CStr};\n\nuse std::ptr;\n\n\n\nuse libipt_sys::{\n\n pt_image_section_cache,\n\n pt_iscache_add_file,\n\n pt_iscache_alloc,\n\n pt_iscache_name,\n\n pt_iscache_read,\n\n pt_iscache_set_limit,\n\n pt_iscache_free\n", "file_path": "src/image/iscache.rs", "rank": 79, "score": 4.116093778867175 }, { "content": " fn test_config_callback_safety() {\n\n let mut kektop = [10;9];\n\n let mut cfg = ConfigBuilder::with_callback(\n\n &mut kektop,\n\n |c, p,| { (Unknown::new(c.0.cpu.stepping + p[8]), 17) })\n\n .unwrap()\n\n .cpu(Cpu::intel(1, 2, 3))\n\n .finish();\n\n\n\n for _ in 0..10 { assert!(check_callback(&mut cfg, 13, 17)) }\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn test_config_callback_out_of_bounds() {\n\n let mut kektop = [10;9];\n\n let cfg = ConfigBuilder::with_callback(&mut kektop, |c, p,| {\n\n // make sure no move or copy is done\n\n if let Cow::Owned(_) = c.0 { panic!(\"BUG!\") }\n\n // assert_eq!(c.0.as_ref() as *const _, raw);\n", "file_path": "src/config/config.rs", "rank": 80, "score": 4.088183950118431 }, { "content": " QueryDecoder::new(\n\n &ConfigBuilder::new(kek).unwrap().finish()\n\n ).unwrap();\n\n }\n\n\n\n #[test ]\n\n fn test_qrydec_props() {\n\n let kek = &mut [2; 3];\n\n // this just checks memory safety for property access\n\n // usage can be found in the integration tests\n\n let mut b = QueryDecoder::new(\n\n &ConfigBuilder::new(kek).unwrap().finish()\n\n ).unwrap();\n\n\n\n assert!(b.cond_branch().is_err());\n\n assert!(b.indirect_branch().is_err());\n\n assert!(b.event().is_err());\n\n assert!(b.core_bus_ratio().is_err());\n\n assert!(b.event().is_err());\n\n assert!(b.config().is_ok());\n", "file_path": "src/event/qry.rs", "rank": 81, "score": 4.004492147606159 }, { "content": " (Unknown::new(p[100]), 17)\n\n }).unwrap().cpu(Cpu::intel(1, 2, 3)).finish();\n\n\n\n unsafe {\n\n let mut ukn: pt_packet_unknown = std::mem::zeroed();\n\n cfg.0.decode.callback.unwrap()(&mut ukn,\n\n cfg.0.as_ref(), cfg.0.begin,\n\n cfg.0.decode.context);\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_builder_buf_lifetimes() {\n\n let mut x = [10; 10];\n\n let a : Config<()>;\n\n {\n\n let mut c = ConfigBuilder::new(&mut x).unwrap();\n\n a = c.finish();\n\n c.cpu(Cpu::intel(1, 2, 3));\n\n let b = c.finish();\n", "file_path": "src/config/config.rs", "rank": 82, "score": 3.7488714689258664 }, { "content": "\n\n let asid = Asid::new(Some(0), Some(666));\n\n let asid2 = Asid::new(Some(0), Some(666));\n\n assert_eq!(asid, asid2);\n\n\n\n let asid = Asid::new(Some(0), Some(1));\n\n let asid2 = Asid::new(Some(0), Some(2));\n\n assert_ne!(asid, asid2);\n\n\n\n let asid = Asid::new(None, Some(1));\n\n let asid2 = Asid::new(Some(0), Some(2));\n\n assert_ne!(asid, asid2);\n\n }\n\n\n\n #[test]\n\n fn test_asid_from() {\n\n let asid: Asid = Default::default();\n\n let raw = asid.0;\n\n assert_eq!(raw.cr3, NO_CR3);\n\n assert_eq!(raw.vmcs, NO_VMCS);\n", "file_path": "src/asid.rs", "rank": 83, "score": 3.703246488563637 }, { "content": " }\n\n\n\n #[test]\n\n fn test_img_name() {\n\n let i = Image::new(Some(\"yeet\")).unwrap();\n\n assert_eq!(i.name().unwrap(), \"yeet\");\n\n let i = Image::new(None).unwrap();\n\n assert!(i.name().is_none());\n\n }\n\n\n\n fn img_with_file<'a>() -> Image<'a> {\n\n let file: PathBuf = [\n\n env!(\"CARGO_MANIFEST_DIR\"), \"testfiles\", \"garbage.txt\"\n\n ].iter().collect();\n\n \n\n let mut i = Image::new(None).unwrap();\n\n let asid = Asid::new(Some(1), Some(2));\n\n i.add_file(file.to_str().unwrap(), 3, 10, Some(asid), 0x123)\n\n .unwrap();\n\n i\n", "file_path": "src/image/image.rs", "rank": 84, "score": 3.688283951681038 }, { "content": " evt.variant.disabled = pt_event__bindgen_ty_1__bindgen_ty_2 {\n\n ip: 11,\n\n };\n\n\n\n let payload: Payload = evt.into();\n\n match payload {\n\n Payload::Disabled(e) => {\n\n assert_eq!(e.ip(), 11);\n\n },\n\n _ => unreachable!(\"oof\")\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_async_disabled_payload() {\n\n let mut evt: pt_event = unsafe { mem::zeroed() };\n\n evt.type_ = pt_event_type_ptev_async_disabled;\n\n evt.variant.async_disabled = pt_event__bindgen_ty_1__bindgen_ty_3 {\n\n at: 1,\n\n ip: 11,\n", "file_path": "src/event/disabled.rs", "rank": 85, "score": 3.6734418370277613 }, { "content": " evt.variant.vmcs = pt_event__bindgen_ty_1__bindgen_ty_10 {\n\n base: 11,\n\n };\n\n\n\n let payload: Payload = evt.into();\n\n match payload {\n\n Payload::Vmcs(e) => {\n\n assert_eq!(e.base(), 11);\n\n },\n\n _ => unreachable!(\"oof\")\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_async_vmcs_payload() {\n\n let mut evt: pt_event = unsafe { mem::zeroed() };\n\n evt.type_ = pt_event_type_ptev_async_vmcs;\n\n evt.variant.async_vmcs = pt_event__bindgen_ty_1__bindgen_ty_11 {\n\n base: 11,\n\n ip: 12\n", "file_path": "src/event/vmcs.rs", "rank": 86, "score": 3.6587186966483456 }, { "content": "use std::fmt::{Debug, Formatter};\n\n\n\nuse bitflags::bitflags;\n\nuse libipt_sys::{\n\n pt_mode_leaf_pt_mol_exec as PT_MODE_LEAF_PT_MOL_EXEC,\n\n pt_mode_leaf_pt_mol_tsx as PT_MODE_LEAF_PT_MOL_TSX,\n\n pt_packet_mode,\n\n pt_packet_type_ppt_mode,\n\n pt_packet_mode_exec,\n\n pt_packet_mode_tsx,\n\n __BindgenBitfieldUnit,\n\n pt_packet_mode__bindgen_ty_1\n\n};\n\n\n\nbitflags! {\n\n /// A mode.exec packet\n\n pub struct Exec : u32 {\n\n /// The mode.exec csl bit\n\n const CSL = 0b00000001;\n\n /// The mode.exec csd bit\n", "file_path": "src/packet/mode.rs", "rank": 87, "score": 3.634176505795965 }, { "content": " assert_eq!(cpu1.0.model, cpu2.0.model);\n\n assert_eq!(cpu1.0.stepping, cpu2.0.stepping);\n\n }\n\n\n\n #[test]\n\n fn test_cpu_errata() {\n\n let cpu = Cpu::intel(0x6, 0x56, 11);\n\n let e = cpu.determine_errata();\n\n assert_eq!(e.bdm70(), 1);\n\n assert_eq!(e.bdm64(), 1);\n\n assert_eq!(e.skd007(), 0);\n\n assert_eq!(e.skd022(), 0);\n\n \n\n let cpu = Cpu::intel(0x6, 0x9e, 11);\n\n let e = cpu.determine_errata();\n\n assert_eq!(e.bdm64(), 0);\n\n assert_eq!(e.bdm70(), 1);\n\n assert_eq!(e.skd007(), 1);\n\n assert_eq!(e.skd022(), 1);\n\n }\n", "file_path": "src/config/cpu.rs", "rank": 88, "score": 3.615248985665612 }, { "content": " evt.variant.paging = pt_event__bindgen_ty_1__bindgen_ty_5 {\n\n cr3: 11,\n\n _bitfield_1: pt_event__bindgen_ty_1__bindgen_ty_5::new_bitfield_1(1),\n\n __bindgen_padding_0: Default::default()\n\n };\n\n\n\n let payload: Payload = evt.into();\n\n match payload {\n\n Payload::Paging(e) => {\n\n assert_eq!(e.cr3(), 11);\n\n assert!(e.non_root());\n\n },\n\n _ => unreachable!(\"oof\")\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_async_paging_payload() {\n\n let mut evt: pt_event = unsafe { mem::zeroed() };\n\n evt.type_ = pt_event_type_ptev_async_paging;\n", "file_path": "src/event/paging.rs", "rank": 89, "score": 3.600987718416022 }, { "content": " #[test]\n\n fn test_block_notruncate() {\n\n let data: [u8; 15] = [17; 15];\n\n let blk = Block(pt_block {\n\n ip: 1,\n\n end_ip: 2,\n\n isid: 3,\n\n mode: pt_exec_mode_ptem_32bit,\n\n iclass: pt_insn_class_ptic_error,\n\n ninsn: 4,\n\n raw: data,\n\n size: 8,\n\n _bitfield_1: pt_block::new_bitfield_1(0, 0),\n\n __bindgen_padding_0: Default::default()\n\n });\n\n\n\n assert_eq!(blk.ip(), 1);\n\n assert_eq!(blk.end_ip(), 2);\n\n assert_eq!(blk.isid(), 3);\n\n assert_eq!(blk.mode(), ExecModeType::Bit32);\n", "file_path": "src/block/block.rs", "rank": 90, "score": 3.5868385234458264 }, { "content": " println!(\"{:?}\", file);\n\n let mut isc = SectionCache::new(None).unwrap();\n\n let isid = isc\n\n .add_file(file.to_str().unwrap(), 5, 24, 0x666)\n\n .unwrap();\n\n\n\n let mut buf = [0; 20];\n\n isc.read(&mut buf, isid, 0x66A).unwrap();\n\n\n\n let expect = &fs::read(&file).unwrap()[9..29];\n\n assert_eq!(expect, buf);\n\n }\n\n\n\n #[test]\n\n fn test_isc_limit() {\n\n let mut isc = SectionCache::new(None).unwrap();\n\n isc.set_limit(111).unwrap();\n\n isc.set_limit(0).unwrap();\n\n isc.set_limit(std::u64::MAX).unwrap();\n\n }\n", "file_path": "src/image/iscache.rs", "rank": 91, "score": 3.5313364535759693 }, { "content": "use std::convert::TryFrom;\n\nuse num_enum::{TryFromPrimitive, IntoPrimitive};\n\nuse libipt_sys::{\n\n pt_packet_ip,\n\n pt_packet_type_ppt_tip,\n\n pt_packet_type_ppt_fup,\n\n pt_packet_type_ppt_tip_pge,\n\n pt_packet_type_ppt_tip_pgd,\n\n pt_ip_compression_pt_ipc_full,\n\n pt_ip_compression_pt_ipc_sext_48,\n\n pt_ip_compression_pt_ipc_suppressed,\n\n pt_ip_compression_pt_ipc_update_16,\n\n pt_ip_compression_pt_ipc_update_32,\n\n pt_ip_compression_pt_ipc_update_48\n\n};\n\n\n\n/// The IP compression\n\n#[derive(Clone, Copy, Debug, TryFromPrimitive, IntoPrimitive)]\n\n#[repr(i32)]\n\npub enum Compression {\n", "file_path": "src/packet/ip.rs", "rank": 92, "score": 3.5258219941490725 }, { "content": " #[test]\n\n fn test_query_flags() {\n\n let query = QueryFlags::empty();\n\n let raw: pt_conf_flags = query.into();\n\n\n\n unsafe { assert_eq!(raw.variant.query.keep_tcal_on_ovf(), 0); }\n\n\n\n let query: QueryFlags = QueryFlags::KEEP_TCAL_ON_OVF;\n\n let raw: pt_conf_flags = query.into();\n\n\n\n unsafe { assert_eq!(raw.variant.query.keep_tcal_on_ovf(), 1); }\n\n }\n\n}\n\n\n\nbitflags! {\n\n /// flags for the block decoder\n\n pub struct BlockFlags: u8 {\n\n /// End a block after a call instruction\n\n const END_ON_CALL = 0b00000001;\n\n /// Enable tick events for timing updates\n", "file_path": "src/config/flags.rs", "rank": 93, "score": 3.5042245948967574 }, { "content": "\n\n #[test]\n\n fn test_insn_flags() {\n\n let insn = InsnFlags::ENABLE_TICK_EVENTS;\n\n let raw: pt_conf_flags = insn.into();\n\n\n\n unsafe {\n\n assert_eq!(raw.variant.insn.enable_tick_events(), 1);\n\n assert_eq!(raw.variant.insn.keep_tcal_on_ovf(), 0);\n\n }\n\n\n\n let insn = InsnFlags::ENABLE_TICK_EVENTS | InsnFlags::KEEP_TCAL_ON_OVF;\n\n let raw: pt_conf_flags = insn.into();\n\n\n\n unsafe {\n\n assert_eq!(raw.variant.insn.enable_tick_events(), 1);\n\n assert_eq!(raw.variant.insn.keep_tcal_on_ovf(), 1);\n\n }\n\n }\n\n\n", "file_path": "src/config/flags.rs", "rank": 94, "score": 3.490824182171263 }, { "content": "use std::mem;\n\nuse libipt_sys::{pt_packet, pt_packet_type_ppt_psb};\n\n\n\n#[derive(Clone, Copy, Debug)]\n\npub struct Psb {}\n\n\n\nimpl Psb {\n\n pub fn new() -> Self { Psb {} }\n\n}\n\n\n\nimpl From<Psb> for pt_packet {\n\n fn from(_: Psb) -> Self {\n\n pt_packet {\n\n type_: pt_packet_type_ppt_psb,\n\n size: mem::size_of::<pt_packet>() as u8,\n\n payload: unsafe { mem::zeroed() }\n\n }\n\n }\n\n}\n\n\n\nimpl Into<Psb> for pt_packet {\n\n fn into(self) -> Psb { Psb{} }\n\n}\n", "file_path": "src/packet/psb.rs", "rank": 95, "score": 3.46187146836432 }, { "content": "use std::mem;\n\nuse libipt_sys::{pt_packet, pt_packet_type_ppt_ovf};\n\n\n\n#[derive(Clone, Copy, Debug)]\n\npub struct Ovf {}\n\n\n\nimpl Ovf {\n\n pub fn new() -> Self { Ovf {} }\n\n}\n\n\n\nimpl From<Ovf> for pt_packet {\n\n fn from(_: Ovf) -> Self {\n\n pt_packet {\n\n type_: pt_packet_type_ppt_ovf,\n\n size: mem::size_of::<pt_packet>() as u8,\n\n payload: unsafe { mem::zeroed() }\n\n }\n\n }\n\n}\n\n\n\nimpl Into<Ovf> for pt_packet {\n\n fn into(self) -> Ovf{ Ovf{} }\n\n}\n", "file_path": "src/packet/ovf.rs", "rank": 96, "score": 3.46187146836432 }, { "content": "use std::mem;\n\nuse libipt_sys::{pt_packet, pt_packet_type_ppt_psbend};\n\n\n\n#[derive(Clone, Copy, Debug)]\n\npub struct Psbend {}\n\n\n\nimpl Psbend {\n\n pub fn new() -> Self { Psbend {} }\n\n}\n\n\n\nimpl From<Psbend> for pt_packet {\n\n fn from(_: Psbend) -> Self {\n\n pt_packet {\n\n type_: pt_packet_type_ppt_psbend,\n\n size: mem::size_of::<pt_packet>() as u8,\n\n payload: unsafe { mem::zeroed() }\n\n }\n\n }\n\n}\n\n\n\nimpl Into<Psbend> for pt_packet {\n\n fn into(self) -> Psbend { Psbend{} }\n\n}\n", "file_path": "src/packet/psbend.rs", "rank": 97, "score": 3.46187146836432 }, { "content": "use std::mem;\n\nuse libipt_sys::{pt_packet, pt_packet_type_ppt_pad};\n\n\n\n#[derive(Clone, Copy, Debug)]\n\npub struct Pad {}\n\n\n\nimpl Pad {\n\n pub fn new() -> Self { Pad {} }\n\n}\n\n\n\nimpl From<Pad> for pt_packet {\n\n fn from(_: Pad) -> Self {\n\n pt_packet {\n\n type_: pt_packet_type_ppt_pad,\n\n size: mem::size_of::<pt_packet>() as u8,\n\n payload: unsafe { mem::zeroed() }\n\n }\n\n }\n\n}\n\n\n\nimpl Into<Pad> for pt_packet {\n\n fn into(self) -> Pad { Pad{} }\n\n}\n", "file_path": "src/packet/pad.rs", "rank": 98, "score": 3.46187146836432 }, { "content": "use std::mem;\n\nuse libipt_sys::{pt_packet, pt_packet_type_ppt_stop};\n\n\n\n#[derive(Clone, Copy, Debug)]\n\npub struct Stop {}\n\n\n\nimpl Stop {\n\n pub fn new() -> Self { Stop {} }\n\n}\n\n\n\nimpl From<Stop> for pt_packet {\n\n fn from(_: Stop) -> Self {\n\n pt_packet {\n\n type_: pt_packet_type_ppt_stop,\n\n size: mem::size_of::<pt_packet>() as u8,\n\n payload: unsafe { mem::zeroed() }\n\n }\n\n }\n\n}\n\n\n\nimpl Into<Stop> for pt_packet {\n\n fn into(self) -> Stop { Stop{} }\n\n}\n", "file_path": "src/packet/stop.rs", "rank": 99, "score": 3.46187146836432 } ]
Rust
client/src/util.rs
fabaff/innernet
fd06b8054d007fcf86144c37c6eaf37f30719450
use crate::{ClientError, Error}; use colored::*; use indoc::eprintdoc; use log::{Level, LevelFilter}; use serde::{de::DeserializeOwned, Serialize}; use shared::{interface_config::ServerInfo, INNERNET_PUBKEY_HEADER}; use std::{io, time::Duration}; use ureq::{Agent, AgentBuilder}; static LOGGER: Logger = Logger; struct Logger; const BASE_MODULES: &[&str] = &["innernet", "shared"]; fn target_is_base(target: &str) -> bool { BASE_MODULES .iter() .any(|module| module == &target || target.starts_with(&format!("{}::", module))) } impl log::Log for Logger { fn enabled(&self, metadata: &log::Metadata) -> bool { metadata.level() <= log::max_level() && (log::max_level() == LevelFilter::Trace || target_is_base(metadata.target())) } fn log(&self, record: &log::Record) { if self.enabled(record.metadata()) { let level_str = match record.level() { Level::Error => "[E]".red(), Level::Warn => "[!]".yellow(), Level::Info => "[*]".dimmed(), Level::Debug => "[D]".blue(), Level::Trace => "[T]".purple(), }; if record.level() <= LevelFilter::Debug && !target_is_base(record.target()) { println!( "{} {} {}", level_str, format!("[{}]", record.target()).dimmed(), record.args() ); } else { println!("{} {}", level_str, record.args()); } } } fn flush(&self) {} } pub fn init_logger(verbosity: u64) { let level = match verbosity { 0 => log::LevelFilter::Info, 1 => log::LevelFilter::Debug, _ => log::LevelFilter::Trace, }; log::set_max_level(level); log::set_logger(&LOGGER).unwrap(); } pub fn human_duration(duration: Duration) -> String { match duration.as_secs() { n if n < 1 => "just now".cyan().to_string(), n if n < 60 => format!("{} {} ago", n, "seconds".cyan()), n if n < 60 * 60 => { let mins = n / 60; let secs = n % 60; format!( "{} {}, {} {} ago", mins, if mins == 1 { "minute" } else { "minutes" }.cyan(), secs, if secs == 1 { "second" } else { "seconds" }.cyan(), ) }, n => { let hours = n / (60 * 60); let mins = (n / 60) % 60; format!( "{} {}, {} {} ago", hours, if hours == 1 { "hour" } else { "hours" }.cyan(), mins, if mins == 1 { "minute" } else { "minutes" }.cyan(), ) }, } } pub fn human_size(bytes: u64) -> String { const KB: u64 = 1024; const MB: u64 = 1024 * KB; const GB: u64 = 1024 * MB; const TB: u64 = 1024 * GB; match bytes { n if n < 2 * KB => format!("{} {}", n, "B".cyan()), n if n < 2 * MB => format!("{:.2} {}", n as f64 / KB as f64, "KiB".cyan()), n if n < 2 * GB => format!("{:.2} {}", n as f64 / MB as f64, "MiB".cyan()), n if n < 2 * TB => format!("{:.2} {}", n as f64 / GB as f64, "GiB".cyan()), n => format!("{:.2} {}", n as f64 / TB as f64, "TiB".cyan()), } } pub fn permissions_helptext(e: &io::Error) { if e.raw_os_error() == Some(1) { let current_exe = std::env::current_exe() .ok() .map(|s| s.to_string_lossy().to_string()) .unwrap_or_else(|| "<innernet path>".into()); eprintdoc!( "{}: innernet can't access the device info. You either need to run innernet as root, or give innernet CAP_NET_ADMIN capabilities: sudo setcap cap_net_admin+eip {} ", "ERROR".bold().red(), current_exe ); } else if e.kind() == io::ErrorKind::PermissionDenied { eprintdoc!( "{}: innernet can't access its config/data folders. You either need to run innernet as root, or give the user/group running innernet permissions to access {config} and {data}. For non-root permissions, it's recommended to create an \"innernet\" group, and run for example: sudo chgrp -R innernet {config} {data} sudo chmod -R g+rwX {config} {data} ", "ERROR".bold().red(), config = shared::CLIENT_CONFIG_DIR.to_string_lossy(), data = shared::CLIENT_DATA_DIR.to_string_lossy(), ); } } pub struct Api<'a> { agent: Agent, server: &'a ServerInfo, } impl<'a> Api<'a> { pub fn new(server: &'a ServerInfo) -> Self { let agent = AgentBuilder::new() .timeout(Duration::from_secs(5)) .redirects(0) .build(); Self { agent, server } } pub fn http<T: DeserializeOwned>(&self, verb: &str, endpoint: &str) -> Result<T, Error> { self.request::<(), _>(verb, endpoint, None) } pub fn http_form<S: Serialize, T: DeserializeOwned>( &self, verb: &str, endpoint: &str, form: S, ) -> Result<T, Error> { self.request(verb, endpoint, Some(form)) } fn request<S: Serialize, T: DeserializeOwned>( &self, verb: &str, endpoint: &str, form: Option<S>, ) -> Result<T, Error> { let request = self .agent .request( verb, &format!("http://{}/v1{}", self.server.internal_endpoint, endpoint), ) .set(INNERNET_PUBKEY_HEADER, &self.server.public_key); let response = if let Some(form) = form { request.send_json(serde_json::to_value(form)?)? } else { request.call()? }; let mut response = response.into_string()?; if response.is_empty() { response = "null".into(); } Ok(serde_json::from_str(&response).map_err(|e| { ClientError(format!( "failed to deserialize JSON response from the server: {}, response={}", e, &response )) })?) } }
use crate::{ClientError, Error}; use colored::*; use indoc::eprintdoc; use log::{Level, LevelFilter}; use serde::{de::DeserializeOwned, Serialize}; use shared::{interface_config::ServerInfo, INNERNET_PUBKEY_HEADER}; use std::{io, time::Duration}; use ureq::{Agent, AgentBuilder}; static LOGGER: Logger = Logger; struct Logger; const BASE_MODULES: &[&str] = &["innernet", "shared"]; fn target_is_base(target: &str) -> bool { BASE_MODULES .iter() .any(|module| module == &target || target.starts_with(&format!("{}::", module))) } impl log::Log for Logger { fn enabled(&self, metadata: &log::Metadata) -> bool { metadata.level() <= log::max_level() && (log::max_level() == LevelFilter::Trace || target_is_base(metadata.target())) } fn log(&self, record: &log::Record) { if self.enabled(record.metadata()) { let level_str = match record.level() { Level::Error => "[E]".red(), Level::Warn => "[!]".yellow(), Level::Info => "[*]".dimmed(), Level::Debug => "[D]".blue(), Level::Trace => "[T]".purple(), }; if record.level() <= LevelFilter::Debug && !target_is_base(record.target()) { println!( "{} {} {}", level_str, format!("[{}]", record.target()).dimmed(), record.args() ); } else { println!("{} {}", level_str, record.args()); } } } fn flush(&self) {} } pub fn init_logger(verbosity: u64) { let level = match verbosity { 0 => log::LevelFilter::Info, 1 => log::LevelFilter::Debug, _ => log::LevelFilter::Trace, }; log::set_max_level(level); log::set_logger(&LOGGER).unwrap(); } pub fn human_duration(duration: Duration) -> String { match duration.as_secs() { n if n < 1 => "just now".cyan().to_string(), n if n < 60 => format!("{} {} ago", n, "seconds".cyan()), n if n < 60 * 60 => { let mins = n / 60; let secs = n % 60; format!( "{} {}, {} {} ago", mins, if mins == 1 { "minute" } else { "minutes" }.cyan(), secs, if secs == 1 { "second" } else { "seconds" }.cyan(), ) }, n => { let hours = n / (60 * 60); let mins = (n / 60) % 60;
pub fn human_size(bytes: u64) -> String { const KB: u64 = 1024; const MB: u64 = 1024 * KB; const GB: u64 = 1024 * MB; const TB: u64 = 1024 * GB; match bytes { n if n < 2 * KB => format!("{} {}", n, "B".cyan()), n if n < 2 * MB => format!("{:.2} {}", n as f64 / KB as f64, "KiB".cyan()), n if n < 2 * GB => format!("{:.2} {}", n as f64 / MB as f64, "MiB".cyan()), n if n < 2 * TB => format!("{:.2} {}", n as f64 / GB as f64, "GiB".cyan()), n => format!("{:.2} {}", n as f64 / TB as f64, "TiB".cyan()), } } pub fn permissions_helptext(e: &io::Error) { if e.raw_os_error() == Some(1) { let current_exe = std::env::current_exe() .ok() .map(|s| s.to_string_lossy().to_string()) .unwrap_or_else(|| "<innernet path>".into()); eprintdoc!( "{}: innernet can't access the device info. You either need to run innernet as root, or give innernet CAP_NET_ADMIN capabilities: sudo setcap cap_net_admin+eip {} ", "ERROR".bold().red(), current_exe ); } else if e.kind() == io::ErrorKind::PermissionDenied { eprintdoc!( "{}: innernet can't access its config/data folders. You either need to run innernet as root, or give the user/group running innernet permissions to access {config} and {data}. For non-root permissions, it's recommended to create an \"innernet\" group, and run for example: sudo chgrp -R innernet {config} {data} sudo chmod -R g+rwX {config} {data} ", "ERROR".bold().red(), config = shared::CLIENT_CONFIG_DIR.to_string_lossy(), data = shared::CLIENT_DATA_DIR.to_string_lossy(), ); } } pub struct Api<'a> { agent: Agent, server: &'a ServerInfo, } impl<'a> Api<'a> { pub fn new(server: &'a ServerInfo) -> Self { let agent = AgentBuilder::new() .timeout(Duration::from_secs(5)) .redirects(0) .build(); Self { agent, server } } pub fn http<T: DeserializeOwned>(&self, verb: &str, endpoint: &str) -> Result<T, Error> { self.request::<(), _>(verb, endpoint, None) } pub fn http_form<S: Serialize, T: DeserializeOwned>( &self, verb: &str, endpoint: &str, form: S, ) -> Result<T, Error> { self.request(verb, endpoint, Some(form)) } fn request<S: Serialize, T: DeserializeOwned>( &self, verb: &str, endpoint: &str, form: Option<S>, ) -> Result<T, Error> { let request = self .agent .request( verb, &format!("http://{}/v1{}", self.server.internal_endpoint, endpoint), ) .set(INNERNET_PUBKEY_HEADER, &self.server.public_key); let response = if let Some(form) = form { request.send_json(serde_json::to_value(form)?)? } else { request.call()? }; let mut response = response.into_string()?; if response.is_empty() { response = "null".into(); } Ok(serde_json::from_str(&response).map_err(|e| { ClientError(format!( "failed to deserialize JSON response from the server: {}, response={}", e, &response )) })?) } }
format!( "{} {}, {} {} ago", hours, if hours == 1 { "hour" } else { "hours" }.cyan(), mins, if mins == 1 { "minute" } else { "minutes" }.cyan(), ) }, } }
function_block-function_prefix_line
[ { "content": "pub fn confirm(prompt: &str) -> Result<bool, io::Error> {\n\n ensure_interactive(prompt)?;\n\n Confirm::with_theme(&*THEME)\n\n .wait_for_newline(true)\n\n .with_prompt(prompt)\n\n .default(false)\n\n .interact()\n\n}\n\n\n", "file_path": "shared/src/prompts.rs", "rank": 0, "score": 254406.89988016884 }, { "content": "pub fn choose_cidr<'a>(cidrs: &'a [Cidr], text: &'static str) -> Result<&'a Cidr, Error> {\n\n let eligible_cidrs: Vec<_> = cidrs\n\n .iter()\n\n .filter(|cidr| cidr.name != \"innernet-server\")\n\n .collect();\n\n Ok(select(text, &eligible_cidrs)?.1)\n\n}\n\n\n", "file_path": "shared/src/prompts.rs", "rank": 2, "score": 217848.18427431968 }, { "content": "pub fn ensure_interactive(prompt: &str) -> Result<(), io::Error> {\n\n if atty::is(atty::Stream::Stdin) {\n\n Ok(())\n\n } else {\n\n Err(io::Error::new(\n\n io::ErrorKind::BrokenPipe,\n\n format!(\"Prompt \\\"{}\\\" failed because TTY isn't connected.\", prompt),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "shared/src/prompts.rs", "rank": 3, "score": 213108.49428650335 }, { "content": "pub fn override_endpoint(unset: bool) -> Result<Option<Option<Endpoint>>, Error> {\n\n let endpoint = if !unset { Some(ask_endpoint()?) } else { None };\n\n\n\n Ok(\n\n if confirm(\n\n &(if let Some(endpoint) = &endpoint {\n\n format!(\"Set external endpoint to {}?\", endpoint)\n\n } else {\n\n \"Unset external endpoint to enable automatic endpoint discovery?\".to_string()\n\n }),\n\n )? {\n\n Some(endpoint)\n\n } else {\n\n None\n\n },\n\n )\n\n}\n", "file_path": "shared/src/prompts.rs", "rank": 6, "score": 195364.3623317718 }, { "content": "pub fn select<'a, T: ToString>(prompt: &str, items: &'a [T]) -> Result<(usize, &'a T), io::Error> {\n\n ensure_interactive(prompt)?;\n\n let choice = Select::with_theme(&*THEME)\n\n .with_prompt(prompt)\n\n .items(items)\n\n .interact()?;\n\n Ok((choice, &items[choice]))\n\n}\n\n\n\npub enum Prefill<T> {\n\n Default(T),\n\n Editable(String),\n\n None,\n\n}\n\n\n", "file_path": "shared/src/prompts.rs", "rank": 8, "score": 188678.73110727154 }, { "content": "/// Updates the permissions of a file or directory. Returns `Ok(true)` if\n\n/// permissions had to be changed, `Ok(false)` if permissions were already\n\n/// correct.\n\npub fn chmod(file: &File, new_mode: u32) -> Result<bool, io::Error> {\n\n let metadata = file.metadata()?;\n\n let mut permissions = metadata.permissions();\n\n let mode = permissions.mode() & 0o777;\n\n let updated = if mode != new_mode {\n\n permissions.set_mode(new_mode);\n\n file.set_permissions(permissions)?;\n\n true\n\n } else {\n\n false\n\n };\n\n\n\n Ok(updated)\n\n}\n", "file_path": "shared/src/lib.rs", "rank": 9, "score": 187660.72747193798 }, { "content": "pub fn infra_peer_contents(name: &str, ip_str: &str) -> Result<PeerContents, Error> {\n\n peer_contents(name, ip_str, INFRA_CIDR_ID, false)\n\n}\n\n\n", "file_path": "server/src/test.rs", "rank": 10, "score": 187113.8224697446 }, { "content": "pub fn user_peer_contents(name: &str, ip_str: &str) -> Result<PeerContents, Error> {\n\n peer_contents(name, ip_str, USER_CIDR_ID, false)\n\n}\n", "file_path": "server/src/test.rs", "rank": 11, "score": 187113.8224697446 }, { "content": "pub fn admin_peer_contents(name: &str, ip_str: &str) -> Result<PeerContents, Error> {\n\n peer_contents(name, ip_str, ADMIN_CIDR_ID, true)\n\n}\n\n\n", "file_path": "server/src/test.rs", "rank": 12, "score": 187113.8224697446 }, { "content": "pub fn developer_peer_contents(name: &str, ip_str: &str) -> Result<PeerContents, Error> {\n\n peer_contents(name, ip_str, DEVELOPER_CIDR_ID, false)\n\n}\n\n\n", "file_path": "server/src/test.rs", "rank": 13, "score": 187113.8224697446 }, { "content": "/// Presents a selection and confirmation of eligible peers for either disabling or enabling,\n\n/// and returns back the ID of the selected peer.\n\npub fn enable_or_disable_peer(peers: &[Peer], enable: bool) -> Result<Option<Peer>, Error> {\n\n let enabled_peers: Vec<_> = peers\n\n .iter()\n\n .filter(|peer| enable && peer.is_disabled || !enable && !peer.is_disabled)\n\n .collect();\n\n\n\n let peer_selection: Vec<_> = enabled_peers\n\n .iter()\n\n .map(|peer| format!(\"{} ({})\", &peer.name, &peer.ip))\n\n .collect();\n\n let (index, _) = select(\n\n &format!(\"Peer to {}able\", if enable { \"en\" } else { \"dis\" }),\n\n &peer_selection,\n\n )?;\n\n let peer = enabled_peers[index];\n\n\n\n Ok(\n\n if confirm(&format!(\n\n \"{}able peer {}?\",\n\n if enable { \"En\" } else { \"Dis\" },\n\n peer.name.yellow()\n\n ))? {\n\n Some(peer.clone())\n\n } else {\n\n None\n\n },\n\n )\n\n}\n\n\n", "file_path": "shared/src/prompts.rs", "rank": 14, "score": 183971.4369399062 }, { "content": "pub fn ask_endpoint() -> Result<Endpoint, Error> {\n\n println!(\"getting external IP address.\");\n\n\n\n let external_ip = if Confirm::with_theme(&*THEME)\n\n .wait_for_newline(true)\n\n .with_prompt(\"Auto-fill public IP address (using a DNS query to 1.1.1.1)?\")\n\n .interact()?\n\n {\n\n publicip::get_any(Preference::Ipv4)\n\n } else {\n\n None\n\n };\n\n\n\n Ok(input(\n\n \"External endpoint\",\n\n match external_ip {\n\n Some(ip) => Prefill::Editable(SocketAddr::new(ip, 51820).to_string()),\n\n None => Prefill::None,\n\n },\n\n )?)\n\n}\n\n\n", "file_path": "shared/src/prompts.rs", "rank": 15, "score": 182533.6226223613 }, { "content": "#[cfg(target_os = \"macos\")]\n\npub fn add_route(interface: &InterfaceName, cidr: IpNetwork) -> Result<bool, io::Error> {\n\n let real_interface = wgctrl::backends::userspace::resolve_tun(interface)?;\n\n let output = cmd(\n\n \"route\",\n\n &[\n\n \"-n\",\n\n \"add\",\n\n if cidr.is_ipv4() { \"-inet\" } else { \"-inet6\" },\n\n &cidr.to_string(),\n\n \"-interface\",\n\n &real_interface,\n\n ],\n\n )?;\n\n let stderr = String::from_utf8_lossy(&output.stderr);\n\n if !output.status.success() {\n\n Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\n\n \"failed to add route for device {} ({}): {}\",\n\n &interface, real_interface, stderr\n\n ),\n\n ))\n\n } else {\n\n Ok(!stderr.contains(\"File exists\"))\n\n }\n\n}\n\n\n\n#[cfg(target_os = \"linux\")]\n\npub use super::netlink::add_route;\n", "file_path": "shared/src/wg.rs", "rank": 16, "score": 180496.70900443036 }, { "content": "pub fn add_route(interface: &InterfaceName, cidr: IpNetwork) -> Result<bool, io::Error> {\n\n let if_index = if_nametoindex(interface)?;\n\n let (address_family, dst) = match cidr {\n\n IpNetwork::V4(network) => (AF_INET as u8, network.network().octets().to_vec()),\n\n IpNetwork::V6(network) => (AF_INET6 as u8, network.network().octets().to_vec()),\n\n };\n\n let message = RouteMessage {\n\n header: RouteHeader {\n\n table: RT_TABLE_MAIN,\n\n protocol: RTPROT_BOOT,\n\n scope: RT_SCOPE_LINK,\n\n kind: RTN_UNICAST,\n\n destination_prefix_length: cidr.prefix(),\n\n address_family,\n\n ..Default::default()\n\n },\n\n nlas: vec![route::Nla::Destination(dst), route::Nla::Oif(if_index)],\n\n };\n\n\n\n match netlink_call(RtnlMessage::NewRoute(message), None) {\n\n Ok(_) => Ok(true),\n\n Err(e) if e.kind() == io::ErrorKind::AlreadyExists => Ok(false),\n\n Err(e) => Err(e),\n\n }\n\n}\n", "file_path": "shared/src/netlink.rs", "rank": 17, "score": 180492.4617290452 }, { "content": "pub fn create_cidr(db: &Connection, name: &str, cidr_str: &str) -> Result<Cidr, Error> {\n\n let cidr = DatabaseCidr::create(\n\n db,\n\n CidrContents {\n\n name: name.to_string(),\n\n cidr: cidr_str.parse()?,\n\n parent: Some(ROOT_CIDR_ID),\n\n },\n\n )?;\n\n\n\n Ok(cidr)\n\n}\n\n\n\n//\n\n// Below are helper functions for writing tests.\n\n//\n\n\n", "file_path": "server/src/test.rs", "rank": 18, "score": 179559.95455495367 }, { "content": "pub fn input<T>(prompt: &str, prefill: Prefill<T>) -> Result<T, io::Error>\n\nwhere\n\n T: Clone + FromStr + Display,\n\n T::Err: Display + Debug,\n\n{\n\n ensure_interactive(prompt)?;\n\n let mut input = Input::with_theme(&*THEME);\n\n match prefill {\n\n Prefill::Default(value) => input.default(value),\n\n Prefill::Editable(value) => input.with_initial_text(value),\n\n _ => &mut input,\n\n }\n\n .with_prompt(prompt)\n\n .interact()\n\n}\n\n\n", "file_path": "shared/src/prompts.rs", "rank": 19, "score": 177554.27605429594 }, { "content": "#[cfg(target_os = \"macos\")]\n\nfn cmd(bin: &str, args: &[&str]) -> Result<std::process::Output, io::Error> {\n\n let output = std::process::Command::new(bin).args(args).output()?;\n\n log::debug!(\"cmd: {} {}\", bin, args.join(\" \"));\n\n log::debug!(\"status: {:?}\", output.status.code());\n\n log::trace!(\"stdout: {}\", String::from_utf8_lossy(&output.stdout));\n\n log::trace!(\"stderr: {}\", String::from_utf8_lossy(&output.stderr));\n\n if output.status.success() {\n\n Ok(output)\n\n } else {\n\n Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\n\n \"failed to run {} {} command: {}\",\n\n bin,\n\n args.join(\" \"),\n\n String::from_utf8_lossy(&output.stderr)\n\n ),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "shared/src/wg.rs", "rank": 20, "score": 164135.14916985546 }, { "content": "pub fn up(\n\n interface: &InterfaceName,\n\n private_key: &str,\n\n address: IpNetwork,\n\n listen_port: Option<u16>,\n\n peer: Option<(&str, IpAddr, SocketAddr)>,\n\n network: NetworkOpt,\n\n) -> Result<(), io::Error> {\n\n let mut device = DeviceUpdate::new();\n\n if let Some((public_key, address, endpoint)) = peer {\n\n let prefix = if address.is_ipv4() { 32 } else { 128 };\n\n let peer_config =\n\n PeerConfigBuilder::new(&wgctrl::Key::from_base64(public_key).map_err(|_| {\n\n io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n \"failed to parse base64 public key\",\n\n )\n\n })?)\n\n .add_allowed_ip(address, prefix)\n\n .set_persistent_keepalive_interval(25)\n", "file_path": "shared/src/wg.rs", "rank": 21, "score": 163720.37573566247 }, { "content": "pub fn warn_on_dangerous_mode(path: &Path) -> Result<(), io::Error> {\n\n let file = File::open(path)?;\n\n let metadata = file.metadata()?;\n\n let permissions = metadata.permissions();\n\n let mode = permissions.mode() & 0o777;\n\n\n\n if mode & 0o007 != 0 {\n\n log::warn!(\n\n \"{} is world-accessible (mode is {:#05o}). This is probably not what you want.\",\n\n path.to_string_lossy(),\n\n mode\n\n );\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "shared/src/lib.rs", "rank": 22, "score": 162159.64572640593 }, { "content": "pub fn down(interface: &InterfaceName, backend: Backend) -> Result<(), Error> {\n\n Ok(Device::get(interface, backend)\n\n .with_str(interface.as_str_lossy())?\n\n .delete()\n\n .with_str(interface.as_str_lossy())?)\n\n}\n\n\n\n/// Add a route in the OS's routing table to get traffic flowing through this interface.\n\n/// Returns an error if the process doesn't exit successfully, otherwise returns\n\n/// true if the route was changed, false if the route already exists.\n", "file_path": "shared/src/wg.rs", "rank": 23, "score": 162091.30469461766 }, { "content": "/// Bring up a prompt to create a new peer. Returns the peer request.\n\npub fn add_peer(\n\n peers: &[Peer],\n\n cidr_tree: &CidrTree,\n\n args: &AddPeerOpts,\n\n) -> Result<Option<(PeerContents, KeyPair, String, File)>, Error> {\n\n let leaves = cidr_tree.leaves();\n\n\n\n let cidr = if let Some(ref parent_name) = args.cidr {\n\n leaves\n\n .iter()\n\n .find(|cidr| &cidr.name == parent_name)\n\n .ok_or_else(|| anyhow!(\"No eligible CIDR with that name exists.\"))?\n\n } else {\n\n choose_cidr(&leaves[..], \"Eligible CIDRs for peer\")?\n\n };\n\n\n\n let mut available_ip = None;\n\n let candidate_ips = cidr.iter().filter(|ip| cidr.is_assignable(*ip));\n\n for ip in candidate_ips {\n\n if !peers.iter().any(|peer| peer.ip == ip) {\n", "file_path": "shared/src/prompts.rs", "rank": 25, "score": 155149.9111916363 }, { "content": "/// Bring up a prompt to create a new peer. Returns the peer request.\n\npub fn rename_peer(\n\n peers: &[Peer],\n\n args: &RenamePeerOpts,\n\n) -> Result<Option<(PeerContents, Hostname)>, Error> {\n\n let eligible_peers = peers\n\n .iter()\n\n .filter(|p| &*p.name != \"innernet-server\")\n\n .collect::<Vec<_>>();\n\n let old_peer = if let Some(ref name) = args.name {\n\n eligible_peers\n\n .into_iter()\n\n .find(|p| &p.name == name)\n\n .ok_or_else(|| anyhow!(\"Peer '{}' does not exist\", name))?\n\n .clone()\n\n } else {\n\n let (peer_index, _) = select(\n\n \"Peer to rename\",\n\n &eligible_peers\n\n .iter()\n\n .map(|ep| ep.name.clone())\n", "file_path": "shared/src/prompts.rs", "rank": 26, "score": 155149.9111916363 }, { "content": "#[cfg(target_os = \"macos\")]\n\npub fn set_up(interface: &InterfaceName, mtu: u32) -> Result<(), io::Error> {\n\n let real_interface = wgctrl::backends::userspace::resolve_tun(interface)?;\n\n cmd(\"ifconfig\", &[&real_interface, \"mtu\", &mtu.to_string()])?;\n\n Ok(())\n\n}\n\n\n\n#[cfg(target_os = \"linux\")]\n\npub use super::netlink::set_addr;\n\n\n\n#[cfg(target_os = \"linux\")]\n\npub use super::netlink::set_up;\n\n\n", "file_path": "shared/src/wg.rs", "rank": 27, "score": 152054.3757473114 }, { "content": "pub fn set_up(interface: &InterfaceName, mtu: u32) -> Result<(), io::Error> {\n\n let index = if_nametoindex(interface)?;\n\n let message = LinkMessage {\n\n header: LinkHeader {\n\n index,\n\n flags: IFF_UP,\n\n ..Default::default()\n\n },\n\n nlas: vec![link::nlas::Nla::Mtu(mtu)],\n\n };\n\n netlink_call(RtnlMessage::SetLink(message), None)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "shared/src/netlink.rs", "rank": 28, "score": 152050.12847192626 }, { "content": "pub fn add_association(cidrs: &[Cidr]) -> Result<Option<(&Cidr, &Cidr)>, Error> {\n\n let cidr1 = choose_cidr(cidrs, \"First CIDR\")?;\n\n let cidr2 = choose_cidr(cidrs, \"Second CIDR\")?;\n\n\n\n Ok(\n\n if confirm(&format!(\n\n \"Add association: {} <=> {}?\",\n\n cidr1.name.yellow().bold(),\n\n cidr2.name.yellow().bold()\n\n ))? {\n\n Some((cidr1, cidr2))\n\n } else {\n\n None\n\n },\n\n )\n\n}\n\n\n", "file_path": "shared/src/prompts.rs", "rank": 29, "score": 152050.12847192626 }, { "content": "/// Confirm and write a innernet invitation file after a peer has been created.\n\npub fn write_peer_invitation(\n\n target_file: (&mut File, &str),\n\n network_name: &InterfaceName,\n\n peer: &Peer,\n\n server_peer: &Peer,\n\n root_cidr: &Cidr,\n\n keypair: KeyPair,\n\n server_api_addr: &SocketAddr,\n\n) -> Result<(), Error> {\n\n let peer_invitation = InterfaceConfig {\n\n interface: InterfaceInfo {\n\n network_name: network_name.to_string(),\n\n private_key: keypair.private.to_base64(),\n\n address: IpNetwork::new(peer.ip, root_cidr.prefix())?,\n\n listen_port: None,\n\n },\n\n server: ServerInfo {\n\n external_endpoint: server_peer\n\n .endpoint\n\n .clone()\n", "file_path": "shared/src/prompts.rs", "rank": 30, "score": 151361.67525662176 }, { "content": "pub fn set_listen_port(\n\n interface: &InterfaceName,\n\n listen_port: Option<u16>,\n\n backend: Backend,\n\n) -> Result<(), Error> {\n\n let mut device = DeviceUpdate::new();\n\n if let Some(listen_port) = listen_port {\n\n device = device.set_listen_port(listen_port);\n\n } else {\n\n device = device.randomize_listen_port();\n\n }\n\n device.apply(interface, backend)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "shared/src/wg.rs", "rank": 31, "score": 151357.54741878776 }, { "content": "pub fn set_listen_port(\n\n interface: &InterfaceInfo,\n\n unset: bool,\n\n) -> Result<Option<Option<u16>>, Error> {\n\n let listen_port = (!unset)\n\n .then(|| {\n\n input(\n\n \"Listen port\",\n\n Prefill::Default(interface.listen_port.unwrap_or(51820)),\n\n )\n\n })\n\n .transpose()?;\n\n\n\n let mut confirmation = Confirm::with_theme(&*THEME);\n\n confirmation\n\n .wait_for_newline(true)\n\n .with_prompt(\n\n &(if let Some(port) = &listen_port {\n\n format!(\"Set listen port to {}?\", port)\n\n } else {\n", "file_path": "shared/src/prompts.rs", "rank": 32, "score": 151357.54741878773 }, { "content": "pub fn delete_association<'a>(\n\n associations: &'a [Association],\n\n cidrs: &'a [Cidr],\n\n) -> Result<Option<&'a Association>, Error> {\n\n let association = choose_association(associations, cidrs)?;\n\n\n\n Ok(\n\n if confirm(&format!(\"Delete association #{}?\", association.id))? {\n\n Some(association)\n\n } else {\n\n None\n\n },\n\n )\n\n}\n\n\n", "file_path": "shared/src/prompts.rs", "rank": 33, "score": 149722.7818627081 }, { "content": "pub fn choose_association<'a>(\n\n associations: &'a [Association],\n\n cidrs: &'a [Cidr],\n\n) -> Result<&'a Association, Error> {\n\n let names: Vec<_> = associations\n\n .iter()\n\n .map(|association| {\n\n format!(\n\n \"{}: {} <=> {}\",\n\n association.id,\n\n &cidrs\n\n .iter()\n\n .find(|c| c.id == association.cidr_id_1)\n\n .unwrap()\n\n .name,\n\n &cidrs\n\n .iter()\n\n .find(|c| c.id == association.cidr_id_2)\n\n .unwrap()\n\n .name\n\n )\n\n })\n\n .collect();\n\n let (index, _) = select(\"Association\", &names)?;\n\n\n\n Ok(&associations[index])\n\n}\n\n\n", "file_path": "shared/src/prompts.rs", "rank": 34, "score": 149722.7818627081 }, { "content": "#[cfg(target_os = \"macos\")]\n\npub fn set_addr(interface: &InterfaceName, addr: IpNetwork) -> Result<(), io::Error> {\n\n let real_interface = wgctrl::backends::userspace::resolve_tun(interface)?;\n\n\n\n if addr.is_ipv4() {\n\n cmd(\n\n \"ifconfig\",\n\n &[\n\n &real_interface,\n\n \"inet\",\n\n &addr.to_string(),\n\n &addr.ip().to_string(),\n\n \"alias\",\n\n ],\n\n )\n\n .map(|_output| ())\n\n } else {\n\n cmd(\n\n \"ifconfig\",\n\n &[&real_interface, \"inet6\", &addr.to_string(), \"alias\"],\n\n )\n\n .map(|_output| ())\n\n }\n\n}\n\n\n", "file_path": "shared/src/wg.rs", "rank": 35, "score": 145991.1904081544 }, { "content": "pub fn set_addr(interface: &InterfaceName, addr: IpNetwork) -> Result<(), io::Error> {\n\n let index = if_nametoindex(interface)?;\n\n let (family, nlas) = match addr {\n\n IpNetwork::V4(network) => {\n\n let addr_bytes = network.ip().octets().to_vec();\n\n (\n\n AF_INET as u8,\n\n vec![\n\n address::Nla::Local(addr_bytes.clone()),\n\n address::Nla::Address(addr_bytes),\n\n ],\n\n )\n\n },\n\n IpNetwork::V6(network) => (\n\n AF_INET6 as u8,\n\n vec![address::Nla::Address(network.ip().octets().to_vec())],\n\n ),\n\n };\n\n let message = AddressMessage {\n\n header: AddressHeader {\n", "file_path": "shared/src/netlink.rs", "rank": 36, "score": 145986.94313276926 }, { "content": "pub fn ensure_dirs_exist(dirs: &[&Path]) -> Result<(), WrappedIoError> {\n\n for dir in dirs {\n\n match fs::create_dir(dir).with_path(dir) {\n\n Err(e) if e.kind() != io::ErrorKind::AlreadyExists => {\n\n return Err(e);\n\n },\n\n _ => {\n\n warn_on_dangerous_mode(dir).with_path(dir)?;\n\n },\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "shared/src/lib.rs", "rank": 37, "score": 142724.8164433746 }, { "content": "/// Bring up a prompt to create a new CIDR. Returns the peer request.\n\npub fn add_cidr(cidrs: &[Cidr], request: &AddCidrOpts) -> Result<Option<CidrContents>, Error> {\n\n let parent_cidr = if let Some(ref parent_name) = request.parent {\n\n cidrs\n\n .iter()\n\n .find(|cidr| &cidr.name == parent_name)\n\n .ok_or_else(|| anyhow!(\"No parent CIDR with that name exists.\"))?\n\n } else {\n\n choose_cidr(cidrs, \"Parent CIDR\")?\n\n };\n\n\n\n let name = if let Some(ref name) = request.name {\n\n name.clone()\n\n } else {\n\n input(\"Name\", Prefill::None)?\n\n };\n\n\n\n let cidr = if let Some(cidr) = request.cidr {\n\n cidr\n\n } else {\n\n input(\"CIDR\", Prefill::None)?\n", "file_path": "shared/src/prompts.rs", "rank": 39, "score": 137789.4680155851 }, { "content": "fn print_peer(peer: &PeerState, short: bool, level: usize) {\n\n let pad = level * 2;\n\n let PeerState { peer, info } = peer;\n\n if short {\n\n let connected = PeerDiff::peer_recently_connected(info);\n\n\n\n println_pad!(\n\n pad,\n\n \"| {} {}: {} ({}{}…)\",\n\n if connected {\n\n \"◉\".bold()\n\n } else {\n\n \"◯\".dimmed()\n\n },\n\n peer.ip.to_string().yellow().bold(),\n\n peer.name.yellow(),\n\n if info.is_none() { \"you, \" } else { \"\" },\n\n &peer.public_key[..6].dimmed(),\n\n );\n\n } else {\n", "file_path": "client/src/main.rs", "rank": 40, "score": 137546.2822893819 }, { "content": "fn list_cidrs(interface: &InterfaceName, tree: bool) -> Result<(), Error> {\n\n let data_store = DataStore::open(interface)?;\n\n if tree {\n\n let cidr_tree = CidrTree::new(data_store.cidrs());\n\n colored::control::set_override(false);\n\n print_tree(&cidr_tree, &[], 0);\n\n colored::control::unset_override();\n\n } else {\n\n for cidr in data_store.cidrs() {\n\n println!(\"{} {}\", cidr.cidr, cidr.name);\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "client/src/main.rs", "rank": 41, "score": 136534.5468438266 }, { "content": "fn print_interface(device_info: &Device, short: bool) -> Result<(), Error> {\n\n if short {\n\n let listen_port_str = device_info\n\n .listen_port\n\n .map(|p| format!(\"(:{}) \", p))\n\n .unwrap_or_default();\n\n println!(\n\n \"{} {}\",\n\n device_info.name.to_string().green().bold(),\n\n listen_port_str.dimmed(),\n\n );\n\n } else {\n\n println!(\n\n \"{}: {}\",\n\n \"network\".green().bold(),\n\n device_info.name.to_string().green(),\n\n );\n\n if let Some(listen_port) = device_info.listen_port {\n\n println!(\" {}: {}\", \"listening port\".bold(), listen_port);\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "client/src/main.rs", "rank": 42, "score": 136534.5468438266 }, { "content": "pub fn json_status_response<F: Serialize>(\n\n form: F,\n\n status: StatusCode,\n\n) -> Result<Response<Body>, ServerError> {\n\n let json = serde_json::to_string(&form)?;\n\n Ok(Response::builder()\n\n .status(status)\n\n .header(header::CONTENT_TYPE, \"application/json\")\n\n .body(Body::from(json))?)\n\n}\n\n\n", "file_path": "server/src/util.rs", "rank": 43, "score": 135469.33823522145 }, { "content": "/// Bring up a prompt to delete a CIDR. Returns the peer request.\n\npub fn delete_cidr(cidrs: &[Cidr], peers: &[Peer], request: &DeleteCidrOpts) -> Result<i64, Error> {\n\n let eligible_cidrs: Vec<_> = cidrs\n\n .iter()\n\n .filter(|cidr| {\n\n !peers.iter().any(|peer| peer.contents.cidr_id == cidr.id) &&\n\n !cidrs.iter().any(\n\n |cidr2| matches!(cidr2.contents.parent, Some(parent_id) if parent_id == cidr.id)\n\n )\n\n })\n\n .collect();\n\n let cidr = if let Some(ref name) = request.name {\n\n cidrs\n\n .iter()\n\n .find(|cidr| &cidr.name == name)\n\n .ok_or_else(|| anyhow!(\"CIDR {} doesn't exist or isn't eligible for deletion\", name))?\n\n } else {\n\n select(\"Delete CIDR\", &eligible_cidrs)?.1\n\n };\n\n\n\n if request.yes || confirm(&format!(\"Delete CIDR \\\"{}\\\"?\", cidr.name))? {\n\n Ok(cidr.id)\n\n } else {\n\n Err(anyhow!(\"Canceled\"))\n\n }\n\n}\n\n\n", "file_path": "shared/src/prompts.rs", "rank": 44, "score": 135288.73710650922 }, { "content": "fn enable_or_disable_peer(interface: &InterfaceName, enable: bool) -> Result<(), Error> {\n\n let InterfaceConfig { server, .. } = InterfaceConfig::from_interface(interface)?;\n\n let api = Api::new(&server);\n\n\n\n log::info!(\"Fetching peers.\");\n\n let peers: Vec<Peer> = api.http(\"GET\", \"/admin/peers\")?;\n\n\n\n if let Some(peer) = prompts::enable_or_disable_peer(&peers[..], enable)? {\n\n let Peer { id, mut contents } = peer;\n\n contents.is_disabled = !enable;\n\n api.http_form(\"PUT\", &format!(\"/admin/peers/{}\", id), contents)?;\n\n } else {\n\n log::info!(\"exiting without disabling peer.\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "client/src/main.rs", "rank": 45, "score": 134027.28104016412 }, { "content": "pub fn enumerate() -> Result<Vec<InterfaceName>, io::Error> {\n\n use std::ffi::OsStr;\n\n\n\n let mut interfaces = vec![];\n\n for entry in fs::read_dir(get_base_folder()?)? {\n\n let path = entry?.path();\n\n if path.extension() == Some(OsStr::new(\"name\")) {\n\n let stem = path\n\n .file_stem()\n\n .and_then(|stem| stem.to_str())\n\n .and_then(|name| name.parse::<InterfaceName>().ok())\n\n .filter(|iface| open_socket(iface).is_ok());\n\n if let Some(iface) = stem {\n\n interfaces.push(iface);\n\n }\n\n }\n\n }\n\n\n\n Ok(interfaces)\n\n}\n\n\n", "file_path": "wgctrl-rs/src/backends/userspace.rs", "rank": 46, "score": 133126.8299829739 }, { "content": "pub fn enumerate() -> Result<Vec<InterfaceName>, io::Error> {\n\n let base = unsafe { wgctrl_sys::wg_list_device_names() };\n\n\n\n if base.is_null() {\n\n return Err(io::Error::last_os_error());\n\n }\n\n\n\n let mut current = base;\n\n let mut result = Vec::new();\n\n\n\n loop {\n\n let next_dev = unsafe { CStr::from_ptr(current).to_bytes() };\n\n\n\n let len = next_dev.len();\n\n\n\n if len == 0 {\n\n break;\n\n }\n\n\n\n current = unsafe { current.add(len + 1) };\n", "file_path": "wgctrl-rs/src/backends/kernel.rs", "rank": 47, "score": 133126.8299829739 }, { "content": "pub fn resolve_tun(name: &InterfaceName) -> io::Result<String> {\n\n let namefile = get_namefile(name)?;\n\n Ok(fs::read_to_string(namefile)\n\n .map_err(|_| io::Error::new(io::ErrorKind::NotFound, \"WireGuard name file can't be read\"))?\n\n .trim()\n\n .to_string())\n\n}\n\n\n", "file_path": "wgctrl-rs/src/backends/userspace.rs", "rank": 48, "score": 131710.9202898635 }, { "content": "pub fn auto_migrate(conn: &rusqlite::Connection) -> Result<(), rusqlite::Error> {\n\n let old_version: usize = conn.pragma_query_value(None, \"user_version\", |r| r.get(0))?;\n\n\n\n if old_version < INVITE_EXPIRATION_VERSION {\n\n conn.execute(\n\n \"ALTER TABLE peers ADD COLUMN invite_expires INTEGER\",\n\n params![],\n\n )?;\n\n }\n\n\n\n conn.pragma_update(None, \"user_version\", &CURRENT_VERSION)?;\n\n if old_version != CURRENT_VERSION {\n\n log::info!(\n\n \"migrated db version from {} to {}\",\n\n old_version,\n\n CURRENT_VERSION\n\n );\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "server/src/db/mod.rs", "rank": 49, "score": 129513.30590081935 }, { "content": "pub fn json_response<F: Serialize>(form: F) -> Result<Response<Body>, ServerError> {\n\n let json = serde_json::to_string(&form)?;\n\n Ok(Response::builder()\n\n .status(StatusCode::OK)\n\n .header(header::CONTENT_TYPE, \"application/json\")\n\n .body(Body::from(json))?)\n\n}\n\n\n", "file_path": "server/src/util.rs", "rank": 50, "score": 128231.43181416328 }, { "content": "pub fn init_wizard(conf: &ServerConfig, opts: InitializeOpts) -> Result<(), Error> {\n\n let theme = ColorfulTheme::default();\n\n\n\n shared::ensure_dirs_exist(&[conf.config_dir(), conf.database_dir()]).map_err(|_| {\n\n anyhow!(\n\n \"Failed to create config and database directories {}\",\n\n \"(are you not running as root?)\".bold()\n\n )\n\n })?;\n\n printdoc!(\n\n \"\\nTime to setup your innernet network.\n\n\n\n Your network name can be any hostname-valid string, i.e. \\\"evilcorp\\\", and\n\n your network CIDR should be in the RFC1918 IPv4 (10/8, 172.16/12, or 192.168/16), \n\n or RFC4193 IPv6 (fd00::/8) ranges.\n\n\n\n The external endpoint specified is a <host>:<port> string that is the address clients\n\n will connect to. It's up to you to forward/open ports in your routers/firewalls\n\n as needed.\n\n\n", "file_path": "server/src/initialize.rs", "rank": 51, "score": 127167.1634295673 }, { "content": "pub fn get_by_name(name: &InterfaceName) -> Result<Device, io::Error> {\n\n let mut sock = open_socket(name)?;\n\n sock.write_all(b\"get=1\\n\\n\")?;\n\n let mut reader = BufReader::new(sock);\n\n let mut buf = String::new();\n\n\n\n let mut parser = ConfigParser::new(name);\n\n\n\n loop {\n\n match reader.read_line(&mut buf)? {\n\n 0 | 1 if buf == \"\\n\" => break,\n\n _ => {\n\n parser.add_line(buf.trim_end())?;\n\n buf.clear();\n\n },\n\n };\n\n }\n\n\n\n Ok(parser.into())\n\n}\n\n\n", "file_path": "wgctrl-rs/src/backends/userspace.rs", "rank": 52, "score": 124958.69155397998 }, { "content": "pub fn get_by_name(name: &InterfaceName) -> Result<Device, io::Error> {\n\n let mut device: *mut wgctrl_sys::wg_device = ptr::null_mut();\n\n\n\n let result = unsafe {\n\n wgctrl_sys::wg_get_device(\n\n (&mut device) as *mut _ as *mut *mut wgctrl_sys::wg_device,\n\n name.as_ptr(),\n\n )\n\n };\n\n\n\n let result = if result == 0 {\n\n Ok(Device::from(unsafe { &*device }))\n\n } else {\n\n Err(io::Error::last_os_error())\n\n };\n\n\n\n unsafe { wgctrl_sys::wg_free_device(device) };\n\n\n\n result\n\n}\n\n\n", "file_path": "wgctrl-rs/src/backends/kernel.rs", "rank": 53, "score": 124958.69155397998 }, { "content": "pub fn peer_contents(\n\n name: &str,\n\n ip_str: &str,\n\n cidr_id: i64,\n\n is_admin: bool,\n\n) -> Result<PeerContents, Error> {\n\n let public_key = KeyPair::generate().public;\n\n\n\n Ok(PeerContents {\n\n name: name.parse().map_err(|e: &str| anyhow!(e))?,\n\n ip: ip_str.parse()?,\n\n cidr_id,\n\n public_key: public_key.to_base64(),\n\n is_admin,\n\n endpoint: None,\n\n persistent_keepalive_interval: None,\n\n is_disabled: false,\n\n is_redeemed: true,\n\n invite_expires: None,\n\n })\n\n}\n\n\n", "file_path": "server/src/test.rs", "rank": 54, "score": 122266.8043672491 }, { "content": "fn if_nametoindex(interface: &InterfaceName) -> Result<u32, io::Error> {\n\n match unsafe { libc::if_nametoindex(interface.as_ptr()) } {\n\n 0 => Err(io::Error::new(\n\n io::ErrorKind::NotFound,\n\n format!(\"couldn't find interface '{}'.\", interface),\n\n )),\n\n index => Ok(index),\n\n }\n\n}\n\n\n", "file_path": "shared/src/netlink.rs", "rank": 55, "score": 119196.73508373288 }, { "content": "pub trait IoErrorContext<T> {\n\n fn with_path<P: AsRef<Path>>(self, path: P) -> Result<T, WrappedIoError>;\n\n fn with_str<S: Into<String>>(self, context: S) -> Result<T, WrappedIoError>;\n\n}\n\n\n\nimpl<T> IoErrorContext<T> for Result<T, std::io::Error> {\n\n fn with_path<P: AsRef<Path>>(self, path: P) -> Result<T, WrappedIoError> {\n\n self.with_str(path.as_ref().to_string_lossy())\n\n }\n\n\n\n fn with_str<S: Into<String>>(self, context: S) -> Result<T, WrappedIoError> {\n\n self.map_err(|e| WrappedIoError {\n\n io_error: e,\n\n context: context.into(),\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct WrappedIoError {\n", "file_path": "shared/src/types.rs", "rank": 56, "score": 118068.03476909216 }, { "content": "/// Following the rough logic of wg-quick(8), use the wireguard-go userspace\n\n/// implementation by default, but allow for an environment variable to choose\n\n/// a different implementation.\n\n///\n\n/// wgctrl-rs will look for WG_USERSPACE_IMPLEMENTATION first, but will also\n\n/// respect the WG_QUICK_USERSPACE_IMPLEMENTATION choice if the former isn't\n\n/// available.\n\nfn get_userspace_implementation() -> String {\n\n std::env::var(\"WG_USERSPACE_IMPLEMENTATION\")\n\n .or_else(|_| std::env::var(\"WG_QUICK_USERSPACE_IMPLEMENTATION\"))\n\n .unwrap_or_else(|_| \"wireguard-go\".to_string())\n\n}\n\n\n", "file_path": "wgctrl-rs/src/backends/userspace.rs", "rank": 57, "score": 116628.56376564885 }, { "content": "pub fn status_response(status: StatusCode) -> Result<Response<Body>, ServerError> {\n\n Ok(Response::builder().status(status).body(Body::empty())?)\n\n}\n", "file_path": "server/src/util.rs", "rank": 58, "score": 109540.55855393904 }, { "content": "/// DNS wants a random-ish ID to be generated per request.\n\nfn get_id() -> Result<[u8; 2], Error> {\n\n let mut id = [0u8; 2];\n\n File::open(\"/dev/urandom\")?.read_exact(&mut id)?;\n\n Ok(id)\n\n}\n\n\n", "file_path": "publicip/src/lib.rs", "rank": 59, "score": 103820.42450672231 }, { "content": "fn run(opt: Opts) -> Result<(), Error> {\n\n let command = opt.command.unwrap_or(Command::Show {\n\n short: false,\n\n tree: false,\n\n interface: None,\n\n });\n\n\n\n match command {\n\n Command::Install {\n\n invite,\n\n hosts,\n\n opts,\n\n } => install(&invite, hosts.into(), opts, opt.network)?,\n\n Command::Show {\n\n short,\n\n tree,\n\n interface,\n\n } => show(short, tree, interface, opt.network)?,\n\n Command::Fetch { interface, hosts } => fetch(&interface, false, hosts.into(), opt.network)?,\n\n Command::Up {\n", "file_path": "client/src/main.rs", "rank": 60, "score": 103820.42450672231 }, { "content": "fn delete_association(interface: &InterfaceName) -> Result<(), Error> {\n\n let InterfaceConfig { server, .. } = InterfaceConfig::from_interface(interface)?;\n\n let api = Api::new(&server);\n\n\n\n log::info!(\"Fetching CIDRs\");\n\n let cidrs: Vec<Cidr> = api.http(\"GET\", \"/admin/cidrs\")?;\n\n log::info!(\"Fetching associations\");\n\n let associations: Vec<Association> = api.http(\"GET\", \"/admin/associations\")?;\n\n\n\n if let Some(association) = prompts::delete_association(&associations[..], &cidrs[..])? {\n\n api.http(\"DELETE\", &format!(\"/admin/associations/{}\", association.id))?;\n\n } else {\n\n log::info!(\"exiting without adding association.\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "client/src/main.rs", "rank": 61, "score": 99617.04100972664 }, { "content": "fn list_associations(interface: &InterfaceName) -> Result<(), Error> {\n\n let InterfaceConfig { server, .. } = InterfaceConfig::from_interface(interface)?;\n\n let api = Api::new(&server);\n\n\n\n log::info!(\"Fetching CIDRs\");\n\n let cidrs: Vec<Cidr> = api.http(\"GET\", \"/admin/cidrs\")?;\n\n log::info!(\"Fetching associations\");\n\n let associations: Vec<Association> = api.http(\"GET\", \"/admin/associations\")?;\n\n\n\n for association in associations {\n\n println!(\n\n \"{}: {} <=> {}\",\n\n association.id,\n\n &cidrs\n\n .iter()\n\n .find(|c| c.id == association.cidr_id_1)\n\n .unwrap()\n\n .name\n\n .yellow(),\n\n &cidrs\n", "file_path": "client/src/main.rs", "rank": 62, "score": 99617.04100972664 }, { "content": "fn netlink_call(\n\n message: RtnlMessage,\n\n flags: Option<u16>,\n\n) -> Result<NetlinkMessage<RtnlMessage>, io::Error> {\n\n let mut req = NetlinkMessage::from(message);\n\n req.header.flags = flags.unwrap_or(NLM_F_REQUEST | NLM_F_ACK | NLM_F_EXCL | NLM_F_CREATE);\n\n req.finalize();\n\n let mut buf = [0; 4096];\n\n req.serialize(&mut buf);\n\n let len = req.buffer_len();\n\n\n\n log::debug!(\"netlink request: {:?}\", req);\n\n let socket = Socket::new(NETLINK_ROUTE).unwrap();\n\n let kernel_addr = SocketAddr::new(0, 0);\n\n socket.connect(&kernel_addr)?;\n\n let n_sent = socket.send(&buf[..len], 0).unwrap();\n\n if n_sent != len {\n\n return Err(io::Error::new(\n\n io::ErrorKind::UnexpectedEof,\n\n \"failed to send netlink request\",\n", "file_path": "shared/src/netlink.rs", "rank": 63, "score": 99100.14608238876 }, { "content": "pub fn get_any(preference: Preference) -> Option<IpAddr> {\n\n let (v4, v6) = get_both();\n\n let (v4, v6) = (v4.map(IpAddr::from), v6.map(IpAddr::from));\n\n match preference {\n\n Preference::Ipv4 => v4.or(v6),\n\n Preference::Ipv6 => v6.or(v4),\n\n }\n\n}\n\n\n", "file_path": "publicip/src/lib.rs", "rank": 64, "score": 96350.37057322389 }, { "content": "pub fn get_both() -> (Option<Ipv4Addr>, Option<Ipv6Addr>) {\n\n let ipv4 = Request::start(CLOUDFLARE_IPV4).ok();\n\n let ipv6 = Request::start(CLOUDFLARE_IPV6).ok();\n\n (\n\n ipv4.and_then(|req| req.read_response().ok()),\n\n ipv6.and_then(|req| req.read_response().ok()),\n\n )\n\n}\n\n\n", "file_path": "publicip/src/lib.rs", "rank": 65, "score": 94457.21473912173 }, { "content": "pub fn delete_interface(iface: &InterfaceName) -> io::Result<()> {\n\n let result = unsafe { wgctrl_sys::wg_del_device(iface.as_ptr()) };\n\n\n\n if result == 0 {\n\n Ok(())\n\n } else {\n\n Err(io::Error::last_os_error())\n\n }\n\n}\n\n\n\n/// Represents a WireGuard encryption key.\n\n///\n\n/// WireGuard makes no meaningful distinction between public,\n\n/// private and preshared keys - any sequence of 32 bytes\n\n/// can be used as either of those.\n\n///\n\n/// This means that you need to be careful when working with\n\n/// `Key`s, especially ones created from external data.\n\n#[cfg(target_os = \"linux\")]\n\n#[derive(PartialEq, Eq, Clone)]\n", "file_path": "wgctrl-rs/src/backends/kernel.rs", "rank": 66, "score": 91024.9468252185 }, { "content": "pub fn delete_interface(name: &InterfaceName) -> io::Result<()> {\n\n fs::remove_file(get_socketfile(name)?).ok();\n\n fs::remove_file(get_namefile(name)?).ok();\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "wgctrl-rs/src/backends/userspace.rs", "rank": 67, "score": 91024.9468252185 }, { "content": "fn uninstall(interface: &InterfaceName, network: NetworkOpt) -> Result<(), Error> {\n\n if Confirm::with_theme(&*prompts::THEME)\n\n .with_prompt(&format!(\n\n \"Permanently delete network \\\"{}\\\"?\",\n\n interface.as_str_lossy().yellow()\n\n ))\n\n .default(false)\n\n .interact()?\n\n {\n\n log::info!(\"bringing down interface (if up).\");\n\n wg::down(interface, network.backend).ok();\n\n let config = InterfaceConfig::get_path(interface);\n\n let data = DataStore::get_path(interface);\n\n std::fs::remove_file(&config)\n\n .with_path(&config)\n\n .map_err(|e| log::warn!(\"{}\", e.to_string().yellow()))\n\n .ok();\n\n std::fs::remove_file(&data)\n\n .with_path(&data)\n\n .map_err(|e| log::warn!(\"{}\", e.to_string().yellow()))\n\n .ok();\n\n log::info!(\n\n \"network {} is uninstalled.\",\n\n interface.as_str_lossy().yellow()\n\n );\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "client/src/main.rs", "rank": 68, "score": 90008.58687916971 }, { "content": "fn print_tree(cidr: &CidrTree, peers: &[PeerState], level: usize) {\n\n println_pad!(\n\n level * 2,\n\n \"{} {}\",\n\n cidr.cidr.to_string().bold().blue(),\n\n cidr.name.blue(),\n\n );\n\n\n\n let mut children: Vec<_> = cidr.children().collect();\n\n children.sort();\n\n children\n\n .iter()\n\n .for_each(|child| print_tree(child, peers, level + 1));\n\n\n\n for peer in peers.iter().filter(|p| p.peer.cidr_id == cidr.id) {\n\n print_peer(peer, true, level);\n\n }\n\n}\n\n\n", "file_path": "client/src/main.rs", "rank": 69, "score": 89347.30170734163 }, { "content": "fn rename_peer(interface: &InterfaceName, opts: RenamePeerOpts) -> Result<(), Error> {\n\n let InterfaceConfig { server, .. } = InterfaceConfig::from_interface(interface)?;\n\n let api = Api::new(&server);\n\n\n\n log::info!(\"Fetching peers\");\n\n let peers: Vec<Peer> = api.http(\"GET\", \"/admin/peers\")?;\n\n\n\n if let Some((peer_request, old_name)) = prompts::rename_peer(&peers, &opts)? {\n\n log::info!(\"Renaming peer...\");\n\n\n\n let id = peers\n\n .iter()\n\n .filter(|p| p.name == old_name)\n\n .map(|p| p.id)\n\n .next()\n\n .ok_or_else(|| anyhow!(\"Peer not found.\"))?;\n\n\n\n let _ = api.http_form(\"PUT\", &format!(\"/admin/peers/{}\", id), peer_request)?;\n\n log::info!(\"Peer renamed.\");\n\n } else {\n\n log::info!(\"exited without renaming peer.\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "client/src/main.rs", "rank": 70, "score": 86788.93582537337 }, { "content": "fn add_peer(interface: &InterfaceName, opts: AddPeerOpts) -> Result<(), Error> {\n\n let InterfaceConfig { server, .. } = InterfaceConfig::from_interface(interface)?;\n\n let api = Api::new(&server);\n\n\n\n log::info!(\"Fetching CIDRs\");\n\n let cidrs: Vec<Cidr> = api.http(\"GET\", \"/admin/cidrs\")?;\n\n log::info!(\"Fetching peers\");\n\n let peers: Vec<Peer> = api.http(\"GET\", \"/admin/peers\")?;\n\n let cidr_tree = CidrTree::new(&cidrs[..]);\n\n\n\n if let Some(result) = prompts::add_peer(&peers, &cidr_tree, &opts)? {\n\n let (peer_request, keypair, target_path, mut target_file) = result;\n\n log::info!(\"Creating peer...\");\n\n let peer: Peer = api.http_form(\"POST\", \"/admin/peers\", peer_request)?;\n\n let server_peer = peers.iter().find(|p| p.id == 1).unwrap();\n\n prompts::write_peer_invitation(\n\n (&mut target_file, &target_path),\n\n interface,\n\n &peer,\n\n server_peer,\n", "file_path": "client/src/main.rs", "rank": 71, "score": 86788.93582537337 }, { "content": "fn delete_cidr(interface: &InterfaceName, opts: DeleteCidrOpts) -> Result<(), Error> {\n\n let InterfaceConfig { server, .. } = InterfaceConfig::from_interface(interface)?;\n\n println!(\"Fetching eligible CIDRs\");\n\n let api = Api::new(&server);\n\n let cidrs: Vec<Cidr> = api.http(\"GET\", \"/admin/cidrs\")?;\n\n let peers: Vec<Peer> = api.http(\"GET\", \"/admin/peers\")?;\n\n\n\n let cidr_id = prompts::delete_cidr(&cidrs, &peers, &opts)?;\n\n\n\n println!(\"Deleting CIDR...\");\n\n let _ = api.http(\"DELETE\", &*format!(\"/admin/cidrs/{}\", cidr_id))?;\n\n\n\n println!(\"CIDR deleted.\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "client/src/main.rs", "rank": 72, "score": 86788.93582537337 }, { "content": "fn add_cidr(interface: &InterfaceName, opts: AddCidrOpts) -> Result<(), Error> {\n\n let InterfaceConfig { server, .. } = InterfaceConfig::from_interface(interface)?;\n\n log::info!(\"Fetching CIDRs\");\n\n let api = Api::new(&server);\n\n let cidrs: Vec<Cidr> = api.http(\"GET\", \"/admin/cidrs\")?;\n\n\n\n if let Some(cidr_request) = prompts::add_cidr(&cidrs, &opts)? {\n\n log::info!(\"Creating CIDR...\");\n\n let cidr: Cidr = api.http_form(\"POST\", \"/admin/cidrs\", cidr_request)?;\n\n\n\n eprintdoc!(\n\n \"\n\n CIDR \\\"{cidr_name}\\\" added.\n\n\n\n Right now, peers within {cidr_name} can only see peers in the same CIDR\n\n , and in the special \\\"infra\\\" CIDR that includes the innernet server peer.\n\n\n\n You'll need to add more associations for peers in diffent CIDRs to communicate.\n\n \",\n\n cidr_name = cidr.name.bold()\n\n );\n\n } else {\n\n log::info!(\"exited without creating CIDR.\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "client/src/main.rs", "rank": 73, "score": 86788.93582537337 }, { "content": "fn add_association(interface: &InterfaceName, opts: AddAssociationOpts) -> Result<(), Error> {\n\n let InterfaceConfig { server, .. } = InterfaceConfig::from_interface(interface)?;\n\n let api = Api::new(&server);\n\n\n\n log::info!(\"Fetching CIDRs\");\n\n let cidrs: Vec<Cidr> = api.http(\"GET\", \"/admin/cidrs\")?;\n\n\n\n let association = if let (Some(ref cidr1), Some(ref cidr2)) = (opts.cidr1, opts.cidr2) {\n\n let cidr1 = cidrs\n\n .iter()\n\n .find(|c| &c.name == cidr1)\n\n .ok_or_else(|| anyhow!(\"can't find cidr '{}'\", cidr1))?;\n\n let cidr2 = cidrs\n\n .iter()\n\n .find(|c| &c.name == cidr2)\n\n .ok_or_else(|| anyhow!(\"can't find cidr '{}'\", cidr2))?;\n\n (cidr1, cidr2)\n\n } else if let Some((cidr1, cidr2)) = prompts::add_association(&cidrs[..])? {\n\n (cidr1, cidr2)\n\n } else {\n", "file_path": "client/src/main.rs", "rank": 74, "score": 86788.93582537337 }, { "content": "/// Inject the collected endpoints from the WG interface into a list of peers.\n\n/// This is essentially what adds NAT holepunching functionality.\n\npub fn inject_endpoints(session: &Session, peers: &mut Vec<Peer>) {\n\n for mut peer in peers {\n\n if peer.contents.endpoint.is_none() {\n\n if let Some(endpoint) = session.context.endpoints.read().get(&peer.public_key) {\n\n peer.contents.endpoint = Some(endpoint.to_owned().into());\n\n }\n\n }\n\n }\n\n}\n", "file_path": "server/src/api/mod.rs", "rank": 75, "score": 85895.46191659072 }, { "content": "fn populate_database(conn: &Connection, db_init_data: DbInitData) -> Result<(), Error> {\n\n const SERVER_NAME: &str = \"innernet-server\";\n\n\n\n let root_cidr = DatabaseCidr::create(\n\n conn,\n\n CidrContents {\n\n name: db_init_data.network_name.clone(),\n\n cidr: db_init_data.network_cidr,\n\n parent: None,\n\n },\n\n )\n\n .map_err(|_| anyhow!(\"failed to create root CIDR\"))?;\n\n\n\n let server_cidr = DatabaseCidr::create(\n\n conn,\n\n CidrContents {\n\n name: SERVER_NAME.into(),\n\n cidr: db_init_data.server_cidr,\n\n parent: Some(root_cidr.id),\n\n },\n", "file_path": "server/src/initialize.rs", "rank": 76, "score": 85320.80606626422 }, { "content": "#[cfg(target_os = \"linux\")]\n\nfn get_listener(addr: SocketAddr, interface: &InterfaceName) -> Result<TcpListener, Error> {\n\n let listener = TcpListener::bind(&addr)?;\n\n listener.set_nonblocking(true)?;\n\n let sock = socket2::Socket::from(listener);\n\n sock.bind_device(Some(interface.as_str_lossy().as_bytes()))?;\n\n Ok(sock.into())\n\n}\n\n\n\n/// BSD-likes do seem to bind to an interface when binding to an IP,\n\n/// according to the internet, but we may want to explicitly use\n\n/// IP_BOUND_IF in the future regardless. This isn't currently in\n\n/// the socket2 crate however, so we aren't currently using it.\n\n///\n\n/// See https://github.com/tonarino/innernet/issues/26 for more details.\n", "file_path": "server/src/main.rs", "rank": 77, "score": 82932.6736472859 }, { "content": "#[cfg(not(target_os = \"linux\"))]\n\nfn get_listener(addr: SocketAddr, _interface: &InterfaceName) -> Result<TcpListener, Error> {\n\n let listener = TcpListener::bind(&addr)?;\n\n listener.set_nonblocking(true)?;\n\n Ok(listener)\n\n}\n\n\n\npub(crate) async fn hyper_service(\n\n req: Request<Body>,\n\n context: Context,\n\n remote_addr: SocketAddr,\n\n) -> Result<Response<Body>, http::Error> {\n\n // Break the path into components.\n\n let components: VecDeque<_> = req\n\n .uri()\n\n .path()\n\n .trim_start_matches('/')\n\n .split('/')\n\n .map(String::from)\n\n .collect();\n\n\n", "file_path": "server/src/main.rs", "rank": 78, "score": 82932.6736472859 }, { "content": "pub fn apply(builder: &DeviceUpdate, iface: &InterfaceName) -> io::Result<()> {\n\n // If we can't open a configuration socket to an existing interface, try starting it.\n\n let mut sock = match open_socket(iface) {\n\n Err(_) => {\n\n fs::create_dir_all(VAR_RUN_PATH)?;\n\n // Clear out any old namefiles if they didn't lead to a connected socket.\n\n let _ = fs::remove_file(get_namefile(iface)?);\n\n start_userspace_wireguard(iface)?;\n\n std::thread::sleep(Duration::from_millis(100));\n\n open_socket(iface)\n\n .map_err(|e| io::Error::new(e.kind(), format!(\"failed to open socket ({})\", e)))?\n\n },\n\n Ok(sock) => sock,\n\n };\n\n\n\n let mut request = String::from(\"set=1\\n\");\n\n\n\n if let Some(Key(k)) = builder.private_key {\n\n request.push_str(&format!(\"private_key={}\\n\", hex::encode(k)));\n\n }\n", "file_path": "wgctrl-rs/src/backends/userspace.rs", "rank": 79, "score": 82866.00190883808 }, { "content": "pub fn apply(builder: &DeviceUpdate, iface: &InterfaceName) -> io::Result<()> {\n\n let (first_peer, last_peer) = encode_peers(&builder.peers);\n\n\n\n let result = unsafe { wgctrl_sys::wg_add_device(iface.as_ptr()) };\n\n match result {\n\n 0 | -17 => {},\n\n _ => return Err(io::Error::last_os_error()),\n\n };\n\n\n\n let mut wg_device = Box::new(wgctrl_sys::wg_device {\n\n name: iface.into_inner(),\n\n ifindex: 0,\n\n public_key: wgctrl_sys::wg_key::default(),\n\n private_key: wgctrl_sys::wg_key::default(),\n\n fwmark: 0,\n\n listen_port: 0,\n\n first_peer,\n\n last_peer,\n\n flags: wgdf(0),\n\n });\n", "file_path": "wgctrl-rs/src/backends/kernel.rs", "rank": 80, "score": 82866.00190883808 }, { "content": "#[derive(Debug, StructOpt)]\n\n#[structopt(name = \"innernet\", about, global_settings(&[AppSettings::ColoredHelp, AppSettings::DeriveDisplayOrder, AppSettings::VersionlessSubcommands, AppSettings::UnifiedHelpMessage]))]\n\nstruct Opts {\n\n #[structopt(subcommand)]\n\n command: Option<Command>,\n\n\n\n /// Verbose output, use -vv for even higher verbositude.\n\n #[structopt(short, long, parse(from_occurrences))]\n\n verbose: u64,\n\n\n\n #[structopt(flatten)]\n\n network: NetworkOpt,\n\n}\n\n\n", "file_path": "client/src/main.rs", "rank": 81, "score": 69924.9220138886 }, { "content": "#[derive(Debug, StructOpt)]\n\n#[structopt(name = \"innernet-server\", about, global_settings(&[AppSettings::ColoredHelp, AppSettings::DeriveDisplayOrder, AppSettings::VersionlessSubcommands, AppSettings::UnifiedHelpMessage]))]\n\nstruct Opt {\n\n #[structopt(subcommand)]\n\n command: Command,\n\n\n\n #[structopt(flatten)]\n\n network: NetworkOpt,\n\n}\n\n\n", "file_path": "server/src/main.rs", "rank": 82, "score": 69924.82703728249 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct HostsOpt {\n\n /// The path to write hosts to.\n\n #[structopt(long = \"hosts-path\", default_value = \"/etc/hosts\")]\n\n hosts_path: PathBuf,\n\n\n\n /// Don't write to any hosts files.\n\n #[structopt(long = \"no-write-hosts\", conflicts_with = \"hosts-path\")]\n\n no_write_hosts: bool,\n\n}\n\n\n\nimpl From<HostsOpt> for Option<PathBuf> {\n\n fn from(opt: HostsOpt) -> Self {\n\n (!opt.no_write_hosts).then(|| opt.hosts_path)\n\n }\n\n}\n\n\n", "file_path": "client/src/main.rs", "rank": 83, "score": 68398.0914519054 }, { "content": "fn up(\n\n interface: &InterfaceName,\n\n loop_interval: Option<Duration>,\n\n hosts_path: Option<PathBuf>,\n\n routing: NetworkOpt,\n\n) -> Result<(), Error> {\n\n loop {\n\n fetch(interface, true, hosts_path.clone(), routing)?;\n\n match loop_interval {\n\n Some(interval) => thread::sleep(interval),\n\n None => break,\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "client/src/main.rs", "rank": 84, "score": 67941.16739845088 }, { "content": "struct DbInitData {\n\n network_name: String,\n\n network_cidr: IpNetwork,\n\n server_cidr: IpNetwork,\n\n our_ip: IpAddr,\n\n public_key_base64: String,\n\n endpoint: Endpoint,\n\n}\n\n\n", "file_path": "server/src/initialize.rs", "rank": 85, "score": 66994.00938472254 }, { "content": "#[cfg(not(target_os = \"linux\"))]\n\nfn main() {}\n", "file_path": "wgctrl-sys/build.rs", "rank": 86, "score": 66333.73826950145 }, { "content": "#[cfg(target_os = \"linux\")]\n\nfn main() {\n\n linux::build_bindings();\n\n linux::build_library();\n\n}\n\n\n", "file_path": "wgctrl-sys/build.rs", "rank": 87, "score": 66333.73826950145 }, { "content": "fn main() {\n\n let opt = Opts::from_args();\n\n util::init_logger(opt.verbose);\n\n\n\n if let Err(e) = run(opt) {\n\n println!();\n\n log::error!(\"{}\\n\", e);\n\n if let Some(e) = e.downcast_ref::<WrappedIoError>() {\n\n util::permissions_helptext(e);\n\n }\n\n if let Some(e) = e.downcast_ref::<io::Error>() {\n\n util::permissions_helptext(e);\n\n }\n\n std::process::exit(1);\n\n }\n\n}\n\n\n", "file_path": "client/src/main.rs", "rank": 88, "score": 66329.49099411629 }, { "content": "fn show(\n\n short: bool,\n\n tree: bool,\n\n interface: Option<Interface>,\n\n network: NetworkOpt,\n\n) -> Result<(), Error> {\n\n let interfaces = interface.map_or_else(\n\n || Device::list(network.backend),\n\n |interface| Ok(vec![*interface]),\n\n )?;\n\n\n\n let devices = interfaces\n\n .into_iter()\n\n .filter_map(|name| {\n\n match DataStore::open(&name) {\n\n Ok(store) => {\n\n let device = Device::get(&name, network.backend).with_str(name.as_str_lossy());\n\n Some(device.map(|device| (device, store)))\n\n },\n\n // Skip WireGuard interfaces that aren't managed by innernet.\n", "file_path": "client/src/main.rs", "rank": 89, "score": 66329.49099411629 }, { "content": "fn uninstall(\n\n interface: &InterfaceName,\n\n conf: &ServerConfig,\n\n network: NetworkOpt,\n\n) -> Result<(), Error> {\n\n if Confirm::with_theme(&*prompts::THEME)\n\n .with_prompt(&format!(\n\n \"Permanently delete network \\\"{}\\\"?\",\n\n interface.as_str_lossy().yellow()\n\n ))\n\n .default(false)\n\n .interact()?\n\n {\n\n println!(\"{} bringing down interface (if up).\", \"[*]\".dimmed());\n\n wg::down(interface, network.backend).ok();\n\n let config = conf.config_path(interface);\n\n let data = conf.database_path(interface);\n\n std::fs::remove_file(&config)\n\n .with_path(&config)\n\n .map_err(|e| println!(\"[!] {}\", e.to_string().yellow()))\n", "file_path": "server/src/main.rs", "rank": 90, "score": 66329.49099411629 }, { "content": "fn fetch(\n\n interface: &InterfaceName,\n\n bring_up_interface: bool,\n\n hosts_path: Option<PathBuf>,\n\n network: NetworkOpt,\n\n) -> Result<(), Error> {\n\n let config = InterfaceConfig::from_interface(interface)?;\n\n let interface_up = match Device::list(network.backend) {\n\n Ok(interfaces) => interfaces.iter().any(|name| name == interface),\n\n _ => false,\n\n };\n\n\n\n if !interface_up {\n\n if !bring_up_interface {\n\n bail!(\n\n \"Interface is not up. Use 'innernet up {}' instead\",\n\n interface\n\n );\n\n }\n\n\n", "file_path": "client/src/main.rs", "rank": 91, "score": 66329.49099411629 }, { "content": "fn install(\n\n invite: &Path,\n\n hosts_file: Option<PathBuf>,\n\n opts: InstallOpts,\n\n network: NetworkOpt,\n\n) -> Result<(), Error> {\n\n shared::ensure_dirs_exist(&[*CLIENT_CONFIG_DIR])?;\n\n let config = InterfaceConfig::from_file(invite)?;\n\n\n\n let iface = if opts.default_name {\n\n config.interface.network_name.clone()\n\n } else if let Some(ref iface) = opts.name {\n\n iface.clone()\n\n } else {\n\n Input::with_theme(&*prompts::THEME)\n\n .with_prompt(\"Interface name\")\n\n .default(config.interface.network_name.clone())\n\n .interact()?\n\n };\n\n\n", "file_path": "client/src/main.rs", "rank": 92, "score": 66329.49099411629 }, { "content": "struct ConfigParser {\n\n device_info: Device,\n\n current_peer: Option<PeerInfo>,\n\n}\n\n\n\nimpl From<ConfigParser> for Device {\n\n fn from(parser: ConfigParser) -> Self {\n\n parser.device_info\n\n }\n\n}\n\n\n\nimpl ConfigParser {\n\n /// Returns `None` if an invalid device name was provided.\n\n fn new(name: &InterfaceName) -> Self {\n\n let device_info = Device {\n\n name: *name,\n\n public_key: None,\n\n private_key: None,\n\n fwmark: None,\n\n listen_port: None,\n", "file_path": "wgctrl-rs/src/backends/userspace.rs", "rank": 93, "score": 65701.5869571278 }, { "content": "fn rename_peer(\n\n interface: &InterfaceName,\n\n conf: &ServerConfig,\n\n opts: RenamePeerOpts,\n\n) -> Result<(), Error> {\n\n let conn = open_database_connection(interface, conf)?;\n\n let peers = DatabasePeer::list(&conn)?\n\n .into_iter()\n\n .map(|dp| dp.inner)\n\n .collect::<Vec<_>>();\n\n\n\n if let Some((peer_request, old_name)) = shared::prompts::rename_peer(&peers, &opts)? {\n\n let mut db_peer = DatabasePeer::list(&conn)?\n\n .into_iter()\n\n .find(|p| p.name == old_name)\n\n .ok_or_else(|| anyhow!(\"Peer not found.\"))?;\n\n let _peer = db_peer.update(&conn, peer_request)?;\n\n } else {\n\n println!(\"exited without creating peer.\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "server/src/main.rs", "rank": 94, "score": 64851.65573882387 }, { "content": "fn add_peer(\n\n interface: &InterfaceName,\n\n conf: &ServerConfig,\n\n opts: AddPeerOpts,\n\n network: NetworkOpt,\n\n) -> Result<(), Error> {\n\n let config = ConfigFile::from_file(conf.config_path(interface))?;\n\n let conn = open_database_connection(interface, conf)?;\n\n let peers = DatabasePeer::list(&conn)?\n\n .into_iter()\n\n .map(|dp| dp.inner)\n\n .collect::<Vec<_>>();\n\n let cidrs = DatabaseCidr::list(&conn)?;\n\n let cidr_tree = CidrTree::new(&cidrs[..]);\n\n\n\n if let Some(result) = shared::prompts::add_peer(&peers, &cidr_tree, &opts)? {\n\n let (peer_request, keypair, target_path, mut target_file) = result;\n\n let peer = DatabasePeer::create(&conn, peer_request)?;\n\n if cfg!(not(test)) && Device::get(interface, network.backend).is_ok() {\n\n // Update the current WireGuard interface with the new peers.\n", "file_path": "server/src/main.rs", "rank": 95, "score": 64851.65573882387 }, { "content": "fn add_cidr(\n\n interface: &InterfaceName,\n\n conf: &ServerConfig,\n\n opts: AddCidrOpts,\n\n) -> Result<(), Error> {\n\n let conn = open_database_connection(interface, conf)?;\n\n let cidrs = DatabaseCidr::list(&conn)?;\n\n if let Some(cidr_request) = shared::prompts::add_cidr(&cidrs, &opts)? {\n\n let cidr = DatabaseCidr::create(&conn, cidr_request)?;\n\n printdoc!(\n\n \"\n\n CIDR \\\"{cidr_name}\\\" added.\n\n\n\n Right now, peers within {cidr_name} can only see peers in the same CIDR, and in\n\n the special \\\"innernet-server\\\" CIDR that includes the innernet server peer.\n\n\n\n You'll need to add more associations for peers in diffent CIDRs to communicate.\n\n \",\n\n cidr_name = cidr.name.bold()\n\n );\n\n } else {\n\n println!(\"exited without creating CIDR.\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "server/src/main.rs", "rank": 96, "score": 64851.65573882387 }, { "content": "fn redeem_invite(\n\n iface: &InterfaceName,\n\n mut config: InterfaceConfig,\n\n target_conf: PathBuf,\n\n network: NetworkOpt,\n\n) -> Result<(), Error> {\n\n log::info!(\"bringing up the interface.\");\n\n let resolved_endpoint = config\n\n .server\n\n .external_endpoint\n\n .resolve()\n\n .with_str(config.server.external_endpoint.to_string())?;\n\n wg::up(\n\n iface,\n\n &config.interface.private_key,\n\n config.interface.address,\n\n None,\n\n Some((\n\n &config.server.public_key,\n\n config.server.internal_endpoint.ip(),\n", "file_path": "client/src/main.rs", "rank": 97, "score": 64851.65573882387 }, { "content": "fn override_endpoint(\n\n interface: &InterfaceName,\n\n unset: bool,\n\n network: NetworkOpt,\n\n) -> Result<(), Error> {\n\n let config = InterfaceConfig::from_interface(interface)?;\n\n if !unset && config.interface.listen_port.is_none() {\n\n println!(\n\n \"{}: you need to set a listen port for your interface first.\",\n\n \"note\".bold().yellow()\n\n );\n\n set_listen_port(interface, unset, network)?;\n\n }\n\n\n\n if let Some(endpoint) = prompts::override_endpoint(unset)? {\n\n log::info!(\"Updating endpoint.\");\n\n Api::new(&config.server).http_form(\n\n \"PUT\",\n\n \"/user/endpoint\",\n\n EndpointContents::from(endpoint),\n\n )?;\n\n } else {\n\n log::info!(\"exiting without overriding endpoint.\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "client/src/main.rs", "rank": 98, "score": 64851.65573882387 }, { "content": "fn delete_cidr(\n\n interface: &InterfaceName,\n\n conf: &ServerConfig,\n\n args: DeleteCidrOpts,\n\n) -> Result<(), Error> {\n\n println!(\"Fetching eligible CIDRs\");\n\n let conn = open_database_connection(interface, conf)?;\n\n let cidrs = DatabaseCidr::list(&conn)?;\n\n let peers = DatabasePeer::list(&conn)?\n\n .into_iter()\n\n .map(|dp| dp.inner)\n\n .collect::<Vec<_>>();\n\n\n\n let cidr_id = prompts::delete_cidr(&cidrs, &peers, &args)?;\n\n\n\n println!(\"Deleting CIDR...\");\n\n let _ = DatabaseCidr::delete(&conn, cidr_id)?;\n\n\n\n println!(\"CIDR deleted.\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "server/src/main.rs", "rank": 99, "score": 64851.65573882387 } ]
Rust
qor-os/src/trap/context.rs
CarterTS/Qor
046616dc06179c158788c9003371441bc8a919d9
use core::usize; use super::TrapFrame; #[derive(Debug, Clone, Copy)] pub enum InterruptType { UserSoftwareInterrupt, SupervisorSoftwareInterrupt, MachineSoftwareInterrupt, UserTimeInterrupt, SupervisorTimerInterrupt, MachineTimerInterrupt, UserExternalInterrupt, SupervisorExternalInterrupt, MachineExternalInterrupt, InstructionAddressMisaligned, InstructionAccessFault, IllegalInstruction, Breakpoint, LoadAddressMisaligned, LoadAccessFault, StoreAddressMisaligned, StoreAccessFault, UserEnvironmentCall, SupervisorEnvironmentCall, MachineEnvironmentCall, InstructionPageFault, LoadPageFault, StorePageFault, UnknownSync(usize), UnknownAsync(usize) } pub struct InterruptContext { epc: usize, tval: usize, cause: InterruptType, hart: usize, status: usize, frame: *mut TrapFrame, async_trap: bool } impl InterruptContext { pub fn new(epc: usize, tval: usize, cause: usize, hart: usize, status: usize, frame: &'static mut TrapFrame) -> Self { let async_trap = cause >> 63 & 1 == 1 ; let interrupt_type = match (async_trap, cause & 0xfff) { (true, 0) => InterruptType::UserSoftwareInterrupt, (true, 1) => InterruptType::SupervisorSoftwareInterrupt, (true, 3) => InterruptType::MachineSoftwareInterrupt, (true, 4) => InterruptType::UserTimeInterrupt, (true, 5) => InterruptType::SupervisorTimerInterrupt, (true, 7) => InterruptType::MachineTimerInterrupt, (true, 8) => InterruptType::UserExternalInterrupt, (true, 9) => InterruptType::SupervisorExternalInterrupt, (true, 11) => InterruptType::MachineExternalInterrupt, (false, 0) => InterruptType::InstructionAddressMisaligned, (false, 1) => InterruptType::InstructionAccessFault, (false, 2) => InterruptType::IllegalInstruction, (false, 3) => InterruptType::Breakpoint, (false, 4) => InterruptType::LoadAddressMisaligned, (false, 5) => InterruptType::LoadAccessFault, (false, 6) => InterruptType::StoreAddressMisaligned, (false, 7) => InterruptType::StoreAccessFault, (false, 8) => InterruptType::UserEnvironmentCall, (false, 9) => InterruptType::SupervisorEnvironmentCall, (false, 11) => InterruptType::MachineEnvironmentCall, (false, 12) => InterruptType::InstructionPageFault, (false, 13) => InterruptType::LoadPageFault, (false, 15) => InterruptType::StorePageFault, (false, default) => InterruptType::UnknownSync(default), (true, default) => InterruptType::UnknownAsync(default), }; Self { epc, tval, cause: interrupt_type, hart, status, frame, async_trap } } pub fn instruction_address(&self) -> usize { self.epc } pub fn get_associated_value(&self) -> usize { self.tval } pub fn get_cause(&self) -> InterruptType { self.cause } pub fn get_hart(&self) -> usize { self.hart } pub fn get_status(&self) -> usize { self.status } pub fn get_frame(&self) -> *mut TrapFrame { self.frame } pub fn get_frame_mut(&self) -> &mut TrapFrame { unsafe { self.frame.as_mut() }.unwrap() } pub fn is_async(&self) -> bool { self.async_trap } } impl core::fmt::Display for InterruptContext { fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { writeln!(f, "Interrupt:")?; writeln!(f, " Cause: {:?}", self.cause)?; writeln!(f, " Instruction: 0x{:x}", self.epc)?; writeln!(f, " MTVAL: 0x{:x}", self.tval)?; writeln!(f, " HART: 0x{:x}", self.hart)?; writeln!(f, " Status: 0x{:x}", self.status)?; writeln!(f, " Frame Ptr: 0x{:x}", self.frame as *const TrapFrame as usize)?; Ok(()) } }
use core::usize; use super::TrapFrame; #[derive(Debug, Clone, Copy)] pub enum InterruptType { UserSoftwareInterrupt, SupervisorSoftwareInterrupt, MachineSoftwareInterrupt, UserTimeInterrupt, SupervisorTimerInterrupt, MachineTimerInterrupt, UserExternalInterrupt, SupervisorExternalInterrupt, MachineExternalInterrupt, InstructionAddressMisaligned, InstructionAccessFault, IllegalInstruction, Breakpoint, LoadAddressMisaligned, LoadAccessFault, StoreAddressMisaligned, StoreAccessFault, UserEnvironmentCall, SupervisorEnvironmentCall, MachineEnvironmentCall, InstructionPageFault, LoadPageF
:InstructionAddressMisaligned, (false, 1) => InterruptType::InstructionAccessFault, (false, 2) => InterruptType::IllegalInstruction, (false, 3) => InterruptType::Breakpoint, (false, 4) => InterruptType::LoadAddressMisaligned, (false, 5) => InterruptType::LoadAccessFault, (false, 6) => InterruptType::StoreAddressMisaligned, (false, 7) => InterruptType::StoreAccessFault, (false, 8) => InterruptType::UserEnvironmentCall, (false, 9) => InterruptType::SupervisorEnvironmentCall, (false, 11) => InterruptType::MachineEnvironmentCall, (false, 12) => InterruptType::InstructionPageFault, (false, 13) => InterruptType::LoadPageFault, (false, 15) => InterruptType::StorePageFault, (false, default) => InterruptType::UnknownSync(default), (true, default) => InterruptType::UnknownAsync(default), }; Self { epc, tval, cause: interrupt_type, hart, status, frame, async_trap } } pub fn instruction_address(&self) -> usize { self.epc } pub fn get_associated_value(&self) -> usize { self.tval } pub fn get_cause(&self) -> InterruptType { self.cause } pub fn get_hart(&self) -> usize { self.hart } pub fn get_status(&self) -> usize { self.status } pub fn get_frame(&self) -> *mut TrapFrame { self.frame } pub fn get_frame_mut(&self) -> &mut TrapFrame { unsafe { self.frame.as_mut() }.unwrap() } pub fn is_async(&self) -> bool { self.async_trap } } impl core::fmt::Display for InterruptContext { fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { writeln!(f, "Interrupt:")?; writeln!(f, " Cause: {:?}", self.cause)?; writeln!(f, " Instruction: 0x{:x}", self.epc)?; writeln!(f, " MTVAL: 0x{:x}", self.tval)?; writeln!(f, " HART: 0x{:x}", self.hart)?; writeln!(f, " Status: 0x{:x}", self.status)?; writeln!(f, " Frame Ptr: 0x{:x}", self.frame as *const TrapFrame as usize)?; Ok(()) } }
ault, StorePageFault, UnknownSync(usize), UnknownAsync(usize) } pub struct InterruptContext { epc: usize, tval: usize, cause: InterruptType, hart: usize, status: usize, frame: *mut TrapFrame, async_trap: bool } impl InterruptContext { pub fn new(epc: usize, tval: usize, cause: usize, hart: usize, status: usize, frame: &'static mut TrapFrame) -> Self { let async_trap = cause >> 63 & 1 == 1 ; let interrupt_type = match (async_trap, cause & 0xfff) { (true, 0) => InterruptType::UserSoftwareInterrupt, (true, 1) => InterruptType::SupervisorSoftwareInterrupt, (true, 3) => InterruptType::MachineSoftwareInterrupt, (true, 4) => InterruptType::UserTimeInterrupt, (true, 5) => InterruptType::SupervisorTimerInterrupt, (true, 7) => InterruptType::MachineTimerInterrupt, (true, 8) => InterruptType::UserExternalInterrupt, (true, 9) => InterruptType::SupervisorExternalInterrupt, (true, 11) => InterruptType::MachineExternalInterrupt, (false, 0) => InterruptType:
random
[ { "content": "#[test]\n\npub fn test_path_iterator()\n\n{\n\n use libutils::paths::OwnedPath;\n\n\n\n let path0 = OwnedPath::new(\"/usr/bin/ls\");\n\n let path1 = OwnedPath::new(\"bin/ls\");\n\n let path2 = OwnedPath::new(\"/\");\n\n let path3 = OwnedPath::new(\"./../../home/\");\n\n\n\n assert_eq!(path0.iter().collect::<Vec<_>>(), vec![\"usr\", \"bin\", \"ls\"]);\n\n assert_eq!(path1.iter().collect::<Vec<_>>(), vec![\"bin\", \"ls\"]);\n\n assert_eq!(path2.iter().collect::<Vec<_>>(), Vec::<&str>::new());\n\n assert_eq!(path3.iter().collect::<Vec<_>>(), vec![\".\", \"..\", \"..\", \"home\"]);\n\n\n\n assert_eq!((&path0).iter().collect::<Vec<_>>(), vec![\"usr\", \"bin\", \"ls\"]);\n\n assert_eq!((&path1).iter().collect::<Vec<_>>(), vec![\"bin\", \"ls\"]);\n\n assert_eq!((&path2).iter().collect::<Vec<_>>(), Vec::<&str>::new());\n\n assert_eq!((&path3).iter().collect::<Vec<_>>(), vec![\".\", \"..\", \"..\", \"home\"]);\n\n}\n\n\n\n/// Test the Path Canonicalization\n", "file_path": "libutils/tests/paths.rs", "rank": 0, "score": 69934.95116026339 }, { "content": "/// Generic Filesystem Trait\n\npub trait Filesystem\n\n{\n\n /// Initialize the filesystem on the current disk\n\n fn init(&mut self) -> FilesystemResult<()>;\n\n\n\n /// Sync the filesystem with the current disk\n\n fn sync(&mut self) -> FilesystemResult<()>;\n\n\n\n /// Set the mount_id of the filesystem\n\n fn set_mount_id(&mut self, mount_id: usize, vfs: &'static mut crate::fs::vfs::FilesystemInterface);\n\n\n\n /// Get the index of the root directory of the filesystem\n\n fn get_root_index(&mut self) -> FilesystemResult<FilesystemIndex>;\n\n\n\n /// Convert a path to an inode\n\n fn path_to_inode(&mut self, path: PathBuffer) -> FilesystemResult<FilesystemIndex>;\n\n\n\n /// Convert an inode to a path\n\n fn inode_to_path(&mut self, inode: FilesystemIndex) -> FilesystemResult<PathBuffer>;\n\n\n", "file_path": "qor-os/src/fs/fstrait.rs", "rank": 1, "score": 69934.95116026339 }, { "content": "#[test]\n\npub fn test_path_canonicalization()\n\n{\n\n use libutils::paths::OwnedPath;\n\n\n\n let mut path0 = OwnedPath::new(\"/usr/bin/ls\");\n\n let mut path1 = OwnedPath::new(\"bin/ls\");\n\n let mut path2 = OwnedPath::new(\"/\");\n\n let mut path3 = OwnedPath::new(\"./../../home/\");\n\n\n\n path0.canonicalize(&OwnedPath::new(\"/home/name\"));\n\n path1.canonicalize(&OwnedPath::new(\"/usr/\"));\n\n path2.canonicalize(&OwnedPath::new(\"/usr/bin/\"));\n\n path3.canonicalize(&OwnedPath::new(\"/usr/bin\"));\n\n\n\n assert_eq!(path0.as_str(), \"/usr/bin/ls\");\n\n assert_eq!(path1.as_str(), \"/usr/bin/ls\");\n\n assert_eq!(path2.as_str(), \"/\");\n\n assert_eq!(path3.as_str(), \"/usr/bin/./../../home/\");\n\n}\n\n\n\n/// Test Path Splitting\n", "file_path": "libutils/tests/paths.rs", "rank": 2, "score": 69934.95116026339 }, { "content": "#[test]\n\npub fn test_path_splitting()\n\n{\n\n use libutils::paths::OwnedPath;\n\n\n\n let path0 = OwnedPath::new(\"/usr/bin/ls\");\n\n let path1 = OwnedPath::new(\"bin/ls\");\n\n let path2 = OwnedPath::new(\"/\");\n\n let path3 = OwnedPath::new(\"./../../home/\");\n\n\n\n assert_eq!(path0.split_last(), (OwnedPath::new(\"/usr/bin/\"), \"ls\"));\n\n assert_eq!(path1.split_last(), (OwnedPath::new(\"bin/\"), \"ls\"));\n\n assert_eq!(path2.split_last(), (OwnedPath::new(\"\"), \"\"));\n\n assert_eq!(path3.split_last(), (OwnedPath::new(\"./../../\"), \"home\"));\n\n}", "file_path": "libutils/tests/paths.rs", "rank": 3, "score": 69934.95116026339 }, { "content": "#[cfg(test)]\n\npub trait TestFunction\n\n{\n\n fn run(&self) -> ();\n\n}\n\n\n\n// Implement testable \n\n#[cfg(test)]\n\nimpl<T: Fn()> TestFunction for T\n\n{\n\n fn run(&self)\n\n {\n\n crate::kprint!(\"Running Test {}......\\t\", core::any::type_name::<T>());\n\n self();\n\n crate::kprintln!(\"\\x1b[32m[OK]\\x1b[m\");\n\n }\n\n}\n\n\n\n/// Test Runner\n", "file_path": "qor-os/src/test.rs", "rank": 4, "score": 69934.95116026339 }, { "content": "/// Wait until an interrupt is recieved\n\npub fn wait_for_int() -> !\n\n{\n\n unsafe { asm_wait_for_int() }\n\n}", "file_path": "qor-os/src/process/scheduler.rs", "rank": 5, "score": 68790.5369707583 }, { "content": "/// Dump on error\n\npub fn dump_on_error()\n\n{\n\n kerrorln!(\"Error Dump:\");\n\n}\n\n\n", "file_path": "qor-os/src/trap/handler.rs", "rank": 6, "score": 68790.5369707583 }, { "content": "/// Switch to the next process\n\npub fn switch_process()\n\n{\n\n let schedule = process::scheduler::schedule_next();\n\n\n\n // Prepare the timer for the next tick\n\n unsafe { drivers::TIMER_DRIVER.trigger() }\n\n\n\n process::scheduler::schedule_jump(schedule);\n\n}\n\n\n", "file_path": "qor-os/src/trap/handler.rs", "rank": 7, "score": 68790.5369707583 }, { "content": "/// init process\n\npub fn init_proc()\n\n{\n\n loop\n\n {\n\n /*\n\n unsafe \n\n {\n\n asm!(\"li a7, 61\");\n\n asm!(\"li a0, 0\");\n\n asm!(\"ecall\");\n\n }*/\n\n }\n\n}", "file_path": "qor-os/src/process/init.rs", "rank": 8, "score": 68790.5369707583 }, { "content": "/// File Descriptor Trait\n\npub trait FileDescriptor\n\n{\n\n /// Close the file descriptor\n\n fn close(&mut self, fs: &mut fs::vfs::FilesystemInterface);\n\n\n\n /// Write to the descriptor\n\n fn write(&mut self, fs: &mut fs::vfs::FilesystemInterface, buffer: *mut u8, count: usize) -> usize;\n\n\n\n /// Read from the descriptor\n\n fn read(&mut self, fs: &mut fs::vfs::FilesystemInterface, buffer: *mut u8, count: usize) -> usize;\n\n\n\n /// Get the inode of the entry\n\n fn get_inode(&mut self) -> Option<FilesystemIndex>;\n\n\n\n /// Seek to the given location in the descriptor\n\n fn seek(&mut self, offset: usize, _mode: SeekMode) -> usize\n\n {\n\n offset\n\n }\n\n\n", "file_path": "qor-os/src/process/descriptor.rs", "rank": 9, "score": 68790.5369707583 }, { "content": "/// Initialize the PLIC Driver (enable the UART receive interrupt)\n\npub fn init_plic_driver()\n\n{\n\n unsafe { drivers::PLIC_DRIVER.set_threshold(drivers::plic::PLICPriority::Disable) };\n\n unsafe { drivers::PLIC_DRIVER.enable(drivers::plic::PLICInterrupt(10)) };\n\n unsafe { drivers::PLIC_DRIVER.set_priority(drivers::plic::PLICInterrupt(10), \n\n drivers::plic::PLICPriority::Priority1) };\n\n}\n\n\n", "file_path": "qor-os/src/drivers/mod.rs", "rank": 10, "score": 67708.99472872022 }, { "content": "/// Initialize the UART Driver\n\npub fn init_uart_driver()\n\n{\n\n // Safety: This is safe as far as a race will lead to overlapping print outs\n\n unsafe { UART_DRIVER.init() };\n\n}\n\n\n", "file_path": "qor-os/src/drivers/mod.rs", "rank": 11, "score": 67708.99472872022 }, { "content": "/// Initialize a process manager\n\npub fn init_process_manager()\n\n{\n\n unsafe \n\n {\n\n GLOBAL_PROC_MANAGER = Some(ProcessManager::new());\n\n }\n\n\n\n // Add the init process\n\n let process = super::process::Process::from_fn_ptr(super::init::init_proc);\n\n add_process(process);\n\n}\n\n\n", "file_path": "qor-os/src/process/scheduler.rs", "rank": 12, "score": 67708.99472872022 }, { "content": "pub trait TeletypeDevice\n\n{\n\n fn tty_read_byte(&mut self) -> Option<u8>;\n\n fn tty_write_byte(&mut self, byte: u8);\n\n fn tty_push_byte(&mut self, byte: u8);\n\n fn tty_pop_byte(&mut self) -> Option<u8>;\n\n fn tty_close(&mut self);\n\n\n\n fn get_tty_settings(&self) -> TeletypeSettings;\n\n fn set_tty_settings(&mut self, settings: TeletypeSettings);\n\n\n\n fn bytes_available(&self) -> bool;\n\n\n\n fn backspace(&mut self) -> bool;\n\n\n\n fn handle_input(&mut self, byte: u8) -> bool\n\n {\n\n let settings = self.get_tty_settings();\n\n\n\n if settings.local_flags & IEXTEN > 0 && self.get_preserve_next_state() && (settings.input_flags & IXON == 0 || !self.get_paused_state())\n", "file_path": "qor-os/src/fs/devfs/tty.rs", "rank": 13, "score": 67708.99472872022 }, { "content": "/// Identity map the kernel\n\npub fn identity_map_kernel()\n\n{\n\n use mmu::PageTableEntryFlags;\n\n\n\n // Allocate a new page table\n\n let page_table = mmu::PageTable::allocate();\n\n\n\n // Identity map the segments from the linker script\n\n page_table.identity_map(lds::text_start(), lds::text_end(), PageTableEntryFlags::readable() | PageTableEntryFlags::executable());\n\n page_table.identity_map(lds::rodata_start(), lds::rodata_end(), PageTableEntryFlags::readable() | PageTableEntryFlags::executable());\n\n page_table.identity_map(lds::data_start(), lds::data_end(), PageTableEntryFlags::readable() | PageTableEntryFlags::writable());\n\n page_table.identity_map(lds::bss_start(), lds::bss_end(), PageTableEntryFlags::readable() | PageTableEntryFlags::writable());\n\n page_table.identity_map(lds::stack_start(), lds::stack_end(), PageTableEntryFlags::readable() | PageTableEntryFlags::writable());\n\n page_table.identity_map(lds::heap_start(), lds::heap_end(), PageTableEntryFlags::readable() | PageTableEntryFlags::writable());\n\n\n\n // Map the RTC MMIO\n\n page_table.identity_map(0x10_1000, 0x10_1fff, PageTableEntryFlags::readable() | PageTableEntryFlags::writable());\n\n\n\n // Map the CLINT MMIO\n\n page_table.identity_map(0x200_0000, 0x200_b000, PageTableEntryFlags::readable() | PageTableEntryFlags::writable());\n", "file_path": "qor-os/src/mem/mod.rs", "rank": 14, "score": 67708.99472872022 }, { "content": "pub fn debug_print_layout()\n\n{\n\n let ptr = KERNEL_HEAP_POINTER.load(core::sync::atomic::Ordering::SeqCst);\n\n unsafe { ptr.as_mut().unwrap().display_node_list() };\n\n}\n\n\n\n/// Allocation error handler\n", "file_path": "qor-os/src/mem/alloc.rs", "rank": 15, "score": 67708.99472872022 }, { "content": "/// Initialize the trap frame into mscratch\n\npub fn init_trap_frame()\n\n{\n\n // Initialize the trap frame\n\n let trap_frame = TrapFrame::new(2);\n\n\n\n // Allocate the stack frame on the kernel heap\n\n let addr = Box::leak(Box::new(trap_frame)) as *mut TrapFrame as usize;\n\n\n\n // Write the stack frame into the mscratch register\n\n riscv::register::mscratch::write(addr);\n\n}", "file_path": "qor-os/src/trap/mod.rs", "rank": 16, "score": 67708.99472872022 }, { "content": "/// Byte Interface Trait\n\n/// Allows for the reading and writing of bytes to and from the given interface\n\npub trait ByteInterface\n\n{\n\n /// Read a byte from the interface\n\n fn read_byte(&mut self) -> Option<u8>;\n\n\n\n /// Write a byte to the interface\n\n fn write_byte(&mut self, data: u8);\n\n\n\n /// Flush the interface\n\n fn flush(&mut self) {}\n\n}\n\n\n", "file_path": "qor-os/src/drivers/generic/mod.rs", "rank": 17, "score": 67708.99472872022 }, { "content": "/// Buffer Interface Trait\n\n/// Allows reading and writing to and from a buffer in memory\n\npub trait BufferInterface\n\n{\n\n /// Read a byte\n\n fn read_byte(&mut self, offset: usize) -> Option<u8>;\n\n\n\n /// Write a byte\n\n fn write_byte(&mut self, offset: usize, data: u8);\n\n\n\n /// Get the size of the buffer\n\n fn get_size(&self) -> usize;\n\n\n\n /// Flush the memory (send an update to wherever it is pointing)\n\n fn flush(&mut self);\n\n\n\n /// Attempt to access an underlying buffer for mmap\n\n fn get_buffer(&self) -> Option<*mut u8>;\n\n}", "file_path": "qor-os/src/drivers/generic/mod.rs", "rank": 18, "score": 67708.99472872022 }, { "content": "/// Initialize the kernel page allocator\n\npub fn init_kernel_page_allocator()\n\n{\n\n unsafe { GLOBAL_KERNEL_PAGE_ALLOCATOR = page::PageMap::initialize(lds::heap_start(), (lds::heap_end() - lds::heap_start()) / PAGE_SIZE) };\n\n}\n\n\n", "file_path": "qor-os/src/mem/mod.rs", "rank": 19, "score": 66685.28185061872 }, { "content": "/// Initialize the VirtIO interrupts\n\npub fn init_virtio_interrupts()\n\n{\n\n use crate::drivers;\n\n\n\n for i in 1..9\n\n {\n\n unsafe { drivers::PLIC_DRIVER.enable(drivers::plic::PLICInterrupt(i)) };\n\n unsafe { drivers::PLIC_DRIVER.set_priority(drivers::plic::PLICInterrupt(i), \n\n drivers::plic::PLICPriority::Priority1) };\n\n }\n\n}", "file_path": "qor-os/src/drivers/virtio/mod.rs", "rank": 20, "score": 66685.28185061872 }, { "content": "#[test_case]\n\npub fn test_page_offset_extraction()\n\n{\n\n assert_eq!(VirtualAddress(0x46F45E).page_offset(), 0x45E);\n\n assert_eq!(VirtualAddress(0xE).page_offset(), 0xE);\n\n assert_eq!(VirtualAddress(0xFFFFFFFFFF).page_offset(), 0xFFF);\n\n assert_eq!(VirtualAddress(0x0).page_offset(), 0);\n\n\n\n let mut value = VirtualAddress(0xFFFFFFFFFFFFFFFF);\n\n value.set_page_offset(0x9A8);\n\n assert_eq!(value.page_offset(), 0x9A8);\n\n}\n\n\n\n/// Test MMU Helpers - Virtual Page Number\n", "file_path": "qor-os/src/mem/mmu.rs", "rank": 21, "score": 66685.28185061872 }, { "content": "/// Initialize the located VirtIO devices\n\npub fn initialize_virtio_devices()\n\n{\n\n let mut devices = DeviceCollection::new();\n\n\n\n for (i, dev_type) in unsafe {VIRTIO_DEVICES}.iter().enumerate()\n\n {\n\n if let Some(dev_type) = dev_type\n\n {\n\n let name = format!(\"{:?}\", dev_type);\n\n kprint!(\" Initializing (Device {}) {:15}.......... \", i, name);\n\n\n\n let mut driver = get_driver_at_index(i).unwrap();\n\n\n\n match dev_type\n\n {\n\n VirtIODeviceType::BlockDevice => \n\n {\n\n match driver.init_driver(!(1 << 5))\n\n {\n\n Err(e) =>\n", "file_path": "qor-os/src/drivers/virtio/discovery.rs", "rank": 22, "score": 66685.28185061872 }, { "content": "#[test_case]\n\npub fn test_mapping_virtual_address_translation()\n\n{\n\n let table = mem::mmu::PageTable::allocate();\n\n\n\n table.map(0x3F_0000_0000, 0x1_0000_0000, PageTableEntryFlags::readable() | PageTableEntryFlags::writable() | PageTableEntryFlags::executable(), 0);\n\n\n\n \n\n let root_ppn = table as *mut mem::mmu::PageTable as usize >> 12;\n\n let satp_val = 8 << 60 | root_ppn;\n\n riscv::register::satp::write(satp_val);\n\n\n\n let ptr = 0x3F_0000_0123;\n\n let next_ptr = table.virt_to_phys(ptr).unwrap();\n\n\n\n assert_eq!(next_ptr, 0x1_0000_0123);\n\n\n\n table.drop_table();\n\n}", "file_path": "qor-os/src/mem/mmu.rs", "rank": 23, "score": 65714.88095906477 }, { "content": "#[test_case]\n\npub fn test_virtual_page_number_extraction()\n\n{\n\n assert_eq!(VirtualAddress(0xA6B46CF45E).virtual_page_number(0), 0xCF);\n\n assert_eq!(VirtualAddress(0xA6B46CF45E).virtual_page_number(1), 0x1A3);\n\n assert_eq!(VirtualAddress(0xA6B46CF45E).virtual_page_number(2), 0x9A);\n\n\n\n let mut value = VirtualAddress(0xFFFFFFFFFFFFFFFF);\n\n value.set_virtual_page_number(0, 0x1A8);\n\n assert_eq!(value.virtual_page_number(0), 0x1A8);\n\n\n\n value.set_virtual_page_number(1, 0x8D);\n\n assert_eq!(value.virtual_page_number(1), 0x8D);\n\n\n\n value.set_virtual_page_number(2, 0x1A8);\n\n assert_eq!(value.virtual_page_number(2), 0x1A8);\n\n\n\n assert_eq!(value.virtual_page_number(1), 0x8D);\n\n assert_eq!(value.virtual_page_number(0), 0x1A8);\n\n}\n\n\n\n/*\n\n ============================= Tests for Mapper =============================\n\n*/\n\n\n\n/// Test MMU - Mapping and Virtual Address Translation\n", "file_path": "qor-os/src/mem/mmu.rs", "rank": 24, "score": 65714.88095906477 }, { "content": "#[test_case]\n\npub fn test_kernel_page_allocator_no_overwrite()\n\n{\n\n // Pages to test\n\n let page_count = 256;\n\n\n\n // The first address in the allocator\n\n let first = super::kpzalloc(1, \"Test\").unwrap();\n\n\n\n // Allocate every page\n\n for _ in 0..(page_count - 1)\n\n {\n\n super::kpzalloc(1, \"Test\").unwrap();\n\n }\n\n\n\n // Go over every page\n\n for i in 0..page_count\n\n {\n\n let this_ptr = first + i * super::PAGE_SIZE;\n\n\n\n // Overwrite this page with 0xFF\n", "file_path": "qor-os/src/mem/test.rs", "rank": 25, "score": 65714.88095906477 }, { "content": "#[test_case]\n\npub fn test_kernel_byte_allocator_multiple()\n\n{\n\n // Initialize a small global allocator\n\n super::alloc::init_kernel_global_allocator(2);\n\n\n\n let mut v = Vec::new();\n\n\n\n for _ in 0..16\n\n {\n\n v.push(Box::leak(Box::new(42usize)) as *mut usize);\n\n }\n\n\n\n for ptr in &v\n\n {\n\n unsafe { ptr.write(5); }\n\n }\n\n\n\n for ptr in v\n\n {\n\n unsafe { Box::from_raw(ptr) };\n\n }\n\n}", "file_path": "qor-os/src/mem/test.rs", "rank": 26, "score": 65714.88095906477 }, { "content": "/// Probe the VirtIO address space, discovering and logging any devices found,\n\n/// will initialize the `VIRTIO_DEVICES` array\n\npub fn probe_virtio_address_space()\n\n{\n\n assert!(VIRT_IO_START >= VIRT_IO_END);\n\n kdebugln!(VirtIO, \"Probing VirtIO Devices\");\n\n\n\n // The VirtIO Devices are laid out from VIRT_IO_END to VIRT_IO_START, at\n\n // VIRT_IO_STEP intervals, with indexing starting at 1\n\n\n\n for base_addr in (VIRT_IO_END..=VIRT_IO_START).step_by(VIRT_IO_STEP)\n\n {\n\n let index = virtio_address_to_index(base_addr);\n\n kdebug!(VirtIO, \"Probing VirtIO Device {} at 0x{:x}..........\", index, base_addr);\n\n\n\n // Safety: This is directly from the VIRT_IO_START and VIRT_IO_END\n\n // values\n\n let dev = unsafe { VirtIOHelper::new(base_addr) };\n\n\n\n let magic = dev.read_field(Field::MagicValue);\n\n let id = dev.read_field(Field::DeviceId);\n\n\n", "file_path": "qor-os/src/drivers/virtio/discovery.rs", "rank": 27, "score": 65714.88095906477 }, { "content": "#[test_case]\n\npub fn test_kernel_page_allocator_zalloc()\n\n{\n\n // Pages to test\n\n let page_count = 4096;\n\n\n\n // The first address in the allocator\n\n let first = super::kpzalloc(1, \"Test\").unwrap();\n\n\n\n // Ensure the first page is zero allocated\n\n if unsafe { (first as *mut [u8; super::PAGE_SIZE]).read() } != [0; super::PAGE_SIZE]\n\n {\n\n panic!(\"Page 0x{:x} is not zero initialized\", first);\n\n }\n\n\n\n // Allocate every page\n\n for _ in 0..(page_count - 1)\n\n {\n\n let ptr = super::kpzalloc(1, \"Test\").unwrap();\n\n\n\n // Ensure the pages are zero allocated\n", "file_path": "qor-os/src/mem/test.rs", "rank": 28, "score": 65714.88095906477 }, { "content": "#[test_case]\n\npub fn test_kernel_page_allocator_allocate_all()\n\n{\n\n // Pages to test\n\n let page_count = 4096;\n\n\n\n // The first address in the allocator\n\n let first = super::kpalloc(1, \"Test\").unwrap();\n\n\n\n // Allocate every page\n\n for _ in 0..(page_count - 1)\n\n {\n\n super::kpalloc(1, \"Test\").unwrap();\n\n }\n\n\n\n // Free every page\n\n for i in 0..page_count\n\n {\n\n super::kpfree(first + super::PAGE_SIZE * i, 1).unwrap();\n\n }\n\n\n\n // Assert that all of the pages are free\n\n assert_eq!(super::allocated_kernel_pages(), 0);\n\n}\n\n\n\n/// Test Kernel Page Grained Allocator - Ensure Zero Alloc\n", "file_path": "qor-os/src/mem/test.rs", "rank": 29, "score": 65714.88095906477 }, { "content": "#[test_case]\n\npub fn test_kernel_byte_allocator_simple()\n\n{\n\n // Initialize a small global allocator\n\n super::alloc::init_kernel_global_allocator(2);\n\n\n\n // Attempt to allocate a box\n\n let b = Box::leak(Box::new(42usize)) as *mut usize;\n\n\n\n unsafe { b.write_volatile(24); }\n\n\n\n // Attempt to free the box\n\n unsafe { Box::from_raw(b) };\n\n}\n\n\n\n/// Test Kernel Byte Grained Allocator - Test Multiple Allocation\n", "file_path": "qor-os/src/mem/test.rs", "rank": 30, "score": 65714.88095906477 }, { "content": "/// Get the data start address as a usize\n\n/// Safety: Because this value should have been read properly from the linker\n\n/// script, this is safe\n\npub fn data_start() -> usize\n\n{\n\n\tunsafe { DATA_START }\n\n}\n\n\n", "file_path": "qor-os/src/mem/lds.rs", "rank": 31, "score": 64394.1273125026 }, { "content": "/// Get the heap start address as a usize\n\n/// Safety: Because this value should have been read properly from the linker\n\n/// script, this is safe\n\npub fn heap_start() -> usize\n\n{\n\n\tunsafe { HEAP_START }\n\n}\n\n\n", "file_path": "qor-os/src/mem/lds.rs", "rank": 32, "score": 64394.1273125026 }, { "content": "/// Get the data end address as a usize\n\n/// Safety: Because this value should have been read properly from the linker\n\n/// script, this is safe\n\npub fn data_end() -> usize\n\n{\n\n\tunsafe { DATA_END }\n\n}\n\n\n", "file_path": "qor-os/src/mem/lds.rs", "rank": 33, "score": 64394.1273125026 }, { "content": "/// Get the text end address as a usize\n\n/// Safety: Because this value should have been read properly from the linker\n\n/// script, this is safe\n\npub fn text_end() -> usize\n\n{\n\n\tunsafe { TEXT_END }\n\n}\n\n\n", "file_path": "qor-os/src/mem/lds.rs", "rank": 34, "score": 64394.1273125026 }, { "content": "/// Get the text start address as a usize\n\n/// Safety: Because this value should have been read properly from the linker\n\n/// script, this is safe\n\npub fn text_start() -> usize\n\n{\n\n\tunsafe { TEXT_START }\n\n}\n\n\n", "file_path": "qor-os/src/mem/lds.rs", "rank": 35, "score": 64394.1273125026 }, { "content": "/// Get the stack end address as a usize\n\n/// Safety: Because this value should have been read properly from the linker\n\n/// script, this is safe\n\npub fn stack_end() -> usize\n\n{\n\n\tunsafe { KERNEL_STACK_END }\n\n}\n\n\n", "file_path": "qor-os/src/mem/lds.rs", "rank": 36, "score": 64394.1273125026 }, { "content": "/// Get the rodata start address as a usize\n\n/// Safety: Because this value should have been read properly from the linker\n\n/// script, this is safe\n\npub fn rodata_start() -> usize\n\n{\n\n\tunsafe { RODATA_START }\n\n}\n\n\n", "file_path": "qor-os/src/mem/lds.rs", "rank": 37, "score": 64394.1273125026 }, { "content": "/// Get the heap end address as a usize\n\n/// Safety: Because this value should have been read properly from the linker\n\n/// script, this is safe\n\npub fn heap_end() -> usize\n\n{\n\n\tunsafe { HEAP_END }\n\n}\n", "file_path": "qor-os/src/mem/lds.rs", "rank": 38, "score": 64394.1273125026 }, { "content": "/// Get the bss start address as a usize\n\n/// Safety: Because this value should have been read properly from the linker\n\n/// script, this is safe\n\npub fn bss_start() -> usize\n\n{\n\n\tunsafe { BSS_START }\n\n}\n\n\n", "file_path": "qor-os/src/mem/lds.rs", "rank": 39, "score": 64394.1273125026 }, { "content": "/// Get the stack start address as a usize\n\n/// Safety: Because this value should have been read properly from the linker\n\n/// script, this is safe\n\npub fn stack_start() -> usize\n\n{\n\n\tunsafe { KERNEL_STACK_START }\n\n}\n\n\n", "file_path": "qor-os/src/mem/lds.rs", "rank": 40, "score": 64394.1273125026 }, { "content": "/// Get the bss end address as a usize\n\n/// Safety: Because this value should have been read properly from the linker\n\n/// script, this is safe\n\npub fn bss_end() -> usize\n\n{\n\n\tunsafe { BSS_END }\n\n}\n\n\n", "file_path": "qor-os/src/mem/lds.rs", "rank": 41, "score": 64394.1273125026 }, { "content": "/// Get the rodata end address as a usize\n\n/// Safety: Because this value should have been read properly from the linker\n\n/// script, this is safe\n\npub fn rodata_end() -> usize\n\n{\n\n\tunsafe { RODATA_END }\n\n}\n\n\n", "file_path": "qor-os/src/mem/lds.rs", "rank": 42, "score": 64394.1273125026 }, { "content": "/// Get the number of allocated pages on the kernel heap\n\npub fn allocated_kernel_pages() -> usize\n\n{\n\n // Ensure the global kernel page allocator was initialized\n\n if unsafe { GLOBAL_KERNEL_PAGE_ALLOCATOR.is_null() }\n\n {\n\n panic!(\"Cannot get the number of allocated kernel pages because the allocator is not initialized\");\n\n }\n\n \n\n // Safety: The above ensured it was initialized, and the only method of\n\n // initialization is through the proper initializer\n\n unsafe\n\n {\n\n // Panic Safety: This is safe because a null would have been caught\n\n // above\n\n GLOBAL_KERNEL_PAGE_ALLOCATOR.as_ref().unwrap().total_alloc_pages()\n\n }\n\n}\n\n\n", "file_path": "qor-os/src/mem/mod.rs", "rank": 43, "score": 63370.41443440109 }, { "content": "/// Get the number of allocated pages on the kernel heap\n\npub fn total_kernel_pages() -> usize\n\n{\n\n // Ensure the global kernel page allocator was initialized\n\n if unsafe { GLOBAL_KERNEL_PAGE_ALLOCATOR.is_null() }\n\n {\n\n panic!(\"Cannot get the total number of kernel pages because the allocator is not initialized\");\n\n }\n\n \n\n // Safety: The above ensured it was initialized, and the only method of\n\n // initialization is through the proper initializer\n\n unsafe\n\n {\n\n // Panic Safety: This is safe because a null would have been caught\n\n // above\n\n GLOBAL_KERNEL_PAGE_ALLOCATOR.as_ref().unwrap().total_pages()\n\n }\n\n}\n\n\n", "file_path": "qor-os/src/mem/mod.rs", "rank": 44, "score": 63370.41443440109 }, { "content": "/// Check if the graphics driver is loaded\n\npub fn is_graphics_driver_loaded() -> bool\n\n{\n\n unsafe { &GLOBAL_GRAPHICS_DRIVER }.is_some()\n\n}", "file_path": "qor-os/src/drivers/gpu/mod.rs", "rank": 45, "score": 62400.01354284714 }, { "content": "/// Initialize the graphics driver\n\npub fn init_graphics_driver() -> bool\n\n{\n\n // Test the GPU\n\n if let Some(raw_driver) = super::virtio::get_gpu_driver(0)\n\n {\n\n let mut driver = GenericGraphics::new(raw_driver);\n\n\n\n driver.init();\n\n\n\n driver.force_update();\n\n\n\n *unsafe { &mut GLOBAL_GRAPHICS_DRIVER } = Some(driver);\n\n\n\n true\n\n }\n\n else\n\n {\n\n kerrorln!(\"Unable to find GPU driver, /dev/fb0 and /dev/disp will not be available\");\n\n false\n\n }\n\n}\n\n\n", "file_path": "qor-os/src/drivers/gpu/mod.rs", "rank": 46, "score": 62400.01354284714 }, { "content": "/// Add a process to the global process manager\n\npub fn add_process(proc: Process)\n\n{\n\n unsafe \n\n {\n\n GLOBAL_PROC_MANAGER.as_mut().unwrap().add_process(proc);\n\n }\n\n}\n\n\n", "file_path": "qor-os/src/process/scheduler.rs", "rank": 47, "score": 60726.954184666996 }, { "content": "/// Initialize the Timer Driver (set the given frequency)\n\npub fn init_timer_driver(freq: usize)\n\n{\n\n kdebugln!(Initialization, \"Setting timer frequency to {}Hz\", freq);\n\n\n\n unsafe\n\n {\n\n drivers::TIMER_DRIVER.set_frequency(freq);\n\n drivers::TIMER_DRIVER.trigger();\n\n }\n\n}\n\n\n", "file_path": "qor-os/src/drivers/mod.rs", "rank": 48, "score": 59756.553293113044 }, { "content": "/// Interrupt handler for all VirtIO interrupts\n\npub fn handle_interrupt(_interrupt: u32)\n\n{\n\n}\n\n\n", "file_path": "qor-os/src/drivers/virtio/mod.rs", "rank": 49, "score": 59756.553293113044 }, { "content": "/// Return all available device directories for the system\n\npub fn get_device_directories() -> Vec<DeviceDirectories>\n\n{\n\n let mut result: Vec<DeviceDirectories> = Vec::new();\n\n\n\n result.push(DeviceDirectories::PseudoTerminalSecondaries);\n\n\n\n result\n\n}\n\n\n", "file_path": "qor-os/src/fs/devfs/devices.rs", "rank": 50, "score": 57959.854030683135 }, { "content": "/// Return all available device files for the system\n\npub fn get_device_files() -> Vec<DeviceFile>\n\n{\n\n let mut result: Vec<DeviceFile> = Vec::new();\n\n \n\n // Only add graphics devices if the graphics driver is loaded\n\n if drivers::gpu::is_graphics_driver_loaded()\n\n {\n\n // /dev/disp : Text mode for the frame buffer\n\n result.push(\n\n DeviceFile::new(\n\n \"disp\",\n\n Box::new(\n\n |inode| Box::new(\n\n ByteInterfaceDescriptor::new(drivers::gpu::get_global_graphics_driver(), inode)\n\n )),\n\n Box::new( |_| usize::MAX)\n\n ));\n\n\n\n // /dev/fb0 : Raw frame buffer access\n\n result.push(\n", "file_path": "qor-os/src/fs/devfs/devices.rs", "rank": 51, "score": 57959.854030683135 }, { "content": "/// Initialize the global kernel allocator\n\npub fn init_kernel_global_allocator(page_count: usize)\n\n{\n\n kdebugln!(ByteMemoryAllocation, \"Initialize the Kernel Global Allocator with {} KBs\", page_count * super::PAGE_SIZE / 1024);\n\n\n\n // Insert a new allocation header\n\n KERNEL_HEAP_POINTER.store(AllocationHeader::new(page_count, 16).unwrap(), core::sync::atomic::Ordering::SeqCst);\n\n}\n\n\n", "file_path": "qor-os/src/mem/alloc.rs", "rank": 52, "score": 57959.854030683135 }, { "content": "pub fn ansi_to_ega(c: u8) -> u8\n\n{\n\n match c\n\n {\n\n 1 => 4,\n\n 3 => 6,\n\n 4 => 1,\n\n 6 => 3,\n\n _ => \n\n {\n\n 8 + ansi_to_ega(c & 7)\n\n }\n\n }\n\n}\n\n\n\n/// Text Mode Cell\n\n#[derive(Debug, Clone, Copy)]\n\npub struct TextModeCell\n\n{\n\n c: u8,\n", "file_path": "qor-os/src/drivers/gpu/driver.rs", "rank": 53, "score": 57599.27401513616 }, { "content": "#[cfg(test)]\n\npub fn test_runner(tests: &[&dyn TestFunction]) \n\n{\n\n kprintln!(\"Running {} Tests\", tests.len());\n\n\n\n for test in tests\n\n {\n\n test.run();\n\n }\n\n\n\n kprintln!(\"Testing Complete\");\n\n}", "file_path": "qor-os/src/test.rs", "rank": 54, "score": 57599.27401513616 }, { "content": "/// Schedule the next process\n\npub fn schedule_next() -> (usize, usize, usize)\n\n{\n\n unsafe \n\n {\n\n GLOBAL_PROC_MANAGER.as_mut().unwrap().schedule_process()\n\n }\n\n}\n\n\n", "file_path": "qor-os/src/process/scheduler.rs", "rank": 55, "score": 57599.27401513616 }, { "content": "/// Get the open pseudo terminal indexes\n\npub fn get_open_pseudo_terminal_indexes() -> Vec<usize>\n\n{\n\n vec![]\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Debug, Clone, Copy)]\n\npub struct TeletypeSettings\n\n{\n\n pub input_flags: u32,\n\n pub output_flags: u32,\n\n pub control_flags: u32,\n\n pub local_flags: u32,\n\n\n\n pub line_discipline: u8,\n\n pub control_characters: [u8; 32],\n\n \n\n pub input_speed: u32,\n\n pub output_speed: u32\n\n}\n", "file_path": "qor-os/src/fs/devfs/tty.rs", "rank": 56, "score": 57126.59495953888 }, { "content": "/// Create a new pipe pair\n\npub fn new_pipe() -> (ReadPipeDescriptor, WritePipeDescriptor)\n\n{\n\n let buffer = utils::ByteRingBuffer::new();\n\n let wrapped_buffer = \n\n alloc::sync::Arc::new(\n\n core::cell::RefCell::new(\n\n buffer));\n\n (\n\n ReadPipeDescriptor { buffer: wrapped_buffer.clone() },\n\n WritePipeDescriptor { buffer: wrapped_buffer.clone() }\n\n )\n\n}", "file_path": "qor-os/src/process/pipe.rs", "rank": 57, "score": 57126.59495953888 }, { "content": "#[alloc_error_handler]\n\npub fn alloc_error(l: core::alloc::Layout) -> ! {\n\n\tpanic!(\n\n\t \"Allocator failed to allocate {} bytes with {}-byte alignment.\",\n\n\t l.size(),\n\n\t l.align()\n\n\t);\n\n}\n\n\n\n// Assign a new global allocator\n\n#[global_allocator]\n\nstatic GA: GlobalAllocator = GlobalAllocator {};", "file_path": "qor-os/src/mem/alloc.rs", "rank": 58, "score": 55805.344711500125 }, { "content": "/// Interrupt Handler\n\npub fn interrupt_handler(interrupt_context: InterruptContext) -> usize\n\n{\n\n kdebugln!(Interrupts, \"{}\", interrupt_context);\n\n\n\n // Check if there is a process running\n\n if let Some(proc) = process::scheduler::current_process()\n\n {\n\n if (interrupt_context.get_status() >> 11) & 3 == 0\n\n {\n\n proc.program_counter = interrupt_context.instruction_address();\n\n }\n\n }\n\n\n\n match interrupt_context.get_cause()\n\n {\n\n InterruptType::MachineExternalInterrupt =>\n\n {\n\n // Get the next external interrupt\n\n if let Some(interrupt) = unsafe { drivers::PLIC_DRIVER.next_interrupt() }\n\n {\n", "file_path": "qor-os/src/trap/handler.rs", "rank": 59, "score": 55802.57475270625 }, { "content": "/// Jump into the process\n\npub fn schedule_jump(data: (usize, usize, usize)) -> !\n\n{\n\n unsafe { switch_to_user(data.0, data.1, data.2) }\n\n}\n\n\n\nextern \"C\"\n\n{\n\n pub fn asm_wait_for_int() -> !;\n\n}\n\n\n", "file_path": "qor-os/src/process/scheduler.rs", "rank": 60, "score": 54884.19699250882 }, { "content": "/// Replace a running process\n\npub fn replace_process(pid: PID, proc: Process)\n\n{\n\n unsafe \n\n {\n\n GLOBAL_PROC_MANAGER.as_mut().unwrap().replace_process(pid, proc);\n\n }\n\n}\n\n\n\n\n", "file_path": "qor-os/src/process/scheduler.rs", "rank": 61, "score": 54884.19699250882 }, { "content": "/// sigreturn Syscall\n\npub fn syscall_sigreturn(proc: &mut super::Process)\n\n{\n\n proc.return_from_signal();\n\n}", "file_path": "qor-os/src/syscalls/sigreturn.rs", "rank": 62, "score": 54884.19699250882 }, { "content": "/// Get a reference to the global graphics driver\n\npub fn get_global_graphics_driver() -> &'static mut GenericGraphics\n\n{\n\n if let Some(reference) = unsafe { &mut GLOBAL_GRAPHICS_DRIVER }\n\n {\n\n reference\n\n }\n\n else\n\n {\n\n panic!(\"Cannot access uninitialized graphics driver\");\n\n }\n\n}\n\n\n", "file_path": "qor-os/src/drivers/gpu/mod.rs", "rank": 63, "score": 54175.35701573006 }, { "content": "/// Get a reference to the init process\n\npub fn get_init_process() -> Option<&'static Box<Process>>\n\n{\n\n unsafe \n\n {\n\n GLOBAL_PROC_MANAGER.as_mut().unwrap().get_process_by_pid(0)\n\n }\n\n}\n\n\n", "file_path": "qor-os/src/process/scheduler.rs", "rank": 64, "score": 54008.645449070216 }, { "content": "/// Seperate a path into a path and the last item (path, name)\n\npub fn separate_path_last(path: &str) -> (String, String)\n\n{\n\n if let Some((name, path_items)) = path.split(\"/\").collect::<Vec<_>>().split_last()\n\n {\n\n (path_items.join(\"/\") + \"/\", name.to_string())\n\n }\n\n else\n\n {\n\n (String::new(), String::new())\n\n }\n\n}", "file_path": "qor-os/src/utils/mod.rs", "rank": 65, "score": 54008.645449070216 }, { "content": "/// Get a reference to the process manager\n\npub fn get_process_manager() -> Option<&'static mut ProcessManager>\n\n{\n\n if let Some(data) = unsafe { &mut GLOBAL_PROC_MANAGER }\n\n {\n\n Some(data)\n\n }\n\n else\n\n {\n\n None\n\n }\n\n}\n\n\n\nextern \"C\"\n\n{\n\n pub fn switch_to_user(frame: usize, pc: usize, satp: usize) -> !;\n\n}\n\n\n", "file_path": "qor-os/src/process/scheduler.rs", "rank": 66, "score": 53175.38637792596 }, { "content": "/// Get the UART driver\n\npub fn get_uart_driver() -> &'static mut uart::UARTDriver\n\n{\n\n unsafe { &mut UART_DRIVER }\n\n}", "file_path": "qor-os/src/drivers/mod.rs", "rank": 67, "score": 53175.38637792596 }, { "content": "/// Get a reference to the vfs interface\n\npub fn get_vfs_reference() -> Option<&'static mut FilesystemInterface>\n\n{\n\n let ptr = VFS_INTERFACE.load(core::sync::atomic::Ordering::SeqCst);\n\n\n\n unsafe { ptr.as_mut() }\n\n}\n\n \n\n/// Virtual Filesystem Interface\n\npub struct FilesystemInterface\n\n{\n\n mounts: Vec<Option<Box<dyn Filesystem>>>,\n\n root: Option<usize>,\n\n pub index: BTreeMap<OwnedPath, FilesystemIndex>,\n\n indexed: BTreeMap<FilesystemIndex, OwnedPath>\n\n}\n\n\n\nimpl FilesystemInterface\n\n{\n\n /// Create a new Filesystem Interface\n\n pub fn new() -> &'static mut Self\n", "file_path": "qor-os/src/fs/vfs.rs", "rank": 68, "score": 53175.38637792596 }, { "content": "/// sync Syscall\n\npub fn syscall_sync(proc: &mut super::Process) -> usize\n\n{\n\n kdebugln!(Syscalls, \"PID {} requests fs sync\", proc.pid);\n\n\n\n use fs::fstrait::Filesystem;\n\n fs::vfs::get_vfs_reference().unwrap().sync().unwrap();\n\n\n\n 0\n\n}", "file_path": "qor-os/src/syscalls/sync.rs", "rank": 69, "score": 52493.398023125555 }, { "content": "/// Fork Syscall\n\npub fn syscall_fork(proc: &mut super::Process) -> usize\n\n{\n\n // Get the forked process\n\n let forked = proc.forked();\n\n\n\n let pid = forked.pid;\n\n\n\n process::scheduler::add_process(forked);\n\n\n\n pid as usize\n\n}", "file_path": "qor-os/src/syscalls/fork.rs", "rank": 70, "score": 52493.398023125555 }, { "content": "/// getpid Syscall\n\npub fn syscall_getpid(proc: &mut super::Process) -> usize\n\n{\n\n proc.pid as usize\n\n}", "file_path": "qor-os/src/syscalls/getpid.rs", "rank": 71, "score": 52493.398023125555 }, { "content": "/// Get the current process\n\npub fn current_process() -> Option<&'static mut Box<Process>>\n\n{\n\n unsafe \n\n {\n\n GLOBAL_PROC_MANAGER.as_mut().unwrap().currently_running_mut()\n\n }\n\n}\n\n\n", "file_path": "qor-os/src/process/scheduler.rs", "rank": 72, "score": 52493.398023125555 }, { "content": "/// Pause Syscall\n\npub fn syscall_pause(proc: &mut super::Process) -> usize\n\n{\n\n proc.state = process::process::ProcessState::Waiting(process::process::WaitMode::ForSignal);\n\n proc.program_counter += 4;\n\n\n\n let schedule = process::scheduler::schedule_next();\n\n process::scheduler::schedule_jump(schedule);\n\n}", "file_path": "qor-os/src/syscalls/pause.rs", "rank": 73, "score": 52493.398023125555 }, { "content": "/// Dump a region of memory\n\npub fn mem_dump(address: *mut u8, size: usize)\n\n{\n\n let start_row = address as usize & !15;\n\n let num_rows = (size + (start_row & 15)) / 16;\n\n\n\n for row in 0..num_rows\n\n {\n\n kprint!(\" {:x}\\t\", start_row / 16 + row);\n\n\n\n let row: &[u8; 16] = unsafe{ (start_row as *mut [u8; 16]).add(row).as_ref().unwrap() };\n\n\n\n for i in 0..16\n\n {\n\n kprint!(\"{:02X} \", row[i]);\n\n\n\n if i == 7\n\n {\n\n kprint!(\" \");\n\n }\n\n }\n", "file_path": "qor-os/src/utils/memdump.rs", "rank": 74, "score": 52493.398023125555 }, { "content": "/// Get the driver at the given index\n\npub fn get_driver_at_index(index: usize) -> Option<VirtIODeviceDriver>\n\n{\n\n unsafe { VIRTIO_DEVICES[index] }.map(\n\n |dt| \n\n VirtIODeviceDriver::new(dt, \n\n unsafe { VirtIOHelper::new(virtio_index_to_address(index)) }))\n\n}\n\n\n\nconst FMT_ERROR: &'static str = \"\\x1B[31m\";\n\nconst FMT_WARN: &'static str = \"\\x1B[33m\";\n\nconst FMT_OK: &'static str = \"\\x1B[32m\";\n\nconst FMT_CLEAR: &'static str = \"\\x1B[0m\";\n\n\n", "file_path": "qor-os/src/drivers/virtio/discovery.rs", "rank": 75, "score": 50900.790966603374 }, { "content": "/// Get a mutable reference to the init process\n\npub fn get_init_process_mut() -> Option<&'static mut Box<Process>>\n\n{\n\n unsafe \n\n {\n\n GLOBAL_PROC_MANAGER.as_mut().unwrap().get_process_by_pid_mut(0)\n\n }\n\n}\n\n\n", "file_path": "qor-os/src/process/scheduler.rs", "rank": 76, "score": 50866.18028614936 }, { "content": "/// Exit Syscall\n\npub fn syscall_exit(proc: &mut super::Process, value: usize)\n\n{\n\n kdebugln!(Syscalls, \"Exiting Process PID {} with value: {}\", proc.pid, value);\n\n\n\n proc.kill(value);\n\n\n\n let schedule = process::scheduler::schedule_next();\n\n process::scheduler::schedule_jump(schedule);\n\n}", "file_path": "qor-os/src/syscalls/exit.rs", "rank": 77, "score": 50363.31520450137 }, { "content": "/// External Interrupt Handler\n\npub fn external_interrupt_handler(interrupt: PLICInterrupt, _interrupt_context: &super::InterruptContext)\n\n{\n\n kdebugln!(Interrupts, \"ExtInt{}\", interrupt.0);\n\n\n\n match interrupt\n\n {\n\n PLICInterrupt(1) => drivers::virtio::handle_interrupt(1),\n\n PLICInterrupt(2) => drivers::virtio::handle_interrupt(2),\n\n PLICInterrupt(3) => drivers::virtio::handle_interrupt(3),\n\n PLICInterrupt(4) => drivers::virtio::handle_interrupt(4),\n\n PLICInterrupt(5) => drivers::virtio::handle_interrupt(5),\n\n PLICInterrupt(6) => drivers::virtio::handle_interrupt(6),\n\n PLICInterrupt(7) => drivers::virtio::handle_interrupt(7),\n\n PLICInterrupt(8) => drivers::virtio::handle_interrupt(8),\n\n PLICInterrupt(10) => \n\n {\n\n // Temporary handler to make sure the UART port is read\n\n // use drivers::generic::ByteInterface;\n\n // let c = unsafe { drivers::UART_DRIVER.read_byte()};\n\n\n", "file_path": "qor-os/src/trap/extint.rs", "rank": 78, "score": 49385.543540658706 }, { "content": "/// Close Syscall\n\npub fn syscall_close(proc: &mut super::Process, fd: usize) -> usize\n\n{\n\n proc.close(fd)\n\n}", "file_path": "qor-os/src/syscalls/close.rs", "rank": 79, "score": 48446.90282269295 }, { "content": "/// Wait Syscall\n\npub fn syscall_wait(proc: &mut super::Process, ptr: usize) -> usize\n\n{\n\n let status = \n\n if ptr != 0\n\n {\n\n proc.map_mem(ptr).unwrap() as *mut u32\n\n }\n\n else\n\n {\n\n 0 as *mut u32\n\n };\n\n\n\n proc.state = process::process::ProcessState::Waiting(process::process::WaitMode::ForChild);\n\n proc.data.return_code_listener = unsafe { status.as_mut() };\n\n proc.program_counter += 4;\n\n\n\n let schedule = process::scheduler::schedule_next();\n\n process::scheduler::schedule_jump(schedule);\n\n}", "file_path": "qor-os/src/syscalls/wait.rs", "rank": 80, "score": 48446.90282269295 }, { "content": "/// Pipe Syscall\n\npub fn syscall_pipe(proc: &mut super::Process, fds: usize) -> usize\n\n{\n\n let buffer = proc.map_mem(fds).unwrap() as *mut u32;\n\n\n\n let (read, write) = proc.pipe();\n\n \n\n unsafe\n\n {\n\n buffer.add(0).write(read as u32);\n\n buffer.add(1).write(write as u32);\n\n }\n\n\n\n 0\n\n}", "file_path": "qor-os/src/syscalls/pipe.rs", "rank": 81, "score": 48446.90282269295 }, { "content": "/// Dup Syscall\n\npub fn syscall_dup(proc: &mut super::Process, old_fd: usize)-> usize\n\n{\n\n kdebugln!(Syscalls, \"Duplicating FD {}on Process PID {}\", old_fd, proc.pid);\n\n\n\n proc.dup(old_fd, None)\n\n}\n\n\n", "file_path": "qor-os/src/syscalls/dup.rs", "rank": 82, "score": 47689.5282324795 }, { "content": "/// chdir Syscall\n\npub fn syscall_chdir(proc: &mut super::Process, path_ptr: usize) -> usize\n\n{\n\n let path_ptr = proc.map_mem(path_ptr).unwrap() as *mut u8;\n\n let mut path = String::new();\n\n\n\n let mut i = 0; \n\n\n\n loop\n\n {\n\n let v = unsafe { path_ptr.add(i).read() } as char;\n\n\n\n if v == '\\x00' { break; }\n\n\n\n path.push(v);\n\n\n\n i += 1;\n\n }\n\n\n\n let path = OwnedPath::new(path).canonicalized(&proc.data.cwd);\n\n \n", "file_path": "qor-os/src/syscalls/chdir.rs", "rank": 83, "score": 47689.5282324795 }, { "content": "/// Free consecutive pages from the kernel\n\npub fn kpfree(addr: usize, count: usize) -> Result<(), page::KernelPageAllocationError>\n\n{\n\n kdebugln!(MemoryAllocation, \"kpfree(0x{:x}, {})\", addr, count);\n\n\n\n \n\n // Ensure the global kernel page allocator was initialized\n\n if unsafe { GLOBAL_KERNEL_PAGE_ALLOCATOR.is_null() }\n\n {\n\n panic!(\"Cannot use kpalloc before the global kernel page allocator is initialized\");\n\n }\n\n \n\n // Safety: The above ensured it was initialized, and the only method of\n\n // initialization is through the proper initializer\n\n unsafe\n\n {\n\n // Panic Safety: This is safe because a null would have been caught\n\n // above\n\n GLOBAL_KERNEL_PAGE_ALLOCATOR.as_mut().unwrap().free_pages(addr, count)\n\n }\n\n}\n\n\n", "file_path": "qor-os/src/mem/mod.rs", "rank": 84, "score": 46966.2660772023 }, { "content": "/// Pause setpgid\n\npub fn syscall_setpgid(proc: &mut super::Process, pid: usize, pgid: usize) -> usize\n\n{\n\n if proc.pid == pid as PID\n\n {\n\n proc.data.process_group_id = pgid as PID;\n\n return 0;\n\n }\n\n\n\n usize::MAX\n\n}", "file_path": "qor-os/src/syscalls/setpgid.rs", "rank": 85, "score": 45120.46777542367 }, { "content": "/// Kill Syscall\n\npub fn syscall_kill(proc: &mut super::Process, pid: usize, signal: usize) -> usize\n\n{\n\n // Convert the signal to the kernel's representation\n\n let sig_type = match signal\n\n {\n\n 2 => SignalType::SIGINT,\n\n 9 => SignalType::SIGKILL,\n\n 15 => SignalType::SIGTERM,\n\n _ => { kwarnln!(\"Unknown signal {}\", signal); return errno::EINVAL }\n\n };\n\n\n\n kdebugln!(Syscalls, \"PID {} Sending Signal {:?} to PID {}\", proc.pid, sig_type, pid);\n\n\n\n if pid != 0\n\n {\n\n if process::scheduler::get_process_manager().as_mut().unwrap().send_signal(\n\n pid as u16, \n\n POSIXSignal::new(pid as u16, proc.pid, sig_type)).is_err()\n\n {\n\n errno::ESRCH // Bad pid\n", "file_path": "qor-os/src/syscalls/kill.rs", "rank": 86, "score": 45120.46777542367 }, { "content": "/// Nanosleep Syscall\n\npub fn syscall_nanosleep(proc: &mut super::Process, time: usize, _remaining: usize) -> usize\n\n{\n\n let time: &'static mut IncomingTime = map_ptr(proc, time);\n\n let kernel_duration = KernelTime::nanoseconds(time.seconds * 1_000_000_000 + time.nano_seconds);\n\n let current = unsafe { &drivers::TIMER_DRIVER }.time();\n\n\n\n proc.state = ProcessState::Sleeping { wake_time: current + kernel_duration };\n\n\n\n proc.program_counter += 4;\n\n\n\n let schedule = process::scheduler::schedule_next();\n\n process::scheduler::schedule_jump(schedule);\n\n}", "file_path": "qor-os/src/syscalls/nanosleep.rs", "rank": 87, "score": 45120.46777542367 }, { "content": "/// Get the block driver with the given index\n\npub fn get_block_driver(index: usize) -> Option<&'static mut drivers::block::BlockDriver>\n\n{\n\n if let Some(collection) = unsafe { &mut VIRTIO_DEVICE_COLLECTION }\n\n {\n\n if let Some(driver) = collection.block_devices.get_mut(index)\n\n {\n\n Some(driver)\n\n }\n\n else\n\n {\n\n None\n\n }\n\n }\n\n else\n\n {\n\n None\n\n }\n\n}\n\n\n", "file_path": "qor-os/src/drivers/virtio/mod.rs", "rank": 88, "score": 44632.22099344115 }, { "content": "/// Get the gpu driver with the given index\n\npub fn get_gpu_driver(index: usize) -> Option<&'static mut drivers::gpu::GPUDriver>\n\n{\n\n if let Some(collection) = unsafe { &mut VIRTIO_DEVICE_COLLECTION }\n\n {\n\n if let Some(driver) = collection.gpu_devices.get_mut(index)\n\n {\n\n Some(driver)\n\n }\n\n else\n\n {\n\n None\n\n }\n\n }\n\n else\n\n {\n\n None\n\n }\n\n}\n\n\n", "file_path": "qor-os/src/drivers/virtio/mod.rs", "rank": 89, "score": 44632.22099344115 }, { "content": "/// Getcwd Syscall\n\npub fn syscall_getcwd(proc: &mut super::Process, buffer_ptr: usize, size: usize) -> usize\n\n{\n\n let buffer = proc.map_mem(buffer_ptr).unwrap() as *mut u8;\n\n\n\n let mut i = 0;\n\n for c in proc.data.cwd.as_str().as_bytes()\n\n {\n\n if i == size\n\n {\n\n break;\n\n }\n\n else\n\n {\n\n unsafe \n\n {\n\n buffer.add(i).write(*c);\n\n }\n\n }\n\n\n\n i += 1;\n\n }\n\n\n\n i\n\n}", "file_path": "qor-os/src/syscalls/getcwd.rs", "rank": 90, "score": 44429.06415801587 }, { "content": "/// munmap Syscall\n\npub fn syscall_munmap(proc: &mut super::Process, start_ptr: usize, length: usize) -> usize\n\n{\n\n proc.unmap(start_ptr, length)\n\n}", "file_path": "qor-os/src/syscalls/munmap.rs", "rank": 91, "score": 44429.06415801587 }, { "content": "/// mkdir Syscall\n\npub fn syscall_mkdir(proc: &mut super::Process, path_ptr: usize, _mode: usize) -> usize\n\n{\n\n let path_ptr = proc.map_mem(path_ptr).unwrap() as *mut u8;\n\n let mut path = String::new();\n\n\n\n let mut i = 0;\n\n\n\n loop\n\n {\n\n let v = unsafe { path_ptr.add(i).read() } as char;\n\n\n\n if v == '\\x00' { break; }\n\n\n\n path.push(v);\n\n\n\n i += 1;\n\n }\n\n\n\n // Expand the path\n\n let mut expanded = OwnedPath::new(path);\n", "file_path": "qor-os/src/syscalls/mkdir.rs", "rank": 92, "score": 44429.06415801587 }, { "content": "/// Dup2 Syscall\n\npub fn syscall_dup2(proc: &mut super::Process, old_fd: usize, new_fd: usize)-> usize\n\n{\n\n kdebugln!(Syscalls, \"Duplicating FD {} to {} on Process PID {}\", old_fd, new_fd, proc.pid);\n\n\n\n proc.dup(old_fd, Some(new_fd))\n\n}", "file_path": "qor-os/src/syscalls/dup.rs", "rank": 93, "score": 43767.459463602485 }, { "content": "/// Allocate consecutive pages from the kernel to zero\n\npub fn kpzalloc(count: usize, reason: &'static str) -> Result<usize, page::KernelPageAllocationError>\n\n{\n\n // Allocate the pages\n\n let ptr = kpalloc(count, reason)? as *mut [u8; 4096];\n\n\n\n // Write zeros to each page\n\n for i in 0..count\n\n {\n\n // Safety: The kernel will throw an error if it cannot find valid memory\n\n unsafe \n\n {\n\n ptr.add(i).write([0; PAGE_SIZE]);\n\n }\n\n }\n\n\n\n Ok(ptr as usize)\n\n}\n\n\n", "file_path": "qor-os/src/mem/mod.rs", "rank": 94, "score": 43767.459463602485 }, { "content": "/// Allocate consecutive pages from the kernel\n\npub fn kpalloc(count: usize, reason: &'static str) -> Result<usize, page::KernelPageAllocationError>\n\n{\n\n kdebug!(MemoryAllocation, \"`{}`: kpalloc({}) -> \", reason, count);\n\n\n\n // Ensure the global kernel page allocator was initialized\n\n if unsafe { GLOBAL_KERNEL_PAGE_ALLOCATOR.is_null() }\n\n {\n\n panic!(\"Cannot use kpalloc before the global kernel page allocator is initialized\");\n\n }\n\n \n\n // Safety: The above ensured it was initialized, and the only method of\n\n // initialization is through the proper initializer\n\n // Panic Safety: This is safe because a null would have been caught\n\n // above\n\n let result = unsafe { GLOBAL_KERNEL_PAGE_ALLOCATOR.as_mut() }.unwrap().alloc_pages(count);\n\n \n\n if let Ok(ptr) = result\n\n {\n\n kdebugln!(MemoryAllocation, \"0x{:x}\", ptr);\n\n }\n\n else\n\n {\n\n kdebugln!(MemoryAllocation, \"{:?}\", result);\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "qor-os/src/mem/mod.rs", "rank": 95, "score": 43767.459463602485 }, { "content": "/// sigaction Syscall\n\npub fn syscall_sigaction(proc: &mut super::Process, signal: usize, new_ptr: usize, old_ptr: usize)\n\n{\n\n let new_ref = unsafe { (proc.map_mem(new_ptr).unwrap() as *mut SignalAction).as_mut() };\n\n let _old_ref = unsafe { (proc.map_mem(old_ptr).unwrap() as *mut SignalAction).as_mut() };\n\n\n\n if let Some(new) = new_ref\n\n {\n\n let sig = SignalType::number_to_signal(signal);\n\n\n\n kdebugln!(Signals, \"sigaction from PID {}: On Signal {:?}\", proc.pid, sig);\n\n\n\n if new.flags & 1 > 0\n\n {\n\n // Set the handler as a function\n\n proc.data.signal_map.insert(sig, SignalDisposition::Handler(new.action_fn_ptr));\n\n }\n\n else\n\n {\n\n // Set the handler as a dispoisiton\n\n match new.handler_value\n", "file_path": "qor-os/src/syscalls/sigaction.rs", "rank": 96, "score": 42365.76618568347 }, { "content": "/// Write Syscall\n\npub fn syscall_write(proc: &mut super::Process, fd: usize, buffer: usize, count: usize) -> usize\n\n{\n\n let ptr = proc.map_mem(buffer).unwrap() as *mut u8;\n\n\n\n proc.write(fd, ptr, count)\n\n}", "file_path": "qor-os/src/syscalls/write.rs", "rank": 97, "score": 42312.83640033974 }, { "content": "/// Ioctl Syscall\n\npub fn syscall_ioctl(proc: &mut super::Process, fd: usize, cmd: usize, args: usize) -> usize\n\n{\n\n let structured_command = \n\n match cmd\n\n {\n\n /* /include/uapi/linux/fb.h - Line 14\n\n #define FBIOGET_VSCREENINFO\t0x4600\n\n #define FBIOPUT_VSCREENINFO\t0x4601\n\n #define FBIOGET_FSCREENINFO\t0x4602\n\n #define FBIOGETCMAP\t 0x4604\n\n #define FBIOPUTCMAP 0x4605\n\n #define FBIOPAN_DISPLAY\t\t0x4606\n\n */\n\n // Framebuffer\n\n 0x4600 =>\n\n {\n\n IOControlCommand::FrameBufferGetVariableInfo{ response: map_ptr(proc, args) }\n\n },\n\n 0x4601 =>\n\n {\n", "file_path": "qor-os/src/syscalls/ioctl.rs", "rank": 98, "score": 42312.83640033974 }, { "content": "/// lseek Syscall\n\npub fn syscall_lseek(proc: &mut super::Process, fd: usize, offset: usize, mode: usize) -> usize\n\n{\n\n proc.seek(fd, offset, mode)\n\n}", "file_path": "qor-os/src/syscalls/lseek.rs", "rank": 99, "score": 42312.83640033974 } ]
Rust
elasticsearch/src/error.rs
yaanhyy/elasticsearch-rs
740c3ebd41b391f954e9cf008209b39d89a75231
/* * Licensed to Elasticsearch B.V. under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch B.V. licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /* Error type based on the error type from es-rs: * * Copyright 2015-2018 Ben Ashford * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use crate::http::{transport::BuildError, StatusCode}; use std::{error, fmt, io}; #[derive(Debug)] pub struct Error { kind: Kind, } #[cfg(feature = "tokio-feature")] #[derive(Debug)] enum Kind { Build(BuildError), Lib(String), Http(reqwest::Error), Io(io::Error), Json(serde_json::error::Error), } impl From<io::Error> for Error { fn from(err: io::Error) -> Error { Error { kind: Kind::Io(err), } } } #[cfg(feature = "tokio-feature")] impl From<reqwest::Error> for Error { fn from(err: reqwest::Error) -> Error { Error { kind: Kind::Http(err), } } } impl From<serde_json::error::Error> for Error { fn from(err: serde_json::error::Error) -> Error { Error { kind: Kind::Json(err), } } } impl From<url::ParseError> for Error { fn from(err: url::ParseError) -> Error { Error { kind: Kind::Lib(err.to_string()), } } } impl From<BuildError> for Error { fn from(err: BuildError) -> Error { Error { kind: Kind::Build(err), } } } pub(crate) fn lib(err: impl Into<String>) -> Error { Error { kind: Kind::Lib(err.into()), } } impl Error { pub fn status_code(&self) -> Option<StatusCode> { match &self.kind { Kind::Http(err) => err.status(), _ => None, } } pub fn is_timeout(&self) -> bool { match &self.kind { Kind::Http(err) => err.is_timeout(), _ => false, } } pub fn is_json(&self) -> bool { match &self.kind { Kind::Json(_) => true, _ => false, } } } impl error::Error for Error { fn source(&self) -> Option<&(dyn error::Error + 'static)> { match &self.kind { Kind::Build(err) => Some(err), Kind::Lib(_) => None, Kind::Http(err) => Some(err), Kind::Io(err) => Some(err), Kind::Json(err) => Some(err), } } } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match &self.kind { Kind::Build(err) => err.fmt(f), Kind::Lib(err) => err.fmt(f), Kind::Http(err) => err.fmt(f), Kind::Io(err) => err.fmt(f), Kind::Json(err) => err.fmt(f), } } }
/* * Licensed to Elasticsearch B.V. under one or more contributor * license agreements. See the NOTICE file distributed with * this work for addition
type based on the error type from es-rs: * * Copyright 2015-2018 Ben Ashford * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use crate::http::{transport::BuildError, StatusCode}; use std::{error, fmt, io}; #[derive(Debug)] pub struct Error { kind: Kind, } #[cfg(feature = "tokio-feature")] #[derive(Debug)] enum Kind { Build(BuildError), Lib(String), Http(reqwest::Error), Io(io::Error), Json(serde_json::error::Error), } impl From<io::Error> for Error { fn from(err: io::Error) -> Error { Error { kind: Kind::Io(err), } } } #[cfg(feature = "tokio-feature")] impl From<reqwest::Error> for Error { fn from(err: reqwest::Error) -> Error { Error { kind: Kind::Http(err), } } } impl From<serde_json::error::Error> for Error { fn from(err: serde_json::error::Error) -> Error { Error { kind: Kind::Json(err), } } } impl From<url::ParseError> for Error { fn from(err: url::ParseError) -> Error { Error { kind: Kind::Lib(err.to_string()), } } } impl From<BuildError> for Error { fn from(err: BuildError) -> Error { Error { kind: Kind::Build(err), } } } pub(crate) fn lib(err: impl Into<String>) -> Error { Error { kind: Kind::Lib(err.into()), } } impl Error { pub fn status_code(&self) -> Option<StatusCode> { match &self.kind { Kind::Http(err) => err.status(), _ => None, } } pub fn is_timeout(&self) -> bool { match &self.kind { Kind::Http(err) => err.is_timeout(), _ => false, } } pub fn is_json(&self) -> bool { match &self.kind { Kind::Json(_) => true, _ => false, } } } impl error::Error for Error { fn source(&self) -> Option<&(dyn error::Error + 'static)> { match &self.kind { Kind::Build(err) => Some(err), Kind::Lib(_) => None, Kind::Http(err) => Some(err), Kind::Io(err) => Some(err), Kind::Json(err) => Some(err), } } } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match &self.kind { Kind::Build(err) => err.fmt(f), Kind::Lib(err) => err.fmt(f), Kind::Http(err) => err.fmt(f), Kind::Io(err) => err.fmt(f), Kind::Json(err) => err.fmt(f), } } }
al information regarding copyright * ownership. Elasticsearch B.V. licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /* Error
random
[ { "content": "pub fn create_default() -> Elasticsearch {\n\n create_for_url(cluster_addr().as_str())\n\n}\n\n\n", "file_path": "elasticsearch/tests/common/client.rs", "rank": 0, "score": 61105.408873425535 }, { "content": "fn create_client() -> Result<Elasticsearch, Error> {\n\n fn cluster_addr() -> String {\n\n match std::env::var(\"ELASTICSEARCH_URL\") {\n\n Ok(server) => server,\n\n Err(_) => DEFAULT_ADDRESS.into(),\n\n }\n\n }\n\n\n\n /// Determines if Fiddler.exe proxy process is running\n\n fn running_proxy() -> bool {\n\n let system = sysinfo::System::new();\n\n !system.get_process_by_name(\"Fiddler\").is_empty()\n\n }\n\n\n\n let mut url = Url::parse(cluster_addr().as_ref()).unwrap();\n\n\n\n // if the url is https and specifies a username and password, remove from the url and set credentials\n\n let credentials = if url.scheme() == \"https\" {\n\n let username = if !url.username().is_empty() {\n\n let u = url.username().to_string();\n", "file_path": "elasticsearch/examples/cat_indices.rs", "rank": 1, "score": 58013.31196316265 }, { "content": "fn create_client() -> Result<Elasticsearch, Error> {\n\n fn cluster_addr() -> String {\n\n match std::env::var(\"ELASTICSEARCH_URL\") {\n\n Ok(server) => server,\n\n Err(_) => DEFAULT_ADDRESS.into(),\n\n }\n\n }\n\n\n\n /// Determines if Fiddler.exe proxy process is running\n\n fn running_proxy() -> bool {\n\n let system = sysinfo::System::new();\n\n !system.get_process_by_name(\"Fiddler\").is_empty()\n\n }\n\n\n\n let mut url = Url::parse(cluster_addr().as_ref()).unwrap();\n\n\n\n // if the url is https and specifies a username and password, remove from the url and set credentials\n\n let credentials = if url.scheme() == \"https\" {\n\n let username = if !url.username().is_empty() {\n\n let u = url.username().to_string();\n", "file_path": "elasticsearch/examples/search_questions/main.rs", "rank": 2, "score": 57310.472480705794 }, { "content": "fn create_client() -> Result<Elasticsearch, Error> {\n\n fn cluster_addr() -> String {\n\n match std::env::var(\"ELASTICSEARCH_URL\") {\n\n Ok(server) => server,\n\n Err(_) => DEFAULT_ADDRESS.into(),\n\n }\n\n }\n\n\n\n /// Determines if Fiddler.exe proxy process is running\n\n fn running_proxy() -> bool {\n\n let system = sysinfo::System::new();\n\n !system.get_process_by_name(\"Fiddler\").is_empty()\n\n }\n\n\n\n let mut url = Url::parse(cluster_addr().as_ref()).unwrap();\n\n\n\n // if the url is https and specifies a username and password, remove from the url and set credentials\n\n let credentials = if url.scheme() == \"https\" {\n\n let username = if !url.username().is_empty() {\n\n let u = url.username().to_string();\n", "file_path": "elasticsearch/examples/index_questions_answers/main.rs", "rank": 3, "score": 56630.80691698594 }, { "content": "fn create_client() -> Result<Elasticsearch, Error> {\n\n fn cluster_addr() -> String {\n\n match std::env::var(\"ELASTICSEARCH_URL\") {\n\n Ok(server) => server,\n\n Err(_) => DEFAULT_ADDRESS.into(),\n\n }\n\n }\n\n\n\n /// Determines if Fiddler.exe proxy process is running\n\n fn running_proxy() -> bool {\n\n let system = sysinfo::System::new();\n\n !system.get_process_by_name(\"Fiddler\").is_empty()\n\n }\n\n\n\n let mut url = Url::parse(cluster_addr().as_ref()).unwrap();\n\n\n\n // if the url is https and specifies a username and password, remove from the url and set credentials\n\n let credentials = if url.scheme() == \"https\" {\n\n let username = if !url.username().is_empty() {\n\n let u = url.username().to_string();\n", "file_path": "elasticsearch/examples/search_questions_answers/main.rs", "rank": 4, "score": 56630.80691698594 }, { "content": "pub fn create_for_url(url: &str) -> Elasticsearch {\n\n let builder = create_builder(url);\n\n create(builder)\n\n}\n\n\n", "file_path": "elasticsearch/tests/common/client.rs", "rank": 5, "score": 55483.00794663407 }, { "content": "pub fn create(mut builder: TransportBuilder) -> Elasticsearch {\n\n if running_proxy() {\n\n let proxy_url = Url::parse(\"http://localhost:8888\").unwrap();\n\n builder = builder.proxy(proxy_url, None, None);\n\n }\n\n\n\n let transport = builder.build().unwrap();\n\n Elasticsearch::new(transport)\n\n}\n\n\n\n/// index some documents into a posts index. If the posts index already exists, do nothing.\n\n///\n\n/// As an async fn, this can end up running multiple times concurrently, and indexing documents\n\n/// several times. In this instance, this is fine.\n\n///\n\n/// TODO: This is a temporary measure until https://github.com/elastic/elasticsearch-rs/issues/19 is implemented.\n\npub async fn index_documents(client: &Elasticsearch) -> Result<Response, Error> {\n\n let index = \"posts\";\n\n let exists_response = client\n\n .indices()\n", "file_path": "elasticsearch/tests/common/client.rs", "rank": 6, "score": 53348.793028803884 }, { "content": "/// Gets the client to use in tests\n\npub fn get() -> &'static Elasticsearch {\n\n GLOBAL_CLIENT.deref()\n\n}\n\n\n\n/// Reads the response from Elasticsearch, returning the method, status code, text response,\n\n/// and the response parsed from json or yaml\n\npub async fn read_response(\n\n response: Response,\n\n) -> Result<(Method, StatusCode, String, Value), failure::Error> {\n\n let is_json = response.content_type().starts_with(\"application/json\");\n\n let is_yaml = response.content_type().starts_with(\"application/yaml\");\n\n let method = response.method();\n\n let status_code = response.status_code();\n\n let text = response.text().await?;\n\n let json = if is_json && !text.is_empty() {\n\n serde_json::from_str::<Value>(text.as_ref())?\n\n } else if is_yaml && !text.is_empty() {\n\n serde_yaml::from_str::<Value>(text.as_ref())?\n\n } else {\n\n Value::Null\n", "file_path": "yaml_test_runner/tests/common/client.rs", "rank": 7, "score": 47666.679793898904 }, { "content": "/// Merge some generated content into an existing file. Content is fetch using the `get_content`\n\n/// function to accommodate for various generation strategies or content sources.\n\n///\n\n/// Generated sections in the file are delimited by start and end markers. The start marker also\n\n/// indicates the name of the section to be merged, allowing a file to contain several generated\n\n/// sections:\n\n///\n\n/// ```\n\n/// // This is a regular section in the file\n\n///\n\n/// // GENERATED-BEGIN:foo\n\n/// // this part will be replaced by the contents of the \"foo\" section\n\n/// // GENERATED-END\n\n///\n\n/// // Another regular section\n\n///\n\n/// // GENERATED-BEGIN:bar\n\n/// // this part will be replaced by the contents of the \"bar\" section\n\n/// // GENERATED-END\n\n///\n\n/// // End of file\n\n/// ```\n\npub fn merge_file(\n\n mut get_content: impl FnMut(&str) -> Option<String>,\n\n dir: &Path,\n\n file_name: &str,\n\n tracker: &mut GeneratedFiles,\n\n) -> Result<(), failure::Error> {\n\n let mut path = dir.to_owned();\n\n path.push(PathBuf::from_slash(file_name));\n\n\n\n let mut in_generated_section = false;\n\n let mut output = String::with_capacity(1024);\n\n\n\n let file = File::open(&path)?;\n\n\n\n for (line_no, line) in BufReader::new(file).lines().enumerate() {\n\n let line = line?;\n\n\n\n if let Some(captures) = START_REGEX.captures(&line) {\n\n if in_generated_section {\n\n return Err(failure::format_err!(\n", "file_path": "api_generator/src/generator/output.rs", "rank": 8, "score": 45541.49463545723 }, { "content": "/// deserializes an ApiEndpoint from a file\n\nfn endpoint_from_file<R>(\n\n name: String,\n\n reader: &mut R,\n\n) -> Result<(String, ApiEndpoint), failure::Error>\n\nwhere\n\n R: Read,\n\n{\n\n // deserialize the map from the reader\n\n let endpoint: BTreeMap<String, ApiEndpoint> =\n\n serde_json::from_reader(reader).map_err(|e| super::error::ParseError {\n\n message: format!(\"Failed to parse {} because: {}\", name, e),\n\n })?;\n\n\n\n // get the first (and only) endpoint name and endpoint body\n\n let mut first_endpoint = endpoint.into_iter().next().unwrap();\n\n first_endpoint.1.full_name = Some(first_endpoint.0.clone());\n\n\n\n // sort the HTTP methods so that we can easily pattern match on them later\n\n for path in first_endpoint.1.url.paths.iter_mut() {\n\n path.methods.sort();\n\n }\n\n\n\n Ok(first_endpoint)\n\n}\n\n\n", "file_path": "api_generator/src/generator/mod.rs", "rank": 9, "score": 45540.66379552235 }, { "content": "/// Writes the input to the specified file, preceded by a header comment indicating generated code\n\npub fn write_file(\n\n input: String,\n\n docs: Option<&PathBuf>,\n\n dir: &PathBuf,\n\n file_name: &str,\n\n tracker: &mut GeneratedFiles,\n\n) -> Result<(), failure::Error> {\n\n let mut path = dir.clone();\n\n path.push(PathBuf::from_slash(file_name));\n\n\n\n let mut file = File::create(&path)?;\n\n file.write_all(\n\n b\"/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \\\"License\\\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n", "file_path": "api_generator/src/generator/output.rs", "rank": 10, "score": 45540.44162979717 }, { "content": "fn write_test_file(\n\n download_dir: &PathBuf,\n\n suite_dir: &str,\n\n mut entry: Entry<GzDecoder<Response>>,\n\n) -> Result<(), failure::Error> {\n\n let path = entry.path()?;\n\n\n\n let mut dir = {\n\n let mut dir = download_dir.clone();\n\n dir.push(suite_dir);\n\n let parent = path.parent().unwrap().file_name().unwrap();\n\n dir.push(parent);\n\n dir\n\n };\n\n\n\n fs::create_dir_all(&dir)?;\n\n dir.push(path.file_name().unwrap());\n\n let mut file = File::create(&dir)?;\n\n io::copy(&mut entry, &mut file)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "yaml_test_runner/src/github.rs", "rank": 11, "score": 45534.59461226386 }, { "content": "fn write_test_file(\n\n test: YamlTests,\n\n relative_path: &Path,\n\n generated_dir: &PathBuf,\n\n) -> Result<(), failure::Error> {\n\n if test.skip_test(\"*\") {\n\n info!(\n\n r#\"skipping all tests in {} because it's included in skip.yml\"#,\n\n test.path,\n\n );\n\n return Ok(());\n\n }\n\n\n\n let mut path = test_file_path(relative_path)?;\n\n path = generated_dir.join(path);\n\n path.set_extension(\"rs\");\n\n\n\n fs::create_dir_all(&path.parent().unwrap())?;\n\n let mut file = File::create(&path)?;\n\n file.write_all(\n", "file_path": "yaml_test_runner/src/generator.rs", "rank": 12, "score": 45534.59461226386 }, { "content": "fn write_spec_file(\n\n download_dir: &PathBuf,\n\n mut entry: Entry<GzDecoder<Response>>,\n\n) -> Result<(), failure::Error> {\n\n let path = entry.path()?;\n\n let mut dir = download_dir.clone();\n\n dir.push(path.file_name().unwrap());\n\n let mut file = File::create(&dir)?;\n\n io::copy(&mut entry, &mut file)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "api_generator/src/rest_spec/mod.rs", "rank": 13, "score": 44334.32646622222 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\nextern crate rustc_version;\n\nuse rustc_version::{version_meta, Channel};\n\n\n", "file_path": "elasticsearch/build.rs", "rank": 14, "score": 39791.845513921464 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/ingest.rs", "rank": 15, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/indices.rs", "rank": 16, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/ssl.rs", "rank": 17, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/xpack.rs", "rank": 18, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/ml.rs", "rank": 19, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/nodes.rs", "rank": 20, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/enrich.rs", "rank": 21, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/sql.rs", "rank": 22, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/ilm.rs", "rank": 23, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/tasks.rs", "rank": 24, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/transform.rs", "rank": 25, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/slm.rs", "rank": 26, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/watcher.rs", "rank": 27, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/license.rs", "rank": 28, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/security.rs", "rank": 29, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/ccr.rs", "rank": 30, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/graph.rs", "rank": 31, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/snapshot.rs", "rank": 32, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/cat.rs", "rank": 33, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/migration.rs", "rank": 34, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n\n\n// -----------------------------------------------\n", "file_path": "elasticsearch/src/cluster.rs", "rank": 35, "score": 38548.75173421724 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n//! These tests require a cluster configured with Security. One can be spun up using the\n\n//! .ci/run-elasticsearch.sh script as follows:\n", "file_path": "elasticsearch/tests/cert.rs", "rank": 36, "score": 38548.47006544079 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n//! Authentication components\n\n\n", "file_path": "elasticsearch/src/auth.rs", "rank": 37, "score": 38548.43120202463 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n//! Certificate components\n\n\n", "file_path": "elasticsearch/src/cert.rs", "rank": 38, "score": 38548.43120202463 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n//! API parameters\n\n\n", "file_path": "elasticsearch/src/params.rs", "rank": 39, "score": 38548.43120202463 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n//! Official Rust client for [Elasticsearch](https://www.elastic.co/products/elasticsearch)\n\n//!\n", "file_path": "elasticsearch/src/lib.rs", "rank": 40, "score": 38548.23428053496 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\npub mod common;\n\nuse common::*;\n", "file_path": "elasticsearch/tests/error.rs", "rank": 42, "score": 38547.95977172924 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\npub mod common;\n\nuse common::*;\n", "file_path": "elasticsearch/tests/client.rs", "rank": 43, "score": 38547.95977172924 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\npub mod common;\n\nuse common::*;\n", "file_path": "elasticsearch/tests/auth.rs", "rank": 44, "score": 38547.95977172924 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\nuse crate::{\n\n http::{headers::HeaderMap, request::Body, response::Response, transport::Transport, Method},\n", "file_path": "elasticsearch/src/client.rs", "rank": 45, "score": 38546.75477638327 }, { "content": "//! `Elasticsearch` is an official Rust client for Elasticsearch, providing an efficient asynchronous\n\n//! client for all stable Elasticsearch APIs that's easy to use.\n\n//!\n\n//! # Versions and Compatibility\n\n//!\n\n//! | Rust client | Elasticsearch |\n\n//! |-------------|---------------|\n\n//! | 7.x | 7.x |\n\n//!\n\n//! A major version of the client is compatible with the same major version of Elasticsearch.\n\n//! Since Elasticsearch is developed following [Semantic Versioning](https://semver.org/) principles,\n\n//! Any minor/patch version of the client can be used against any minor/patch version of Elasticsearch\n\n//! **within the same major version lineage**. For example,\n\n//!\n\n//! - A `7.5.0` client can be used against `7.0.0` Elasticsearch\n\n//! - A `7.4.0` client can be used against `7.5.1` Elasticsearch\n\n//!\n\n//! In the former case, a 7.5.0 client may contain additional API functions that are not available\n\n//! in 7.0.0 Elasticsearch. In this case, these APIs cannot be used, but for any APIs available in\n\n//! Elasticsearch, the respective API functions on the client will be compatible.\n", "file_path": "elasticsearch/src/lib.rs", "rank": 47, "score": 38517.00033941545 }, { "content": " ) -> IndicesGetDataStream<'a, 'b> {\n\n IndicesGetDataStream::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Indices Get Field Mapping API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-get-field-mapping.html)\\n\\nReturns mapping for one or more fields.\"]\n\n pub fn get_field_mapping<'b>(\n\n &'a self,\n\n parts: IndicesGetFieldMappingParts<'b>,\n\n ) -> IndicesGetFieldMapping<'a, 'b> {\n\n IndicesGetFieldMapping::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Indices Get Mapping API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-get-mapping.html)\\n\\nReturns mappings for one or more indices.\"]\n\n pub fn get_mapping<'b>(\n\n &'a self,\n\n parts: IndicesGetMappingParts<'b>,\n\n ) -> IndicesGetMapping<'a, 'b> {\n\n IndicesGetMapping::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Indices Get Settings API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-get-settings.html)\\n\\nReturns settings for one or more indices.\"]\n\n pub fn get_settings<'b>(\n\n &'a self,\n", "file_path": "elasticsearch/src/indices.rs", "rank": 48, "score": 38516.90716429531 }, { "content": " pub fn preview_transform<'b>(&'a self) -> TransformPreviewTransform<'a, 'b, ()> {\n\n TransformPreviewTransform::new(self.transport())\n\n }\n\n #[doc = \"[Transform Put Transform API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/put-transform.html)\\n\\nInstantiates a transform.\"]\n\n pub fn put_transform<'b>(\n\n &'a self,\n\n parts: TransformPutTransformParts<'b>,\n\n ) -> TransformPutTransform<'a, 'b, ()> {\n\n TransformPutTransform::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Transform Start Transform API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/start-transform.html)\\n\\nStarts one or more transforms.\"]\n\n pub fn start_transform<'b>(\n\n &'a self,\n\n parts: TransformStartTransformParts<'b>,\n\n ) -> TransformStartTransform<'a, 'b, ()> {\n\n TransformStartTransform::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Transform Stop Transform API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/stop-transform.html)\\n\\nStops one or more transforms.\"]\n\n pub fn stop_transform<'b>(\n\n &'a self,\n", "file_path": "elasticsearch/src/transform.rs", "rank": 49, "score": 38516.49457475033 }, { "content": "///\n\n/// ## Full validation\n\n///\n\n/// With Elasticsearch running at `https://example.com`, configured to use a certificate generated\n\n/// with your own Certificate Authority (CA), and where the certificate contains a CommonName (CN)\n\n/// or Subject Alternative Name (SAN) that matches the hostname of Elasticsearch\n\n#[cfg_attr(\n\n any(feature = \"native-tls\", feature = \"rustls-tls\"),\n\n doc = r##\"\n\n```rust,no_run\n\n# use elasticsearch::{\n\n# auth::Credentials,\n\n# cert::{Certificate,CertificateValidation},\n\n# Error, Elasticsearch,\n\n# http::transport::{TransportBuilder,SingleNodeConnectionPool},\n\n# };\n\n# use std::fs::File;\n\n# use std::io::Read;\n\n# use url::Url;\n\n# async fn doc() -> Result<(), Box<dyn std::error::Error>> {\n", "file_path": "elasticsearch/src/cert.rs", "rank": 50, "score": 38516.35816810159 }, { "content": " pub fn groups(mut self, groups: bool) -> Self {\n\n self.groups = Some(groups);\n\n self\n\n }\n\n #[doc = \"Adds a HTTP header\"]\n\n pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self {\n\n self.headers.insert(key, value);\n\n self\n\n }\n\n #[doc = \"Return human readable values for statistics.\"]\n\n pub fn human(mut self, human: bool) -> Self {\n\n self.human = Some(human);\n\n self\n\n }\n\n #[doc = \"Whether to report the aggregated disk usage of each one of the Lucene index files (only applies if segment stats are requested)\"]\n\n pub fn include_segment_file_sizes(mut self, include_segment_file_sizes: bool) -> Self {\n\n self.include_segment_file_sizes = Some(include_segment_file_sizes);\n\n self\n\n }\n\n #[doc = \"Return indices stats aggregated at index, node or shard level\"]\n", "file_path": "elasticsearch/src/nodes.rs", "rank": 51, "score": 38516.29531192893 }, { "content": " #[doc = \"[Ml Start Datafeed API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-start-datafeed.html)\\n\\nStarts one or more datafeeds.\"]\n\n pub fn start_datafeed<'b>(\n\n &'a self,\n\n parts: MlStartDatafeedParts<'b>,\n\n ) -> MlStartDatafeed<'a, 'b, ()> {\n\n MlStartDatafeed::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ml Stop Datafeed API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-stop-datafeed.html)\\n\\nStops one or more datafeeds.\"]\n\n pub fn stop_datafeed<'b>(\n\n &'a self,\n\n parts: MlStopDatafeedParts<'b>,\n\n ) -> MlStopDatafeed<'a, 'b, ()> {\n\n MlStopDatafeed::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ml Update Datafeed API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-update-datafeed.html)\\n\\nUpdates certain properties of a datafeed.\"]\n\n pub fn update_datafeed<'b>(\n\n &'a self,\n\n parts: MlUpdateDatafeedParts<'b>,\n\n ) -> MlUpdateDatafeed<'a, 'b, ()> {\n\n MlUpdateDatafeed::new(self.transport(), parts)\n", "file_path": "elasticsearch/src/ml.rs", "rank": 52, "score": 38516.29605954949 }, { "content": " pub fn exists_template<'b>(\n\n &'a self,\n\n parts: IndicesExistsTemplateParts<'b>,\n\n ) -> IndicesExistsTemplate<'a, 'b> {\n\n IndicesExistsTemplate::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Indices Exists Type API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-types-exists.html)\\n\\nReturns information about whether a particular document type exists. (DEPRECATED)\"]\n\n pub fn exists_type<'b>(\n\n &'a self,\n\n parts: IndicesExistsTypeParts<'b>,\n\n ) -> IndicesExistsType<'a, 'b> {\n\n IndicesExistsType::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Indices Flush API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-flush.html)\\n\\nPerforms the flush operation on one or more indices.\"]\n\n pub fn flush<'b>(&'a self, parts: IndicesFlushParts<'b>) -> IndicesFlush<'a, 'b, ()> {\n\n IndicesFlush::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Indices Forcemerge API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-forcemerge.html)\\n\\nPerforms the force merge operation on one or more indices.\"]\n\n pub fn forcemerge<'b>(\n\n &'a self,\n", "file_path": "elasticsearch/src/indices.rs", "rank": 53, "score": 38516.2717657893 }, { "content": "///\n\n/// This requires the `native-tls` feature to be enabled.\n\n///\n\n/// With Elasticsearch running at `https://example.com`, configured to use a certificate generated\n\n/// with your own Certificate Authority (CA)\n\n#[cfg_attr(\n\n feature = \"native-tls\",\n\n doc = r##\"\n\n```rust,no_run\n\n# use elasticsearch::{\n\n# auth::Credentials,\n\n# cert::{Certificate,CertificateValidation},\n\n# Error, Elasticsearch,\n\n# http::transport::{TransportBuilder,SingleNodeConnectionPool},\n\n# };\n\n# use std::fs::File;\n\n# use std::io::Read;\n\n# use url::Url;\n\n# async fn doc() -> Result<(), Box<dyn std::error::Error>> {\n\nlet url = Url::parse(\"https://example.com\")?;\n", "file_path": "elasticsearch/src/cert.rs", "rank": 54, "score": 38516.179750505806 }, { "content": "//! .await?;\n\n//!\n\n//! let successful = response.status_code().is_success();\n\n//! # Ok(())\n\n//! # }\n\n//! ```\n\n//!\n\n//! For indexing multiple documents, the bulk API is a better option, allowing multiple operations\n\n//! to be sent in one API call\n\n//!\n\n//! ```rust,no_run\n\n//! # use elasticsearch::{auth::Credentials, Elasticsearch, Error, IndexParts, BulkParts, http::request::JsonBody};\n\n//! # use url::Url;\n\n//! # use serde_json::{json, Value};\n\n//! # #[tokio::main]\n\n//! # async fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n//! # let client = Elasticsearch::default();\n\n//! let mut body: Vec<JsonBody<_>> = Vec::with_capacity(4);\n\n//!\n\n//! // add the first operation and document\n", "file_path": "elasticsearch/src/lib.rs", "rank": 55, "score": 38516.0603764565 }, { "content": " }\n\n #[doc = \"Adds a HTTP header\"]\n\n pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self {\n\n self.headers.insert(key, value);\n\n self\n\n }\n\n #[doc = \"Return human readable values for statistics.\"]\n\n pub fn human(mut self, human: bool) -> Self {\n\n self.human = Some(human);\n\n self\n\n }\n\n #[doc = \"Whether to report the aggregated disk usage of each one of the Lucene index files (only applies if segment stats are requested)\"]\n\n pub fn include_segment_file_sizes(mut self, include_segment_file_sizes: bool) -> Self {\n\n self.include_segment_file_sizes = Some(include_segment_file_sizes);\n\n self\n\n }\n\n #[doc = \"If set to true segment stats will include stats for segments that are not currently loaded into memory\"]\n\n pub fn include_unloaded_segments(mut self, include_unloaded_segments: bool) -> Self {\n\n self.include_unloaded_segments = Some(include_unloaded_segments);\n\n self\n", "file_path": "elasticsearch/src/indices.rs", "rank": 56, "score": 38515.99093896493 }, { "content": " ) -> SecurityHasPrivileges<'a, 'b, ()> {\n\n SecurityHasPrivileges::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Security Invalidate Api Key API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/security-api-invalidate-api-key.html)\\n\\nInvalidates one or more API keys.\"]\n\n pub fn invalidate_api_key<'b>(&'a self) -> SecurityInvalidateApiKey<'a, 'b, ()> {\n\n SecurityInvalidateApiKey::new(self.transport())\n\n }\n\n #[doc = \"[Security Invalidate Token API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/security-api-invalidate-token.html)\\n\\nInvalidates one or more access tokens or refresh tokens.\"]\n\n pub fn invalidate_token<'b>(&'a self) -> SecurityInvalidateToken<'a, 'b, ()> {\n\n SecurityInvalidateToken::new(self.transport())\n\n }\n\n #[doc = \"[Security Put Privileges API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/security-api-put-privileges.html)\\n\\nAdds or updates application privileges.\"]\n\n pub fn put_privileges<'b>(&'a self) -> SecurityPutPrivileges<'a, 'b, ()> {\n\n SecurityPutPrivileges::new(self.transport())\n\n }\n\n #[doc = \"[Security Put Role API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/security-api-put-role.html)\\n\\nAdds and updates roles in the native realm.\"]\n\n pub fn put_role<'b>(&'a self, parts: SecurityPutRoleParts<'b>) -> SecurityPutRole<'a, 'b, ()> {\n\n SecurityPutRole::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Security Put Role Mapping API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/security-api-put-role-mapping.html)\\n\\nCreates and updates role mappings.\"]\n", "file_path": "elasticsearch/src/security.rs", "rank": 57, "score": 38515.82074774696 }, { "content": "/// **Use on production clusters is strongly discouraged**\n\n///\n\n/// ```rust,no_run\n\n/// # use elasticsearch::{\n\n/// # auth::Credentials,\n\n/// # cert::{Certificate,CertificateValidation},\n\n/// # Error, Elasticsearch,\n\n/// # http::transport::{TransportBuilder,SingleNodeConnectionPool},\n\n/// # };\n\n/// # use std::fs::File;\n\n/// # use std::io::Read;\n\n/// # use url::Url;\n\n/// # async fn doc() -> Result<(), Box<dyn std::error::Error>> {\n\n/// let url = Url::parse(\"https://example.com\")?;\n\n/// let conn_pool = SingleNodeConnectionPool::new(url);\n\n/// let transport = TransportBuilder::new(conn_pool)\n\n/// .cert_validation(CertificateValidation::None)\n\n/// .build()?;\n\n/// let client = Elasticsearch::new(transport);\n\n/// let _response = client.ping().send().await?;\n", "file_path": "elasticsearch/src/cert.rs", "rank": 58, "score": 38515.797039971716 }, { "content": "// This file is generated, Please do not edit it manually.\n\n// Run the following in the root of the repo to regenerate:\n\n//\n\n// cargo make generate-api\n\n// -----------------------------------------------\n\n\n\n//! Task Management APIs\n\n//!\n\n//! [Manage tasks currently executing on one or more nodes in the cluster](https://www.elastic.co/guide/en/elasticsearch/reference/master/tasks.html).\n\n\n\n# ! [ allow ( unused_imports ) ]use crate::{\n\n client::Elasticsearch,\n\n error::Error,\n\n http::{\n\n headers::{HeaderMap, HeaderName, HeaderValue, ACCEPT, CONTENT_TYPE},\n\n request::{Body, JsonBody, NdBody, PARTS_ENCODED},\n\n response::Response,\n\n transport::Transport,\n\n Method,\n\n },\n", "file_path": "elasticsearch/src/tasks.rs", "rank": 59, "score": 38515.50872631368 }, { "content": "// This file is generated, Please do not edit it manually.\n\n// Run the following in the root of the repo to regenerate:\n\n//\n\n// cargo make generate-api\n\n// -----------------------------------------------\n\n\n\n//! Migration APIs\n\n//!\n\n//! [Simplify upgrading X-Pack indices from one version to another](https://www.elastic.co/guide/en/elasticsearch/reference/master/migration-api.html).\n\n\n\n# ! [ allow ( unused_imports ) ]use crate::{\n\n client::Elasticsearch,\n\n error::Error,\n\n http::{\n\n headers::{HeaderMap, HeaderName, HeaderValue, ACCEPT, CONTENT_TYPE},\n\n request::{Body, JsonBody, NdBody, PARTS_ENCODED},\n\n response::Response,\n\n transport::Transport,\n\n Method,\n\n },\n", "file_path": "elasticsearch/src/migration.rs", "rank": 60, "score": 38515.47307973804 }, { "content": " }\n\n #[doc = \"[Snapshot Create API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/modules-snapshots.html)\\n\\nCreates a snapshot in a repository.\"]\n\n pub fn create<'b>(&'a self, parts: SnapshotCreateParts<'b>) -> SnapshotCreate<'a, 'b, ()> {\n\n SnapshotCreate::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Snapshot Create Repository API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/modules-snapshots.html)\\n\\nCreates a repository.\"]\n\n pub fn create_repository<'b>(\n\n &'a self,\n\n parts: SnapshotCreateRepositoryParts<'b>,\n\n ) -> SnapshotCreateRepository<'a, 'b, ()> {\n\n SnapshotCreateRepository::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Snapshot Delete API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/modules-snapshots.html)\\n\\nDeletes one or more snapshots.\"]\n\n pub fn delete<'b>(&'a self, parts: SnapshotDeleteParts<'b>) -> SnapshotDelete<'a, 'b> {\n\n SnapshotDelete::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Snapshot Delete Repository API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/modules-snapshots.html)\\n\\nDeletes a repository.\"]\n\n pub fn delete_repository<'b>(\n\n &'a self,\n\n parts: SnapshotDeleteRepositoryParts<'b>,\n", "file_path": "elasticsearch/src/snapshot.rs", "rank": 61, "score": 38515.2193556017 }, { "content": " #[doc = \"[Security Disable User API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/security-api-disable-user.html)\\n\\nDisables users in the native realm.\"]\n\n pub fn disable_user<'b>(\n\n &'a self,\n\n parts: SecurityDisableUserParts<'b>,\n\n ) -> SecurityDisableUser<'a, 'b, ()> {\n\n SecurityDisableUser::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Security Enable User API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/security-api-enable-user.html)\\n\\nEnables users in the native realm.\"]\n\n pub fn enable_user<'b>(\n\n &'a self,\n\n parts: SecurityEnableUserParts<'b>,\n\n ) -> SecurityEnableUser<'a, 'b, ()> {\n\n SecurityEnableUser::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Security Get Api Key API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/security-api-get-api-key.html)\\n\\nRetrieves information for one or more API keys.\"]\n\n pub fn get_api_key<'b>(&'a self) -> SecurityGetApiKey<'a, 'b> {\n\n SecurityGetApiKey::new(self.transport())\n\n }\n\n #[doc = \"[Security Get Builtin Privileges API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/security-api-get-builtin-privileges.html)\\n\\nRetrieves the list of cluster privileges and index privileges that are available in this version of Elasticsearch.\"]\n\n pub fn get_builtin_privileges<'b>(&'a self) -> SecurityGetBuiltinPrivileges<'a, 'b> {\n", "file_path": "elasticsearch/src/security.rs", "rank": 62, "score": 38514.83114205712 }, { "content": " pub fn repositories<'b>(&'a self) -> CatRepositories<'a, 'b> {\n\n CatRepositories::new(self.transport())\n\n }\n\n #[doc = \"[Cat Segments API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/cat-segments.html)\\n\\nProvides low-level information about the segments in the shards of an index.\"]\n\n pub fn segments<'b>(&'a self, parts: CatSegmentsParts<'b>) -> CatSegments<'a, 'b> {\n\n CatSegments::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Cat Shards API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/cat-shards.html)\\n\\nProvides a detailed view of shard allocation on nodes.\"]\n\n pub fn shards<'b>(&'a self, parts: CatShardsParts<'b>) -> CatShards<'a, 'b> {\n\n CatShards::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Cat Snapshots API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/cat-snapshots.html)\\n\\nReturns all snapshots in a specific repository.\"]\n\n pub fn snapshots<'b>(&'a self, parts: CatSnapshotsParts<'b>) -> CatSnapshots<'a, 'b> {\n\n CatSnapshots::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Cat Tasks API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/tasks.html)\\n\\nReturns information about the tasks currently executing on one or more nodes in the cluster.\"]\n\n pub fn tasks<'b>(&'a self) -> CatTasks<'a, 'b> {\n\n CatTasks::new(self.transport())\n\n }\n\n #[doc = \"[Cat Templates API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/cat-templates.html)\\n\\nReturns information about existing templates.\"]\n", "file_path": "elasticsearch/src/cat.rs", "rank": 63, "score": 38514.777359174695 }, { "content": " transport: &'a Transport,\n\n}\n\nimpl<'a> Snapshot<'a> {\n\n #[doc = \"Creates a new instance of [Snapshot]\"]\n\n pub fn new(transport: &'a Transport) -> Self {\n\n Self { transport }\n\n }\n\n pub fn transport(&self) -> &Transport {\n\n self.transport\n\n }\n\n #[doc = \"[Snapshot Cleanup Repository API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/clean-up-snapshot-repo-api.html)\\n\\nRemoves stale data from repository.\"]\n\n pub fn cleanup_repository<'b>(\n\n &'a self,\n\n parts: SnapshotCleanupRepositoryParts<'b>,\n\n ) -> SnapshotCleanupRepository<'a, 'b, ()> {\n\n SnapshotCleanupRepository::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Snapshot Clone API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/modules-snapshots.html)\\n\\nClones indices from one snapshot into another snapshot in the same repository.\"]\n\n pub fn clone<'b>(&'a self, parts: SnapshotCloneParts<'b>) -> SnapshotClone<'a, 'b, ()> {\n\n SnapshotClone::new(self.transport(), parts)\n", "file_path": "elasticsearch/src/snapshot.rs", "rank": 64, "score": 38514.75182959407 }, { "content": " self.transport\n\n }\n\n #[doc = \"[Indices Add Block API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/index-modules-blocks.html)\\n\\nAdds a block to an index.\"]\n\n pub fn add_block<'b>(&'a self, parts: IndicesAddBlockParts<'b>) -> IndicesAddBlock<'a, 'b, ()> {\n\n IndicesAddBlock::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Indices Analyze API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-analyze.html)\\n\\nPerforms the analysis process on a text and return the tokens breakdown of the text.\"]\n\n pub fn analyze<'b>(&'a self, parts: IndicesAnalyzeParts<'b>) -> IndicesAnalyze<'a, 'b, ()> {\n\n IndicesAnalyze::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Indices Clear Cache API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-clearcache.html)\\n\\nClears all or specific caches for one or more indices.\"]\n\n pub fn clear_cache<'b>(\n\n &'a self,\n\n parts: IndicesClearCacheParts<'b>,\n\n ) -> IndicesClearCache<'a, 'b, ()> {\n\n IndicesClearCache::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Indices Clone API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-clone-index.html)\\n\\nClones an index\"]\n\n pub fn clone<'b>(&'a self, parts: IndicesCloneParts<'b>) -> IndicesClone<'a, 'b, ()> {\n\n IndicesClone::new(self.transport(), parts)\n", "file_path": "elasticsearch/src/indices.rs", "rank": 65, "score": 38514.74712814116 }, { "content": " #[doc = \"[Indices Put Template API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-templates.html)\\n\\nCreates or updates an index template.\"]\n\n pub fn put_template<'b>(\n\n &'a self,\n\n parts: IndicesPutTemplateParts<'b>,\n\n ) -> IndicesPutTemplate<'a, 'b, ()> {\n\n IndicesPutTemplate::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Indices Recovery API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-recovery.html)\\n\\nReturns information about ongoing index shard recoveries.\"]\n\n pub fn recovery<'b>(&'a self, parts: IndicesRecoveryParts<'b>) -> IndicesRecovery<'a, 'b> {\n\n IndicesRecovery::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Indices Refresh API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-refresh.html)\\n\\nPerforms the refresh operation in one or more indices.\"]\n\n pub fn refresh<'b>(&'a self, parts: IndicesRefreshParts<'b>) -> IndicesRefresh<'a, 'b, ()> {\n\n IndicesRefresh::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Indices Reload Search Analyzers API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-reload-analyzers.html)\\n\\nReloads an index's search analyzers and their resources.\"]\n\n pub fn reload_search_analyzers<'b>(\n\n &'a self,\n\n parts: IndicesReloadSearchAnalyzersParts<'b>,\n\n ) -> IndicesReloadSearchAnalyzers<'a, 'b, ()> {\n", "file_path": "elasticsearch/src/indices.rs", "rank": 66, "score": 38514.74712814116 }, { "content": " &'a self,\n\n parts: MlGetCalendarsParts<'b>,\n\n ) -> MlGetCalendars<'a, 'b, ()> {\n\n MlGetCalendars::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ml Get Categories API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-get-category.html)\\n\\nRetrieves anomaly detection job results for one or more categories.\"]\n\n pub fn get_categories<'b>(\n\n &'a self,\n\n parts: MlGetCategoriesParts<'b>,\n\n ) -> MlGetCategories<'a, 'b, ()> {\n\n MlGetCategories::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ml Get Datafeed Stats API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-get-datafeed-stats.html)\\n\\nRetrieves usage information for datafeeds.\"]\n\n pub fn get_datafeed_stats<'b>(\n\n &'a self,\n\n parts: MlGetDatafeedStatsParts<'b>,\n\n ) -> MlGetDatafeedStats<'a, 'b> {\n\n MlGetDatafeedStats::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ml Get Datafeeds API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-get-datafeed.html)\\n\\nRetrieves configuration information for datafeeds.\"]\n", "file_path": "elasticsearch/src/ml.rs", "rank": 67, "score": 38514.725533124234 }, { "content": " #[doc = \"[Watcher Get Watch API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-get-watch.html)\\n\\nRetrieves a watch by its ID.\"]\n\n pub fn get_watch<'b>(&'a self, parts: WatcherGetWatchParts<'b>) -> WatcherGetWatch<'a, 'b> {\n\n WatcherGetWatch::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Watcher Put Watch API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-put-watch.html)\\n\\nCreates a new watch, or updates an existing one.\"]\n\n pub fn put_watch<'b>(&'a self, parts: WatcherPutWatchParts<'b>) -> WatcherPutWatch<'a, 'b, ()> {\n\n WatcherPutWatch::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Watcher Start API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-start.html)\\n\\nStarts Watcher if it is not already running.\"]\n\n pub fn start<'b>(&'a self) -> WatcherStart<'a, 'b, ()> {\n\n WatcherStart::new(self.transport())\n\n }\n\n #[doc = \"[Watcher Stats API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-stats.html)\\n\\nRetrieves the current Watcher metrics.\"]\n\n pub fn stats<'b>(&'a self, parts: WatcherStatsParts<'b>) -> WatcherStats<'a, 'b> {\n\n WatcherStats::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Watcher Stop API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-stop.html)\\n\\nStops Watcher if it is running.\"]\n\n pub fn stop<'b>(&'a self) -> WatcherStop<'a, 'b, ()> {\n\n WatcherStop::new(self.transport())\n\n }\n\n}\n\nimpl Elasticsearch {\n\n #[doc = \"Creates a namespace client for Watcher APIs\"]\n\n pub fn watcher(&self) -> Watcher {\n\n Watcher::new(self.transport())\n\n }\n\n}\n", "file_path": "elasticsearch/src/watcher.rs", "rank": 68, "score": 38514.67160745366 }, { "content": " }\n\n}\n\n#[doc = \"Namespace client for Machine Learning APIs\"]\n\npub struct Ml<'a> {\n\n transport: &'a Transport,\n\n}\n\nimpl<'a> Ml<'a> {\n\n #[doc = \"Creates a new instance of [Ml]\"]\n\n pub fn new(transport: &'a Transport) -> Self {\n\n Self { transport }\n\n }\n\n pub fn transport(&self) -> &Transport {\n\n self.transport\n\n }\n\n #[doc = \"[Ml Close Job API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-close-job.html)\\n\\nCloses one or more anomaly detection jobs. A job can be opened and closed multiple times throughout its lifecycle.\"]\n\n pub fn close_job<'b>(&'a self, parts: MlCloseJobParts<'b>) -> MlCloseJob<'a, 'b, ()> {\n\n MlCloseJob::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ml Delete Calendar API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-delete-calendar.html)\\n\\nDeletes a calendar.\"]\n\n pub fn delete_calendar<'b>(\n", "file_path": "elasticsearch/src/ml.rs", "rank": 69, "score": 38514.6619429474 }, { "content": "let conn_pool = SingleNodeConnectionPool::new(url);\n\n\n\n// load the CA certificate\n\nlet mut buf = Vec::new();\n\nFile::open(\"my_ca_cert.pem\")?\n\n .read_to_end(&mut buf)?;\n\nlet cert = Certificate::from_pem(&buf)?;\n\nlet transport = TransportBuilder::new(conn_pool)\n\n .cert_validation(CertificateValidation::Certificate(cert))\n\n .build()?;\n\nlet client = Elasticsearch::new(transport);\n\nlet _response = client.ping().send().await?;\n\n# Ok(())\n\n# }\n\n```\n\n\"##\n\n)]\n\n/// ## No validation\n\n///\n\n/// No validation is performed on the certificate provided by the server.\n", "file_path": "elasticsearch/src/cert.rs", "rank": 70, "score": 38514.65745067983 }, { "content": "let url = Url::parse(\"https://example.com\")?;\n\nlet conn_pool = SingleNodeConnectionPool::new(url);\n\n\n\n// load the CA certificate\n\nlet mut buf = Vec::new();\n\nFile::open(\"my_ca_cert.pem\")?\n\n .read_to_end(&mut buf)?;\n\nlet cert = Certificate::from_pem(&buf)?;\n\n\n\nlet transport = TransportBuilder::new(conn_pool)\n\n .cert_validation(CertificateValidation::Full(cert))\n\n .build()?;\n\nlet client = Elasticsearch::new(transport);\n\nlet _response = client.ping().send().await?;\n\n# Ok(())\n\n# }\n\n```\n\n\"##\n\n)]\n\n/// ## Certificate validation\n", "file_path": "elasticsearch/src/cert.rs", "rank": 71, "score": 38514.57313453997 }, { "content": " parts: IndicesForcemergeParts<'b>,\n\n ) -> IndicesForcemerge<'a, 'b, ()> {\n\n IndicesForcemerge::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Indices Freeze API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/freeze-index-api.html)\\n\\nFreezes an index. A frozen index has almost no overhead on the cluster (except for maintaining its metadata in memory) and is read-only.\"]\n\n pub fn freeze<'b>(&'a self, parts: IndicesFreezeParts<'b>) -> IndicesFreeze<'a, 'b, ()> {\n\n IndicesFreeze::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Indices Get API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-get-index.html)\\n\\nReturns information about one or more indices.\"]\n\n pub fn get<'b>(&'a self, parts: IndicesGetParts<'b>) -> IndicesGet<'a, 'b> {\n\n IndicesGet::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Indices Get Alias API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-aliases.html)\\n\\nReturns an alias.\"]\n\n pub fn get_alias<'b>(&'a self, parts: IndicesGetAliasParts<'b>) -> IndicesGetAlias<'a, 'b> {\n\n IndicesGetAlias::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Indices Get Data Stream API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/data-streams.html)\\n\\nReturns data streams.\"]\n\n pub fn get_data_stream<'b>(\n\n &'a self,\n\n parts: IndicesGetDataStreamParts<'b>,\n", "file_path": "elasticsearch/src/indices.rs", "rank": 72, "score": 38514.569235584335 }, { "content": " #[doc = \"[Ml Info API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/get-ml-info.html)\\n\\nReturns defaults and limits used by machine learning.\"]\n\n pub fn info<'b>(&'a self) -> MlInfo<'a, 'b> {\n\n MlInfo::new(self.transport())\n\n }\n\n #[doc = \"[Ml Open Job API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-open-job.html)\\n\\nOpens one or more anomaly detection jobs.\"]\n\n pub fn open_job<'b>(&'a self, parts: MlOpenJobParts<'b>) -> MlOpenJob<'a, 'b, ()> {\n\n MlOpenJob::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ml Post Calendar Events API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-post-calendar-event.html)\\n\\nPosts scheduled events in a calendar.\"]\n\n pub fn post_calendar_events<'b>(\n\n &'a self,\n\n parts: MlPostCalendarEventsParts<'b>,\n\n ) -> MlPostCalendarEvents<'a, 'b, ()> {\n\n MlPostCalendarEvents::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ml Post Data API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-post-data.html)\\n\\nSends data to an anomaly detection job for analysis.\"]\n\n pub fn post_data<'b>(&'a self, parts: MlPostDataParts<'b>) -> MlPostData<'a, 'b, ()> {\n\n MlPostData::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ml Preview Datafeed API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-preview-datafeed.html)\\n\\nPreviews a datafeed.\"]\n", "file_path": "elasticsearch/src/ml.rs", "rank": 73, "score": 38514.536263445414 }, { "content": " pub fn flush_job<'b>(&'a self, parts: MlFlushJobParts<'b>) -> MlFlushJob<'a, 'b, ()> {\n\n MlFlushJob::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ml Forecast API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-forecast.html)\\n\\nPredicts the future behavior of a time series by using its historical behavior.\"]\n\n pub fn forecast<'b>(&'a self, parts: MlForecastParts<'b>) -> MlForecast<'a, 'b, ()> {\n\n MlForecast::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ml Get Buckets API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-get-bucket.html)\\n\\nRetrieves anomaly detection job results for one or more buckets.\"]\n\n pub fn get_buckets<'b>(&'a self, parts: MlGetBucketsParts<'b>) -> MlGetBuckets<'a, 'b, ()> {\n\n MlGetBuckets::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ml Get Calendar Events API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-get-calendar-event.html)\\n\\nRetrieves information about the scheduled events in calendars.\"]\n\n pub fn get_calendar_events<'b>(\n\n &'a self,\n\n parts: MlGetCalendarEventsParts<'b>,\n\n ) -> MlGetCalendarEvents<'a, 'b> {\n\n MlGetCalendarEvents::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ml Get Calendars API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-get-calendar.html)\\n\\nRetrieves configuration information for calendars.\"]\n\n pub fn get_calendars<'b>(\n", "file_path": "elasticsearch/src/ml.rs", "rank": 74, "score": 38514.326053202785 }, { "content": " &'a self,\n\n parts: SlmDeleteLifecycleParts<'b>,\n\n ) -> SlmDeleteLifecycle<'a, 'b> {\n\n SlmDeleteLifecycle::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Slm Execute Lifecycle API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/slm-api-execute-lifecycle.html)\\n\\nImmediately creates a snapshot according to the lifecycle policy, without waiting for the scheduled time.\"]\n\n pub fn execute_lifecycle<'b>(\n\n &'a self,\n\n parts: SlmExecuteLifecycleParts<'b>,\n\n ) -> SlmExecuteLifecycle<'a, 'b, ()> {\n\n SlmExecuteLifecycle::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Slm Execute Retention API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/slm-api-execute-retention.html)\\n\\nDeletes any snapshots that are expired according to the policy's retention rules.\"]\n\n pub fn execute_retention<'b>(&'a self) -> SlmExecuteRetention<'a, 'b, ()> {\n\n SlmExecuteRetention::new(self.transport())\n\n }\n\n #[doc = \"[Slm Get Lifecycle API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/slm-api-get-policy.html)\\n\\nRetrieves one or more snapshot lifecycle policy definitions and information about the latest snapshot attempts.\"]\n\n pub fn get_lifecycle<'b>(&'a self, parts: SlmGetLifecycleParts<'b>) -> SlmGetLifecycle<'a, 'b> {\n\n SlmGetLifecycle::new(self.transport(), parts)\n\n }\n", "file_path": "elasticsearch/src/slm.rs", "rank": 75, "score": 38514.27415170819 }, { "content": " p.push_str(encoded_repository.as_ref());\n\n p.push_str(\"/\");\n\n p.push_str(encoded_snapshot.as_ref());\n\n p.into()\n\n }\n\n }\n\n }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Snapshot Delete API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/modules-snapshots.html)\\n\\nDeletes one or more snapshots.\"]\n\npub struct SnapshotDelete<'a, 'b> {\n\n transport: &'a Transport,\n\n parts: SnapshotDeleteParts<'b>,\n\n error_trace: Option<bool>,\n\n filter_path: Option<&'b [&'b str]>,\n\n headers: HeaderMap,\n\n human: Option<bool>,\n\n master_timeout: Option<&'b str>,\n\n pretty: Option<bool>,\n\n request_timeout: Option<Duration>,\n", "file_path": "elasticsearch/src/snapshot.rs", "rank": 76, "score": 38514.26917306973 }, { "content": "\n\nuse elasticsearch::auth::Credentials;\n\n\n\nuse base64::{self, write::EncoderWriter as Base64Encoder};\n\n// use std::fs::File;\n\n// use std::io::Read;\n\nuse std::io::Write;\n\n\n\n#[tokio::test]\n\nasync fn basic_auth_header() -> Result<(), failure::Error> {\n\n let server = server::http(move |req| async move {\n\n let mut header_value = b\"Basic \".to_vec();\n\n {\n\n let mut encoder = Base64Encoder::new(&mut header_value, base64::STANDARD);\n\n write!(encoder, \"username:password\").unwrap();\n\n }\n\n\n\n assert_eq!(\n\n req.headers()[\"authorization\"],\n\n String::from_utf8(header_value).unwrap()\n", "file_path": "elasticsearch/tests/auth.rs", "rank": 77, "score": 38514.17424899578 }, { "content": " pub fn get_datafeeds<'b>(&'a self, parts: MlGetDatafeedsParts<'b>) -> MlGetDatafeeds<'a, 'b> {\n\n MlGetDatafeeds::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ml Get Filters API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-get-filter.html)\\n\\nRetrieves filters.\"]\n\n pub fn get_filters<'b>(&'a self, parts: MlGetFiltersParts<'b>) -> MlGetFilters<'a, 'b> {\n\n MlGetFilters::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ml Get Influencers API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-get-influencer.html)\\n\\nRetrieves anomaly detection job results for one or more influencers.\"]\n\n pub fn get_influencers<'b>(\n\n &'a self,\n\n parts: MlGetInfluencersParts<'b>,\n\n ) -> MlGetInfluencers<'a, 'b, ()> {\n\n MlGetInfluencers::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ml Get Job Stats API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-get-job-stats.html)\\n\\nRetrieves usage information for anomaly detection jobs.\"]\n\n pub fn get_job_stats<'b>(&'a self, parts: MlGetJobStatsParts<'b>) -> MlGetJobStats<'a, 'b> {\n\n MlGetJobStats::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ml Get Jobs API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-get-job.html)\\n\\nRetrieves configuration information for anomaly detection jobs.\"]\n\n pub fn get_jobs<'b>(&'a self, parts: MlGetJobsParts<'b>) -> MlGetJobs<'a, 'b> {\n", "file_path": "elasticsearch/src/ml.rs", "rank": 78, "score": 38514.13188822527 }, { "content": " p.push_str(\"/\");\n\n p.push_str(encoded_snapshot.as_ref());\n\n p.push_str(\"/_clone/\");\n\n p.push_str(encoded_target_snapshot.as_ref());\n\n p.into()\n\n }\n\n }\n\n }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Snapshot Clone API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/modules-snapshots.html)\\n\\nClones indices from one snapshot into another snapshot in the same repository.\"]\n\npub struct SnapshotClone<'a, 'b, B> {\n\n transport: &'a Transport,\n\n parts: SnapshotCloneParts<'b>,\n\n body: Option<B>,\n\n error_trace: Option<bool>,\n\n filter_path: Option<&'b [&'b str]>,\n\n headers: HeaderMap,\n\n human: Option<bool>,\n\n master_timeout: Option<&'b str>,\n", "file_path": "elasticsearch/src/snapshot.rs", "rank": 79, "score": 38514.07826124842 }, { "content": " p.into()\n\n }\n\n }\n\n }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Ml Get Buckets API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-get-bucket.html)\\n\\nRetrieves anomaly detection job results for one or more buckets.\"]\n\npub struct MlGetBuckets<'a, 'b, B> {\n\n transport: &'a Transport,\n\n parts: MlGetBucketsParts<'b>,\n\n anomaly_score: Option<f64>,\n\n body: Option<B>,\n\n desc: Option<bool>,\n\n end: Option<&'b str>,\n\n error_trace: Option<bool>,\n\n exclude_interim: Option<bool>,\n\n expand: Option<bool>,\n\n filter_path: Option<&'b [&'b str]>,\n\n from: Option<i32>,\n\n headers: HeaderMap,\n", "file_path": "elasticsearch/src/ml.rs", "rank": 80, "score": 38514.07826124842 }, { "content": " match self {\n\n CatTasksParts::None => \"/_cat/tasks\".into(),\n\n }\n\n }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Cat Tasks API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/tasks.html)\\n\\nReturns information about the tasks currently executing on one or more nodes in the cluster.\"]\n\npub struct CatTasks<'a, 'b> {\n\n transport: &'a Transport,\n\n parts: CatTasksParts,\n\n actions: Option<&'b [&'b str]>,\n\n detailed: Option<bool>,\n\n error_trace: Option<bool>,\n\n filter_path: Option<&'b [&'b str]>,\n\n format: Option<&'b str>,\n\n h: Option<&'b [&'b str]>,\n\n headers: HeaderMap,\n\n help: Option<bool>,\n\n human: Option<bool>,\n\n node_id: Option<&'b [&'b str]>,\n", "file_path": "elasticsearch/src/cat.rs", "rank": 81, "score": 38514.00425231732 }, { "content": " let mut p = String::with_capacity(19usize + encoded_transform_id.len());\n\n p.push_str(\"/_transform/\");\n\n p.push_str(encoded_transform_id.as_ref());\n\n p.push_str(\"/_start\");\n\n p.into()\n\n }\n\n }\n\n }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Transform Start Transform API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/start-transform.html)\\n\\nStarts one or more transforms.\"]\n\npub struct TransformStartTransform<'a, 'b, B> {\n\n transport: &'a Transport,\n\n parts: TransformStartTransformParts<'b>,\n\n body: Option<B>,\n\n error_trace: Option<bool>,\n\n filter_path: Option<&'b [&'b str]>,\n\n headers: HeaderMap,\n\n human: Option<bool>,\n\n pretty: Option<bool>,\n", "file_path": "elasticsearch/src/transform.rs", "rank": 82, "score": 38513.96773494572 }, { "content": "#[doc = \"Builder for the [Ml Start Datafeed API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-start-datafeed.html)\\n\\nStarts one or more datafeeds.\"]\n\npub struct MlStartDatafeed<'a, 'b, B> {\n\n transport: &'a Transport,\n\n parts: MlStartDatafeedParts<'b>,\n\n body: Option<B>,\n\n end: Option<&'b str>,\n\n error_trace: Option<bool>,\n\n filter_path: Option<&'b [&'b str]>,\n\n headers: HeaderMap,\n\n human: Option<bool>,\n\n pretty: Option<bool>,\n\n request_timeout: Option<Duration>,\n\n source: Option<&'b str>,\n\n start: Option<&'b str>,\n\n timeout: Option<&'b str>,\n\n}\n\nimpl<'a, 'b, B> MlStartDatafeed<'a, 'b, B>\n\nwhere\n\n B: Body,\n\n{\n", "file_path": "elasticsearch/src/ml.rs", "rank": 83, "score": 38513.931536629054 }, { "content": " percent_encode(index_str.as_bytes(), PARTS_ENCODED).into();\n\n let mut p = String::with_capacity(10usize + encoded_index.len());\n\n p.push_str(\"/\");\n\n p.push_str(encoded_index.as_ref());\n\n p.push_str(\"/_refresh\");\n\n p.into()\n\n }\n\n }\n\n }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Indices Refresh API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-refresh.html)\\n\\nPerforms the refresh operation in one or more indices.\"]\n\npub struct IndicesRefresh<'a, 'b, B> {\n\n transport: &'a Transport,\n\n parts: IndicesRefreshParts<'b>,\n\n allow_no_indices: Option<bool>,\n\n body: Option<B>,\n\n error_trace: Option<bool>,\n\n expand_wildcards: Option<&'b [ExpandWildcards]>,\n\n filter_path: Option<&'b [&'b str]>,\n", "file_path": "elasticsearch/src/indices.rs", "rank": 84, "score": 38513.86008057975 }, { "content": " }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Security Invalidate Api Key API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/security-api-invalidate-api-key.html)\\n\\nInvalidates one or more API keys.\"]\n\npub struct SecurityInvalidateApiKey<'a, 'b, B> {\n\n transport: &'a Transport,\n\n parts: SecurityInvalidateApiKeyParts,\n\n body: Option<B>,\n\n error_trace: Option<bool>,\n\n filter_path: Option<&'b [&'b str]>,\n\n headers: HeaderMap,\n\n human: Option<bool>,\n\n pretty: Option<bool>,\n\n request_timeout: Option<Duration>,\n\n source: Option<&'b str>,\n\n}\n\nimpl<'a, 'b, B> SecurityInvalidateApiKey<'a, 'b, B>\n\nwhere\n\n B: Body,\n\n{\n", "file_path": "elasticsearch/src/security.rs", "rank": 85, "score": 38513.86008057975 }, { "content": " let index_str = index.join(\",\");\n\n let encoded_index: Cow<str> =\n\n percent_encode(index_str.as_bytes(), PARTS_ENCODED).into();\n\n let mut p = String::with_capacity(1usize + encoded_index.len());\n\n p.push_str(\"/\");\n\n p.push_str(encoded_index.as_ref());\n\n p.into()\n\n }\n\n }\n\n }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Indices Get API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-get-index.html)\\n\\nReturns information about one or more indices.\"]\n\npub struct IndicesGet<'a, 'b> {\n\n transport: &'a Transport,\n\n parts: IndicesGetParts<'b>,\n\n allow_no_indices: Option<bool>,\n\n error_trace: Option<bool>,\n\n expand_wildcards: Option<&'b [ExpandWildcards]>,\n\n filter_path: Option<&'b [&'b str]>,\n", "file_path": "elasticsearch/src/indices.rs", "rank": 86, "score": 38513.82481473546 }, { "content": " }\n\n }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Watcher Put Watch API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/watcher-api-put-watch.html)\\n\\nCreates a new watch, or updates an existing one.\"]\n\npub struct WatcherPutWatch<'a, 'b, B> {\n\n transport: &'a Transport,\n\n parts: WatcherPutWatchParts<'b>,\n\n active: Option<bool>,\n\n body: Option<B>,\n\n error_trace: Option<bool>,\n\n filter_path: Option<&'b [&'b str]>,\n\n headers: HeaderMap,\n\n human: Option<bool>,\n\n if_primary_term: Option<i64>,\n\n if_seq_no: Option<i64>,\n\n pretty: Option<bool>,\n\n request_timeout: Option<Duration>,\n\n source: Option<&'b str>,\n\n version: Option<i64>,\n", "file_path": "elasticsearch/src/watcher.rs", "rank": 87, "score": 38513.789851719754 }, { "content": " Ok(response)\n\n }\n\n}\n\n#[derive(Debug, Clone, PartialEq)]\n\n#[doc = \"API parts for the Security Invalidate Token API\"]\n\npub enum SecurityInvalidateTokenParts {\n\n #[doc = \"No parts\"]\n\n None,\n\n}\n\nimpl SecurityInvalidateTokenParts {\n\n #[doc = \"Builds a relative URL path to the Security Invalidate Token API\"]\n\n pub fn url(self) -> Cow<'static, str> {\n\n match self {\n\n SecurityInvalidateTokenParts::None => \"/_security/oauth2/token\".into(),\n\n }\n\n }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Security Invalidate Token API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/security-api-invalidate-token.html)\\n\\nInvalidates one or more access tokens or refresh tokens.\"]\n\npub struct SecurityInvalidateToken<'a, 'b, B> {\n", "file_path": "elasticsearch/src/security.rs", "rank": 88, "score": 38513.789851719754 }, { "content": " }\n\n }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Indices Forcemerge API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-forcemerge.html)\\n\\nPerforms the force merge operation on one or more indices.\"]\n\npub struct IndicesForcemerge<'a, 'b, B> {\n\n transport: &'a Transport,\n\n parts: IndicesForcemergeParts<'b>,\n\n allow_no_indices: Option<bool>,\n\n body: Option<B>,\n\n error_trace: Option<bool>,\n\n expand_wildcards: Option<&'b [ExpandWildcards]>,\n\n filter_path: Option<&'b [&'b str]>,\n\n flush: Option<bool>,\n\n headers: HeaderMap,\n\n human: Option<bool>,\n\n ignore_unavailable: Option<bool>,\n\n max_num_segments: Option<i64>,\n\n only_expunge_deletes: Option<bool>,\n\n pretty: Option<bool>,\n", "file_path": "elasticsearch/src/indices.rs", "rank": 89, "score": 38513.789851719754 }, { "content": " }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Transform Stop Transform API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/stop-transform.html)\\n\\nStops one or more transforms.\"]\n\npub struct TransformStopTransform<'a, 'b, B> {\n\n transport: &'a Transport,\n\n parts: TransformStopTransformParts<'b>,\n\n allow_no_match: Option<bool>,\n\n body: Option<B>,\n\n error_trace: Option<bool>,\n\n filter_path: Option<&'b [&'b str]>,\n\n force: Option<bool>,\n\n headers: HeaderMap,\n\n human: Option<bool>,\n\n pretty: Option<bool>,\n\n request_timeout: Option<Duration>,\n\n source: Option<&'b str>,\n\n timeout: Option<&'b str>,\n\n wait_for_checkpoint: Option<bool>,\n\n wait_for_completion: Option<bool>,\n", "file_path": "elasticsearch/src/transform.rs", "rank": 90, "score": 38513.789851719754 }, { "content": " parts: CcrPauseAutoFollowPatternParts<'b>,\n\n ) -> CcrPauseAutoFollowPattern<'a, 'b, ()> {\n\n CcrPauseAutoFollowPattern::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ccr Pause Follow API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ccr-post-pause-follow.html)\\n\\nPauses a follower index. The follower index will not fetch any additional operations from the leader index.\"]\n\n pub fn pause_follow<'b>(\n\n &'a self,\n\n parts: CcrPauseFollowParts<'b>,\n\n ) -> CcrPauseFollow<'a, 'b, ()> {\n\n CcrPauseFollow::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ccr Put Auto Follow Pattern API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ccr-put-auto-follow-pattern.html)\\n\\nCreates a new named collection of auto-follow patterns against a specified remote cluster. Newly created indices on the remote cluster matching any of the specified patterns will be automatically configured as follower indices.\"]\n\n pub fn put_auto_follow_pattern<'b>(\n\n &'a self,\n\n parts: CcrPutAutoFollowPatternParts<'b>,\n\n ) -> CcrPutAutoFollowPattern<'a, 'b, ()> {\n\n CcrPutAutoFollowPattern::new(self.transport(), parts)\n\n }\n\n #[doc = \"[Ccr Resume Auto Follow Pattern API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ccr-resume-auto-follow-pattern.html)\\n\\nResumes an auto-follow pattern that has been paused\"]\n\n pub fn resume_auto_follow_pattern<'b>(\n", "file_path": "elasticsearch/src/ccr.rs", "rank": 91, "score": 38513.76442040004 }, { "content": " percent_encode(index_str.as_bytes(), PARTS_ENCODED).into();\n\n let mut p = String::with_capacity(14usize + encoded_index.len());\n\n p.push_str(\"/\");\n\n p.push_str(encoded_index.as_ref());\n\n p.push_str(\"/_cache/clear\");\n\n p.into()\n\n }\n\n }\n\n }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Indices Clear Cache API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-clearcache.html)\\n\\nClears all or specific caches for one or more indices.\"]\n\npub struct IndicesClearCache<'a, 'b, B> {\n\n transport: &'a Transport,\n\n parts: IndicesClearCacheParts<'b>,\n\n allow_no_indices: Option<bool>,\n\n body: Option<B>,\n\n error_trace: Option<bool>,\n\n expand_wildcards: Option<&'b [ExpandWildcards]>,\n\n fielddata: Option<bool>,\n", "file_path": "elasticsearch/src/indices.rs", "rank": 92, "score": 38513.755187648705 }, { "content": " }\n\n }\n\n }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Indices Get Field Mapping API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-get-field-mapping.html)\\n\\nReturns mapping for one or more fields.\"]\n\npub struct IndicesGetFieldMapping<'a, 'b> {\n\n transport: &'a Transport,\n\n parts: IndicesGetFieldMappingParts<'b>,\n\n allow_no_indices: Option<bool>,\n\n error_trace: Option<bool>,\n\n expand_wildcards: Option<&'b [ExpandWildcards]>,\n\n filter_path: Option<&'b [&'b str]>,\n\n headers: HeaderMap,\n\n human: Option<bool>,\n\n ignore_unavailable: Option<bool>,\n\n include_defaults: Option<bool>,\n\n local: Option<bool>,\n\n pretty: Option<bool>,\n\n request_timeout: Option<Duration>,\n", "file_path": "elasticsearch/src/indices.rs", "rank": 93, "score": 38513.755187648705 }, { "content": " }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Indices Flush API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-flush.html)\\n\\nPerforms the flush operation on one or more indices.\"]\n\npub struct IndicesFlush<'a, 'b, B> {\n\n transport: &'a Transport,\n\n parts: IndicesFlushParts<'b>,\n\n allow_no_indices: Option<bool>,\n\n body: Option<B>,\n\n error_trace: Option<bool>,\n\n expand_wildcards: Option<&'b [ExpandWildcards]>,\n\n filter_path: Option<&'b [&'b str]>,\n\n force: Option<bool>,\n\n headers: HeaderMap,\n\n human: Option<bool>,\n\n ignore_unavailable: Option<bool>,\n\n pretty: Option<bool>,\n\n request_timeout: Option<Duration>,\n\n source: Option<&'b str>,\n\n wait_if_ongoing: Option<bool>,\n", "file_path": "elasticsearch/src/indices.rs", "rank": 94, "score": 38513.755187648705 }, { "content": " p.push_str(encoded_job_id.as_ref());\n\n p.push_str(\"/_close\");\n\n p.into()\n\n }\n\n }\n\n }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Ml Close Job API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-close-job.html)\\n\\nCloses one or more anomaly detection jobs. A job can be opened and closed multiple times throughout its lifecycle.\"]\n\npub struct MlCloseJob<'a, 'b, B> {\n\n transport: &'a Transport,\n\n parts: MlCloseJobParts<'b>,\n\n allow_no_jobs: Option<bool>,\n\n allow_no_match: Option<bool>,\n\n body: Option<B>,\n\n error_trace: Option<bool>,\n\n filter_path: Option<&'b [&'b str]>,\n\n force: Option<bool>,\n\n headers: HeaderMap,\n\n human: Option<bool>,\n", "file_path": "elasticsearch/src/ml.rs", "rank": 95, "score": 38513.68674113419 }, { "content": " IndicesGetMappingParts::None => \"/_mapping\".into(),\n\n IndicesGetMappingParts::Index(ref index) => {\n\n let index_str = index.join(\",\");\n\n let encoded_index: Cow<str> =\n\n percent_encode(index_str.as_bytes(), PARTS_ENCODED).into();\n\n let mut p = String::with_capacity(10usize + encoded_index.len());\n\n p.push_str(\"/\");\n\n p.push_str(encoded_index.as_ref());\n\n p.push_str(\"/_mapping\");\n\n p.into()\n\n }\n\n }\n\n }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Indices Get Mapping API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-get-mapping.html)\\n\\nReturns mappings for one or more indices.\"]\n\npub struct IndicesGetMapping<'a, 'b> {\n\n transport: &'a Transport,\n\n parts: IndicesGetMappingParts<'b>,\n\n allow_no_indices: Option<bool>,\n", "file_path": "elasticsearch/src/indices.rs", "rank": 96, "score": 38513.61944541826 }, { "content": " IndicesGetSettingsParts::Name(ref name) => {\n\n let name_str = name.join(\",\");\n\n let encoded_name: Cow<str> =\n\n percent_encode(name_str.as_bytes(), PARTS_ENCODED).into();\n\n let mut p = String::with_capacity(11usize + encoded_name.len());\n\n p.push_str(\"/_settings/\");\n\n p.push_str(encoded_name.as_ref());\n\n p.into()\n\n }\n\n }\n\n }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Indices Get Settings API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/indices-get-settings.html)\\n\\nReturns settings for one or more indices.\"]\n\npub struct IndicesGetSettings<'a, 'b> {\n\n transport: &'a Transport,\n\n parts: IndicesGetSettingsParts<'b>,\n\n allow_no_indices: Option<bool>,\n\n error_trace: Option<bool>,\n\n expand_wildcards: Option<&'b [ExpandWildcards]>,\n", "file_path": "elasticsearch/src/indices.rs", "rank": 97, "score": 38513.586220078134 }, { "content": " percent_encode(index.as_bytes(), PARTS_ENCODED).into();\n\n let mut p = String::with_capacity(19usize + encoded_index.len());\n\n p.push_str(\"/\");\n\n p.push_str(encoded_index.as_ref());\n\n p.push_str(\"/_ccr/pause_follow\");\n\n p.into()\n\n }\n\n }\n\n }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Ccr Pause Follow API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ccr-post-pause-follow.html)\\n\\nPauses a follower index. The follower index will not fetch any additional operations from the leader index.\"]\n\npub struct CcrPauseFollow<'a, 'b, B> {\n\n transport: &'a Transport,\n\n parts: CcrPauseFollowParts<'b>,\n\n body: Option<B>,\n\n error_trace: Option<bool>,\n\n filter_path: Option<&'b [&'b str]>,\n\n headers: HeaderMap,\n\n human: Option<bool>,\n", "file_path": "elasticsearch/src/ccr.rs", "rank": 98, "score": 38513.56487310505 }, { "content": " let encoded_job_id: Cow<str> =\n\n percent_encode(job_id.as_bytes(), PARTS_ENCODED).into();\n\n let mut p = String::with_capacity(43usize + encoded_job_id.len());\n\n p.push_str(\"/_ml/anomaly_detectors/\");\n\n p.push_str(encoded_job_id.as_ref());\n\n p.push_str(\"/results/categories/\");\n\n p.into()\n\n }\n\n }\n\n }\n\n}\n\n#[derive(Clone, Debug)]\n\n#[doc = \"Builder for the [Ml Get Categories API](https://www.elastic.co/guide/en/elasticsearch/reference/8.0/ml-get-category.html)\\n\\nRetrieves anomaly detection job results for one or more categories.\"]\n\npub struct MlGetCategories<'a, 'b, B> {\n\n transport: &'a Transport,\n\n parts: MlGetCategoriesParts<'b>,\n\n body: Option<B>,\n\n error_trace: Option<bool>,\n\n filter_path: Option<&'b [&'b str]>,\n\n from: Option<i32>,\n", "file_path": "elasticsearch/src/ml.rs", "rank": 99, "score": 38513.5205968951 } ]
Rust
internal/wasmldr/src/workload.rs
rohankumardubey/enarx
27300ba494f463bf077fb1f8d326057412fa58ae
use log::{debug, info}; use wasmtime_wasi::sync::WasiCtxBuilder; #[allow(clippy::enum_variant_names)] #[derive(Debug)] pub enum Error { ConfigurationError, ExportNotFound, InstantiationFailed, CallFailed, IoError(std::io::Error), WASIError(wasmtime_wasi::Error), StringTableError, } impl From<std::io::Error> for Error { fn from(err: std::io::Error) -> Self { Self::IoError(err) } } impl From<wasmtime_wasi::Error> for Error { fn from(err: wasmtime_wasi::Error) -> Self { Self::WASIError(err) } } impl From<Error> for i32 { fn from(err: Error) -> Self { use Error::*; match err { ConfigurationError => 65, StringTableError => 65, InstantiationFailed => 65, ExportNotFound => 65, CallFailed => 65, WASIError(_) => 70, IoError(_) => 74, } } } pub type Result<T> = std::result::Result<T, Error>; pub fn run<T: AsRef<str>, U: AsRef<str>>( bytes: impl AsRef<[u8]>, args: impl IntoIterator<Item = T>, envs: impl IntoIterator<Item = (U, U)>, ) -> Result<Box<[wasmtime::Val]>> { debug!("configuring wasmtime engine"); let mut config = wasmtime::Config::new(); config.wasm_module_linking(true); config.wasm_multi_memory(true); config.static_memory_maximum_size(0); config.static_memory_guard_size(0); config.dynamic_memory_guard_size(0); config.dynamic_memory_reserved_for_growth(0); let engine = wasmtime::Engine::new(&config).or(Err(Error::ConfigurationError))?; debug!("instantiating wasmtime linker"); let mut linker = wasmtime::Linker::new(&engine); debug!("adding WASI to linker"); wasmtime_wasi::add_to_linker(&mut linker, |s| s)?; debug!("creating WASI context"); let mut wasi = WasiCtxBuilder::new(); for arg in args { wasi = wasi.arg(arg.as_ref()).or(Err(Error::StringTableError))?; } for kv in envs { wasi = wasi .env(kv.0.as_ref(), kv.1.as_ref()) .or(Err(Error::StringTableError))?; } info!("inheriting stdio from calling process"); wasi = wasi.inherit_stdio(); debug!("creating wasmtime Store"); let mut store = wasmtime::Store::new(&engine, wasi.build()); debug!("instantiating module from bytes"); let module = wasmtime::Module::from_binary(&engine, bytes.as_ref())?; debug!("adding module to store"); linker .module(&mut store, "", &module) .or(Err(Error::InstantiationFailed))?; debug!("getting module's default function"); let func = linker .get_default(&mut store, "") .or(Err(Error::ExportNotFound))?; debug!("calling function"); func.call(store, Default::default()) .or(Err(Error::CallFailed)) } #[cfg(test)] pub(crate) mod test { use crate::workload; use std::iter::empty; const NO_EXPORT_WAT: &'static str = r#"(module (memory (export "") 1) )"#; const RETURN_1_WAT: &'static str = r#"(module (func (export "") (result i32) i32.const 1) )"#; const WASI_COUNT_ARGS_WAT: &'static str = r#"(module (import "wasi_snapshot_preview1" "args_sizes_get" (func $__wasi_args_sizes_get (param i32 i32) (result i32))) (func (export "_start") (result i32) (i32.store (i32.const 0) (i32.const 0)) (i32.store (i32.const 4) (i32.const 0)) (call $__wasi_args_sizes_get (i32.const 0) (i32.const 4)) drop (i32.load (i32.const 0)) ) (memory 1) (export "memory" (memory 0)) )"#; const HELLO_WASI_WAT: &'static str = r#"(module (import "wasi_snapshot_preview1" "proc_exit" (func $__wasi_proc_exit (param i32))) (import "wasi_snapshot_preview1" "fd_write" (func $__wasi_fd_write (param i32 i32 i32 i32) (result i32))) (func $_start (i32.store (i32.const 24) (i32.const 14)) (i32.store (i32.const 20) (i32.const 0)) (block (br_if 0 (call $__wasi_fd_write (i32.const 1) (i32.const 20) (i32.const 1) (i32.const 16))) (br_if 0 (i32.ne (i32.load (i32.const 16)) (i32.const 14))) (br 1) ) (call $__wasi_proc_exit (i32.const 1)) ) (memory 1) (export "memory" (memory 0)) (export "_start" (func $_start)) (data (i32.const 0) "Hello, world!\0a") )"#; #[test] fn workload_run_return_1() { let bytes = wat::parse_str(RETURN_1_WAT).expect("error parsing wat"); let results: Vec<i32> = workload::run(&bytes, empty::<String>(), empty::<(String, String)>()) .unwrap() .iter() .map(|v| v.unwrap_i32()) .collect(); assert_eq!(results, vec![1]); } #[test] fn workload_run_no_export() { let bytes = wat::parse_str(NO_EXPORT_WAT).expect("error parsing wat"); match workload::run(&bytes, empty::<String>(), empty::<(String, String)>()) { Err(workload::Error::ExportNotFound) => {} _ => panic!("unexpected error"), }; } #[test] fn workload_run_wasi_count_args() { let bytes = wat::parse_str(WASI_COUNT_ARGS_WAT).expect("error parsing wat"); let results: Vec<i32> = workload::run( &bytes, vec!["a".to_string(), "b".to_string(), "c".to_string()], vec![("k", "v")], ) .unwrap() .iter() .map(|v| v.unwrap_i32()) .collect(); assert_eq!(results, vec![3]); } #[test] fn workload_run_hello_wasi() { let bytes = wat::parse_str(HELLO_WASI_WAT).expect("error parsing wat"); let args: Vec<String> = vec![]; let envs: Vec<(String, String)> = vec![]; let results = workload::run(&bytes, args, envs).unwrap(); assert_eq!(results.len(), 0); } }
use log::{debug, info}; use wasmtime_wasi::sync::WasiCtxBuilder; #[allow(clippy::enum_variant_names)] #[derive(Debug)] pub enum Error { ConfigurationError, ExportNotFound, InstantiationFailed, CallFailed, IoError(std::io::Error), WASIError(wasmtime_wasi::Error), StringTableError, } impl From<std::io::Error> for Error { fn from(err: std::io::Error) -> Self { Self::IoError(err) } } impl From<wasmtime_wasi::Error> for Error { fn from(err: wasmtime_wasi::Error) -> Self { Self::WASIError(err) } } impl From<Error> for i32 { fn from(err: Error) -> Self { use Error::*; match err { ConfigurationError => 65, StringTableError => 65, InstantiationFailed => 65, ExportNotFound => 65, CallFailed => 65, WASIError(_) => 70, IoError(_) => 74, } } } pub type Result<T> = std::result::Result<T, Error>; pub fn run<T: AsRef<str>, U: AsRef<str>>( bytes: impl AsRef<[u8]>, args: impl IntoIterator<Item = T>, envs: impl IntoIterator<Item = (U, U)>, ) -> Result<Box<[wasmtime::Val]>> { debug!("configuring wasmtime engine"); let mut config = wasmtime::Config::new(); config.wasm_module_linking(true); config.wasm_multi_memory(true); config.static_memory_maximum_size(0); config.static_memory_guard_size(0); config.dynamic_memory_guard_size(0); config.dynamic_memory_reserved_for_growth(0); let engine = wasmtime::Engine::new(&config).or(Err(Error::ConfigurationError))?; debug!("instantiating wasmtime linker"); let mut linker = wasmtime::Linker::new(&engine); debug!("adding WASI to linker"); wasmtime_wasi::add_to_linker(&mut linker, |s| s)?; debug!("creating WASI context"); let mut wasi = WasiCtxBuilder::new(); for arg in args { wasi = wasi.arg(arg.as_ref()).or(Err(Error::StringTableError))?; } for kv in envs { wasi = wasi .env(kv.0.as_ref(), kv.1.as_ref()) .or(Err(Error::StringTableError))?; } info!("inheriting stdio from calling process"); wasi = wasi.inherit_stdio(); debug!("creating wasmtime Store"); let mut store = wasmtime::Store::new(&engine, wasi.build()); debug!("instantiating module from bytes"); let module = wasmtime::Module::from_binary(&engine, bytes.as_ref())?; debug!("adding module to store"); linker .module(&mut store, "", &module) .or(Err(Error::InstantiationFailed))?; debug!("getting module's default function"); let func = linker .get_default(&mut store, "") .or(Err(Error::ExportNotFound))?; debug!("calling function"); func.call(store, Default::default()) .or(Err(Error::CallFailed)) } #[cfg(test)] pub(crate) mod test { use crate::workload; use std::iter::empty; const NO_EXPORT_WAT: &'static str = r#"(module (memory (export "") 1) )"#; const RETURN_1_WAT: &'static str = r#"(module (func (export "") (result i32) i32.const 1) )"#; const WASI_COUNT_ARGS_WAT: &'static str = r#"(module (import "wasi_snapshot_preview1" "args_sizes_get" (func $__wasi_args_sizes_get (param i32 i32) (result i32))) (func (export "_start") (result i32) (i32.store (i32.const 0) (i32.const 0)) (i32.store (i32.const 4) (i32.const 0)) (call $__wasi_args_sizes_get (i32.const 0) (i32.const 4)) drop (i32.load (i32.const 0)) ) (memory 1) (export "memory" (memory 0)) )"#; const HELLO_WASI_WAT: &'static str = r#"(module (import "wasi_snapshot_preview1" "proc_exit" (func $__wasi_proc_exit (param i32))) (import "wasi_snapshot_preview1" "fd_write" (func $__wasi_fd_write (param i32 i32 i32 i32) (result i32))) (func $_start (i32.store (i32.const 24) (i32.const 14)) (i32.store (i32.const 20) (i32.const 0)) (block (br_if 0 (call $__wasi_fd_write (i32.const 1) (i32.const 20) (i32.const 1) (i32.const 16))) (br_if 0 (i32.ne (i32.load (i32.const 16)) (i32.const 14))) (br 1) ) (call $__wasi_proc_exit (i32.const 1)) ) (memory 1) (export "memory" (memory 0)) (export "_start" (func $_start)) (data (i32.const 0) "Hello, world!\0a") )"#; #[test]
#[test] fn workload_run_no_export() { let bytes = wat::parse_str(NO_EXPORT_WAT).expect("error parsing wat"); match workload::run(&bytes, empty::<String>(), empty::<(String, String)>()) { Err(workload::Error::ExportNotFound) => {} _ => panic!("unexpected error"), }; } #[test] fn workload_run_wasi_count_args() { let bytes = wat::parse_str(WASI_COUNT_ARGS_WAT).expect("error parsing wat"); let results: Vec<i32> = workload::run( &bytes, vec!["a".to_string(), "b".to_string(), "c".to_string()], vec![("k", "v")], ) .unwrap() .iter() .map(|v| v.unwrap_i32()) .collect(); assert_eq!(results, vec![3]); } #[test] fn workload_run_hello_wasi() { let bytes = wat::parse_str(HELLO_WASI_WAT).expect("error parsing wat"); let args: Vec<String> = vec![]; let envs: Vec<(String, String)> = vec![]; let results = workload::run(&bytes, args, envs).unwrap(); assert_eq!(results.len(), 0); } }
fn workload_run_return_1() { let bytes = wat::parse_str(RETURN_1_WAT).expect("error parsing wat"); let results: Vec<i32> = workload::run(&bytes, empty::<String>(), empty::<(String, String)>()) .unwrap() .iter() .map(|v| v.unwrap_i32()) .collect(); assert_eq!(results, vec![1]); }
function_block-full_function
[ { "content": "#[cfg(feature = \"gdb\")]\n\npub fn handle_gdb(block: &mut Block, gdb_fd: &mut Option<std::net::TcpStream>, sockaddr: &str) {\n\n use gdbstub::Connection;\n\n\n\n let req = unsafe { block.msg.req };\n\n match req.num.into() {\n\n sallyport::syscall::SYS_ENARX_GDB_START => {\n\n if gdb_fd.is_none() {\n\n let mut stream = wait_for_gdb_connection(sockaddr).unwrap();\n\n let res = stream\n\n .on_session_start()\n\n .map(|_| [0usize.into(), 0usize.into()])\n\n .map_err(|e| e.raw_os_error().unwrap_or(libc::EINVAL));\n\n if res.is_ok() {\n\n gdb_fd.replace(stream);\n\n }\n\n block.msg.rep = res.into();\n\n } else {\n\n block.msg.rep = Ok([0usize.into(), 0usize.into()]).into();\n\n }\n\n }\n", "file_path": "src/backend/mod.rs", "rank": 0, "score": 290173.30933490256 }, { "content": "/// Returns a handle to a child process through which output (stdout, stderr) can\n\n/// be accessed.\n\npub fn keepldr_exec<'a>(bin: &str, input: impl Into<Option<&'a [u8]>>) -> Output {\n\n let bin_path = Path::new(CRATE).join(OUT_DIR).join(TEST_BINS_OUT).join(bin);\n\n\n\n let mut child = Command::new(&String::from(KEEP_BIN))\n\n .current_dir(CRATE)\n\n .arg(\"exec\")\n\n .arg(bin_path)\n\n .stdin(Stdio::piped())\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::piped())\n\n .spawn()\n\n .unwrap_or_else(|e| panic!(\"failed to run `{}`: {:#?}\", bin, e));\n\n\n\n if let Some(input) = input.into() {\n\n child\n\n .stdin\n\n .as_mut()\n\n .unwrap()\n\n .write_all(input)\n\n .expect(\"failed to write stdin to child\");\n", "file_path": "tests/common/mod.rs", "rank": 1, "score": 261197.41797171294 }, { "content": "fn humanize(mut size: f64) -> (f64, &'static str) {\n\n let mut iter = 0;\n\n\n\n while size > 512.0 {\n\n size /= 1024.0;\n\n iter += 1;\n\n }\n\n\n\n let suffix = match iter {\n\n 0 => \"\",\n\n 1 => \"KiB\",\n\n 2 => \"MiB\",\n\n 3 => \"GiB\",\n\n 4 => \"TiB\",\n\n 5 => \"PiB\",\n\n 6 => \"EiB\",\n\n 7 => \"ZiB\",\n\n 8 => \"YiB\",\n\n _ => panic!(\"Size unsupported!\"),\n\n };\n", "file_path": "src/backend/sgx/data.rs", "rank": 2, "score": 255611.87512119254 }, { "content": "pub fn enarx_run<'a>(wasm: &str, input: impl Into<Option<&'a [u8]>>) -> Output {\n\n let wasm_path = Path::new(CRATE)\n\n .join(OUT_DIR)\n\n .join(TEST_BINS_OUT)\n\n .join(wasm);\n\n\n\n let mut child = Command::new(&String::from(KEEP_BIN))\n\n .current_dir(CRATE)\n\n .arg(\"run\")\n\n .arg(wasm_path)\n\n .stdin(Stdio::piped())\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::piped())\n\n .spawn()\n\n .unwrap_or_else(|e| panic!(\"failed to run `{}`: {:#?}\", wasm, e));\n\n\n\n if let Some(input) = input.into() {\n\n child\n\n .stdin\n\n .as_mut()\n", "file_path": "tests/wasmldr_tests.rs", "rank": 3, "score": 239279.18085503957 }, { "content": "fn read_item<T: Copy>(mut rdr: impl Read) -> std::io::Result<T> {\n\n let mut item = MaybeUninit::uninit();\n\n let ptr = item.as_mut_ptr() as *mut u8;\n\n let buf = unsafe { from_raw_parts_mut(ptr, size_of::<T>()) };\n\n rdr.read_exact(buf)?;\n\n Ok(unsafe { item.assume_init() })\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 4, "score": 236202.0202914127 }, { "content": "trait Mapper: Sized + TryFrom<Self::Config, Error = Error> {\n\n type Config: Config;\n\n type Output: TryFrom<Self, Error = Error>;\n\n\n\n fn map(\n\n &mut self,\n\n pages: Map<perms::ReadWrite>,\n\n to: usize,\n\n with: <Self::Config as Config>::Flags,\n\n ) -> Result<()>;\n\n}\n\n\n", "file_path": "src/backend/mod.rs", "rank": 6, "score": 227719.8403455682 }, { "content": "#[cfg(feature = \"gdb\")]\n\npub fn wait_for_gdb_connection(sockaddr: &str) -> std::io::Result<std::net::TcpStream> {\n\n use std::net::TcpListener;\n\n\n\n eprintln!(\"Waiting for a GDB connection on {:?}...\", sockaddr);\n\n let sock = TcpListener::bind(sockaddr)?;\n\n let (stream, addr) = sock.accept()?;\n\n\n\n // Blocks until a GDB client connects via TCP.\n\n // i.e: Running `target remote localhost:<port>` from the GDB prompt.\n\n\n\n eprintln!(\"Debugger connected from {}\", addr);\n\n Ok(stream) // `TcpStream` implements `gdbstub::Connection`\n\n}\n\n\n", "file_path": "src/backend/mod.rs", "rank": 7, "score": 222794.8081525314 }, { "content": "pub fn assert_eq_slices(expected_output: &[u8], output: &[u8], what: &str) {\n\n let max_len = usize::min(output.len(), expected_output.len());\n\n let max_len = max_len.min(MAX_ASSERT_ELEMENTS);\n\n assert_eq!(\n\n output[..max_len],\n\n expected_output[..max_len],\n\n \"Expected contents of {} differs\",\n\n what\n\n );\n\n assert_eq!(\n\n output.len(),\n\n expected_output.len(),\n\n \"Expected length of {} differs\",\n\n what\n\n );\n\n assert_eq!(\n\n output, expected_output,\n\n \"Expected contents of {} differs\",\n\n what\n\n );\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 8, "score": 220398.50279299944 }, { "content": "/// smash the pagetable entries to 4k pages\n\npub fn smash(addr: VirtAddr) -> Result<(), Error> {\n\n let trans = paging::SHIM_PAGETABLE.write().translate(addr);\n\n match trans {\n\n TranslateResult::Mapped {\n\n frame,\n\n flags: _,\n\n offset: _,\n\n } => match frame {\n\n MappedFrame::Size4KiB(_frame) => Ok(()),\n\n MappedFrame::Size2MiB(_frame) => {\n\n let page = Page::<Size2MiB>::containing_address(addr);\n\n let new_pagetable: &mut PageTable = unsafe {\n\n &mut *(ALLOCATOR\n\n .write()\n\n .try_alloc(Layout::from_size_align_unchecked(\n\n size_of::<PageTable>(),\n\n Page::<Size4KiB>::SIZE as _,\n\n ))\n\n .unwrap()\n\n .as_ptr() as *mut PageTable)\n", "file_path": "internal/shim-sev/src/pagetables.rs", "rank": 9, "score": 220081.8971039637 }, { "content": "/// Returns a handle to a child process through which output (stdout, stderr) can\n\n/// be accessed.\n\npub fn run_test<'a>(\n\n bin: &str,\n\n status: i32,\n\n input: impl Into<Option<&'a [u8]>>,\n\n expected_stdout: impl Into<Option<&'a [u8]>>,\n\n expected_stderr: impl Into<Option<&'a [u8]>>,\n\n) -> Output {\n\n let output = keepldr_exec(bin, input);\n\n check_output(&output, status.into(), expected_stdout, expected_stderr);\n\n output\n\n}\n\n\n", "file_path": "tests/common/mod.rs", "rank": 10, "score": 214547.480405047 }, { "content": "fn return_empty_option(_i: usize) -> Option<&'static mut Block> {\n\n None\n\n}\n\n\n\n/// The static HostCall Mutex\n\npub static HOST_CALL_ALLOC: Lazy<RwLocked<HostCallAllocator>> = Lazy::new(|| {\n\n if snp_active() {\n\n // For SEV-SNP mark the sallyport pages as shared/unencrypted\n\n\n\n let npages = (unsafe {\n\n &_ENARX_SALLYPORT_END as *const _ as usize\n\n - &_ENARX_SALLYPORT_START as *const _ as usize\n\n }) / Page::<Size4KiB>::SIZE as usize;\n\n\n\n GHCB.set_memory_shared(\n\n VirtAddr::from_ptr(unsafe { &_ENARX_SALLYPORT_START }),\n\n npages,\n\n );\n\n }\n\n\n", "file_path": "internal/shim-sev/src/hostcall.rs", "rank": 11, "score": 214346.94075610617 }, { "content": "/// Returns a handle to a child process through which output (stdout, stderr) can\n\n/// be accessed.\n\npub fn run_crate<'a>(\n\n crate_name: &str,\n\n bin: &str,\n\n status: i32,\n\n input: impl Into<Option<&'a [u8]>>,\n\n expected_stdout: impl Into<Option<&'a [u8]>>,\n\n expected_stderr: impl Into<Option<&'a [u8]>>,\n\n) -> Output {\n\n let output = keepldr_exec_crate(crate_name, bin, input);\n\n check_output(&output, status.into(), expected_stdout, expected_stderr);\n\n output\n\n}\n", "file_path": "tests/common/mod.rs", "rank": 12, "score": 204621.5900378666 }, { "content": "pub fn check_output<'a>(\n\n output: &Output,\n\n expected_status: i32,\n\n expected_stdout: impl Into<Option<&'a [u8]>>,\n\n expected_stderr: impl Into<Option<&'a [u8]>>,\n\n) {\n\n let expected_stdout = expected_stdout.into();\n\n let expected_stderr = expected_stderr.into();\n\n\n\n // Output potential error messages\n\n if expected_stderr.is_none() && !output.stderr.is_empty() {\n\n let _ = std::io::stderr().write_all(&output.stderr);\n\n }\n\n\n\n if let Some(expected_stdout) = expected_stdout {\n\n if output.stdout.len() < MAX_ASSERT_ELEMENTS && expected_stdout.len() < MAX_ASSERT_ELEMENTS\n\n {\n\n assert_eq!(\n\n output.stdout, expected_stdout,\n\n \"Expected contents of stdout output differs\"\n", "file_path": "tests/common/mod.rs", "rank": 13, "score": 204616.9290637988 }, { "content": "fn parse_env_var(s: &str) -> Result<(String, String)> {\n\n let parts: Vec<&str> = s.splitn(2, '=').collect();\n\n if parts.len() != 2 {\n\n bail!(\"must be of the form `NAME=VAL`\");\n\n }\n\n Ok((parts[0].to_owned(), parts[1].to_owned()))\n\n}\n", "file_path": "internal/wasmldr/src/cli.rs", "rank": 14, "score": 202301.65255005637 }, { "content": "/// Returns a handle to a child process through which output (stdout, stderr) can\n\n/// be accessed.\n\npub fn keepldr_exec_crate<'a>(\n\n crate_name: &str,\n\n bin: &str,\n\n input: impl Into<Option<&'a [u8]>>,\n\n) -> Output {\n\n let crate_path = Path::new(CRATE).join(crate_name);\n\n\n\n let mut child = Command::new(\"cargo\")\n\n .current_dir(crate_path)\n\n .arg(\"run\")\n\n .arg(\"--quiet\")\n\n .arg(\"--bin\")\n\n .arg(bin)\n\n .env(\"ENARX_BIN\", KEEP_BIN)\n\n .stdin(Stdio::piped())\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::piped())\n\n .spawn()\n\n .unwrap_or_else(|e| panic!(\"failed to run `{}`: {:#?}\", bin, e));\n\n\n", "file_path": "tests/common/mod.rs", "rank": 15, "score": 200391.94711433037 }, { "content": "/// clear the c_bit\n\npub fn clear_c_bit_address_range(start: VirtAddr, end: VirtAddr) -> Result<(), Error> {\n\n let c_bit_mask = get_cbit_mask();\n\n\n\n let enc_phys_offset = EncPhysOffset::default();\n\n\n\n let mut current = start;\n\n loop {\n\n if current >= end {\n\n return Ok(());\n\n }\n\n let trans = paging::SHIM_PAGETABLE.write().translate(current);\n\n\n\n current += match trans {\n\n TranslateResult::Mapped {\n\n frame,\n\n flags: _,\n\n offset,\n\n } => match frame {\n\n MappedFrame::Size4KiB(frame) => {\n\n if offset != 0 {\n", "file_path": "internal/shim-sev/src/pagetables.rs", "rank": 16, "score": 194242.3670466561 }, { "content": "#[test]\n\n#[serial]\n\nfn hello_wasi_snapshot1() {\n\n // This module just prints \"Hello, world!\" to stdout. Hooray!\n\n run_wasm_test(\n\n \"hello_wasi_snapshot1.wasm\",\n\n 0,\n\n None,\n\n &b\"Hello, world!\\n\"[..],\n\n None,\n\n );\n\n}\n\n\n", "file_path": "tests/wasmldr_tests.rs", "rank": 17, "score": 193538.97724405886 }, { "content": "pub fn run(cmd: Command) -> Result<()> {\n\n match cmd {\n\n Command::Vcek => write_vcek(&mut io::stdout()),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_merge_vcek_stack() -> Result<()> {\n\n let pem = merge_vcek_stack(\n\n include_bytes!(\"testdata/vcek.der\"),\n\n include_str!(\"testdata/chain.pem\"),\n\n )?;\n\n let certs = X509::stack_from_pem(pem.as_bytes())?;\n\n assert_eq!(certs.len(), 3);\n\n let cert1_key = certs[1].public_key()?;\n\n let cert2_key = certs[2].public_key()?;\n\n assert!(certs[0].verify(&cert1_key).expect(\n\n \"failed to verify that certificate 0 is signed using public key of certificate 1\"\n\n ));\n\n assert!(certs[1].verify(&cert2_key).expect(\n\n \"failed to verify that certificate 1 is signed using public key of certificate 2\"\n\n ));\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/cli/sev.rs", "rank": 18, "score": 177414.215166472 }, { "content": "fn compile(wasm: &str) {\n\n let out_dir = Path::new(CRATE).join(OUT_DIR).join(TEST_BINS_OUT);\n\n let wasm = out_dir.join(wasm);\n\n\n\n create(&out_dir);\n\n\n\n let src_path = &Path::new(CRATE).join(\"tests/wasm\");\n\n\n\n let wat = src_path\n\n .join(wasm.file_stem().unwrap())\n\n .with_extension(\"wat\");\n\n\n\n // poor mans `make`\n\n if wasm.exists() {\n\n let wasm_meta = wasm.metadata().unwrap();\n\n let wasm_time = wasm_meta.modified().unwrap();\n\n\n\n let wat_meta = wat.metadata().unwrap();\n\n let wat_time = wat_meta.modified().unwrap();\n\n\n\n if wasm_meta.len() > 0 && wasm_time > wat_time {\n\n // skip, if already compiled and newer than original\n\n return;\n\n }\n\n }\n\n\n\n let bin = wat::parse_file(&wat).unwrap_or_else(|_| panic!(\"failed to compile {:?}\", &wat));\n\n std::fs::write(&wasm, &bin).unwrap_or_else(|_| panic!(\"failed to write {:?}\", &wasm));\n\n}\n\n\n", "file_path": "tests/wasmldr_tests.rs", "rank": 19, "score": 177304.3307408607 }, { "content": "/// Exit the shim with a `status` code\n\n///\n\n/// Reverts to a triple fault, which causes a `#VMEXIT` and a KVM shutdown,\n\n/// if it cannot talk to the host.\n\npub fn shim_exit(status: i32) -> ! {\n\n if let Some(mut host_call) = HOST_CALL_ALLOC.try_alloc() {\n\n host_call.exit_group(status)\n\n }\n\n\n\n // provoke triple fault, causing a VM shutdown\n\n unsafe { _enarx_asm_triple_fault() }\n\n}\n", "file_path": "internal/shim-sev/src/hostcall.rs", "rank": 20, "score": 176739.82734386064 }, { "content": "#[inline(always)]\n\npub fn shim_write_all(fd: HostFd, bytes: &[u8]) -> Result<(), libc::c_int> {\n\n let bytes_len = bytes.len();\n\n let mut to_write = bytes_len;\n\n\n\n let mut host_call = HOST_CALL_ALLOC.try_alloc().ok_or(libc::EIO)?;\n\n\n\n loop {\n\n let written = unsafe {\n\n let next = bytes_len.checked_sub(to_write).ok_or(libc::EFAULT)?;\n\n host_call\n\n .write(fd.as_raw_fd(), &bytes[next..])\n\n .map(|regs| usize::from(regs[0]))\n\n }?;\n\n // be careful with `written` as it is untrusted\n\n to_write = to_write.checked_sub(written).ok_or(libc::EIO)?;\n\n if to_write == 0 {\n\n break;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "internal/shim-sev/src/hostcall.rs", "rank": 21, "score": 176308.89879153203 }, { "content": "fn write_vcek<T: io::Write>(w: &mut T) -> Result<()> {\n\n let mut sev = Firmware::open().context(\"failed to open SEV device\")?;\n\n\n\n let id = sev.identifier().context(\"failed to query SEV identifier\")?;\n\n\n\n let status = sev\n\n .platform_status()\n\n .context(\"failed to query SEV platform status\")?;\n\n if status.tcb.platform_version != status.tcb.reported_version {\n\n // It is not clear from the documentation what the difference between the two is,\n\n // therefore only proceed if they are identical to ensure correctness.\n\n // TODO: Figure out which one should be used and drop this check.\n\n return Err(anyhow!(\n\n \"reported TCB version is not equal to installed TCB version\"\n\n ));\n\n }\n\n\n\n let client = reqwest::blocking::Client::new();\n\n\n\n let vcek_der = client\n", "file_path": "src/cli/sev.rs", "rank": 22, "score": 160532.71502111587 }, { "content": "#[test]\n\n#[serial]\n\nfn no_export() {\n\n // This module has no exported functions, so we get Error::ExportNotFound,\n\n // which wasmldr maps to EX_DATAERR (65) at process exit.\n\n run_wasm_test(\"no_export.wasm\", 65, None, None, None);\n\n}\n", "file_path": "tests/wasmldr_tests.rs", "rank": 23, "score": 159843.6199133776 }, { "content": "pub fn dev_sev() -> Datum {\n\n Datum {\n\n name: \"Driver\".into(),\n\n pass: std::path::Path::new(\"/dev/sev\").exists(),\n\n info: Some(\"/dev/sev\".into()),\n\n mesg: None,\n\n }\n\n}\n\n\n", "file_path": "src/backend/sev/data.rs", "rank": 24, "score": 159381.6167117152 }, { "content": "pub fn dev_kvm() -> Datum {\n\n let dev_kvm = std::path::Path::new(\"/dev/kvm\");\n\n\n\n Datum {\n\n name: \"Driver\".into(),\n\n pass: dev_kvm.exists(),\n\n info: Some(\"/dev/kvm\".into()),\n\n mesg: None,\n\n }\n\n}\n\n\n", "file_path": "src/backend/kvm/data.rs", "rank": 25, "score": 159381.6167117152 }, { "content": "pub fn kvm_version() -> Datum {\n\n let version = Kvm::new().map(|kvm| kvm.get_api_version());\n\n let (pass, info) = match version {\n\n Ok(v) => (v == 12, Some(v.to_string())),\n\n Err(_) => (false, None),\n\n };\n\n\n\n Datum {\n\n name: \" API Version\".into(),\n\n pass,\n\n info,\n\n mesg: None,\n\n }\n\n}\n", "file_path": "src/backend/kvm/data.rs", "rank": 26, "score": 159381.6167117152 }, { "content": "#[test]\n\n#[serial]\n\nfn memory_stress_test() {\n\n run_crate(\n\n \"integration/simple\",\n\n \"memory_stress_test\",\n\n 0,\n\n None,\n\n None,\n\n None,\n\n );\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 27, "score": 157920.4948648471 }, { "content": "#[test]\n\n#[serial]\n\nfn wasi_snapshot1() {\n\n // This module uses WASI to return the number of commandline args.\n\n // Since we don't currently do anything with the function return value,\n\n // we don't get any output here, and we expect '0', as above.\n\n run_wasm_test(\"wasi_snapshot1.wasm\", 0, None, None, None);\n\n}\n\n\n", "file_path": "tests/wasmldr_tests.rs", "rank": 28, "score": 156559.55005724414 }, { "content": "pub fn has_reasonable_memlock_rlimit() -> Datum {\n\n let mut rlimits = MaybeUninit::uninit();\n\n let res = unsafe { libc::getrlimit(libc::RLIMIT_MEMLOCK, rlimits.as_mut_ptr()) };\n\n\n\n let (pass, info) = if res == 0 {\n\n let rlimit = unsafe { rlimits.assume_init() };\n\n\n\n /* footprint = approximately (size of shim + size of wasmldr + size of workload) */\n\n let keep_footprint = nbytes::bytes![5; MiB];\n\n\n\n let num_keeps = rlimit.rlim_cur as usize / keep_footprint;\n\n let keep_status = format!(\n\n \"{}{} keep{}\",\n\n if num_keeps > 0 { \"~\" } else { \"\" },\n\n num_keeps,\n\n if num_keeps == 1 { \"\" } else { \"s\" }\n\n );\n\n\n\n let pass = num_keeps > 0;\n\n\n", "file_path": "src/backend/sev/data.rs", "rank": 29, "score": 156517.98786180757 }, { "content": "pub fn dev_sgx_enclave() -> Datum {\n\n let mut pass = false;\n\n\n\n if File::open(\"/dev/sgx_enclave\").is_ok() {\n\n pass = true;\n\n }\n\n\n\n Datum {\n\n name: \"Driver\".into(),\n\n pass,\n\n info: Some(\"/dev/sgx_enclave\".into()),\n\n mesg: None,\n\n }\n\n}\n", "file_path": "src/backend/sgx/data.rs", "rank": 30, "score": 156517.98786180757 }, { "content": "pub fn dev_sev_readable() -> Datum {\n\n let opts = OpenOptions::new().read(true).open(\"/dev/sev\");\n\n\n\n Datum {\n\n name: \" /dev/sev is readable by user\".into(),\n\n pass: opts.is_ok(),\n\n info: None,\n\n mesg: None,\n\n }\n\n}\n\n\n", "file_path": "src/backend/sev/data.rs", "rank": 31, "score": 156517.98786180757 }, { "content": "pub fn sev_enabled_in_kernel() -> Datum {\n\n let mut datum = Datum {\n\n name: \" SEV is enabled in host kernel\".into(),\n\n pass: false,\n\n info: None,\n\n mesg: None,\n\n };\n\n\n\n let mod_param = \"/sys/module/kvm_amd/parameters/sev\";\n\n if std::path::Path::new(mod_param).exists() {\n\n if let Ok(val) = std::fs::read_to_string(mod_param) {\n\n datum.pass = val.trim() == \"1\" || val.trim() == \"Y\";\n\n }\n\n }\n\n\n\n datum\n\n}\n\n\n", "file_path": "src/backend/sev/data.rs", "rank": 32, "score": 156517.98786180757 }, { "content": "pub fn dev_sev_writable() -> Datum {\n\n let opts = OpenOptions::new().write(true).open(\"/dev/sev\");\n\n\n\n Datum {\n\n name: \" /dev/sev is writable by user\".into(),\n\n pass: opts.is_ok(),\n\n info: None,\n\n mesg: None,\n\n }\n\n}\n\n\n\npub const CPUIDS: &[CpuId] = &[\n\n CpuId {\n\n name: \"CPU Manufacturer\",\n\n leaf: 0x00000000,\n\n subl: 0x00000000,\n\n func: |res| {\n\n let name: [u8; 12] = unsafe { transmute([res.ebx, res.edx, res.ecx]) };\n\n let name = from_utf8(&name[..]).unwrap();\n\n (name == \"AuthenticAMD\", Some(name.into()))\n", "file_path": "src/backend/sev/data.rs", "rank": 33, "score": 156517.98786180757 }, { "content": "fn merge_vcek_stack(vcek_der: &[u8], chain_pem: &str) -> Result<String> {\n\n let vcek_pem = X509::from_der(vcek_der)\n\n .context(\"failed to parse VCEK certificate\")?\n\n .to_pem()\n\n .context(\"failed to format VCEK certificate as PEM\")\n\n .map(String::from_utf8)?\n\n .context(\"invalid PEM generated by openssl\")?;\n\n Ok(format!(\"{}{}\", vcek_pem, chain_pem))\n\n}\n\n\n", "file_path": "src/cli/sev.rs", "rank": 34, "score": 156470.50938531384 }, { "content": "fn get_att(mut nonce: [u8; 64]) -> std::io::Result<()> {\n\n let mut buffer = [0u8; 4000];\n\n let (len, tech) = get_att_syscall(Some(&mut nonce[..]), Some(&mut buffer))?;\n\n\n\n assert!(matches!(tech, TeeTech::Sev));\n\n assert_eq!(len, core::mem::size_of::<SnpReportResponseData>());\n\n\n\n let report_data = buffer.as_ptr() as *const SnpReportResponseData;\n\n\n\n let report = unsafe { report_data.read_unaligned() };\n\n\n\n assert_eq!(report.status, 0);\n\n assert_eq!(report.size, 1184);\n\n\n\n assert_eq!(report.report.version, 2);\n\n\n\n eprintln!(\"report: {:?}\", report);\n\n Ok(())\n\n}\n\n\n", "file_path": "integration/sev_attestation/src/main.rs", "rank": 35, "score": 155694.95197271058 }, { "content": "#[inline(always)]\n\npub fn snp_active() -> bool {\n\n get_cbit_mask() > 0\n\n}\n\n\n\n/// Error returned by pvalidate\n\n#[derive(Debug)]\n\n#[non_exhaustive]\n\npub enum Error {\n\n /// Reasons:\n\n /// - Page size is 2MB and page is not 2MB aligned\n\n FailInput,\n\n /// Reasons:\n\n /// - 2MB validation backed by 4KB pages\n\n FailSizeMismatch,\n\n /// Unknown error\n\n Unknown(u32),\n\n}\n\n\n\n/// The size of the page to `pvalidate`\n\n#[repr(u64)]\n", "file_path": "internal/shim-sev/src/snp/mod.rs", "rank": 36, "score": 153223.299030972 }, { "content": "#[inline(always)]\n\npub fn get_cbit_mask() -> u64 {\n\n C_BIT_MASK.load(Ordering::Relaxed)\n\n}\n\n\n\n/// Test, if SEV-SNP is enabled\n", "file_path": "internal/shim-sev/src/snp/mod.rs", "rank": 37, "score": 150647.40425794042 }, { "content": "#[doc(hidden)]\n\n#[inline(always)]\n\npub fn _eprint(args: fmt::Arguments) {\n\n use fmt::Write;\n\n\n\n if !is_printing_enabled() {\n\n // Early return to prevent dead locks.\n\n return;\n\n }\n\n\n\n HostWrite(unsafe { HostFd::from_raw_fd(libc::STDERR_FILENO) })\n\n .write_fmt(args)\n\n .expect(\"Printing via Host fd 2 failed\");\n\n}\n\n\n\n/// Prints to the standard output of the host.\n\n///\n\n/// Equivalent to the [`println!`] macro except that a newline is not printed at\n\n/// the end of the message.\n\n///\n\n/// [`println!`]: macro.println.html\n\n///\n", "file_path": "internal/shim-sev/src/print.rs", "rank": 38, "score": 148038.88573212028 }, { "content": "#[doc(hidden)]\n\n#[inline(always)]\n\npub fn _print(args: fmt::Arguments) {\n\n use fmt::Write;\n\n\n\n if !is_printing_enabled() {\n\n // Early return to prevent dead locks.\n\n return;\n\n }\n\n\n\n HostWrite(unsafe { HostFd::from_raw_fd(libc::STDOUT_FILENO) })\n\n .write_fmt(args)\n\n .expect(\"Printing via Host fd 1 failed\");\n\n}\n\n\n", "file_path": "internal/shim-sev/src/print.rs", "rank": 39, "score": 148038.88573212028 }, { "content": "pub fn epc_size(max: u32) -> Datum {\n\n let mut pass = false;\n\n let mut info = None;\n\n\n\n if max >= 0x00000012 {\n\n let mut size = 0;\n\n\n\n for i in 2.. {\n\n let result = unsafe { __cpuid_count(0x00000012, i) };\n\n if result.eax & 0xf != 1 {\n\n break;\n\n }\n\n\n\n let low = result.ecx as u64 & 0xfffff000;\n\n let high = result.edx as u64 & 0x000fffff;\n\n size += high << 12 | low;\n\n }\n\n\n\n let (n, s) = humanize(size as f64);\n\n info = Some(format!(\"{:.0} {}\", n, s));\n", "file_path": "src/backend/sgx/data.rs", "rank": 40, "score": 147872.90870807407 }, { "content": "#[inline]\n\npub fn cpuid(leaf: u32) -> CpuidResult {\n\n cpuid_count(leaf, 0)\n\n}\n\n\n\n/// Returns the result of the `cpuid` instruction for a given `leaf` (`EAX`)\n\n/// and\n\n/// `sub_leaf` (`ECX`).\n\n///\n\n/// The highest-supported leaf value is returned by the first tuple argument of\n\n/// [`__get_cpuid_max(0)`](fn.__get_cpuid_max.html). For leaves containung\n\n/// sub-leaves, the second tuple argument returns the highest-supported\n\n/// sub-leaf\n\n/// value.\n\n///\n\n/// The [CPUID Wikipedia page][wiki_cpuid] contains how to query which\n\n/// information using the `EAX` and `ECX` registers, and the interpretation of\n\n/// the results returned in `EAX`, `EBX`, `ECX`, and `EDX`.\n\n///\n\n/// The references are:\n\n/// - [Intel 64 and IA-32 Architectures Software Developer's Manual Volume 2:\n\n/// Instruction Set Reference, A-Z][intel64_ref].\n\n/// - [AMD64 Architecture Programmer's Manual, Volume 3: General-Purpose and\n\n/// System Instructions][amd64_ref].\n\n///\n\n/// [wiki_cpuid]: https://en.wikipedia.org/wiki/CPUID\n\n/// [intel64_ref]: http://www.intel.de/content/dam/www/public/us/en/documents/manuals/64-ia-32-architectures-software-developer-instruction-set-reference-manual-325383.pdf\n\n/// [amd64_ref]: http://support.amd.com/TechDocs/24594.pdf\n", "file_path": "internal/shim-sev/src/snp/cpuid_page.rs", "rank": 41, "score": 140526.93034155777 }, { "content": "fn main() {\n\n let mut ret = 0;\n\n let mut size: usize = 1;\n\n while size < SIZE_32M {\n\n let mut vec = Vec::with_capacity(size);\n\n vec.push(0u8);\n\n ret += vec.pop().unwrap();\n\n size *= 2;\n\n drop(vec);\n\n }\n\n\n\n for _i in 0..100 {\n\n let mut vec = Vec::with_capacity(size);\n\n vec.push(0u8);\n\n ret += vec.pop().unwrap();\n\n drop(vec);\n\n }\n\n\n\n while size > 0 {\n\n let mut vec = Vec::with_capacity(size);\n\n vec.push(0u8);\n\n ret += vec.pop().unwrap();\n\n size /= 2;\n\n drop(vec);\n\n }\n\n\n\n std::process::exit(ret as _);\n\n}\n", "file_path": "integration/simple/src/memory_stress_test.rs", "rank": 42, "score": 140290.46787319946 }, { "content": "fn main() -> Result<()> {\n\n let opts = Options::from_args();\n\n opts.log.init_logger();\n\n\n\n info!(\"logging initialized!\");\n\n info!(\"CLI opts: {:?}\", &opts);\n\n\n\n match opts.cmd {\n\n cli::Command::Info(info) => info.display(),\n\n cli::Command::Exec(exec) => {\n\n let backend = exec.backend.pick()?;\n\n let binary = mmarinus::Kind::Private.load::<mmarinus::perms::Read, _>(&exec.binpath)?;\n\n #[cfg(not(feature = \"gdb\"))]\n\n let gdblisten = None;\n\n\n\n #[cfg(feature = \"gdb\")]\n\n let gdblisten = Some(exec.gdblisten);\n\n\n\n keep_exec(backend, backend.shim(), binary, gdblisten)\n\n }\n", "file_path": "src/main.rs", "rank": 43, "score": 135907.0369699941 }, { "content": "pub fn parse(\n\n mut input: impl Read,\n\n mut handle_custom: impl FnMut(&[u8]) -> Result<()>,\n\n mut handle_default: impl FnMut(&[u8]) -> Result<()>,\n\n) -> Result<()> {\n\n let mut buf = Vec::new();\n\n let mut parser = Parser::new(0);\n\n let mut eof = false;\n\n let mut stack = Vec::new();\n\n\n\n loop {\n\n let (payload, consumed) = match parser.parse(&buf, eof).or(Err(ErrorKind::InvalidInput))? {\n\n Chunk::NeedMoreData(hint) => {\n\n assert!(!eof); // otherwise an error would be returned\n\n\n\n // Use the hint to preallocate more space, then read\n\n // some more data into our buffer.\n\n //\n\n // Note that the buffer management here is not ideal,\n\n // but it's compact enough to fit in an example!\n", "file_path": "internal/wasmldr/src/bundle.rs", "rank": 44, "score": 129337.69030337525 }, { "content": "#[inline]\n\npub fn cpuid_count(leaf: u32, sub_leaf: u32) -> CpuidResult {\n\n if !snp_active() {\n\n unsafe { core::arch::x86_64::__cpuid_count(leaf, sub_leaf) }\n\n } else {\n\n let cpuid = &unsafe { _ENARX_CPUID };\n\n cpuid\n\n .get_functions()\n\n .iter()\n\n .find_map(|e| {\n\n if e.eax_in == leaf && e.ecx_in == sub_leaf {\n\n Some(CpuidResult {\n\n eax: e.eax,\n\n ebx: e.ebx,\n\n ecx: e.ecx,\n\n edx: e.edx,\n\n })\n\n } else {\n\n None\n\n }\n\n })\n", "file_path": "internal/shim-sev/src/snp/cpuid_page.rs", "rank": 45, "score": 127509.81458035077 }, { "content": "/// Initialize the IDT\n\npub fn init() {\n\n #[cfg(debug_assertions)]\n\n eprintln!(\"interrupts::init\");\n\n IDT.load();\n\n}\n\n\n\n#[cfg(feature = \"dbg\")]\n\nmod debug {\n\n use super::*;\n\n\n\n pub(crate) fn idt_add_debug_exception_handlers(idt: &mut InterruptDescriptorTable) {\n\n unsafe {\n\n let virt = VirtAddr::new_unsafe(divide_error_handler as usize as u64);\n\n idt.divide_error.set_handler_addr(virt).set_stack_index(6);\n\n\n\n let virt = VirtAddr::new_unsafe(debug_handler as usize as u64);\n\n idt.debug.set_handler_addr(virt).set_stack_index(1);\n\n\n\n let virt = VirtAddr::new_unsafe(non_maskable_interrupt_handler as usize as u64);\n\n idt.non_maskable_interrupt\n", "file_path": "internal/shim-sev/src/interrupts.rs", "rank": 46, "score": 127223.83487703739 }, { "content": "fn main() -> io::Result<()> {\n\n let mut buffer = Vec::new();\n\n std::io::stdin().read_to_end(&mut buffer)?;\n\n std::io::stdout().write_all(&buffer)?;\n\n Ok(())\n\n}\n", "file_path": "integration/simple/src/echo.rs", "rank": 47, "score": 125395.15075135576 }, { "content": "/// Setup and check SSE relevant stuff\n\npub fn init_sse() {\n\n const XSAVE_SUPPORTED_BIT: u32 = 1 << 26;\n\n let xsave_supported = (cpuid_count(1, 0).ecx & XSAVE_SUPPORTED_BIT) != 0;\n\n assert!(xsave_supported);\n\n\n\n let xsaveopt_supported = (cpuid_count(0xD, 1).eax & 1) == 1;\n\n assert!(xsaveopt_supported);\n\n\n\n let sse_extended_supported = (cpuid_count(0xd, 0).eax & 0b111) == 0b111;\n\n\n\n if sse_extended_supported {\n\n let mut xcr0 = XCr0::read();\n\n xcr0 |= XCr0Flags::AVX | XCr0Flags::SSE;\n\n unsafe { XCr0::write(xcr0) };\n\n } else {\n\n let mut xcr0 = XCr0::read();\n\n xcr0 |= XCr0Flags::SSE;\n\n unsafe { XCr0::write(xcr0) };\n\n }\n\n\n", "file_path": "internal/shim-sev/src/sse.rs", "rank": 48, "score": 125220.34193461714 }, { "content": "/// Unmap the initial identity mapping for 0xC000_0000..=0xFFFF_FFFF\n\npub fn unmap_identity() {\n\n SHIM_PAGETABLE.write().level_4_table()[0] = PageTableEntry::new();\n\n flush_all();\n\n}\n\n\n\n/// Error returned by this module\n\n#[derive(Debug)]\n\n#[non_exhaustive]\n\npub enum Error {\n\n /// The given virtual address is not mapped to a physical frame.\n\n NotMapped,\n\n /// The page table entry for the given virtual address points to an invalid physical address.\n\n InvalidFrameAddress(PhysAddr),\n\n /// The given virtual address is not page aligned.\n\n NotAligned,\n\n}\n\n\n", "file_path": "internal/shim-sev/src/pagetables.rs", "rank": 49, "score": 125220.34193461714 }, { "content": "pub fn kvm_builder_map(\n\n sallyports: &mut Vec<Option<VirtAddr>>,\n\n vm_fd: &mut VmFd,\n\n pages: &mut Map<perms::ReadWrite>,\n\n to: usize,\n\n with: u32,\n\n slot: u32,\n\n) -> anyhow::Result<kvm_userspace_memory_region> {\n\n if with & SALLYPORT != 0 {\n\n for start in (0..pages.size()).step_by(size_of::<Block>()) {\n\n if start + size_of::<Block>() <= pages.size() {\n\n let virt = VirtAddr::from_ptr(pages.as_ptr()) + start;\n\n sallyports.push(Some(virt));\n\n }\n\n }\n\n }\n\n\n\n let mem_region = kvm_userspace_memory_region {\n\n slot,\n\n flags: 0,\n", "file_path": "src/backend/kvm/builder.rs", "rank": 50, "score": 125220.34193461714 }, { "content": "#[inline]\n\npub fn enable_printing() {\n\n unsafe { PRINT_INHIBITOR.store(0, Ordering::Release) }\n\n}\n\n\n\n/// Returns true, if shim can (debug) print\n\n///\n\n/// See also [`PrintBarrier`]\n", "file_path": "internal/shim-sev/src/print.rs", "rank": 51, "score": 125220.34193461714 }, { "content": "pub fn kvm_try_from_builder(\n\n sallyports: &[Option<VirtAddr>],\n\n kvm_fd: &mut Kvm,\n\n vm_fd: &mut VmFd,\n\n) -> Result<(VcpuFd, VirtAddr)> {\n\n // If no LOAD segment were defined as sallyport blocks\n\n if sallyports.is_empty() {\n\n anyhow::bail!(\"No sallyport blocks defined!\");\n\n }\n\n\n\n let cpuids = kvm_fd.get_supported_cpuid(KVM_MAX_CPUID_ENTRIES)?;\n\n\n\n let vcpu_fd = vm_fd.create_vcpu(0)?;\n\n vcpu_fd.set_cpuid2(&cpuids)?;\n\n\n\n // FIXME: this will be removed with relative addresses in sallyport\n\n // unwrap, because we have at least one block\n\n let sallyport_block_start = sallyports.first().unwrap().unwrap();\n\n Ok((vcpu_fd, sallyport_block_start))\n\n}\n", "file_path": "src/backend/kvm/builder.rs", "rank": 52, "score": 125220.34193461714 }, { "content": "/// execute the exec\n\npub fn execute_exec() -> ! {\n\n let header = map_elf(*EXEC_VIRT_ADDR.read());\n\n\n\n let stack = init_stack_with_guard(\n\n EXEC_STACK_VIRT_ADDR_BASE + (random() & 0xFFFF_F000),\n\n EXEC_STACK_SIZE,\n\n PageTableFlags::USER_ACCESSIBLE,\n\n );\n\n\n\n let (entry, sp_handle) = crt0setup(*EXEC_VIRT_ADDR.read(), stack.slice, header);\n\n\n\n #[cfg(feature = \"gdb\")]\n\n unsafe {\n\n // Breakpoint at the exec entry address\n\n asm!(\n\n \"mov dr0, {}\",\n\n \"mov dr7, {}\",\n\n\n\n in(reg) entry.as_u64(),\n\n in(reg) 1u64,\n\n )\n\n };\n\n\n\n unsafe {\n\n EXEC_READY.store(true, Ordering::Relaxed);\n\n usermode(entry.as_u64(), sp_handle)\n\n }\n\n}\n", "file_path": "internal/shim-sev/src/exec.rs", "rank": 53, "score": 125220.34193461714 }, { "content": "fn main() -> io::Result<()> {\n\n let mut dir_name = String::new();\n\n\n\n stdin().read_line(&mut dir_name)?;\n\n\n\n let dir_name = PathBuf::from(dir_name);\n\n\n\n let listener = UnixListener::bind(dir_name.join(\"enarx_unix_echo_to_bin\"))?;\n\n let (mut socket, _) = listener.accept()?;\n\n\n\n let mut buffer = Vec::new();\n\n socket.read_to_end(&mut buffer)?;\n\n\n\n let mut socket = UnixStream::connect(dir_name.join(\"enarx_unix_echo_from_bin\")).unwrap();\n\n socket.write_all(&buffer)?;\n\n Ok(())\n\n}\n", "file_path": "integration/simple/src/unix_echo.rs", "rank": 54, "score": 123384.70456640657 }, { "content": "/// print a stack trace from a stack frame pointer\n\npub fn print_stack_trace() {\n\n let mut rbp: usize;\n\n\n\n unsafe {\n\n asm!(\"mov {}, rbp\", out(reg) rbp);\n\n stack_trace_from_rbp(rbp);\n\n }\n\n}\n\n\n\n#[cfg(feature = \"dbg\")]\n\nunsafe fn stack_trace_from_rbp(mut rbp: usize) {\n\n use crate::exec::EXEC_VIRT_ADDR;\n\n use crate::paging::SHIM_PAGETABLE;\n\n use crate::print;\n\n\n\n use core::mem::size_of;\n\n use core::sync::atomic::Ordering;\n\n\n\n use x86_64::structures::paging::Translate;\n\n\n", "file_path": "internal/shim-sev/src/debug.rs", "rank": 55, "score": 123318.78846646624 }, { "content": "pub fn get_att_syscall(\n\n nonce: Option<&mut [u8]>,\n\n buf: Option<&mut [u8]>,\n\n) -> std::io::Result<(usize, TeeTech)> {\n\n let rax: i64;\n\n let rdx: u64;\n\n\n\n let arg1 = if let Some(ref nonce) = nonce {\n\n nonce.len()\n\n } else {\n\n 0usize\n\n };\n\n\n\n let arg0 = if let Some(nonce) = nonce {\n\n nonce.as_ptr() as usize\n\n } else {\n\n 0usize\n\n };\n\n\n\n let arg3 = if let Some(ref buf) = buf {\n", "file_path": "integration/sev_attestation/src/main.rs", "rank": 56, "score": 123318.78846646624 }, { "content": "/// Get a random number\n\npub fn random() -> u64 {\n\n let mut r: u64 = 0;\n\n\n\n for _ in 0..1024 {\n\n if unsafe { core::arch::x86_64::_rdrand64_step(&mut r) } == 1 {\n\n return r;\n\n }\n\n }\n\n\n\n panic!(\"Could not get random!\")\n\n}\n", "file_path": "internal/shim-sev/src/random.rs", "rank": 57, "score": 121564.8386433132 }, { "content": "/// Allocate a stack with guard pages\n\npub fn init_stack_with_guard(\n\n start: VirtAddr,\n\n stack_size: u64,\n\n extra_flags: PageTableFlags,\n\n) -> GuardedStack {\n\n let mut allocator = ALLOCATOR.write();\n\n\n\n // guard page\n\n allocator\n\n .map_memory(\n\n PhysAddr::new(0),\n\n start - Page::<Size4KiB>::SIZE,\n\n Page::<Size4KiB>::SIZE as _,\n\n PageTableFlags::empty(),\n\n PageTableFlags::PRESENT | PageTableFlags::WRITABLE,\n\n )\n\n .expect(\"Stack guard page mapping failed\");\n\n\n\n let mem_slice = allocator\n\n .allocate_and_map_memory(\n", "file_path": "internal/shim-sev/src/shim_stack.rs", "rank": 58, "score": 121511.5873381107 }, { "content": "fn rerun_src(path: impl AsRef<Path>) {\n\n for entry in find_files_with_extensions(&[\"rs\", \"s\", \"S\"], &path) {\n\n if let Some(path) = entry.to_str() {\n\n println!(\"cargo:rerun-if-changed={}\", path)\n\n }\n\n }\n\n}\n\n\n", "file_path": "build.rs", "rank": 59, "score": 119675.57083328172 }, { "content": "#[inline]\n\npub fn is_printing_enabled() -> bool {\n\n unsafe { PRINT_INHIBITOR.load(Ordering::Acquire) == 0 }\n\n}\n\n\n\n/// Temporarily disable (debug) printing\n\n///\n\n/// Creating a `PrintBarrier` will prevent printing, until the object is dropped.\n\n/// This helps to avoid dead locks with debug printing, which has to be temporarily\n\n/// disabled to avoid dead locks with other Mutexes and RwLocks.\n\npub struct PrintBarrier;\n\n\n\nimpl Default for PrintBarrier {\n\n fn default() -> Self {\n\n unsafe {\n\n PRINT_INHIBITOR.fetch_add(1, Ordering::Release);\n\n }\n\n Self\n\n }\n\n}\n\n\n", "file_path": "internal/shim-sev/src/print.rs", "rank": 60, "score": 119663.28517516228 }, { "content": "fn main() -> std::io::Result<()> {\n\n let (len, tech) = get_att_syscall(None, None)?;\n\n\n\n if matches!(tech, TeeTech::Sev) {\n\n assert_eq!(len, 4000);\n\n\n\n get_att([\n\n 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D,\n\n 0x0E, 0x0F, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1A, 0x1B,\n\n 0x1C, 0x1D, 0x1E, 0x1F, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29,\n\n 0x2A, 0x2B, 0x2C, 0x2D, 0x2E, 0x2F, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,\n\n 0x38, 0x39, 0x3A, 0x3B, 0x3C, 0x3D, 0x3E, 0x3F,\n\n ])?;\n\n\n\n get_att([0; 64])?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "integration/sev_attestation/src/main.rs", "rank": 61, "score": 118209.39018034504 }, { "content": "#[test]\n\n#[serial]\n\nfn read() {\n\n const INPUT: &[u8; 12] = b\"hello world\\n\";\n\n run_test(\"read\", 0, &INPUT[..], &INPUT[..], None);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 62, "score": 117413.68727562498 }, { "content": "#[test]\n\n#[serial]\n\nfn getegid() {\n\n run_test(\"getegid\", 0, None, None, None);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 63, "score": 117413.68727562498 }, { "content": "#[test]\n\n#[serial]\n\nfn getuid() {\n\n run_test(\"getuid\", 0, None, None, None);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 64, "score": 117413.68727562498 }, { "content": "#[test]\n\n#[serial]\n\nfn socket() {\n\n run_test(\"socket\", 0, None, None, None);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 65, "score": 117413.68727562498 }, { "content": "#[test]\n\n#[serial]\n\nfn bind() {\n\n run_test(\"bind\", 0, None, None, None);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 66, "score": 117413.68727562498 }, { "content": "#[test]\n\n#[serial]\n\nfn close() {\n\n run_test(\"close\", 0, None, None, None);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 67, "score": 117413.68727562498 }, { "content": "#[test]\n\n#[serial]\n\nfn return_1() {\n\n // This module does, in fact, return 1. But function return values\n\n // are separate from setting the process exit status code, so\n\n // we still expect a return code of '0' here.\n\n run_wasm_test(\"return_1.wasm\", 0, None, None, None);\n\n}\n\n\n", "file_path": "tests/wasmldr_tests.rs", "rank": 68, "score": 117413.68727562498 }, { "content": "#[test]\n\n#[serial]\n\nfn echo() {\n\n let mut input: Vec<u8> = Vec::with_capacity(2 * 1024 * 1024);\n\n\n\n for i in 0..input.capacity() {\n\n input.push(i as _);\n\n }\n\n run_crate(\n\n \"integration/simple\",\n\n \"echo\",\n\n 0,\n\n input.as_slice(),\n\n input.as_slice(),\n\n None,\n\n );\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 69, "score": 117413.68727562498 }, { "content": "#[test]\n\n#[serial]\n\nfn memspike() {\n\n run_crate(\"integration/simple\", \"memspike\", 0, None, None, None);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 70, "score": 117413.68727562498 }, { "content": "#[test]\n\n#[serial]\n\nfn listen() {\n\n run_test(\"listen\", 0, None, None, None);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 71, "score": 117413.68727562498 }, { "content": "#[test]\n\n#[serial]\n\nfn cpuid() {\n\n run_crate(\"integration/simple\", \"cpuid\", 0, None, None, None);\n\n}\n", "file_path": "tests/integration_tests.rs", "rank": 72, "score": 117413.68727562498 }, { "content": "#[test]\n\n#[serial]\n\nfn geteuid() {\n\n run_test(\"geteuid\", 0, None, None, None);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 73, "score": 117413.68727562498 }, { "content": "#[test]\n\n#[serial]\n\nfn getgid() {\n\n run_test(\"getgid\", 0, None, None, None);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 74, "score": 117413.68727562498 }, { "content": "#[test]\n\n#[serial]\n\nfn readv() {\n\n const INPUT: &[u8; 36] = b\"hello, worldhello, worldhello, world\";\n\n run_test(\"readv\", 0, &INPUT[..], &INPUT[..], None);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 75, "score": 117413.68727562498 }, { "content": "#[test]\n\n#[serial]\n\nfn uname() {\n\n run_test(\"uname\", 0, None, None, None);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 76, "score": 117413.68727562498 }, { "content": "fn run_wasm_test<'a>(\n\n wasm: &str,\n\n status: i32,\n\n input: impl Into<Option<&'a [u8]>>,\n\n expected_stdout: impl Into<Option<&'a [u8]>>,\n\n expected_stderr: impl Into<Option<&'a [u8]>>,\n\n) -> Output {\n\n compile(wasm);\n\n\n\n let output = enarx_run(wasm, input);\n\n check_output(&output, status, expected_stdout, expected_stderr);\n\n output\n\n}\n\n\n", "file_path": "tests/wasmldr_tests.rs", "rank": 77, "score": 115949.31451967452 }, { "content": "#[test]\n\n#[serial]\n\nfn exit_one() {\n\n run_test(\"exit_one\", 1, None, None, None);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 78, "score": 115340.08267065338 }, { "content": "#[test]\n\n#[serial]\n\nfn get_att() {\n\n run_test(\"get_att\", 0, None, None, None);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 79, "score": 115340.08267065338 }, { "content": "#[test]\n\n#[serial]\n\nfn write_stdout() {\n\n run_test(\"write_stdout\", 0, None, &b\"hi\\n\"[..], None);\n\n}\n\n\n\n#[cfg(not(feature = \"dbg\"))]\n\n#[test]\n\n#[serial]\n\n// v0.1.0 KEEP-CONFIG HACK: logging is hardcoded to send output to stderr,\n\n// which clobbers the output here. Skip this test until we have a way to\n\n// disable log output and/or send it somewhere other than stderr.\n", "file_path": "tests/integration_tests.rs", "rank": 80, "score": 115340.08267065338 }, { "content": "#[test]\n\n#[serial]\n\nfn clock_gettime() {\n\n use libc::{clock_gettime, CLOCK_MONOTONIC};\n\n\n\n // Get the time from inside the keep.\n\n let stdout = run_test(\"clock_gettime\", 0, None, None, None).stdout;\n\n let theirs: libc::timespec = read_item(stdout.as_slice()).unwrap();\n\n\n\n // Get the time from outside the keep.\n\n let ours = unsafe {\n\n let mut ts = MaybeUninit::uninit();\n\n assert_eq!(0, clock_gettime(CLOCK_MONOTONIC, ts.as_mut_ptr()));\n\n ts.assume_init()\n\n };\n\n\n\n // Validate that the difference in time is minor...\n\n const NSEC_PER_SEC: libc::c_long = 1_000_000_000;\n\n const MAX_SEC: libc::c_long = 2;\n\n\n\n let sec = ours.tv_sec - theirs.tv_sec;\n\n assert!(sec >= 0);\n\n assert!(sec < MAX_SEC);\n\n\n\n let nsec = sec * NSEC_PER_SEC + ours.tv_nsec - theirs.tv_nsec;\n\n assert!(nsec >= 0);\n\n assert!(nsec < MAX_SEC * NSEC_PER_SEC);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 81, "score": 115340.08267065338 }, { "content": "#[test]\n\n#[serial]\n\nfn exit_zero() {\n\n run_test(\"exit_zero\", 0, None, None, None);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 82, "score": 115340.08267065338 }, { "content": "#[test]\n\n#[serial]\n\nfn read_udp() {\n\n // The maximum UDP message size is 65507, as determined by the following formula:\n\n // 0xffff - (sizeof(minimal IP Header) + sizeof(UDP Header)) = 65535-(20+8) = 65507\n\n const MAX_UDP_PACKET_SIZE: usize = 65507;\n\n\n\n let mut input: Vec<u8> = Vec::with_capacity(MAX_UDP_PACKET_SIZE);\n\n for i in 0..input.capacity() {\n\n input.push(i as _);\n\n }\n\n run_test(\"read_udp\", 0, input.as_slice(), input.as_slice(), None);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 83, "score": 115340.08267065338 }, { "content": "#[test]\n\n#[serial]\n\nfn unix_echo() {\n\n let tmpdir = Arc::new(Builder::new().prefix(\"unix_echo\").tempdir().unwrap());\n\n const FILENAME_IN: &'static str = \"enarx_unix_echo_to_bin\";\n\n const FILENAME_OUT: &'static str = \"enarx_unix_echo_from_bin\";\n\n let mut input: Vec<u8> = Vec::with_capacity(2 * 1024 * 1024);\n\n\n\n let _ = fs::remove_file(FILENAME_IN);\n\n\n\n for i in 0..input.capacity() {\n\n input.push(i as _);\n\n }\n\n\n\n let handle = thread::spawn({\n\n let tmpdir = tmpdir.clone();\n\n move || {\n\n let socket_path = tmpdir.path().join(FILENAME_IN);\n\n let mut cnt = 0;\n\n while cnt < 100 && !socket_path.exists() {\n\n cnt += 1;\n\n thread::sleep(Duration::from_millis(500))\n", "file_path": "tests/integration_tests.rs", "rank": 84, "score": 115340.08267065338 }, { "content": "#[ignore]\n\nfn write_emsgsize() {\n\n run_test(\"write_emsgsize\", 0, None, None, None);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 85, "score": 115335.17444763068 }, { "content": "#[ignore]\n\nfn write_stderr() {\n\n run_test(\"write_stderr\", 0, None, None, &b\"hi\\n\"[..]);\n\n}\n\n\n\n#[test]\n\n#[serial]\n\n// FIXME this should not be ignored, this was applied as part\n\n// of a commit that must be reverted and implemented properly.\n", "file_path": "tests/integration_tests.rs", "rank": 86, "score": 115335.17444763068 }, { "content": "#[cfg(feature = \"backend-sev\")]\n\n#[test]\n\n#[serial]\n\nfn rust_sev_attestation() {\n\n run_crate(\n\n \"integration/sev_attestation\",\n\n \"sev_attestation\",\n\n 0,\n\n None,\n\n None,\n\n None,\n\n );\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 87, "score": 113361.95024145363 }, { "content": "trait Config: Sized {\n\n type Flags;\n\n\n\n fn flags(flags: u32) -> Self::Flags;\n\n fn new(shim: &Binary, exec: &Binary) -> Result<Self>;\n\n}\n\n\n", "file_path": "src/backend/mod.rs", "rank": 88, "score": 111487.10952909602 }, { "content": "#[cfg(feature = \"backend-sgx\")]\n\n#[test]\n\n#[serial]\n\nfn sgx_get_att_quote() {\n\n run_test(\"sgx_get_att_quote\", 0, None, None, None);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 89, "score": 111472.85620228926 }, { "content": "fn create(path: &Path) {\n\n match std::fs::create_dir(&path) {\n\n Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => {}\n\n Err(e) => {\n\n eprintln!(\"Can't create {:#?} : {:#?}\", path, e);\n\n std::process::exit(1);\n\n }\n\n Ok(_) => {}\n\n }\n\n}\n\n\n", "file_path": "tests/wasmldr_tests.rs", "rank": 90, "score": 109793.94157370448 }, { "content": "#[cfg(feature = \"backend-sgx\")]\n\n#[test]\n\n#[serial]\n\nfn sgx_get_att_quote_size() {\n\n run_test(\"sgx_get_att_quote_size\", 0, None, None, None);\n\n}\n\n\n", "file_path": "tests/integration_tests.rs", "rank": 91, "score": 109666.5063103851 }, { "content": "pub trait Thread {\n\n /// Enters the keep.\n\n fn enter(&mut self) -> Result<Command>;\n\n}\n\n\n\npub enum Command<'a> {\n\n #[allow(dead_code)]\n\n SysCall(&'a mut Block),\n\n\n\n #[allow(dead_code)]\n\n CpuId(&'a mut Block),\n\n\n\n #[cfg(feature = \"gdb\")]\n\n #[allow(dead_code)]\n\n Gdb(&'a mut Block, &'a mut Option<std::net::TcpStream>),\n\n\n\n #[allow(dead_code)]\n\n Continue,\n\n}\n\n\n", "file_path": "src/backend/mod.rs", "rank": 92, "score": 108998.39518737607 }, { "content": "pub trait Keep {\n\n /// Creates a new thread in the keep.\n\n fn spawn(self: Arc<Self>) -> Result<Option<Box<dyn Thread>>>;\n\n}\n\n\n", "file_path": "src/backend/mod.rs", "rank": 93, "score": 108998.39518737607 }, { "content": "#[derive(Copy, Clone)]\n\n#[non_exhaustive]\n\nenum GhcbError {\n\n /// Unexpected state from the VMM\n\n VmmError,\n\n /// Instruction caused exception\n\n Exception,\n\n}\n\n\n", "file_path": "internal/shim-sev/src/snp/ghcb.rs", "rank": 94, "score": 108248.0966136038 }, { "content": "/// load the elf binary\n\nfn map_elf(app_virt_start: VirtAddr) -> &'static Header {\n\n let header: &Header = unsafe { &crate::_ENARX_EXEC_START };\n\n let header_ptr = header as *const _;\n\n\n\n if !header.e_ident[..ELFMAG.len()].eq(ELFMAG) {\n\n panic!(\"Not valid ELF\");\n\n }\n\n\n\n let headers: &[ProgramHeader] = unsafe {\n\n #[allow(clippy::cast_ptr_alignment)]\n\n core::slice::from_raw_parts(\n\n (header_ptr as usize as *const u8).offset(header.e_phoff as _) as *const ProgramHeader,\n\n header.e_phnum as _,\n\n )\n\n };\n\n\n\n // Convert to shim physical addresses with potential SEV C-Bit set\n\n let code_start_phys = ShimPhysAddr::try_from(header as *const _)\n\n .unwrap()\n\n .raw()\n", "file_path": "internal/shim-sev/src/exec.rs", "rank": 95, "score": 107524.95704118979 }, { "content": "#[derive(Copy, Clone, PartialEq)]\n\n#[repr(u8)]\n\n#[non_exhaustive]\n\nenum SnpMsgType {\n\n /*\n\n TypeInvalid = 0,\n\n CpuidReq,\n\n CpuidRsp,\n\n KeyReq,\n\n KeyRsp,\n\n */\n\n ReportReq = 5,\n\n ReportRsp = 6,\n\n /*\n\n ExportReq,\n\n ExportRsp,\n\n ImportReq,\n\n ImportRsp,\n\n AbsorbReq,\n\n AbsorbRsp,\n\n VmrkReq,\n\n VmrkRsp,\n\n */\n\n}\n\n\n", "file_path": "internal/shim-sev/src/snp/ghcb.rs", "rank": 96, "score": 106330.68441689065 }, { "content": "#[inline]\n\nfn shim_virt_to_enc_phys<T>(p: *mut T) -> PhysAddr {\n\n let virt = VirtAddr::from_ptr(p);\n\n debug_assert!(virt.as_u64() > SHIM_VIRT_OFFSET);\n\n PhysAddr::new(virt.as_u64().checked_sub(SHIM_VIRT_OFFSET).unwrap() | get_cbit_mask())\n\n}\n\n\n\nunsafe impl GlobalAlloc for RwLocked<EnarxAllocator> {\n\n unsafe fn alloc(&self, layout: Layout) -> *mut u8 {\n\n let mut this = self.write();\n\n this.try_alloc(layout)\n\n .map_or(core::ptr::null_mut(), |p| p.as_ptr())\n\n }\n\n\n\n unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {\n\n let mut this = self.write();\n\n this.deallocate(ptr, layout);\n\n }\n\n}\n", "file_path": "internal/shim-sev/src/allocator.rs", "rank": 97, "score": 104855.7788604983 }, { "content": "pub trait KeepPersonality {\n\n fn map(_vm_fd: &mut VmFd, _region: &Region) -> std::io::Result<()> {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/backend/kvm/mod.rs", "rank": 98, "score": 104645.20271479103 }, { "content": "#[inline]\n\npub fn get_cpuid_max(leaf: u32) -> (u32, u32) {\n\n let CpuidResult { eax, ebx, .. } = cpuid(leaf);\n\n (eax, ebx)\n\n}\n", "file_path": "internal/shim-sev/src/snp/cpuid_page.rs", "rank": 99, "score": 101660.03017258187 } ]
Rust
infra/src/shader.rs
MrShiposha/apriori-engine
faaf897fb72c093a8bc86498bf6f7e913ebc8f02
use { std::{ fs, io::prelude::*, path::{Path, PathBuf} }, shaderc::{ Compiler, CompileOptions, IncludeType, IncludeCallbackResult, ResolvedInclude }, convert_case::{Case, Casing}, pathdiff::diff_paths, crate::{ Result, Error, GENERATED_FILE_DIR, ffi::FOREIGN_FN_IFACE_DIR_NAME }, }; pub fn process_shader_srcs(src_path: &PathBuf, dir: &Path) -> Result<()> { const SHADER_DIR_NAME: &'static str = "gpu"; for entry in fs::read_dir(dir)? { let entry = entry?; let path = entry.path(); if path.is_file() { continue; } if let Some(dir_name) = path.components().last() { if dir_name.as_os_str().to_string_lossy() == SHADER_DIR_NAME { process_shader_dir(src_path, dir, &path)?; break; } } } Ok(()) } fn process_shader_dir(src_path: &PathBuf, top_shader_dir: &Path, dir: &Path) -> Result<()> { for entry in fs::read_dir(dir)? { let entry = entry?; let path = entry.path(); if path.is_dir() { process_shader_dir(src_path, top_shader_dir, &path)?; } else { compile_shader(src_path, top_shader_dir, &path)?; } } Ok(()) } fn compile_shader(src_path: &PathBuf, top_shader_dir: &Path, file_path: &Path) -> Result<()> { let mut options = CompileOptions::new() .ok_or(Error::Internal("shader compile options allocation failure".to_string()))?; options.set_include_callback(include_callback(src_path)); let source_language; if let Some(ext) = file_path.extension() { if ext == "glsl" { source_language = shaderc::SourceLanguage::GLSL; } else if ext == "hlsl" { source_language = shaderc::SourceLanguage::HLSL; } else { return Err( Error::ShaderFile( format!( "the shader {} has unknown extension {}", file_path.display(), ext.to_string_lossy() ) ) ) } } else { return Err( Error::ShaderFile( format!( "the shader {} has no extension", file_path.display() ) ) ) } options.set_source_language(source_language); options.add_macro_definition("___gpu___", None); let source_text = fs::read_to_string(file_path)?; let shader_kind = shaderc::ShaderKind::InferFromSource; let input_file_name = file_path.to_string_lossy(); let entry_point_name = "main"; let mut compiler = Compiler::new() .ok_or(Error::Internal("shader compiler allocation failure".to_string()))?; let spirv = compiler.compile_into_spirv( &source_text, shader_kind, &input_file_name, entry_point_name, Some(&options) )?; let binary_spirv = spirv.as_binary(); let binary_spirv_code_size = binary_spirv.len() * std::mem::size_of_val(&binary_spirv[0]); let file_name = file_path .file_stem() .expect("shader file name") .to_str() .expect("shader file name str"); let parent_dir = file_path.parent() .ok_or(Error::ShaderFile("unable to get shader parent dir".into()))?; let shader_relative_path = diff_paths( parent_dir, top_shader_dir ).ok_or(Error::ShaderFile("unable to get shader relative path".into()))?; let shader_ffi_dir = src_path .join(FOREIGN_FN_IFACE_DIR_NAME) .join(GENERATED_FILE_DIR) .join(shader_relative_path); let shader_ffi_base = shader_ffi_dir.join(file_name); if !shader_ffi_dir.exists() { fs::create_dir_all(shader_ffi_dir)?; } let mut shader_ffi_header = shader_ffi_base.clone(); shader_ffi_header.set_extension("h"); println!("cargo:rerun-if-changed={}", shader_ffi_header.display()); let do_not_modify_comment = format! { r#"// This file generated automatically. // DO NOT MODIFY IT MANUALLY! // Original shader source path: file:///{shader_source_path}"#, shader_source_path = file_path.display() }; let header_guard = format!( "___FFI_SHADER_HEADER_{}_H___", file_name.to_case(Case::UpperSnake) ); let shader_snake_name = file_name.to_case(Case::Snake); let shader_fn_decl = format!("uint32_t *{}()", shader_snake_name); let shader_code_size_fn_decl = format!("size_t {}_code_size()", shader_snake_name); let spirv_binary_hex = binary_spirv.iter() .map(|word| format!("{:#010X}", word)) .collect::<Vec<_>>() .join(",\n\t\t"); let shader_ffi_header_content = format! { r#"{do_not_modify_comment} #ifndef {header_guard} #define {header_guard} #include <stdint.h> {shader_fn_decl}; {shader_code_size_fn_decl}; #endif // {header_guard}"#, do_not_modify_comment = do_not_modify_comment, header_guard = header_guard, shader_fn_decl = shader_fn_decl, shader_code_size_fn_decl = shader_code_size_fn_decl, }; let shader_ffi_src_content = format! { r#"{do_not_modify_comment} #include "{header_file_path}" {shader_fn_decl} {{ static uint32_t shader_src[] = {{ {spirv_binary} }}; return shader_src; }} {shader_code_size_fn_decl} {{ return {shader_code_size}ULL; }} "#, do_not_modify_comment = do_not_modify_comment, header_file_path = shader_ffi_header.display(), shader_fn_decl = shader_fn_decl, spirv_binary = spirv_binary_hex, shader_code_size_fn_decl = shader_code_size_fn_decl, shader_code_size = binary_spirv_code_size }; let mut out = fs::OpenOptions::new() .create(true) .write(true) .truncate(true) .open(shader_ffi_header.clone())?; out.write_all(shader_ffi_header_content.as_bytes())?; let mut shader_ffi_src = shader_ffi_base.clone(); shader_ffi_src.set_extension("c"); println!("cargo:rerun-if-changed={}", shader_ffi_src.display()); let mut out = fs::OpenOptions::new() .create(true) .write(true) .truncate(true) .open(shader_ffi_src)?; out.write_all(shader_ffi_src_content.as_bytes())?; Ok(()) } fn include_callback(src_path: &PathBuf) -> impl Fn(&str, IncludeType, &str, usize) -> IncludeCallbackResult { let src_path = src_path.clone(); move |requested_source, include_type, requesting_source, _include_depth| { let header_path; let requested_source_path = Path::new(requested_source); let standard_path = src_path.join(requested_source_path); match include_type { IncludeType::Relative => { let requesting_source_path = Path::new(requesting_source); let requesting_dir_path = requesting_source_path.parent() .ok_or(format!("{}: expected parent path", requested_source_path.display()))?; let relative_path = requesting_dir_path.join(requested_source_path); if relative_path.is_file() { header_path = relative_path; } else if standard_path.is_file() { header_path = standard_path.clone(); } else { return Err("relative header path is not found".to_string()); } }, IncludeType::Standard => if standard_path.is_file() { header_path = standard_path.clone(); } else { return Err("standard header path is not found".to_string()); } } let header_content = fs::read_to_string(header_path) .map_err(|err| err.to_string())?; let resolved_include = ResolvedInclude { resolved_name: standard_path.to_string_lossy().to_string(), content: header_content }; Ok(resolved_include) } }
use { std::{ fs, io::prelude::*, path::{Path, PathBuf} }, shaderc::{ Compiler, CompileOptions, IncludeType, IncludeCallbackResult, ResolvedInclude }, convert_case::{Case, Casing}, pathdiff::diff_paths, crate::{ Result, Error, GENERATED_FILE_DIR, ffi::FOREIGN_FN_IFACE_DIR_NAME }, }; pub fn process_shader_srcs(src_path: &PathBuf, dir: &Path) -> Result<()> { const SHADER_DIR_NAME: &'static str = "gpu"; for entry in fs::read_dir(dir)? { let entry = entry?; let path =
shaderc::SourceLanguage::GLSL; } else if ext == "hlsl" { source_language = shaderc::SourceLanguage::HLSL; } else { return Err( Error::ShaderFile( format!( "the shader {} has unknown extension {}", file_path.display(), ext.to_string_lossy() ) ) ) } } else { return Err( Error::ShaderFile( format!( "the shader {} has no extension", file_path.display() ) ) ) } options.set_source_language(source_language); options.add_macro_definition("___gpu___", None); let source_text = fs::read_to_string(file_path)?; let shader_kind = shaderc::ShaderKind::InferFromSource; let input_file_name = file_path.to_string_lossy(); let entry_point_name = "main"; let mut compiler = Compiler::new() .ok_or(Error::Internal("shader compiler allocation failure".to_string()))?; let spirv = compiler.compile_into_spirv( &source_text, shader_kind, &input_file_name, entry_point_name, Some(&options) )?; let binary_spirv = spirv.as_binary(); let binary_spirv_code_size = binary_spirv.len() * std::mem::size_of_val(&binary_spirv[0]); let file_name = file_path .file_stem() .expect("shader file name") .to_str() .expect("shader file name str"); let parent_dir = file_path.parent() .ok_or(Error::ShaderFile("unable to get shader parent dir".into()))?; let shader_relative_path = diff_paths( parent_dir, top_shader_dir ).ok_or(Error::ShaderFile("unable to get shader relative path".into()))?; let shader_ffi_dir = src_path .join(FOREIGN_FN_IFACE_DIR_NAME) .join(GENERATED_FILE_DIR) .join(shader_relative_path); let shader_ffi_base = shader_ffi_dir.join(file_name); if !shader_ffi_dir.exists() { fs::create_dir_all(shader_ffi_dir)?; } let mut shader_ffi_header = shader_ffi_base.clone(); shader_ffi_header.set_extension("h"); println!("cargo:rerun-if-changed={}", shader_ffi_header.display()); let do_not_modify_comment = format! { r#"// This file generated automatically. // DO NOT MODIFY IT MANUALLY! // Original shader source path: file:///{shader_source_path}"#, shader_source_path = file_path.display() }; let header_guard = format!( "___FFI_SHADER_HEADER_{}_H___", file_name.to_case(Case::UpperSnake) ); let shader_snake_name = file_name.to_case(Case::Snake); let shader_fn_decl = format!("uint32_t *{}()", shader_snake_name); let shader_code_size_fn_decl = format!("size_t {}_code_size()", shader_snake_name); let spirv_binary_hex = binary_spirv.iter() .map(|word| format!("{:#010X}", word)) .collect::<Vec<_>>() .join(",\n\t\t"); let shader_ffi_header_content = format! { r#"{do_not_modify_comment} #ifndef {header_guard} #define {header_guard} #include <stdint.h> {shader_fn_decl}; {shader_code_size_fn_decl}; #endif // {header_guard}"#, do_not_modify_comment = do_not_modify_comment, header_guard = header_guard, shader_fn_decl = shader_fn_decl, shader_code_size_fn_decl = shader_code_size_fn_decl, }; let shader_ffi_src_content = format! { r#"{do_not_modify_comment} #include "{header_file_path}" {shader_fn_decl} {{ static uint32_t shader_src[] = {{ {spirv_binary} }}; return shader_src; }} {shader_code_size_fn_decl} {{ return {shader_code_size}ULL; }} "#, do_not_modify_comment = do_not_modify_comment, header_file_path = shader_ffi_header.display(), shader_fn_decl = shader_fn_decl, spirv_binary = spirv_binary_hex, shader_code_size_fn_decl = shader_code_size_fn_decl, shader_code_size = binary_spirv_code_size }; let mut out = fs::OpenOptions::new() .create(true) .write(true) .truncate(true) .open(shader_ffi_header.clone())?; out.write_all(shader_ffi_header_content.as_bytes())?; let mut shader_ffi_src = shader_ffi_base.clone(); shader_ffi_src.set_extension("c"); println!("cargo:rerun-if-changed={}", shader_ffi_src.display()); let mut out = fs::OpenOptions::new() .create(true) .write(true) .truncate(true) .open(shader_ffi_src)?; out.write_all(shader_ffi_src_content.as_bytes())?; Ok(()) } fn include_callback(src_path: &PathBuf) -> impl Fn(&str, IncludeType, &str, usize) -> IncludeCallbackResult { let src_path = src_path.clone(); move |requested_source, include_type, requesting_source, _include_depth| { let header_path; let requested_source_path = Path::new(requested_source); let standard_path = src_path.join(requested_source_path); match include_type { IncludeType::Relative => { let requesting_source_path = Path::new(requesting_source); let requesting_dir_path = requesting_source_path.parent() .ok_or(format!("{}: expected parent path", requested_source_path.display()))?; let relative_path = requesting_dir_path.join(requested_source_path); if relative_path.is_file() { header_path = relative_path; } else if standard_path.is_file() { header_path = standard_path.clone(); } else { return Err("relative header path is not found".to_string()); } }, IncludeType::Standard => if standard_path.is_file() { header_path = standard_path.clone(); } else { return Err("standard header path is not found".to_string()); } } let header_content = fs::read_to_string(header_path) .map_err(|err| err.to_string())?; let resolved_include = ResolvedInclude { resolved_name: standard_path.to_string_lossy().to_string(), content: header_content }; Ok(resolved_include) } }
entry.path(); if path.is_file() { continue; } if let Some(dir_name) = path.components().last() { if dir_name.as_os_str().to_string_lossy() == SHADER_DIR_NAME { process_shader_dir(src_path, dir, &path)?; break; } } } Ok(()) } fn process_shader_dir(src_path: &PathBuf, top_shader_dir: &Path, dir: &Path) -> Result<()> { for entry in fs::read_dir(dir)? { let entry = entry?; let path = entry.path(); if path.is_dir() { process_shader_dir(src_path, top_shader_dir, &path)?; } else { compile_shader(src_path, top_shader_dir, &path)?; } } Ok(()) } fn compile_shader(src_path: &PathBuf, top_shader_dir: &Path, file_path: &Path) -> Result<()> { let mut options = CompileOptions::new() .ok_or(Error::Internal("shader compile options allocation failure".to_string()))?; options.set_include_callback(include_callback(src_path)); let source_language; if let Some(ext) = file_path.extension() { if ext == "glsl" { source_language =
random
[ { "content": "pub fn process_c_srcs(dir: &Path, include_dirs: &Vec<PathBuf>, cc_build: &mut cc::Build) -> Result<()> {\n\n for entry in fs::read_dir(dir)? {\n\n let entry = entry?;\n\n let path = entry.path();\n\n\n\n if path.is_file() {\n\n continue;\n\n }\n\n\n\n if let Some(dir_name) = path.components().last() {\n\n if dir_name.as_os_str().to_string_lossy() == FOREIGN_FN_IFACE_DIR_NAME {\n\n let builder = bindgen::Builder::default()\n\n .clang_args(\n\n include_dirs.iter()\n\n .map(|path| format!(\"-F{}\", path.display()))\n\n )\n\n .blacklist_item(VULKAN_ITEM_REGEX)\n\n .raw_line(\"#![allow(unused_variables)]\")\n\n .raw_line(\"#![allow(non_snake_case)]\")\n\n .raw_line(\"#![allow(non_camel_case_types)]\")\n", "file_path": "infra/src/ffi.rs", "rank": 2, "score": 189508.1791832314 }, { "content": "pub fn last_error(error_kind: &str) -> Error {\n\n let mut buffer = vec![0 as WCHAR; 1024];\n\n\n\n unsafe {\n\n FormatMessageW(\n\n FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS,\n\n std::ptr::null_mut(),\n\n GetLastError(),\n\n MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT) as DWORD,\n\n buffer.as_mut_ptr(),\n\n buffer.len() as DWORD,\n\n std::ptr::null_mut()\n\n );\n\n }\n\n\n\n let null_idx = buffer.as_slice().iter().position(|&c| c == '\\r' as WCHAR).unwrap();\n\n buffer.truncate(null_idx);\n\n\n\n let description = OsString::from_wide(buffer.as_slice());\n\n let description = description.to_string_lossy().to_string();\n\n\n\n let description = format!(\"{} -- {}\", error_kind, description);\n\n\n\n Error::OsSpecific(description)\n\n}", "file_path": "lib/src/os/windows/mod.rs", "rank": 4, "score": 180737.76790604254 }, { "content": "pub fn execute() -> Result<()> {\n\n let mut msg: MSG = unsafe {\n\n std::mem::zeroed()\n\n };\n\n\n\n log::trace! {\n\n target: LOG_TARGET,\n\n \"execute IO\"\n\n }\n\n\n\n while is_active()? {\n\n for hwnd in WINDOWS.read()?.iter() {\n\n unsafe {\n\n if PeekMessageW(\n\n &mut msg,\n\n **hwnd,\n\n 0,\n\n 0,\n\n PM_REMOVE\n\n ) > 0 {\n\n TranslateMessage(&msg);\n\n DispatchMessageW(&msg);\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "lib/src/io/win_io.rs", "rank": 5, "score": 143885.37440098703 }, { "content": "pub fn stop() -> Result<()> {\n\n log::trace! {\n\n target: LOG_TARGET,\n\n \"stop IO\"\n\n }\n\n\n\n let mut is_active = IS_IO_ACTIVE.write()?;\n\n *is_active = false;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "lib/src/io/win_io.rs", "rank": 6, "score": 143885.37440098703 }, { "content": "pub fn is_active() -> Result<bool> {\n\n IS_IO_ACTIVE.read()\n\n .map(|v| *v)\n\n .map_err(|err| err.into())\n\n}", "file_path": "lib/src/io/win_io.rs", "rank": 7, "score": 135697.37112038062 }, { "content": "fn process_ffi_dir(dir: &Path, mut builder: bindgen::Builder, cc_build: &mut cc::Build)\n\n -> Result<(bindgen::Builder, usize)> {\n\n let c_ext = \"c\";\n\n let h_ext = \"h\";\n\n\n\n let mut bindings_count = 0;\n\n\n\n for entry in fs::read_dir(dir)? {\n\n let entry = entry?;\n\n let path = entry.path();\n\n\n\n if path.is_dir() {\n\n let (subdir_builder, subdir_bindins_count) = process_ffi_dir(\n\n &path,\n\n builder,\n\n cc_build\n\n )?;\n\n\n\n builder = subdir_builder;\n\n bindings_count += subdir_bindins_count;\n", "file_path": "infra/src/ffi.rs", "rank": 8, "score": 112641.10034714104 }, { "content": "pub fn project_build(\n\n src_path: PathBuf,\n\n include_dirs: Vec<PathBuf>,\n\n libraries: Vec<PathBuf>,\n\n) -> Result<()> {\n\n shader::process_shader_srcs(&src_path, &src_path)?;\n\n\n\n let mut cc_build = cc::Build::new();\n\n cc_build.includes(include_dirs.clone())\n\n .warnings_into_errors(true);\n\n\n\n if cfg!(target_os = \"windows\") {\n\n let c11_std_flag = \"/std:c11\";\n\n let anno_union_suppress_warn = \"/wd4201\";\n\n\n\n cc_build.define(\"___windows___\", None)\n\n .define(\"VK_USE_PLATFORM_WIN32_KHR\", None)\n\n .flag(c11_std_flag)\n\n .flag(anno_union_suppress_warn);\n\n } else if cfg!(target_os = \"macos\") {\n", "file_path": "infra/src/lib.rs", "rank": 9, "score": 104652.93366560253 }, { "content": "fn main() -> infra::Result<()> {\n\n let project_path = Path::new(env!(\"CARGO_MANIFEST_DIR\"));\n\n let src_path = project_path.join(\"src\");\n\n\n\n let vulkan_sdk = env::var(\"VULKAN_SDK\")?;\n\n let vulkan_sdk = Path::new(&vulkan_sdk);\n\n\n\n let include_dirs = vec![\n\n src_path.clone(),\n\n vulkan_sdk.join(\"Include\")\n\n ];\n\n\n\n let libraries = vec![\n\n vulkan_sdk.join(\"Lib\").join(\"vulkan-1\")\n\n ];\n\n\n\n project_build(src_path, include_dirs, libraries)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "lib/build.rs", "rank": 11, "score": 100845.83672542882 }, { "content": " Apriori2Error error;\n", "file_path": "lib/src/ffi/core/result.h", "rank": 12, "score": 93114.90734689924 }, { "content": "fn init_raw_input(hwnd: HWND) -> Result<()> {\n\n let mouse = RAWINPUTDEVICE {\n\n usUsagePage: GENERIC_DESKTOP_CONTROLS,\n\n usUsage: HID_USAGE_GENERIC_MOUSE,\n\n dwFlags: 0,\n\n hwndTarget: hwnd\n\n };\n\n\n\n let keyboard = RAWINPUTDEVICE {\n\n usUsagePage: GENERIC_DESKTOP_CONTROLS,\n\n usUsage: HID_USAGE_GENERIC_KEYBOARD,\n\n dwFlags: RIDEV_NOLEGACY,\n\n hwndTarget: hwnd\n\n };\n\n\n\n let mut devices = vec![mouse, keyboard];\n\n\n\n unsafe {\n\n let result = RegisterRawInputDevices(\n\n devices.as_mut_ptr(),\n", "file_path": "lib/src/os/windows/input_handling.rs", "rank": 13, "score": 86237.90599890803 }, { "content": "void error(const char *target, const char *format, ...);\n", "file_path": "lib/src/ffi/core/log.h", "rank": 14, "score": 60476.28598405041 }, { "content": "void error(const char *target, const char *format, ...) {\n\n FFI_LOG_CODE(\"ERROR\", target, format, args)\n", "file_path": "lib/src/ffi/core/log.c", "rank": 15, "score": 60476.28598405041 }, { "content": " VkDevice gpu;\n", "file_path": "lib/src/ffi/graphics/renderer/renderer.impl.h", "rank": 16, "score": 59258.7250366574 }, { "content": "pub trait WindowMethods<Id: io::InputId> {\n\n fn show(&self);\n\n\n\n fn hide(&self);\n\n\n\n fn size(&self) -> Result<WindowSize>;\n\n\n\n fn platform_handle(&self) -> ffi::Handle;\n\n\n\n fn input_handler(&self) -> &io::InputHandler<Id>;\n\n\n\n fn input_handler_mut(&mut self) -> &mut io::InputHandler<Id>;\n\n\n\n fn handle_window_state<H>(&mut self, handler: H)\n\n where\n\n H: FnMut(WindowState) + 'static;\n\n}\n", "file_path": "lib/src/os/mod.rs", "rank": 17, "score": 42390.64872391436 }, { "content": "const char *error_to_string(Apriori2Error error) {\n\n uint32_t general_error = (uint32_t)error;\n\n\n\n#define APRIORI_CASE(v, ...) case v: return #v __VA_ARGS__\n\n#define VK_CASE(v, ...) case VK_##v: return \"(Vulkan API) \" #v __VA_ARGS__\n\n\n\n switch (general_error) {\n\n APRIORI_CASE(SUCCESS);\n\n APRIORI_CASE(OUT_OF_MEMORY, \": memory allocation failure\");\n\n APRIORI_CASE(VK_PROC_NOT_FOUND, \": vkGetInstanceProcAddr failed\");\n\n APRIORI_CASE(DEBUG_REPORTER_CREATION, \": unable to create Vulkan Instance debug reporter\");\n\n APRIORI_CASE(LAYERS_NOT_FOUND, \": some Vulkan validation layers was not found\");\n\n APRIORI_CASE(EXTENSIONS_NOT_FOUND, \": some Vulkan extensions was not found\");\n\n APRIORI_CASE(GRAPHICS_QUEUE_FAMILY_NOT_FOUND, \": graphics queue family was not found on the physical device\");\n\n APRIORI_CASE(PRESENT_QUEUE_FAMILY_NOT_FOUND, \": present queue family was not found on the physical device\");\n\n APRIORI_CASE(\n\n RENDERER_QUEUE_FAMILIES_NOT_FOUND,\n\n \": both graphics and present queue families were not found on the physical device\"\n\n );\n\n\n\n VK_CASE(NOT_READY);\n\n VK_CASE(TIMEOUT);\n\n VK_CASE(EVENT_SET);\n\n VK_CASE(EVENT_RESET);\n\n VK_CASE(INCOMPLETE);\n\n VK_CASE(ERROR_OUT_OF_HOST_MEMORY);\n\n VK_CASE(ERROR_OUT_OF_DEVICE_MEMORY);\n\n VK_CASE(ERROR_INITIALIZATION_FAILED);\n\n VK_CASE(ERROR_DEVICE_LOST);\n\n VK_CASE(ERROR_MEMORY_MAP_FAILED);\n\n VK_CASE(ERROR_LAYER_NOT_PRESENT);\n\n VK_CASE(ERROR_EXTENSION_NOT_PRESENT);\n\n VK_CASE(ERROR_FEATURE_NOT_PRESENT);\n\n VK_CASE(ERROR_INCOMPATIBLE_DRIVER);\n\n VK_CASE(ERROR_TOO_MANY_OBJECTS);\n\n VK_CASE(ERROR_FORMAT_NOT_SUPPORTED);\n\n VK_CASE(ERROR_SURFACE_LOST_KHR);\n\n VK_CASE(ERROR_NATIVE_WINDOW_IN_USE_KHR);\n\n VK_CASE(SUBOPTIMAL_KHR);\n\n VK_CASE(ERROR_OUT_OF_DATE_KHR);\n\n VK_CASE(ERROR_INCOMPATIBLE_DISPLAY_KHR);\n\n VK_CASE(ERROR_VALIDATION_FAILED_EXT);\n\n VK_CASE(ERROR_INVALID_SHADER_NV);\n\n default:\n\n return \"Unknown error\";\n\n }\n\n\n\n#undef VK_CASE\n\n#undef APRIORI_CASE\n", "file_path": "lib/src/ffi/core/error.c", "rank": 18, "score": 41160.467364259646 }, { "content": "const char *error_to_string(Apriori2Error error);\n", "file_path": "lib/src/ffi/core/error.h", "rank": 19, "score": 41160.467364259646 }, { "content": "pub trait InputId: fmt::Debug + Clone + Unpin + Hash + Eq + Serialize + DeserializeOwned\n\n{}\n\n\n\nimpl<T> InputId for T\n\nwhere T: fmt::Debug + Clone + Unpin + Hash + Eq + Serialize + DeserializeOwned\n\n{}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct InputMap<Id: Hash + Eq> {\n\n input_map: HashMap<Id, InputVariants>\n\n}\n\n\n\nimpl<Id: InputId> InputMap<Id> {\n\n pub fn load<P: AsRef<Path>>(path: P) -> Result<Self> {\n\n let file = File::open(path)?;\n\n Ok(from_reader(file)?)\n\n }\n\n\n\n pub fn hash_map(&self) -> &HashMap<Id, InputVariants> {\n\n &self.input_map\n", "file_path": "lib/src/io/input.rs", "rank": 20, "score": 35319.689155441 }, { "content": "fn vkey(key: USHORT, scan_code: USHORT, is_e0: bool) -> Option<VirtualKey> {\n\n // See https://docs.microsoft.com/en-us/windows/win32/inputdev/virtual-key-codes\n\n\n\n use VirtualKey::*;\n\n\n\n let key = match key as i32 {\n\n VK_CONTROL => if is_e0 {\n\n RightCtrl\n\n } else {\n\n LeftCtrl\n\n }\n\n VK_MENU => if is_e0 {\n\n RightAlt\n\n } else {\n\n LeftAlt\n\n }\n\n VK_SHIFT => match unsafe {\n\n MapVirtualKeyW(scan_code as UINT, MAPVK_VSC_TO_VK_EX) as i32\n\n } {\n\n VK_LSHIFT => LeftShift,\n", "file_path": "lib/src/os/windows/input_handling.rs", "rank": 21, "score": 35197.39730365559 }, { "content": " Handle object;\n", "file_path": "lib/src/ffi/core/result.h", "rank": 22, "score": 32638.621362848833 }, { "content": "Result new_gpu(\n\n struct PhyDeviceDescr *phy_dev_descr,\n\n struct RendererQueueFamilies *families,\n\n VkDeviceQueueCreateInfo *queues_cis,\n\n uint32_t queues_cis_count\n\n) {\n\n ASSERT_NOT_NULL(phy_dev_descr);\n\n ASSERT_NOT_NULL(families);\n\n ASSERT_NOT_NULL(queues_cis);\n\n\n\n Result result = { 0 };\n\n VkPhysicalDeviceFeatures enabled_features = { 0 };\n\n VkDeviceCreateInfo device_ci = {\n\n .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO\n\n };\n\n VkDevice gpu = VK_NULL_HANDLE;\n\n\n\n info(LOG_TARGET, \"creating new GPU object...\");\n\n\n\n# ifdef ___debug___\n\n const char *layer_names[] = {\n\n \"VK_LAYER_LUNARG_standard_validation\"\n\n };\n\n\n\n const uint32_t layer_names_count = STATIC_ARRAY_SIZE(layer_names);\n\n# else\n\n const char *layer_names = NULL;\n\n const uint32_t layer_names_count = 0;\n\n# endif // ___debug___\n\n\n\n const char *extension_names[] = {\n\n VK_KHR_SWAPCHAIN_EXTENSION_NAME\n\n };\n\n\n\n result = check_all_device_layers_available(phy_dev_descr->phy_device, layer_names, layer_names_count);\n\n EXPECT_SUCCESS(result);\n\n\n\n result = check_all_device_extensions_available(\n\n phy_dev_descr->phy_device,\n\n extension_names,\n\n STATIC_ARRAY_SIZE(extension_names)\n\n );\n\n EXPECT_SUCCESS(result);\n\n\n\n if (phy_dev_descr->features.samplerAnisotropy)\n\n enabled_features.samplerAnisotropy = VK_TRUE;\n\n\n\n device_ci.enabledLayerCount = layer_names_count;\n\n device_ci.ppEnabledLayerNames = layer_names;\n\n device_ci.enabledExtensionCount = STATIC_ARRAY_SIZE(extension_names);\n\n device_ci.ppEnabledExtensionNames = extension_names;\n\n device_ci.queueCreateInfoCount = queues_cis_count;\n\n device_ci.pQueueCreateInfos = queues_cis;\n\n device_ci.pEnabledFeatures = &enabled_features;\n\n\n\n result.error = vkCreateDevice(phy_dev_descr->phy_device, &device_ci, NULL, &gpu);\n\n result.object = gpu;\n\n EXPECT_SUCCESS(result);\n\n\n\n info(LOG_TARGET, \"new GPU object created successfully\");\n\n\n\n FN_FORCE_EXIT(result);\n", "file_path": "lib/src/ffi/graphics/renderer/renderer.impl.c", "rank": 23, "score": 30795.794767342835 }, { "content": "use {\n\n std::{\n\n fs,\n\n path::{Path, PathBuf}\n\n },\n\n crate::{Result, Error},\n\n};\n\n\n\npub const FOREIGN_FN_IFACE_DIR_NAME: &'static str = \"ffi\";\n\nconst RUST_VISIBLE_FILE_EXT: &'static str = \".rs.h\";\n\nconst VULKAN_ITEM_REGEX: &'static str = r\"(PFN_)?((vk)|(Vk)|(VK)).*\";\n\n\n\n// TODO: panic if found several files with the same name\n\n\n", "file_path": "infra/src/ffi.rs", "rank": 25, "score": 26.39832366900411 }, { "content": "use std::{path::PathBuf, io, fmt, env};\n\n\n\npub mod shader;\n\npub mod ffi;\n\n\n\npub const GENERATED_FILE_DIR: &'static str = \"generated\";\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n Io(io::Error),\n\n FilenameExpected(PathBuf),\n\n ParentDirExpected(PathBuf),\n\n Bindgen,\n\n ShaderFile(String),\n\n ShaderCompile(shaderc::Error),\n\n EnvVar(env::VarError),\n\n Internal(String),\n\n}\n\n\n\nimpl fmt::Display for Error {\n", "file_path": "infra/src/lib.rs", "rank": 26, "score": 21.696632731043046 }, { "content": "pub mod vulkan_instance;\n\npub mod log;\n\n\n\nuse {\n\n std::{\n\n fmt,\n\n sync::PoisonError,\n\n ffi::CStr,\n\n os::raw::c_char,\n\n },\n\n crate::{ffi, io},\n\n};\n\n\n\npub use vulkan_instance::VulkanInstance;\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n Apriori2FFI(ffi::Apriori2Error, &'static str),\n\n Utf8Error(std::str::Utf8Error),\n\n OsSpecific(String),\n", "file_path": "lib/src/core/mod.rs", "rank": 27, "score": 18.119155926771704 }, { "content": "impl From<shaderc::Error> for Error {\n\n fn from(err: shaderc::Error) -> Self {\n\n Self::ShaderCompile(err)\n\n }\n\n}\n\n\n\nimpl From<env::VarError> for Error {\n\n fn from(err: env::VarError) -> Self {\n\n Self::EnvVar(err)\n\n }\n\n}\n\n\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n", "file_path": "infra/src/lib.rs", "rank": 28, "score": 16.08596650330681 }, { "content": "use {\n\n std::{\n\n collections::HashMap,\n\n path::Path,\n\n fs::File,\n\n hash::Hash,\n\n marker::Unpin,\n\n fmt,\n\n },\n\n serde::{Serialize, Deserialize, de::DeserializeOwned},\n\n ron::de::from_reader,\n\n crate::{\n\n core::Result,\n\n io::*\n\n }\n\n};\n\n\n\n#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]\n\npub enum InputVariants {\n\n Axis(Vec<Axis>),\n", "file_path": "lib/src/io/input.rs", "rank": 29, "score": 14.120966360149188 }, { "content": " Self::Serialization(err) => write!(f, \"{}\", err),\n\n Self::Io(err) => write!(f, \"(io error) {}\", err),\n\n }\n\n }\n\n}\n\n\n\nimpl std::error::Error for Error {}\n\n\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\nimpl ffi::Result {\n\n pub fn try_unwrap<T>(&self) -> Result<*mut T> {\n\n let result;\n\n unsafe {\n\n if self.error == ffi::Apriori2Error_SUCCESS {\n\n result = std::mem::transmute::<ffi::Handle, *mut T>(self.object);\n\n } else {\n\n let error_code = self.error;\n\n\n\n let error_description = CStr::from_ptr(\n", "file_path": "lib/src/core/mod.rs", "rank": 30, "score": 14.039854717710256 }, { "content": "use {\n\n std::sync::{RwLock},\n\n winapi::{\n\n shared::windef::HWND,\n\n um::winuser::*\n\n },\n\n lazy_static::lazy_static,\n\n crate::core::{Result, AssumeThreadSafe},\n\n};\n\n\n\nlazy_static! {\n\n static ref IS_IO_ACTIVE: RwLock<bool> = RwLock::new(true);\n\n pub(crate) static ref WINDOWS: RwLock<Vec<AssumeThreadSafe<HWND>>> = RwLock::new(vec![]);\n\n}\n\n\n\nconst LOG_TARGET: &'static str = \"Windows IO\";\n\n\n", "file_path": "lib/src/io/win_io.rs", "rank": 31, "score": 13.917886160735398 }, { "content": "use {\n\n std::collections::HashMap,\n\n crate::io::*,\n\n};\n\n\n\npub struct InputHandler<Id: InputId> {\n\n inputs: HashMap<Input, Id>,\n\n handlers: HashMap<Id, Box<dyn FnMut(Id, InputEvent, InputKind)>>,\n\n\n\n #[cfg(target_os = \"windows\")]\n\n pub(crate) aux: WindowsInputAuxInfo,\n\n}\n\n\n\nimpl<Id: InputId> InputHandler<Id> {\n\n const LOG_TARGET: &'static str = \"InputHandler\";\n\n\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n\n", "file_path": "lib/src/io/input_handler.rs", "rank": 32, "score": 12.780548107276061 }, { "content": "use crate::{\n\n ffi,\n\n os::{self, WindowMethods},\n\n core::{Result, VulkanInstance},\n\n io,\n\n};\n\n\n\npub struct Renderer {\n\n renderer_ffi: ffi::Renderer\n\n}\n\n\n\nimpl Renderer {\n\n const LOG_TARGET: &'static str = \"Rust/Renderer\";\n\n\n\n pub fn new<Id: io::InputId>(\n\n vk_instance: &VulkanInstance,\n\n window: &os::Window<Id>,\n\n ) -> Result<Self> {\n\n log::info! {\n\n target: Self::LOG_TARGET,\n", "file_path": "lib/src/graphics/renderer.rs", "rank": 35, "score": 12.048333111594268 }, { "content": "use {\n\n std::fmt,\n\n crate::{core::Result, ffi, io}\n\n};\n\n\n\n#[cfg(target_os = \"windows\")]\n\npub mod windows;\n\n\n\n#[cfg(target_os = \"windows\")]\n\npub use windows::Window;\n\n\n\n#[derive(Debug)]\n\npub struct WindowSize {\n\n pub width: u16,\n\n pub height: u16\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct WindowPosition {\n\n pub x: i16,\n", "file_path": "lib/src/os/mod.rs", "rank": 36, "score": 11.98865099890525 }, { "content": "use {\n\n std::fmt,\n\n serde::{Serialize, Deserialize},\n\n crate::{\n\n core::{Result, Error},\n\n io::*\n\n }\n\n};\n\n\n\n#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)]\n\npub struct Action {\n\n key: VirtualKey,\n\n mods: KeyMods\n\n}\n\n\n\nimpl Action {\n\n pub fn new(key: VirtualKey, mods: KeyMods) -> Result<Self> {\n\n if key.as_key_mods()\n\n .map(|mod_key| mods.contains(mod_key))\n\n .unwrap_or(false) {\n", "file_path": "lib/src/io/action.rs", "rank": 37, "score": 11.412815483832585 }, { "content": "use {\n\n std::ffi::{c_void, CStr},\n\n libc::c_char,\n\n log::{log, error},\n\n printf::printf\n\n};\n\n\n\nconst LOG_TARGET: &'static str = \"apriori-log\";\n\n\n\n#[no_mangle]\n\nextern \"C\" fn ffi_log(level: *const c_char, target: *const c_char, format: *const c_char, args: *mut c_void) {\n\n macro_rules! as_str {\n\n ($str:expr) => {\n\n unsafe {\n\n match CStr::from_ptr($str).to_str() {\n\n Ok(value) => value,\n\n Err(err) => {\n\n error! {\n\n target: LOG_TARGET,\n\n \"FFI log error while converting \\\"{}\\\" to &str -- {}\",\n", "file_path": "lib/src/core/log.rs", "rank": 38, "score": 11.153818513140543 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Self::Io(err) => write!(f, \"io error: {}\", err),\n\n Self::FilenameExpected(path) => write!(f, \"{}: file name expected\", path.display()),\n\n Self::ParentDirExpected(path) => write!(f, \"{}: parent directory expected\", path.display()),\n\n Self::Bindgen => write!(f, \"c bindings generation error\"),\n\n Self::ShaderFile(err) => write!(f, \"shader error: {}\", err),\n\n Self::ShaderCompile(err) => write!(f, \"shader compiler error: {}\", err),\n\n Self::EnvVar(err) => write!(f, \"env variable error: {}\", err),\n\n Self::Internal(err) => write!(f, \"internal error: {}\", err),\n\n }\n\n }\n\n}\n\n\n\nimpl From<io::Error> for Error {\n\n fn from(err: io::Error) -> Self {\n\n Self::Io(err)\n\n }\n\n}\n\n\n", "file_path": "infra/src/lib.rs", "rank": 39, "score": 10.817892764901039 }, { "content": "use super::{ffi, Result};\n\n\n\npub struct VulkanInstance {\n\n pub instance_ffi: ffi::VulkanInstance\n\n}\n\n\n\nimpl VulkanInstance {\n\n const LOG_TARGET: &'static str = \"Rust/VulkanInstance\";\n\n\n\n pub fn new() -> Result<Self> {\n\n log::info! {\n\n target: Self::LOG_TARGET,\n\n \"creating new vulkan instance...\"\n\n }\n\n\n\n let instance;\n\n unsafe {\n\n instance = Self {\n\n instance_ffi: ffi::new_vk_instance().try_unwrap()?\n\n };\n", "file_path": "lib/src/core/vulkan_instance.rs", "rank": 40, "score": 10.595080756222544 }, { "content": " basetsd::LONG_PTR,\n\n },\n\n um::{\n\n errhandlingapi::{\n\n SetLastError,\n\n GetLastError\n\n },\n\n winuser::*\n\n }\n\n },\n\n crate::{\n\n core::Result,\n\n os,\n\n io::*,\n\n }\n\n};\n\n\n\nconst LOG_TARGET: &'static str = \"Windows Window Callback\";\n\n\n\n// https://docs.microsoft.com/en-us/windows-hardware/drivers/hid/hid-usages#usage-page\n", "file_path": "lib/src/os/windows/input_handling.rs", "rank": 41, "score": 10.503040146651372 }, { "content": " std::mem::transmute::<ffi::Handle, *mut c_char>(self.object)\n\n ).to_str()?;\n\n\n\n return Err(Error::Apriori2FFI(error_code, error_description));\n\n }\n\n }\n\n\n\n Ok(result)\n\n }\n\n}\n\n\n\npub struct AssumeThreadSafe<T>(T);\n\n\n\nunsafe impl<T> std::marker::Sync for AssumeThreadSafe<T> {}\n\n\n\nunsafe impl<T> std::marker::Send for AssumeThreadSafe<T> {}\n\n\n\nimpl<T> From<T> for AssumeThreadSafe<T> {\n\n fn from(v: T) -> Self {\n\n Self(v)\n", "file_path": "lib/src/core/mod.rs", "rank": 42, "score": 10.473037553920278 }, { "content": "};\n\n\n\nmod input_handling;\n\n\n\nconst WINDOW_CLASS_NAME: &'static str = \"Apriori2WindowClass\";\n\n\n\npub(crate) struct WindowInternalInfo<Id: io::InputId> {\n\n pub(crate) input_handler: io::InputHandler<Id>,\n\n pub(crate) state_handler: Box<dyn FnMut(WindowState)>,\n\n pub(crate) is_dropped: bool,\n\n}\n\n\n\nimpl<Id: io::InputId> Unpin for WindowInternalInfo<Id> {}\n\n\n\npub struct Window<Id: io::InputId> {\n\n hwnd: HWND,\n\n internal: Pin<Box<WindowInternalInfo<Id>>>,\n\n}\n\n\n\nimpl<Id: io::InputId> Window<Id> {\n", "file_path": "lib/src/os/windows/mod.rs", "rank": 43, "score": 10.084252454739612 }, { "content": " KeyAndModifierMatch(io::VirtualKey),\n\n Sync(String),\n\n Serialization(String),\n\n Io(std::io::Error),\n\n}\n\n\n\nimpl From<std::str::Utf8Error> for Error {\n\n fn from(err: std::str::Utf8Error) -> Self {\n\n Self::Utf8Error(err)\n\n }\n\n}\n\n\n\nimpl From<std::io::Error> for Error {\n\n fn from(err: std::io::Error) -> Self {\n\n Self::Io(err)\n\n }\n\n}\n\n\n\nimpl From<ron::error::Error> for Error {\n\n fn from(err: ron::error::Error) -> Self {\n", "file_path": "lib/src/core/mod.rs", "rank": 45, "score": 9.811472301615145 }, { "content": "use {\n\n std::{\n\n env,\n\n path::Path\n\n },\n\n infra::{self, project_build}\n\n};\n\n\n", "file_path": "lib/build.rs", "rank": 46, "score": 9.80713334379464 }, { "content": " .raw_line(\"#![allow(dead_code)]\")\n\n .raw_line(\"#![allow(non_upper_case_globals)]\");\n\n\n\n let (builder, bindings_count) = process_ffi_dir(&path, builder, cc_build)?;\n\n\n\n if bindings_count > 0 {\n\n let ffi_mod_path = path.join(\"mod.rs\");\n\n let bindings = builder.generate()\n\n .map_err(|_| Error::Bindgen)?;\n\n bindings.write_to_file(ffi_mod_path)?;\n\n }\n\n\n\n break;\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "infra/src/ffi.rs", "rank": 47, "score": 8.769786806045232 }, { "content": " },\n\n windef::{\n\n HWND,\n\n HICON,\n\n HBRUSH,\n\n HMENU,\n\n RECT,\n\n }\n\n },\n\n um::{\n\n winbase::*,\n\n winuser::*,\n\n errhandlingapi::GetLastError,\n\n }\n\n },\n\n crate::{\n\n core::{Result, Error},\n\n os::*,\n\n io,\n\n }\n", "file_path": "lib/src/os/windows/mod.rs", "rank": 48, "score": 8.233045229778565 }, { "content": "use {\n\n std::{\n\n fmt,\n\n cmp::{PartialEq, Eq},\n\n hash::{Hash, Hasher}\n\n },\n\n serde::{Serialize, Deserialize},\n\n crate::io::*\n\n};\n\n\n\npub type AxisScale = f32;\n\npub type AxisValue = f32;\n\n\n\n#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy, Serialize, Deserialize)]\n\npub enum AxisId {\n\n Key(VirtualKey),\n\n MousePositionX,\n\n MousePositionY,\n\n MouseWheel,\n\n}\n", "file_path": "lib/src/io/axis.rs", "rank": 49, "score": 8.172570441196484 }, { "content": " const LOG_TARGET: &'static str = \"Window\";\n\n\n\n pub fn new(\n\n title: &str,\n\n size: WindowSize,\n\n position: WindowPosition\n\n ) -> Result<Self> {\n\n log::info! {\n\n target: Self::LOG_TARGET,\n\n \"creating new window...\"\n\n };\n\n\n\n log::trace! {\n\n target: Self::LOG_TARGET,\n\n \"\\twindow class name: \\\"{}\\\"\",\n\n WINDOW_CLASS_NAME\n\n };\n\n\n\n log::trace! {\n\n target: Self::LOG_TARGET,\n", "file_path": "lib/src/os/windows/mod.rs", "rank": 50, "score": 8.068629651572675 }, { "content": " .ok_or(Error::FilenameExpected(lib.clone()))?;\n\n let lib_dir = lib\n\n .parent()\n\n .ok_or(Error::ParentDirExpected(lib.clone()))?;\n\n\n\n println!(\"cargo:rustc-link-search=native={}\", lib_dir.display());\n\n println!(\"cargo:rustc-link-lib=static={}\", lib_name.to_string_lossy());\n\n }\n\n\n\n Ok(())\n\n}", "file_path": "infra/src/lib.rs", "rank": 51, "score": 7.991593286750446 }, { "content": "\n\nimpl Serialize for KeyMods {\n\n fn serialize<S>(&self, serializer: S) -> std::result::Result<<S as Serializer>::Ok, <S as Serializer>::Error>\n\n where S: Serializer {\n\n let keys = self.as_virtual_keys();\n\n let mut seq = serializer.serialize_seq(Some(keys.len()))?;\n\n\n\n for key in keys {\n\n seq.serialize_element(&key)?;\n\n }\n\n\n\n seq.end()\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for KeyMods {\n\n fn deserialize<D>(deserializer: D) -> std::result::Result<Self, <D as Deserializer<'de>>::Error>\n\n where D: Deserializer<'de> {\n\n deserializer.deserialize_seq(KeyModsVisitor)\n\n }\n\n}\n\n\n", "file_path": "lib/src/io/key.rs", "rank": 53, "score": 7.4516734108674765 }, { "content": " devices.len() as UINT,\n\n std::mem::size_of::<RAWINPUTDEVICE>() as UINT\n\n );\n\n\n\n if result == FALSE {\n\n return Err(os::windows::last_error(\"raw input devices\"));\n\n }\n\n }\n\n\n\n Ok(())\n\n}", "file_path": "lib/src/os/windows/input_handling.rs", "rank": 54, "score": 7.43790755543734 }, { "content": "mod key;\n\nmod action;\n\nmod axis;\n\nmod input;\n\nmod input_handler;\n\n\n\n#[cfg(target_os = \"windows\")]\n\nmod win_io;\n\n\n\npub use key::*;\n\npub use action::*;\n\npub use axis::*;\n\npub use input::*;\n\npub use input_handler::*;\n\n\n\n#[cfg(target_os = \"windows\")]\n\npub use win_io::*;", "file_path": "lib/src/io/mod.rs", "rank": 55, "score": 7.291060062238697 }, { "content": " }\n\n }\n\n\n\n if duplicated.is_empty() {\n\n Ok(mods)\n\n } else {\n\n Err(\n\n de::Error::custom(\n\n format!(\n\n \"duplicated modifiers ({})\",\n\n duplicated.iter()\n\n .map(|key_mod| format!(\"{}\", key_mod))\n\n .join(\",\")\n\n )\n\n )\n\n )\n\n }\n\n }\n\n\n\n fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"[<key modifier>, ...]\")\n\n }\n\n}", "file_path": "lib/src/io/key.rs", "rank": 57, "score": 7.056213540880569 }, { "content": "\n\n fn new(input_id: Id, handler: &'h mut InputHandler<Id>) -> Self {\n\n Self {\n\n input_id,\n\n handler,\n\n }\n\n }\n\n\n\n pub fn with<H>(self, new_handler: H) -> &'h mut InputHandler<Id>\n\n where\n\n H: FnMut(Id, InputEvent, InputKind) + 'static\n\n {\n\n self.handler.handlers.insert(self.input_id, Box::new(new_handler));\n\n self.handler\n\n }\n\n\n\n pub fn axis<H>(self, mut new_handler: H) -> &'h mut InputHandler<Id>\n\n where\n\n H: FnMut(AxisValue) + 'static\n\n {\n", "file_path": "lib/src/io/input_handler.rs", "rank": 58, "score": 6.974115748133547 }, { "content": "use {\n\n std::{\n\n os::windows::ffi::{OsStrExt, OsStringExt},\n\n ffi::{OsStr, OsString},\n\n pin::Pin,\n\n marker::Unpin,\n\n },\n\n winapi::{\n\n shared::{\n\n ntdef::{\n\n LPCWSTR,\n\n WCHAR,\n\n MAKELANGID,\n\n LANG_NEUTRAL,\n\n SUBLANG_DEFAULT,\n\n },\n\n minwindef::{\n\n HINSTANCE,\n\n DWORD,\n\n LPVOID,\n", "file_path": "lib/src/os/windows/mod.rs", "rank": 59, "score": 6.942519646234627 }, { "content": " self.with(move |id, event, kind| {\n\n match kind {\n\n InputKind::Axis(scale) => new_handler(event.axis_value() * scale),\n\n _ => log::error! {\n\n target: Self::LOG_TARGET,\n\n \"{:#?} handler type mismatch, handler expects axis while input id is action\",\n\n id\n\n }\n\n }\n\n })\n\n }\n\n\n\n pub fn action<H>(self, mut new_handler: H) -> &'h mut InputHandler<Id>\n\n where\n\n H: FnMut(InputEvent) + 'static\n\n {\n\n self.with(move |id, event, kind| {\n\n match kind {\n\n InputKind::Action => new_handler(event),\n\n _ => log::error! {\n", "file_path": "lib/src/io/input_handler.rs", "rank": 60, "score": 6.749389951573423 }, { "content": "pub mod renderer;\n\n\n\npub use renderer::Renderer;\n", "file_path": "lib/src/graphics/mod.rs", "rank": 61, "score": 6.668630110968168 }, { "content": " cc_build.define(\"___macos___\", None)\n\n .define(\"VK_USE_PLATFORM_MACOS_MVK\", None);\n\n } else if cfg!(target_os = \"linux\") {\n\n cc_build.define(\"___linux___\", None);\n\n } else {\n\n cc_build.define(\"___unknown___\", None);\n\n }\n\n\n\n if cfg!(debug_assertions) {\n\n cc_build.define(\"___debug___\", None);\n\n } else {\n\n cc_build.define(\"___release___\", None);\n\n }\n\n\n\n ffi::process_c_srcs(&src_path, &include_dirs, &mut cc_build)?;\n\n\n\n cc_build.compile(\"apriori2.c.ffi\");\n\n\n\n for lib in libraries {\n\n let lib_name = lib.file_stem()\n", "file_path": "infra/src/lib.rs", "rank": 62, "score": 6.23410484251678 }, { "content": "\n\n let result = GetRawInputData(\n\n lparam as HRAWINPUT,\n\n RID_INPUT,\n\n std::ptr::null_mut(),\n\n &mut input_size,\n\n std::mem::size_of::<RAWINPUTHEADER>() as UINT\n\n );\n\n\n\n if result != 0 {\n\n return Err(os::windows::last_error(\"get raw input data (get input size)\"));\n\n }\n\n\n\n let mut bytes = vec![0u8; input_size as usize];\n\n\n\n let size = GetRawInputData(\n\n lparam as HRAWINPUT,\n\n RID_INPUT,\n\n bytes.as_mut_ptr() as LPVOID,\n\n &mut input_size,\n", "file_path": "lib/src/os/windows/input_handling.rs", "rank": 63, "score": 6.21727621736419 }, { "content": " stringify![$str],\n\n err\n\n };\n\n\n\n return;\n\n }\n\n }\n\n }\n\n };\n\n }\n\n\n\n let level: log::Level = match as_str![level].parse() {\n\n Ok(lvl) => lvl,\n\n Err(err) => {\n\n error! {\n\n target: LOG_TARGET,\n\n \"FFI log error while parsing log level -- {}\",\n\n err\n\n };\n\n\n", "file_path": "lib/src/core/log.rs", "rank": 64, "score": 6.048725664035805 }, { "content": "\n\n // See SetWindowLongPtrW docs\n\n // https://docs.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-setwindowlongptrw#return-value\n\n // (About SetLastError and SetWindowLongPtrW return value)\n\n\n\n SetLastError(0);\n\n\n\n let result = SetWindowLongPtrW(\n\n hwnd,\n\n GWLP_USERDATA,\n\n window_internal as LONG_PTR\n\n );\n\n\n\n let last_error = GetLastError();\n\n\n\n if result == 0 && last_error != 0 {\n\n return Err(os::windows::last_error(\"set window long ptr\"));\n\n }\n\n\n\n init_raw_input(hwnd)?;\n", "file_path": "lib/src/os/windows/input_handling.rs", "rank": 67, "score": 5.474670615286078 }, { "content": " }\n\n\n\n fn size(&self) -> Result<WindowSize> {\n\n let size;\n\n unsafe {\n\n let mut rect: RECT = std::mem::zeroed();\n\n\n\n if GetWindowRect(self.hwnd, &mut rect) == 0 {\n\n return Err(last_error(\"get window size\"));\n\n }\n\n\n\n size = WindowSize {\n\n width: (rect.right - rect.left).abs() as u16,\n\n height: (rect.bottom - rect.top).abs() as u16\n\n }\n\n }\n\n\n\n Ok(size)\n\n }\n\n\n", "file_path": "lib/src/os/windows/mod.rs", "rank": 68, "score": 5.419421147491898 }, { "content": " } else if let Some(ext) = path.extension() {\n\n let file_name = path.file_stem()\n\n .ok_or(Error::FilenameExpected(path.clone().into()))?\n\n .to_string_lossy();\n\n\n\n if file_name.ends_with(RUST_VISIBLE_FILE_EXT) && ext == h_ext {\n\n builder = builder.header(path.to_string_lossy());\n\n bindings_count += 1;\n\n }\n\n\n\n if ext == c_ext {\n\n cc_build.file(path.clone());\n\n }\n\n\n\n println!(\"cargo:rerun-if-changed={}\", path.display());\n\n\n\n }\n\n }\n\n\n\n Ok((builder, bindings_count))\n\n}", "file_path": "infra/src/ffi.rs", "rank": 69, "score": 5.353642082944379 }, { "content": " axis_id: self.axis_id.normalized(),\n\n ..*self\n\n }\n\n }\n\n\n\n pub fn axis_id(&self) -> AxisId {\n\n self.axis_id\n\n }\n\n\n\n pub fn scale(&self) -> AxisScale {\n\n self.scale\n\n }\n\n\n\n pub fn mods(&self) -> KeyMods {\n\n self.mods\n\n }\n\n}\n\n\n\nimpl fmt::Display for Axis {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "lib/src/io/axis.rs", "rank": 70, "score": 5.221936989979021 }, { "content": "use {\n\n std::{\n\n fmt,\n\n collections::HashSet,\n\n },\n\n serde::{\n\n ser::{Serialize, Serializer, SerializeSeq},\n\n de::{self, Deserialize, Deserializer, Visitor, SeqAccess}\n\n },\n\n bitflags::bitflags,\n\n itertools::Itertools,\n\n};\n\n\n\n#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy, serde::Serialize, serde::Deserialize)]\n\npub enum VirtualKey {\n\n MouseLeft,\n\n MouseMiddle,\n\n MouseRight,\n\n MouseX1,\n\n MouseX2,\n", "file_path": "lib/src/io/key.rs", "rank": 71, "score": 5.1919320174762245 }, { "content": " return Err(Error::KeyAndModifierMatch(key));\n\n }\n\n\n\n let action = Self {\n\n key,\n\n mods\n\n };\n\n\n\n Ok(action)\n\n }\n\n\n\n /// Transforms OS specific keys to general keys\n\n pub fn normalized(&self) -> Self {\n\n Self {\n\n key: self.key.normalized(),\n\n ..*self\n\n }\n\n }\n\n\n\n pub fn key(&self) -> VirtualKey {\n", "file_path": "lib/src/io/action.rs", "rank": 72, "score": 4.967442018214021 }, { "content": " }\n\n}\n\n\n\nimpl<Id: InputId> From<InputMap<Id>> for InputHandler<Id> {\n\n fn from(input_map: InputMap<Id>) -> Self {\n\n let mut handler = Self::new();\n\n\n\n handler.update_inputs(&input_map);\n\n\n\n handler\n\n }\n\n}\n\n\n\npub struct InputHandlerAdder<'h, Id: InputId> {\n\n input_id: Id,\n\n handler: &'h mut InputHandler<Id>\n\n}\n\n\n\nimpl<'h, Id: InputId> InputHandlerAdder<'h, Id> {\n\n const LOG_TARGET: &'static str = \"InputHandlerAdder\";\n", "file_path": "lib/src/io/input_handler.rs", "rank": 73, "score": 4.763836772191731 }, { "content": " Self::Serialization(err.to_string())\n\n }\n\n}\n\n\n\nimpl<T> From<PoisonError<T>> for Error {\n\n fn from(err: PoisonError<T>) -> Self {\n\n Self::Sync(err.to_string())\n\n }\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Self::Apriori2FFI(_err, desc) => write!(f, \"FFI error: {}\", desc),\n\n Self::Utf8Error(err) => write!(f, \"{}\", err),\n\n Self::OsSpecific(err) => write!(f, \"(OS) {}\", err),\n\n Self::KeyAndModifierMatch(key) => {\n\n write!(f, \"{:#?} - key and modifier are same\", key)\n\n },\n\n Self::Sync(err) => write!(f, \"{}\", err),\n", "file_path": "lib/src/core/mod.rs", "rank": 74, "score": 4.753020525091749 }, { "content": " pub y: i16\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum WindowState {\n\n Close,\n\n Show,\n\n Hide,\n\n SizeChanged(WindowSize),\n\n PositionChanged(WindowPosition),\n\n}\n\n\n\nimpl fmt::Display for WindowSize {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(\n\n f,\n\n \"(width = {}, height = {})\",\n\n self.width, self.height\n\n )\n\n }\n", "file_path": "lib/src/os/mod.rs", "rank": 75, "score": 4.723353612962596 }, { "content": " _ => unreachable!()\n\n })\n\n }\n\n}\n\n\n\nimpl fmt::Display for VirtualKey {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{:?}\", *self)\n\n }\n\n}\n\n\n\npub type KeyModsUnderlyingType = u32;\n\n\n\nbitflags! {\n\n pub struct KeyMods: KeyModsUnderlyingType {\n\n const CTRL = 0x1;\n\n const CMD = 0x2;\n\n const SHIFT = 0x4;\n\n const ALT = 0x8;\n\n }\n", "file_path": "lib/src/io/key.rs", "rank": 76, "score": 4.674856694941957 }, { "content": " self.key\n\n }\n\n\n\n pub fn mods(&self) -> KeyMods {\n\n self.mods\n\n }\n\n}\n\n\n\nimpl fmt::Display for Action {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"Action(key: {}, mods: {})\", self.key, self.mods)\n\n }\n\n}", "file_path": "lib/src/io/action.rs", "rank": 77, "score": 4.6683544791866876 }, { "content": "const GENERIC_DESKTOP_CONTROLS: USHORT = 0x01;\n\n\n\n// https://docs.microsoft.com/en-us/windows-hardware/drivers/hid/hid-usages#usage-id\n\nconst HID_USAGE_GENERIC_MOUSE: USHORT = 0x02;\n\nconst HID_USAGE_GENERIC_KEYBOARD: USHORT = 0x06;\n\n\n\npub unsafe extern \"system\" fn window_cb<Id: InputId>(\n\n hwnd: HWND,\n\n msg: UINT,\n\n wparam: WPARAM,\n\n lparam: LPARAM\n\n) -> LRESULT {\n\n match window_cb_inner::<Id>(hwnd, msg, wparam, lparam) {\n\n Ok(Some(result)) => result,\n\n Ok(None) => DefWindowProcW(hwnd, msg, wparam, lparam),\n\n Err(err) => {\n\n log::error! {\n\n target: LOG_TARGET,\n\n \"{}\", err\n\n };\n", "file_path": "lib/src/os/windows/input_handling.rs", "rank": 78, "score": 4.52290860524675 }, { "content": " target: Self::LOG_TARGET,\n\n \"{:#?} handler type mismatch, handler expects action while input id is axis\",\n\n id\n\n }\n\n }\n\n })\n\n }\n\n}\n\n\n\n#[cfg(target_os = \"windows\")]\n\npub(crate) struct WindowsInputAuxInfo {\n\n pub mods: KeyMods\n\n}\n\n\n\n#[cfg(target_os = \"windows\")]\n\nimpl WindowsInputAuxInfo {\n\n fn new() -> Self {\n\n Self {\n\n mods: KeyMods::empty()\n\n }\n\n }\n\n}\n", "file_path": "lib/src/io/input_handler.rs", "rank": 79, "score": 4.388912342641175 }, { "content": "\n\nimpl AxisId {\n\n /// Transforms OS specific keys to general keys\n\n pub fn normalized(&self) -> Self {\n\n match self {\n\n Self::Key(key) => Self::Key(key.normalized()),\n\n _ => self.clone()\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for AxisId {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{:?}\", *self)\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\npub struct Axis {\n\n axis_id: AxisId,\n", "file_path": "lib/src/io/axis.rs", "rank": 80, "score": 4.280533425036855 }, { "content": "pub mod graphics;\n\npub mod core;\n\npub mod os;\n\npub mod io;\n\nmod ffi;", "file_path": "lib/src/lib.rs", "rank": 81, "score": 3.7806519009389286 }, { "content": " fn platform_handle(&self) -> ffi::Handle {\n\n self.hwnd as ffi::Handle\n\n }\n\n\n\n fn input_handler(&self) -> &io::InputHandler<Id> {\n\n &self.internal.input_handler\n\n }\n\n\n\n fn input_handler_mut(&mut self) -> &mut io::InputHandler<Id> {\n\n &mut self.internal.input_handler\n\n }\n\n\n\n fn handle_window_state<H>(&mut self, handler: H)\n\n where\n\n H: FnMut(WindowState) + 'static\n\n {\n\n self.internal.state_handler = Box::new(handler);\n\n }\n\n}\n\n\n", "file_path": "lib/src/os/windows/mod.rs", "rank": 82, "score": 3.5400679311759653 }, { "content": "\n\nimpl fmt::Display for Input {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Self::Action(action) => write!(f, \"{}\", action),\n\n Self::Axis(axis) => write!(f, \"{}\", axis),\n\n }\n\n }\n\n}\n\n\n\nimpl From<Axis> for Input {\n\n fn from(axis: Axis) -> Self {\n\n Self::Axis(axis)\n\n }\n\n}\n\n\n\nimpl From<Action> for Input {\n\n fn from(action: Action) -> Self {\n\n Self::Action(action)\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone, Copy)]\n\npub enum InputKind {\n\n Axis(AxisScale),\n\n Action\n\n}\n\n\n", "file_path": "lib/src/io/input.rs", "rank": 83, "score": 3.2310155391456394 }, { "content": " window_class_name.push(0);\n\n\n\n let mut window_title = OsStr::new(title)\n\n .encode_wide().collect::<Vec<u16>>();\n\n\n\n // Add '\\0' at the end\n\n window_title.push(0);\n\n\n\n let hwnd;\n\n let mut internal;\n\n unsafe {\n\n let window_class = WNDCLASSW {\n\n style: 0,\n\n cbClsExtra: 0,\n\n cbWndExtra: 0,\n\n hInstance: 0 as HINSTANCE,\n\n hIcon: 0 as HICON,\n\n hCursor: LoadCursorW(std::ptr::null_mut(), IDC_CROSS),\n\n hbrBackground: 0x10 as HBRUSH,\n\n lpszMenuName: 0 as LPCWSTR,\n", "file_path": "lib/src/os/windows/mod.rs", "rank": 84, "score": 3.200447199124253 }, { "content": " std::mem::size_of::<RAWINPUTHEADER>() as UINT\n\n );\n\n\n\n if size != input_size {\n\n return Err(os::windows::last_error(\"get raw input data (invalid size)\"));\n\n }\n\n\n\n let input = &mut *(bytes.as_mut_ptr() as *mut RAWINPUT);\n\n\n\n if input.header.dwType == RIM_TYPEKEYBOARD {\n\n const FAKE_KEY: USHORT = 0xFF;\n\n\n\n let keyboard = input.data.keyboard();\n\n let scan_code = keyboard.MakeCode;\n\n let key = keyboard.VKey;\n\n let flags = keyboard.Flags;\n\n\n\n if key == FAKE_KEY {\n\n return Ok(None);\n\n }\n", "file_path": "lib/src/os/windows/input_handling.rs", "rank": 85, "score": 3.1213352176458895 }, { "content": " return;\n\n }\n\n };\n\n\n\n let target = as_str![target];\n\n\n\n let message = unsafe {\n\n printf(format, args)\n\n };\n\n\n\n log! {\n\n target: target,\n\n level,\n\n \"{}\", message\n\n };\n\n}\n", "file_path": "lib/src/core/log.rs", "rank": 86, "score": 2.794984487952883 }, { "content": " scale: AxisScale,\n\n mods: KeyMods,\n\n}\n\n\n\nimpl Axis {\n\n pub fn new(axis_id: AxisId, scale: AxisScale, mods: KeyMods) -> Self {\n\n Self {\n\n axis_id,\n\n scale,\n\n mods,\n\n }\n\n }\n\n\n\n pub fn with_unit_scale(axis_id: AxisId, mods: KeyMods) -> Self {\n\n Self::new(axis_id, 1.0, mods)\n\n }\n\n\n\n /// Transforms OS specific keys to general keys\n\n pub fn normalized(&self) -> Self {\n\n Self {\n", "file_path": "lib/src/io/axis.rs", "rank": 87, "score": 2.7838405136754343 }, { "content": "}\n\n\n\nimpl fmt::Display for WindowPosition {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(\n\n f,\n\n \"(x = {}, y = {})\",\n\n self.x, self.y\n\n )\n\n }\n\n}\n\n\n", "file_path": "lib/src/os/mod.rs", "rank": 88, "score": 2.7775983911864914 }, { "content": " fn drop(&mut self) {\n\n unsafe {\n\n if !self.internal.is_dropped {\n\n if DestroyWindow(self.hwnd) == 0 {\n\n log::error! {\n\n target: Self::LOG_TARGET,\n\n \"{}\", last_error(\"window destroy\")\n\n }\n\n }\n\n }\n\n }\n\n\n\n log::debug! {\n\n target: Self::LOG_TARGET,\n\n \"drop window\"\n\n }\n\n }\n\n}\n\n\n\nimpl<Id: io::InputId> WindowMethods<Id> for Window<Id> {\n", "file_path": "lib/src/os/windows/mod.rs", "rank": 89, "score": 2.7223831616748955 }, { "content": " }\n\n}\n\n\n\nimpl<T> std::ops::Deref for AssumeThreadSafe<T> {\n\n type Target = T;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n", "file_path": "lib/src/core/mod.rs", "rank": 90, "score": 2.714003499120339 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum InputEvent {\n\n Pressed,\n\n Released,\n\n // Repeat(u16),\n\n // DoubleClick,\n\n Axis(AxisValue),\n\n}\n\n\n\nimpl InputEvent {\n\n pub fn axis_value(&self) -> AxisValue {\n\n match self {\n\n Self::Axis(value) => *value,\n\n // Self::Repeat(value) => *value as AxisValue,\n\n Self::Pressed => 1.0,\n\n _ => 0.0\n\n }\n\n }\n\n}", "file_path": "lib/src/io/input.rs", "rank": 92, "score": 2.6456194793465997 }, { "content": " pub fn is_general_mod(&self) -> bool {\n\n self.split_general_mod().is_some()\n\n }\n\n\n\n /// Splits `Ctrl`, `Alt` or `Shift` into left and right.\n\n pub fn split_general_mod(&self) -> Option<(Self, Self)> {\n\n match self {\n\n Self::Ctrl => Some((Self::LeftCtrl, Self::RightCtrl)),\n\n Self::Shift => Some((Self::LeftShift, Self::RightShift)),\n\n Self::Alt => Some((Self::LeftAlt, Self::RightAlt)),\n\n _ => None\n\n }\n\n }\n\n\n\n pub fn as_general_mod(&self) -> Option<Self> {\n\n match self {\n\n Self::Ctrl\n\n | Self::LeftCtrl\n\n | Self::RightCtrl => Some(Self::Ctrl),\n\n\n", "file_path": "lib/src/io/key.rs", "rank": 93, "score": 2.6268639961875566 }, { "content": "use {\n\n winapi::{\n\n shared::{\n\n minwindef::{\n\n TRUE,\n\n FALSE,\n\n BOOL,\n\n UINT,\n\n USHORT,\n\n DWORD,\n\n WPARAM,\n\n LPARAM,\n\n LRESULT,\n\n LPVOID,\n\n LOWORD,\n\n HIWORD,\n\n },\n\n windef::{\n\n HWND,\n\n },\n", "file_path": "lib/src/os/windows/input_handling.rs", "rank": 94, "score": 2.574689954189478 }, { "content": " pub fn as_os_ctrl(&self) -> Option<Self> {\n\n match self.as_general_mod() {\n\n #[cfg(target_os = \"windows\")]\n\n Some(Self::Ctrl) => Some(Self::OsCtrl),\n\n\n\n #[cfg(target_os = \"macos\")]\n\n Some(Self::Cmd) => Some(Self::OsCtrl),\n\n\n\n _ => None\n\n }\n\n }\n\n\n\n /// Transforms OS specific keys into general keys\n\n pub fn normalized(&self) -> Self {\n\n match self {\n\n #[cfg(target_os = \"windows\")]\n\n Self::OsCtrl => Self::Ctrl,\n\n\n\n #[cfg(target_os = \"macos\")]\n\n Self::OsCtrl => Self::Cmd,\n", "file_path": "lib/src/io/key.rs", "rank": 95, "score": 2.2938845832685577 }, { "content": " pub fn handle(&mut self, input_id: Id) -> InputHandlerAdder<Id> {\n\n InputHandlerAdder::new(input_id, self)\n\n }\n\n\n\n pub fn update_inputs(&mut self, input_map: &InputMap<Id>) {\n\n log::trace! {\n\n target: Self::LOG_TARGET,\n\n \"update inputs\"\n\n }\n\n\n\n for (id, variants) in input_map.hash_map().iter() {\n\n let inputs: Vec<Input> = variants.clone().into();\n\n\n\n for input in inputs {\n\n log::trace! {\n\n target: Self::LOG_TARGET,\n\n \"\\t{:?}: {}\",\n\n id, input\n\n }\n\n\n", "file_path": "lib/src/io/input_handler.rs", "rank": 96, "score": 2.2938845832685577 }, { "content": " Axis(Axis),\n\n Action(Action)\n\n}\n\n\n\nimpl Input {\n\n /// Transforms OS specific keys to general keys\n\n pub fn normalized(&self) -> Self {\n\n match self {\n\n Self::Axis(axis) => Self::Axis(axis.normalized()),\n\n Self::Action(action) => Self::Action(action.normalized())\n\n }\n\n }\n\n\n\n /// Splits `Ctrl`, `Alt` or `Shift` to left and right\n\n pub fn split_general_mod(&self) -> Option<(Input, Input)> {\n\n match self {\n\n Self::Axis(axis) => match axis.axis_id() {\n\n AxisId::Key(key) => key.split_general_mod()\n\n .map(|(left, right)| {\n\n macro_rules! make_input {\n", "file_path": "lib/src/io/input.rs", "rank": 97, "score": 2.136109274646825 }, { "content": "}\n\n\n\nimpl KeyMods {\n\n pub fn as_virtual_keys(&self) -> Vec<VirtualKey> {\n\n macro_rules! vec_mods {\n\n (let $vec:ident = { $(($mods:expr, $repr:expr)),+ $(,)? }) => {\n\n let mut $vec = vec![];\n\n\n\n $(\n\n if self.contains($mods) {\n\n $vec.push($repr);\n\n }\n\n )+\n\n\n\n let $vec = $vec;\n\n };\n\n }\n\n\n\n vec_mods! {\n\n let mods = {\n", "file_path": "lib/src/io/key.rs", "rank": 99, "score": 2.0052797915559046 } ]
Rust
vendor/sgx_tstd/src/sys/rwlock.rs
mesainner/crates-io
b9e098e801cc7180d2d025cf495add70d9f7e9f5
use sgx_types::{SysError, sgx_thread_t, SGX_THREAD_T_NULL}; use sgx_trts::libc; use crate::thread; use crate::sync::SgxThreadMutex; use crate::sync::SgxThreadCondvar; use crate::sync::SgxThreadSpinlock; use core::cell::UnsafeCell; struct SgxThreadRwLockInner { readers_num: u32, writers_num: u32, busy: u32, writer_thread: sgx_thread_t, condvar: SgxThreadCondvar, mutex: SgxThreadMutex, spinlock: SgxThreadSpinlock, } impl SgxThreadRwLockInner { const fn new() -> Self { SgxThreadRwLockInner{ readers_num: 0, writers_num: 0, busy: 0, writer_thread: SGX_THREAD_T_NULL, condvar: SgxThreadCondvar::new(), mutex: SgxThreadMutex::new(), spinlock: SgxThreadSpinlock::new(), } } unsafe fn ref_busy(&mut self) -> SysError { let ret: SysError; self.spinlock.lock(); { if self.busy == u32::max_value() { ret = Err(libc::EAGAIN); } else { self.busy += 1; ret = Ok(()); } } self.spinlock.unlock(); ret } unsafe fn deref_busy(&mut self) -> SysError { let ret: SysError; self.spinlock.lock(); { if self.busy == 0 { ret = Err(libc::EAGAIN); } else { self.busy -= 1; ret = Ok(()); } } self.spinlock.unlock(); ret } unsafe fn read(&mut self) -> SysError { self.ref_busy()?; self.mutex.lock(); { if self.writer_thread == thread::rsgx_thread_self() { self.mutex.unlock(); self.deref_busy(); return Err(libc::EDEADLK); } if self.readers_num == u32::max_value() { self.mutex.unlock(); self.deref_busy(); return Err(libc::EAGAIN); } while self.writers_num > 0 { self.condvar.wait(&self.mutex); } self.readers_num += 1; } self.mutex.unlock(); self.deref_busy(); Ok(()) } unsafe fn try_read(&mut self) -> SysError { self.ref_busy()?; self.mutex.lock(); { let mut ret = Ok(()); if self.writer_thread == thread::rsgx_thread_self() { ret = Err(libc::EDEADLK); } else if self.readers_num == u32::max_value() { ret = Err(libc::EAGAIN); } else if self.writers_num > 0 { ret = Err(libc::EBUSY); } match ret { Ok(_) => {}, Err(e) => { self.mutex.unlock(); self.deref_busy(); return Err(e); } } self.readers_num += 1; } self.mutex.unlock(); self.deref_busy(); Ok(()) } unsafe fn write(&mut self) -> SysError { self.ref_busy()?; self.mutex.lock(); { if self.writer_thread == thread::rsgx_thread_self() { self.mutex.unlock(); self.deref_busy(); return Err(libc::EDEADLK); } if self.writers_num == u32::max_value() { self.mutex.unlock(); self.deref_busy(); return Err(libc::EAGAIN); } self.writers_num += 1; while self.readers_num > 0 { self.condvar.wait(&self.mutex); } while self.writer_thread != SGX_THREAD_T_NULL { self.condvar.wait(&self.mutex); } self.writer_thread = thread::rsgx_thread_self(); } self.mutex.unlock(); self.deref_busy(); Ok(()) } pub unsafe fn try_write(&mut self) -> SysError { self.ref_busy()?; self.mutex.lock(); { let mut ret = Ok(()); if self.writer_thread == thread::rsgx_thread_self() { ret = Err(libc::EDEADLK); } else if self.writers_num == u32::max_value() { ret = Err(libc::EAGAIN); } else if self.readers_num > 0 || self.writer_thread != SGX_THREAD_T_NULL { ret = Err(libc::EBUSY); } match ret { Ok(_) => {}, Err(e) => { self.mutex.unlock(); self.deref_busy(); return Err(e); } } self.writers_num += 1; self.writer_thread = thread::rsgx_thread_self(); } self.mutex.unlock(); self.deref_busy(); Ok(()) } unsafe fn read_unlock(&mut self) -> SysError { self.raw_unlock() } unsafe fn write_unlock(&mut self) -> SysError { self.raw_unlock() } unsafe fn raw_unlock(&mut self) -> SysError { self.mutex.lock(); { if self.readers_num > 0 { self.readers_num -= 1; if self.readers_num == 0 && self.writers_num > 0 { self.condvar.broadcast(); } } else { if self.writer_thread != thread::rsgx_thread_self() { self.mutex.unlock(); return Err(libc::EPERM); } self.writers_num -= 1; self.writer_thread = SGX_THREAD_T_NULL; if self.busy > 0 { self.condvar.broadcast(); } } } self.mutex.unlock(); Ok(()) } unsafe fn destroy(&mut self) -> SysError { self.mutex.lock(); { if self.readers_num > 0 || self.writers_num > 0 || self.busy > 0 { self.spinlock.unlock(); return Err(libc::EBUSY); } self.condvar.destroy(); self.mutex.destroy(); } self.spinlock.unlock(); Ok(()) } } pub struct SgxThreadRwLock { lock: UnsafeCell<SgxThreadRwLockInner>, } impl SgxThreadRwLock { pub const fn new() -> Self { SgxThreadRwLock { lock: UnsafeCell::new(SgxThreadRwLockInner::new()) } } #[inline] pub unsafe fn read(&self) -> SysError { let rwlock: &mut SgxThreadRwLockInner = &mut *self.lock.get(); rwlock.read() } #[inline] pub unsafe fn try_read(&self) -> SysError { let rwlock: &mut SgxThreadRwLockInner = &mut *self.lock.get(); rwlock.try_read() } #[inline] pub unsafe fn write(&self) -> SysError { let rwlock: &mut SgxThreadRwLockInner = &mut *self.lock.get(); rwlock.write() } #[inline] pub unsafe fn try_write(&self) -> SysError { let rwlock: &mut SgxThreadRwLockInner = &mut *self.lock.get(); rwlock.try_write() } #[inline] pub unsafe fn read_unlock(&self) -> SysError { let rwlock: &mut SgxThreadRwLockInner = &mut *self.lock.get(); rwlock.read_unlock() } #[inline] pub unsafe fn write_unlock(&self) -> SysError { let rwlock: &mut SgxThreadRwLockInner = &mut *self.lock.get(); rwlock.write_unlock() } #[inline] pub unsafe fn destroy(&self) -> SysError { let rwlock: &mut SgxThreadRwLockInner = &mut *self.lock.get(); rwlock.destroy() } }
use sgx_types::{SysError, sgx_thread_t, SGX_THREAD_T_NULL}; use sgx_trts::libc; use crate::thread; use crate::sync::SgxThreadMutex; use crate::sync::SgxThreadCondvar; use crate::sync::SgxThreadSpinlock; use core::cell::UnsafeCell; struct SgxThreadRwLockInner { readers_num: u32, writers_num: u32, busy: u32, writer_thread: sgx_thread_t, condvar: SgxThreadCondvar, mutex: SgxThreadMutex, spinlock: SgxThreadSpinlock, } impl SgxThreadRwLockInner { const fn new() -> Self { SgxThreadRwLockInner{ readers_num: 0, writers_num: 0, busy: 0, writer_thread: SGX_THREAD_T_NULL, condvar: SgxThreadCondvar::new(), mutex: SgxThreadMutex::new(), spinlock: SgxThreadSpinlock::new(), } } unsafe fn ref_busy(&mut self) -> SysError { let ret: SysError; self.spinlock.lock(); { if self.busy == u32::max_valu
unsafe fn deref_busy(&mut self) -> SysError { let ret: SysError; self.spinlock.lock(); { if self.busy == 0 { ret = Err(libc::EAGAIN); } else { self.busy -= 1; ret = Ok(()); } } self.spinlock.unlock(); ret } unsafe fn read(&mut self) -> SysError { self.ref_busy()?; self.mutex.lock(); { if self.writer_thread == thread::rsgx_thread_self() { self.mutex.unlock(); self.deref_busy(); return Err(libc::EDEADLK); } if self.readers_num == u32::max_value() { self.mutex.unlock(); self.deref_busy(); return Err(libc::EAGAIN); } while self.writers_num > 0 { self.condvar.wait(&self.mutex); } self.readers_num += 1; } self.mutex.unlock(); self.deref_busy(); Ok(()) } unsafe fn try_read(&mut self) -> SysError { self.ref_busy()?; self.mutex.lock(); { let mut ret = Ok(()); if self.writer_thread == thread::rsgx_thread_self() { ret = Err(libc::EDEADLK); } else if self.readers_num == u32::max_value() { ret = Err(libc::EAGAIN); } else if self.writers_num > 0 { ret = Err(libc::EBUSY); } match ret { Ok(_) => {}, Err(e) => { self.mutex.unlock(); self.deref_busy(); return Err(e); } } self.readers_num += 1; } self.mutex.unlock(); self.deref_busy(); Ok(()) } unsafe fn write(&mut self) -> SysError { self.ref_busy()?; self.mutex.lock(); { if self.writer_thread == thread::rsgx_thread_self() { self.mutex.unlock(); self.deref_busy(); return Err(libc::EDEADLK); } if self.writers_num == u32::max_value() { self.mutex.unlock(); self.deref_busy(); return Err(libc::EAGAIN); } self.writers_num += 1; while self.readers_num > 0 { self.condvar.wait(&self.mutex); } while self.writer_thread != SGX_THREAD_T_NULL { self.condvar.wait(&self.mutex); } self.writer_thread = thread::rsgx_thread_self(); } self.mutex.unlock(); self.deref_busy(); Ok(()) } pub unsafe fn try_write(&mut self) -> SysError { self.ref_busy()?; self.mutex.lock(); { let mut ret = Ok(()); if self.writer_thread == thread::rsgx_thread_self() { ret = Err(libc::EDEADLK); } else if self.writers_num == u32::max_value() { ret = Err(libc::EAGAIN); } else if self.readers_num > 0 || self.writer_thread != SGX_THREAD_T_NULL { ret = Err(libc::EBUSY); } match ret { Ok(_) => {}, Err(e) => { self.mutex.unlock(); self.deref_busy(); return Err(e); } } self.writers_num += 1; self.writer_thread = thread::rsgx_thread_self(); } self.mutex.unlock(); self.deref_busy(); Ok(()) } unsafe fn read_unlock(&mut self) -> SysError { self.raw_unlock() } unsafe fn write_unlock(&mut self) -> SysError { self.raw_unlock() } unsafe fn raw_unlock(&mut self) -> SysError { self.mutex.lock(); { if self.readers_num > 0 { self.readers_num -= 1; if self.readers_num == 0 && self.writers_num > 0 { self.condvar.broadcast(); } } else { if self.writer_thread != thread::rsgx_thread_self() { self.mutex.unlock(); return Err(libc::EPERM); } self.writers_num -= 1; self.writer_thread = SGX_THREAD_T_NULL; if self.busy > 0 { self.condvar.broadcast(); } } } self.mutex.unlock(); Ok(()) } unsafe fn destroy(&mut self) -> SysError { self.mutex.lock(); { if self.readers_num > 0 || self.writers_num > 0 || self.busy > 0 { self.spinlock.unlock(); return Err(libc::EBUSY); } self.condvar.destroy(); self.mutex.destroy(); } self.spinlock.unlock(); Ok(()) } } pub struct SgxThreadRwLock { lock: UnsafeCell<SgxThreadRwLockInner>, } impl SgxThreadRwLock { pub const fn new() -> Self { SgxThreadRwLock { lock: UnsafeCell::new(SgxThreadRwLockInner::new()) } } #[inline] pub unsafe fn read(&self) -> SysError { let rwlock: &mut SgxThreadRwLockInner = &mut *self.lock.get(); rwlock.read() } #[inline] pub unsafe fn try_read(&self) -> SysError { let rwlock: &mut SgxThreadRwLockInner = &mut *self.lock.get(); rwlock.try_read() } #[inline] pub unsafe fn write(&self) -> SysError { let rwlock: &mut SgxThreadRwLockInner = &mut *self.lock.get(); rwlock.write() } #[inline] pub unsafe fn try_write(&self) -> SysError { let rwlock: &mut SgxThreadRwLockInner = &mut *self.lock.get(); rwlock.try_write() } #[inline] pub unsafe fn read_unlock(&self) -> SysError { let rwlock: &mut SgxThreadRwLockInner = &mut *self.lock.get(); rwlock.read_unlock() } #[inline] pub unsafe fn write_unlock(&self) -> SysError { let rwlock: &mut SgxThreadRwLockInner = &mut *self.lock.get(); rwlock.write_unlock() } #[inline] pub unsafe fn destroy(&self) -> SysError { let rwlock: &mut SgxThreadRwLockInner = &mut *self.lock.get(); rwlock.destroy() } }
e() { ret = Err(libc::EAGAIN); } else { self.busy += 1; ret = Ok(()); } } self.spinlock.unlock(); ret }
function_block-function_prefixed
[]
Rust
planus-cli/src/util/sorted_map.rs
OliverEvans96/planus
c24182f57eafe15e416d240f805a9d30d652c056
#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)] pub struct SortedMap<K, V>(pub Vec<(K, V)>); pub struct SortedSet<K>(SortedMap<K, ()>); impl<K, V> Default for SortedMap<K, V> { fn default() -> Self { Self::new() } } impl<K> Default for SortedSet<K> { fn default() -> Self { Self::new() } } impl<K, V> SortedMap<K, V> { pub fn new() -> Self { Self(Vec::new()) } pub fn clear(&mut self) { self.0.clear(); } pub fn len(&self) -> usize { self.0.len() } pub fn is_empty(&self) -> bool { self.0.len() == 0 } pub fn capacity(&self) -> usize { self.0.capacity() } pub fn iter(&self) -> impl Iterator<Item = &(K, V)> { self.0.iter() } pub fn values(&self) -> impl Iterator<Item = &V> { self.0.iter().map(|(_, v)| v) } pub fn values_mut(&mut self) -> impl Iterator<Item = &mut V> { self.0.iter_mut().map(|(_, v)| v) } pub fn first(&self) -> Option<&(K, V)> { self.0.first() } pub fn first_value(&self) -> Option<&V> { self.first().map(|(_k, v)| v) } pub fn last(&self) -> Option<&(K, V)> { self.0.last() } pub fn last_value(&self) -> Option<&V> { self.last().map(|(_k, v)| v) } } impl<K: Ord, V> SortedMap<K, V> { pub fn index_of(&self, k: &K) -> Option<usize> { self.0.binary_search_by_key(&k, |(k, _v)| k).ok() } pub fn get(&self, k: &K) -> Option<&V> { let index = self.0.binary_search_by_key(&k, |(k, _v)| k).ok()?; Some(&self.0[index].1) } pub fn insert(&mut self, key: K, value: V) -> Option<V> { match self.0.binary_search_by_key(&&key, |(k, _v)| k) { Ok(index) => Some(std::mem::replace(&mut self.0[index].1, value)), Err(index) => { self.0.insert(index, (key, value)); None } } } pub fn entry(&mut self, key: K) -> sorted_map::Entry<'_, K, V> { match self.0.binary_search_by_key(&&key, |(k, _v)| k) { Ok(index) => { sorted_map::Entry::Occupied(sorted_map::OccupiedEntry { map: self, index }) } Err(index) => sorted_map::Entry::Vacant(sorted_map::VacantEntry { map: self, key, index, }), } } } #[allow(clippy::module_inception)] pub mod sorted_map { pub enum Entry<'a, K: 'a, V: 'a> { Occupied(OccupiedEntry<'a, K, V>), Vacant(VacantEntry<'a, K, V>), } pub struct OccupiedEntry<'a, K: 'a, V: 'a> { pub(super) map: &'a mut super::SortedMap<K, V>, pub(super) index: usize, } pub struct VacantEntry<'a, K: 'a, V: 'a> { pub(super) map: &'a mut super::SortedMap<K, V>, pub(super) key: K, pub(super) index: usize, } impl<'a, K: 'a, V: 'a> Entry<'a, K, V> { #[inline] pub fn or_insert(self, default: V) -> &'a mut V { match self { Entry::Occupied(entry) => entry.into_mut(), Entry::Vacant(entry) => entry.insert(default), } } #[inline] pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V { match self { Entry::Occupied(entry) => entry.into_mut(), Entry::Vacant(entry) => entry.insert(default()), } } } impl<'a, K, V: Default> Entry<'a, K, V> { #[inline] pub fn or_default(self) -> &'a mut V { match self { Entry::Occupied(entry) => entry.into_mut(), Entry::Vacant(entry) => entry.insert(Default::default()), } } } impl<'a, K: 'a, V: 'a> OccupiedEntry<'a, K, V> { pub fn into_mut(self) -> &'a mut V { &mut self.map.0[self.index].1 } pub fn get_mut(&mut self) -> &mut V { &mut self.map.0[self.index].1 } } impl<'a, K: 'a, V: 'a> VacantEntry<'a, K, V> { pub fn insert(self, value: V) -> &'a mut V { self.map.0.insert(self.index, (self.key, value)); &mut self.map.0[self.index].1 } } } impl<K> SortedSet<K> { pub fn new() -> Self { Self(SortedMap::new()) } pub fn clear(&mut self) { self.0.clear(); } pub fn len(&self) -> usize { self.0.len() } pub fn is_empty(&self) -> bool { self.0.len() == 0 } pub fn capacity(&self) -> usize { self.0.capacity() } pub fn iter(&self) -> impl Iterator<Item = &K> { self.0.iter().map(|(k, ())| k) } pub fn first(&self) -> Option<&K> { self.0.first().map(|(k, ())| k) } pub fn last(&self) -> Option<&K> { self.0.last().map(|(k, ())| k) } } impl<K: Ord> SortedSet<K> { pub fn index_of(&self, k: &K) -> Option<usize> { self.0.index_of(k) } pub fn insert(&mut self, key: K) -> bool { self.0.insert(key, ()).is_none() } pub fn contains(&self, k: &K) -> bool { self.0.get(k).is_some() } }
#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)] pub struct SortedMap<K, V>(pub Vec<(K, V)>); pub struct SortedSet<K>(SortedMap<K, ()>); impl<K, V> Default for SortedMap<K, V> { fn default() -> Self { Self::new() } } impl<K> Default for SortedSet<K> { fn default() -> Self { Self::new() } } impl<K, V> SortedMap<K, V> { pub fn new() -> Self { Self(Vec::new()) } pub fn clear(&mut self) { self.0.clear(); } pub fn len(&self) -> usize { self.0.len() } pub fn is_empty(&self) -> bool { self.0.len() == 0 } pub fn capacity(&self) -> usize { self.0.capacity() } pub fn iter(&self) -> impl Iterator<Item = &(K, V)> { self.0.iter() } pub fn values(&self) -> impl Iterator<Item = &V> { self.0.iter().map(|(_, v)| v) } pub fn values_mut(&mut self) -> impl Iterator<Item = &mut V> { self.0.iter_mut().map(|(_, v)| v) } pub fn first(&self) -> Option<&(K, V)> { self.0.first() } pub fn first_value(&self) -> Option<&V> { self.first().map(|(_k, v)| v) } pub fn last(&self) -> Option<&(K, V)> { self.0.last() } pub fn last_value(&self) -> Option<&V> { self.last().map(|(_k, v)| v) } } impl<K: Ord, V> SortedMap<K, V> { pub fn index_of(&self, k: &K) -> Option<usize> { self.0.binary_search_by_key(&k, |(k, _v)| k).ok() } pub fn get(&self, k: &K) -> Option<&V> { let index = self.0.binary_search_by_key(&k, |(k, _v)| k).ok()?; Some(&self.0[index].1) } pub fn insert(&mut self, key: K, value: V) -> Option<V> { match self.0.binary_search_by_key(&&key, |(k, _v)| k) { Ok(index) => Some(std::mem::replace(&mut self.0[index].1, value)), Err(index) => { self.0.insert(index, (key, value)); None } } } pub fn entry(&mut self, key: K) -> sorted_map::Entry<'_, K, V> { match self.0.binary_search_by_key(&&key, |(k, _v)| k) { Ok(index) => { sorted_map::Entry::Occupied(sorted_map::OccupiedEntry { map: self, index }) } Err(index) => sorted_map::Entry::Vacant(sorted_map::VacantEntry { map: self, key, index, }), } } } #[allow(clippy::module_inception)] pub mod sorted_map { pub enum Entry<'a, K: 'a, V: 'a> { Occupied(OccupiedEntry<'a, K, V>), Vacant(VacantEntry<'a, K, V>), } pub struct OccupiedEntry<'a, K: 'a, V: 'a> { pub(super) map: &'a mut super::SortedMap<K, V>, pub(super) index: usize, } pub struct VacantEntry<'a, K: 'a, V: 'a> { pub(super) map: &'a mut super::SortedMap<K, V>, pub(super) key: K, pub(super) index: usize, } impl<'a, K: 'a, V: 'a> Entry<'a, K, V> { #[inline] pub fn or_insert(self, default: V) -> &'a mut V { match self { Entry::Occupied(entry) => entry.into_mut(), Entry::Vacant(entry) => entry.insert(default), } } #[inline] pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V { match self { Entry::Occupied(entry) => entry.into_mut(), Entry::Vacant(entry) => entry.insert(default()), } } } impl<'a, K, V: Default> Entry<'a, K, V> { #[inline]
} impl<'a, K: 'a, V: 'a> OccupiedEntry<'a, K, V> { pub fn into_mut(self) -> &'a mut V { &mut self.map.0[self.index].1 } pub fn get_mut(&mut self) -> &mut V { &mut self.map.0[self.index].1 } } impl<'a, K: 'a, V: 'a> VacantEntry<'a, K, V> { pub fn insert(self, value: V) -> &'a mut V { self.map.0.insert(self.index, (self.key, value)); &mut self.map.0[self.index].1 } } } impl<K> SortedSet<K> { pub fn new() -> Self { Self(SortedMap::new()) } pub fn clear(&mut self) { self.0.clear(); } pub fn len(&self) -> usize { self.0.len() } pub fn is_empty(&self) -> bool { self.0.len() == 0 } pub fn capacity(&self) -> usize { self.0.capacity() } pub fn iter(&self) -> impl Iterator<Item = &K> { self.0.iter().map(|(k, ())| k) } pub fn first(&self) -> Option<&K> { self.0.first().map(|(k, ())| k) } pub fn last(&self) -> Option<&K> { self.0.last().map(|(k, ())| k) } } impl<K: Ord> SortedSet<K> { pub fn index_of(&self, k: &K) -> Option<usize> { self.0.index_of(k) } pub fn insert(&mut self, key: K) -> bool { self.0.insert(key, ()).is_none() } pub fn contains(&self, k: &K) -> bool { self.0.get(k).is_some() } }
pub fn or_default(self) -> &'a mut V { match self { Entry::Occupied(entry) => entry.into_mut(), Entry::Vacant(entry) => entry.insert(Default::default()), } }
function_block-full_function
[ { "content": "pub fn align_up(value: u32, alignment: u32) -> u32 {\n\n ((value + alignment - 1) / alignment) * alignment\n\n}\n", "file_path": "planus-cli/src/util/mod.rs", "rank": 0, "score": 163890.73865225195 }, { "content": "pub fn criterion_benchmark(c: &mut Criterion) {\n\n c.bench_function(\"fib 20\", |b| b.iter(|| 2 + 2));\n\n}\n\n\n", "file_path": "test/rust/benches/benchmark.rs", "rank": 1, "score": 161532.84141183394 }, { "content": "fn vtable_index_to_offset(vtable_index: usize) -> usize {\n\n 2 * vtable_index // 2 bytes per index, skip the vtable size and object size\n\n}\n", "file_path": "planus/src/table_writer.rs", "rank": 2, "score": 141689.5809479158 }, { "content": "pub fn sp(l: ByteIndex, r: ByteIndex) -> Span {\n\n Span::new(l, r)\n\n}\n", "file_path": "planus-cli/src/grammar_helper.rs", "rank": 3, "score": 141090.7850796772 }, { "content": "fn array_from_buffer(\n\n buffer: crate::slice_helpers::SliceWithStartOffset<'_>,\n\n offset: usize,\n\n) -> core::result::Result<\n\n (crate::slice_helpers::SliceWithStartOffset<'_>, usize),\n\n crate::errors::ErrorKind,\n\n> {\n\n let value: u32 = crate::traits::TableRead::from_buffer(buffer, offset)?;\n\n let vector_offset = offset\n\n .checked_add(value as usize)\n\n .ok_or(crate::errors::ErrorKind::InvalidOffset)?;\n\n let buffer = buffer.advance(vector_offset)?;\n\n let len: u32 = crate::traits::TableRead::from_buffer(buffer, 0)?;\n\n Ok((buffer.advance(4)?, len as usize))\n\n}\n", "file_path": "planus/src/impls/mod.rs", "rank": 4, "score": 138074.72733546776 }, { "content": "pub fn normalize_path(path: &Path) -> PathBuf {\n\n let mut components = path.components().peekable();\n\n let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {\n\n components.next();\n\n PathBuf::from(c.as_os_str())\n\n } else {\n\n PathBuf::new()\n\n };\n\n\n\n for component in components {\n\n match component {\n\n Component::Prefix(..) => unreachable!(),\n\n Component::RootDir => {\n\n ret.push(component.as_os_str());\n\n }\n\n Component::CurDir => {}\n\n Component::ParentDir => {\n\n if let Some(Component::Normal(_)) = ret.components().last() {\n\n ret.pop();\n\n } else {\n", "file_path": "planus-cli/src/util/mod.rs", "rank": 5, "score": 133737.5944145324 }, { "content": "pub fn translate_files<P: AsRef<std::path::Path>>(\n\n ctx: &mut crate::ctx::Ctx,\n\n input_files: &[P],\n\n) -> types::Declarations {\n\n let mut ast_map = crate::ast_map::AstMap::default();\n\n for file in input_files {\n\n if let Some(file_id) = ctx.add_file(&file, []) {\n\n ast_map.add_files_recursively(ctx, file_id);\n\n }\n\n }\n\n\n\n let mut translator = translation::Translator::new(ctx, ast_map.reachability());\n\n for schema in ast_map.iter() {\n\n translator.add_schema(schema);\n\n }\n\n\n\n translator.finish()\n\n}\n", "file_path": "planus-cli/src/intermediate_language/mod.rs", "rank": 6, "score": 125528.32666223033 }, { "content": "pub trait WriteAsDefault<P: Primitive, D: ?Sized> {\n\n #[doc(hidden)]\n\n type Prepared: WriteAsPrimitive<P>;\n\n #[doc(hidden)]\n\n fn prepare(&self, builder: &mut Builder, default: &D) -> Option<Self::Prepared>;\n\n}\n\n\n", "file_path": "planus/src/traits/mod.rs", "rank": 7, "score": 119805.54191800131 }, { "content": "pub fn main() {\n\n let path = std::env::args().nth(1).unwrap();\n\n let data = std::fs::read(path).unwrap();\n\n let monster = MonsterRef::read_as_root(&data).unwrap();\n\n println!(\"{:#?}\", monster);\n\n}\n", "file_path": "examples/rust/examples/print.rs", "rank": 8, "score": 118822.89960222285 }, { "content": "pub fn main() {\n\n let input_path = std::env::args()\n\n .nth(1)\n\n .expect(\"Usage: from_json (input file) (output file)\");\n\n let output_path = std::env::args()\n\n .nth(2)\n\n .expect(\"Usage: from_json (input file) (output file)\");\n\n let input_data = std::fs::read_to_string(input_path).unwrap();\n\n\n\n let monster: Monster = serde_json::from_str(&input_data).unwrap();\n\n let mut builder = Builder::new();\n\n let output_data = builder.finish(monster, None);\n\n\n\n std::fs::write(output_path, output_data).unwrap();\n\n}\n", "file_path": "examples/rust/examples/from_json.rs", "rank": 9, "score": 118822.89960222285 }, { "content": "pub fn main() {\n\n let path = std::env::args().nth(1).unwrap();\n\n let data = std::fs::read(path).unwrap();\n\n let monster: Monster = MonsterRef::read_as_root(&data).unwrap().try_into().unwrap();\n\n\n\n println!(\"{}\", serde_json::to_string_pretty(&monster).unwrap());\n\n}\n", "file_path": "examples/rust/examples/to_json.rs", "rank": 10, "score": 118822.89960222285 }, { "content": "fn bench_serialize(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"Serialize\");\n\n for i in [10000].into_iter() {\n\n let mut builder = Builder::new();\n\n group.bench_with_input(BenchmarkId::new(\"planus\", i), &i, |b, i| {\n\n b.iter(|| serialize_planus(&mut builder, *i))\n\n });\n\n let mut builder = flatbuffers::FlatBufferBuilder::new();\n\n group.bench_with_input(BenchmarkId::new(\"flatbuffers\", i), &i, |b, i| {\n\n b.iter(|| serialize_flatbuffers(&mut builder, *i))\n\n });\n\n }\n\n group.finish();\n\n}\n\n\n", "file_path": "test/rust/benches/benchmark.rs", "rank": 11, "score": 112276.8200822997 }, { "content": "struct FormattedRelativeNamespace<'a, B: ?Sized + Backend, F> {\n\n super_name: &'a str,\n\n self_name: Option<&'a str>,\n\n separator: &'a str,\n\n value: &'a RelativeNamespace<'a, B>,\n\n output_shared_ancestor: bool,\n\n name: F,\n\n trailing_part: &'a str,\n\n}\n\n\n\nimpl<'a, B: ?Sized + Backend> RelativeNamespace<'a, B> {\n\n pub fn format(\n\n &'a self,\n\n output_shared_ancestor: bool,\n\n super_name: &'a str,\n\n self_name: Option<&'a str>,\n\n separator: &'a str,\n\n name: impl 'a + Fn(&B::NamespaceInfo) -> &str,\n\n trailing_part: &'a str,\n\n ) -> impl 'a + std::fmt::Display {\n", "file_path": "planus-cli/src/codegen/backend.rs", "rank": 12, "score": 105697.14975552817 }, { "content": "fn indent_string(do_indent: bool) -> &'static str {\n\n if do_indent {\n\n INDENT_STRING\n\n } else {\n\n \"\"\n\n }\n\n}\n\n\n\npub struct PrettyPrinter<'writer, 'src, T> {\n\n is_at_new_paragraph: bool,\n\n writer: &'writer mut T,\n\n source: &'src str,\n\n}\n\n\n\nimpl<'writer, 'src, T: std::fmt::Write> PrettyPrinter<'writer, 'src, T> {\n\n fn write_standalone_comment(\n\n &mut self,\n\n indent: bool,\n\n comment: &Comment<'_>,\n\n ) -> std::fmt::Result {\n", "file_path": "planus-cli/src/cst/pretty_print.rs", "rank": 13, "score": 103266.02021518956 }, { "content": "fn serialize_planus(builder: &mut Builder, iterations: u64) {\n\n for _ in 0..iterations {\n\n builder.clear();\n\n let table3: &[Offset<MyTable3>] = &[MyTable3::create(builder, 4)];\n\n let w = MyTable3::create(builder, 1337);\n\n let w = HelloUnion::create_y(builder, w);\n\n let offset = MyTable::create(builder, 3, true, MyEnumse::Banaaaaaaaan, table3, Some(w));\n\n let offset = MyTable2::create(\n\n builder,\n\n 1,\n\n MyStruct {\n\n foo: 2,\n\n bar: true,\n\n baz: MyEnumse::Apple,\n\n },\n\n offset,\n\n );\n\n builder.finish(offset, None);\n\n }\n\n}\n\n\n\ncriterion_group!(benches, bench_serialize);\n\ncriterion_main!(benches);\n\n */\n", "file_path": "test/rust/benches/benchmark.rs", "rank": 14, "score": 102421.91841705807 }, { "content": "pub fn generate_code<P: AsRef<Path>>(\n\n input_files: &[P],\n\n output_filename: &str,\n\n) -> anyhow::Result<()> {\n\n let mut ctx = Ctx::default();\n\n let declarations = crate::intermediate_language::translate_files(&mut ctx, input_files);\n\n\n\n if ctx.has_errors() {\n\n anyhow::bail!(\"Bailing because of errors\")\n\n }\n\n\n\n let output = super::backend_translation::run_backend(&mut RustBackend, &declarations);\n\n\n\n let res = super::templates::rust::Namespace(&output).render().unwrap();\n\n let mut file = std::fs::File::create(&output_filename)?;\n\n file.write_all(res.as_bytes())?;\n\n file.flush()?;\n\n\n\n format_file(output_filename)?;\n\n Ok(())\n\n}\n", "file_path": "planus-cli/src/codegen/rust.rs", "rank": 15, "score": 101753.82607046464 }, { "content": "pub fn hexdump_flatbuffer_table(buf: &[u8]) -> String {\n\n let mut out = String::new();\n\n let obj_start = u32::from_le_bytes(buf[..4].try_into().unwrap()) as usize;\n\n let vtable_offset =\n\n i32::from_le_bytes(buf[obj_start..obj_start + 4].try_into().unwrap()) as isize as usize;\n\n let vtable_start = obj_start.wrapping_sub(vtable_offset);\n\n assert!(vtable_start % 2 == 0);\n\n let vtable_size =\n\n u16::from_le_bytes(buf[vtable_start..vtable_start + 2].try_into().unwrap()) as usize;\n\n let obj_size =\n\n u16::from_le_bytes(buf[vtable_start + 2..vtable_start + 4].try_into().unwrap()) as usize;\n\n assert!(vtable_size >= 4 && vtable_size % 2 == 0);\n\n assert!(obj_size >= 4);\n\n\n\n let vtable_end = vtable_start.checked_add(vtable_size).unwrap();\n\n let obj_end = obj_start.checked_add(obj_size).unwrap();\n\n\n\n assert!(vtable_end <= obj_start || obj_end <= vtable_start);\n\n\n\n writeln!(out, \"obj @ 0x{:02x}..0x{:02x}\", obj_start, obj_end).unwrap();\n", "file_path": "test/rust/src/hexdump.rs", "rank": 16, "score": 101753.82607046464 }, { "content": "pub fn run_backend<B: ?Sized + Backend>(\n\n backend: &mut B,\n\n declarations: &Declarations,\n\n) -> BackendNamespace<B> {\n\n let keywords: Keywords = B::KEYWORDS.iter().copied().collect();\n\n let mut global_names = Names::new(&keywords);\n\n let global_names = &mut global_names;\n\n let mut namespace_names = (0..declarations.namespaces.len())\n\n .map(|_| Names::new(&keywords))\n\n .collect::<Vec<_>>();\n\n let mut declaration_names = (0..declarations.declarations.len())\n\n .map(|_| Names::new(&keywords))\n\n .collect::<Vec<_>>();\n\n let translated_namespaces = declarations\n\n .namespaces\n\n .iter()\n\n .zip(&mut namespace_names)\n\n .map(|((namespace_name, namespace), namespace_names)| {\n\n backend.generate_namespace(\n\n &mut NamespaceNames {\n", "file_path": "planus-cli/src/codegen/backend_translation.rs", "rank": 17, "score": 100131.73378875152 }, { "content": "#[doc(hidden)]\n\npub trait Primitive {\n\n const ALIGNMENT: usize;\n\n const ALIGNMENT_MASK: usize = Self::ALIGNMENT - 1;\n\n const SIZE: usize;\n\n}\n\n\n", "file_path": "planus/src/traits/mod.rs", "rank": 18, "score": 99654.94193820748 }, { "content": "pub fn pretty_print<W: std::fmt::Write>(\n\n source: &str,\n\n schema: &Schema<'_>,\n\n writer: &mut W,\n\n) -> std::fmt::Result {\n\n let mut printer = PrettyPrinter {\n\n is_at_new_paragraph: true,\n\n writer,\n\n source,\n\n };\n\n\n\n let mut last_declaration_requires_paragraph = false;\n\n for decl in &schema.declarations {\n\n if last_declaration_requires_paragraph {\n\n printer.begin_new_paragraph()?;\n\n }\n\n last_declaration_requires_paragraph = printer.write_declaration(decl)?;\n\n }\n\n printer.write_token_meta(false, true, &schema.end_of_stream.token_metadata)?;\n\n Ok(())\n\n}\n", "file_path": "planus-cli/src/cst/pretty_print.rs", "rank": 19, "score": 96641.81905821638 }, { "content": "fn serialize_flatbuffers(builder: &mut flatbuffers::FlatBufferBuilder, iterations: u64) {\n\n for _ in 0..iterations {\n\n builder.reset();\n\n let offset = flatc::MyTable3::create(builder, &flatc::MyTable3Args { x: 4 });\n\n let offset = builder.create_vector(&[offset]);\n\n let w_offset =\n\n flatc::MyTable3::create(builder, &flatc::MyTable3Args { x: 1337 }).as_union_value();\n\n let offset = flatc::MyTable::create(\n\n builder,\n\n &flatc::MyTableArgs {\n\n x: 3,\n\n y: true,\n\n z: Some(offset),\n\n numse: flatc::MyEnumse::Banaaaaaaaan,\n\n w_type: flatc::HelloUnion::y,\n\n w: Some(w_offset),\n\n },\n\n );\n\n let offset = flatc::MyTable2::create(\n\n builder,\n", "file_path": "test/rust/benches/benchmark.rs", "rank": 20, "score": 95900.77542524462 }, { "content": "pub trait VectorRead<'buf> {\n\n #[doc(hidden)]\n\n const STRIDE: usize;\n\n #[doc(hidden)]\n\n unsafe fn from_buffer(buffer: SliceWithStartOffset<'buf>, offset: usize) -> Self;\n\n}\n\n\n\n/// This trait is a hack to get around the coherence restriction.\n\n/// Ideally we would want to be able to do an `impl VectorRead<'buf> for planus::Result<MyType>`\n\n/// in our generated code, however instead we do something like this:\n\n/// impl<T: VectorReadInner<'buf>, E> VectorRead<'buf> for Result<T, E>\n", "file_path": "planus/src/traits/mod.rs", "rank": 21, "score": 94047.93584581275 }, { "content": "pub trait VectorWrite<P> {\n\n #[doc(hidden)]\n\n const STRIDE: usize;\n\n #[doc(hidden)]\n\n type Value: WriteAsPrimitive<P> + Sized;\n\n #[doc(hidden)]\n\n fn prepare(&self, builder: &mut Builder) -> Self::Value;\n\n #[doc(hidden)]\n\n unsafe fn write_values(\n\n values: &[Self::Value],\n\n bytes: *mut MaybeUninit<u8>,\n\n buffer_position: u32,\n\n );\n\n}\n", "file_path": "planus/src/traits/mod.rs", "rank": 22, "score": 94047.93584581275 }, { "content": "#[doc(hidden)]\n\npub trait WriteAsPrimitive<P> {\n\n fn write<const N: usize>(&self, cursor: Cursor<'_, N>, buffer_position: u32);\n\n}\n\n\n", "file_path": "planus/src/traits/mod.rs", "rank": 23, "score": 94047.93584581275 }, { "content": "pub trait ReadAsRoot<'a>: Sized {\n\n fn read_as_root(slice: &'a [u8]) -> Result<Self>;\n\n}\n\n\n", "file_path": "planus/src/traits/mod.rs", "rank": 24, "score": 92725.35002727329 }, { "content": "pub trait WriteAs<P: Primitive> {\n\n #[doc(hidden)]\n\n type Prepared: WriteAsPrimitive<P>;\n\n #[doc(hidden)]\n\n fn prepare(&self, builder: &mut Builder) -> Self::Prepared;\n\n}\n\n\n", "file_path": "planus/src/traits/mod.rs", "rank": 25, "score": 92725.35002727329 }, { "content": "pub fn check_ast(ctx: &Ctx, schema: &ast::Schema) {\n\n for native_include in &schema.native_includes {\n\n ctx.emit_error(\n\n ErrorKind::NOT_SUPPORTED,\n\n [Label::primary(schema.file_id, native_include.span)],\n\n Some(\"Native includes are not supported\"),\n\n );\n\n }\n\n\n\n for attribute in &schema.attributes {\n\n ctx.emit_error(\n\n ErrorKind::NOT_SUPPORTED,\n\n [Label::primary(schema.file_id, attribute.span)],\n\n Some(\"User attributes are not supported\"),\n\n );\n\n }\n\n\n\n for decl in schema.type_declarations.values() {\n\n match &decl.kind {\n\n ast::TypeDeclarationKind::RpcService(_) => {\n", "file_path": "planus-cli/src/intermediate_language/checks/compatibility.rs", "rank": 26, "score": 92025.05842850388 }, { "content": "fn reserve_rust_enum_variant_name(\n\n path: &str,\n\n binding_kind: &'static str,\n\n declaration_names: &mut DeclarationNames<'_, '_>,\n\n) -> String {\n\n let name = path.to_upper_camel_case().into();\n\n declaration_names\n\n .declaration_names\n\n .try_reserve_repeat(binding_kind, name, '_')\n\n .into()\n\n}\n\n\n", "file_path": "planus-cli/src/codegen/rust.rs", "rank": 27, "score": 91507.31568943878 }, { "content": "pub trait WriteAsOffset<T: ?Sized> {\n\n #[doc(hidden)]\n\n fn prepare(&self, builder: &mut Builder) -> Offset<T>;\n\n}\n\n\n", "file_path": "planus/src/traits/mod.rs", "rank": 28, "score": 90840.34692963466 }, { "content": "pub trait WriteAsOptional<P: Primitive> {\n\n #[doc(hidden)]\n\n type Prepared: WriteAsPrimitive<P>;\n\n #[doc(hidden)]\n\n fn prepare(&self, builder: &mut Builder) -> Option<Self::Prepared>;\n\n}\n\n\n", "file_path": "planus/src/traits/mod.rs", "rank": 29, "score": 90840.34692963466 }, { "content": "pub trait WriteAsUnion<T: ?Sized> {\n\n #[doc(hidden)]\n\n fn prepare(&self, builder: &mut Builder) -> UnionOffset<T>;\n\n}\n\n\n", "file_path": "planus/src/traits/mod.rs", "rank": 30, "score": 90840.34692963466 }, { "content": "#[doc(hidden)]\n\npub trait TableRead<'buf>: Sized {\n\n fn from_buffer(\n\n buffer: SliceWithStartOffset<'buf>,\n\n offset: usize,\n\n ) -> core::result::Result<Self, ErrorKind>;\n\n}\n\n\n", "file_path": "planus/src/traits/mod.rs", "rank": 31, "score": 90840.34692963466 }, { "content": "fn reserve_module_name(path: &str, namespace_names: &mut NamespaceNames<'_, '_>) -> String {\n\n let name = path.to_snake_case().into();\n\n namespace_names\n\n .namespace_names\n\n .try_reserve_repeat(BINDING_KIND_TYPES, name, '_')\n\n .into()\n\n}\n\n\n", "file_path": "planus-cli/src/codegen/rust.rs", "rank": 32, "score": 89965.40234629714 }, { "content": "fn reserve_type_name(path: &str, declaration_names: &mut DeclarationNames<'_, '_>) -> String {\n\n let name = path.to_upper_camel_case().into();\n\n declaration_names\n\n .declaration_names\n\n .try_reserve_repeat(BINDING_KIND_TYPES, name, '_')\n\n .into()\n\n}\n\n\n", "file_path": "planus-cli/src/codegen/rust.rs", "rank": 33, "score": 89965.40234629714 }, { "content": "fn check<T, const X: usize, const Y: usize, const Z: usize>() {\n\n let _: () = SizeCheck::<T, X, Y, Z>::CHECK;\n\n // Do the same checks at run-time, so even if rustc changes to allow ignore\n\n // our compile-time errors, we will at least not create UB\n\n check_impl::<T, X, Y, Z>();\n\n}\n\n\n\npub(crate) fn split_mut<'a, T, const X: usize, const Y: usize, const Z: usize>(\n\n x: &'a mut [T; X],\n\n) -> (&'a mut [T; Y], &'a mut [T; Z]) {\n\n check::<T, X, Y, Z>();\n\n\n\n let wrapper: &'a mut Wrapper<T, Y, Z> = unsafe { core::mem::transmute(x) };\n\n (&mut wrapper.0, &mut wrapper.1)\n\n}\n", "file_path": "array-init-cursor/src/util.rs", "rank": 34, "score": 89740.91700003613 }, { "content": "#[doc(hidden)]\n\npub trait VectorReadInner<'buf>: Sized {\n\n #[doc(hidden)]\n\n type Error: Sized;\n\n #[doc(hidden)]\n\n const STRIDE: usize;\n\n #[doc(hidden)]\n\n unsafe fn from_buffer(\n\n buffer: SliceWithStartOffset<'buf>,\n\n offset: usize,\n\n ) -> core::result::Result<Self, Self::Error>;\n\n}\n\n\n", "file_path": "planus/src/traits/mod.rs", "rank": 35, "score": 89048.74232225743 }, { "content": "pub trait WriteAsOptionalUnion<T: ?Sized> {\n\n #[doc(hidden)]\n\n fn prepare(&self, builder: &mut Builder) -> Option<UnionOffset<T>>;\n\n}\n\n\n", "file_path": "planus/src/traits/mod.rs", "rank": 36, "score": 89048.74232225743 }, { "content": "#[doc(hidden)]\n\npub trait TableReadUnion<'buf>: Sized {\n\n fn from_buffer(\n\n buffer: SliceWithStartOffset<'buf>,\n\n offset: usize,\n\n tag: u8,\n\n ) -> core::result::Result<Self, ErrorKind>;\n\n}\n\n\n", "file_path": "planus/src/traits/mod.rs", "rank": 37, "score": 89048.74232225743 }, { "content": "pub fn format_file<P: AsRef<Path>>(path: P) -> std::io::Result<()> {\n\n let output = Command::new(\"rustfmt\")\n\n .args(&[path.as_ref().as_os_str()])\n\n .output()?;\n\n\n\n if !output.stderr.is_empty() {\n\n println!(\"{}\", String::from_utf8(output.stderr).unwrap());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "planus-cli/src/codegen/rust.rs", "rank": 38, "score": 86479.5998001495 }, { "content": "#[repr(C)]\n\nstruct Wrapper<T, const Y: usize, const Z: usize>([T; Y], [T; Z]);\n\n\n\nconst fn check_impl<T, const X: usize, const Y: usize, const Z: usize>() {\n\n // Check that the array sizes match\n\n assert!(X == Y + Z, \"Array cannot be split: sizes don't match\");\n\n assert!(\n\n usize::checked_add(Y, Z).is_some(),\n\n \"Array cannot be split: length would overflow\"\n\n );\n\n // Make doubly sure that nothing funky is going on with the memory representations\n\n assert!(core::mem::size_of::<Wrapper<T, Y, Z>>() == core::mem::size_of::<[T; X]>());\n\n assert!(core::mem::align_of::<Wrapper<T, Y, Z>>() == core::mem::align_of::<[T; X]>());\n\n}\n\n\n", "file_path": "array-init-cursor/src/util.rs", "rank": 39, "score": 85635.33642635116 }, { "content": "pub fn convert(ctx: &Ctx, file_id: FileId, schema: cst::Schema<'_>) -> Schema {\n\n let mut converter = CstConverter {\n\n schema: Schema::new(file_id),\n\n ctx,\n\n current_span: schema.span,\n\n };\n\n for decl in &schema.declarations {\n\n converter.convert_declaration(decl);\n\n }\n\n converter.schema\n\n}\n\n\n\nimpl<'ctx> CstConverter<'ctx> {\n\n fn add_error(&self, error_type: ErrorKind) {\n\n self.schema\n\n .errors_seen\n\n .set(self.schema.errors_seen.get() | error_type);\n\n }\n\n fn emit_error(\n\n &self,\n", "file_path": "planus-cli/src/ast/convert.rs", "rank": 40, "score": 84041.32939801604 }, { "content": "fn translate_type_index<'a, B: ?Sized + Backend>(\n\n translation_context: &DeclarationTranslationContext<'a, '_, B>,\n\n declarations: &'a Declarations,\n\n full_translated_decls: &'a VecMap<BackendDeclaration<B>>,\n\n index: usize,\n\n current_namespace_path: &AbsolutePath,\n\n) -> ResolvedType<'a, B> {\n\n let (path, decl) = &translation_context.translated_decls[index];\n\n let relative_path: RelativeNamespace<B> = RelativeNamespace::new(\n\n current_namespace_path,\n\n &path.clone_pop(),\n\n translation_context.translated_namespaces,\n\n declarations,\n\n );\n\n match decl {\n\n DeclInfo::Table(translated_decl, decl) => {\n\n ResolvedType::Table(decl, translated_decl, relative_path)\n\n }\n\n DeclInfo::Struct(translated_decl, decl) => {\n\n ResolvedType::Struct(decl, translated_decl, relative_path)\n", "file_path": "planus-cli/src/codegen/backend_translation.rs", "rank": 41, "score": 82982.37319159487 }, { "content": " *buffer.as_slice().get_unchecked(offset) != 0\n\n }\n\n}\n\n\n\nimpl VectorWrite<bool> for bool {\n\n const STRIDE: usize = 1;\n\n\n\n type Value = bool;\n\n\n\n #[inline]\n\n fn prepare(&self, _builder: &mut Builder) -> Self::Value {\n\n *self\n\n }\n\n\n\n #[inline]\n\n unsafe fn write_values(\n\n values: &[Self::Value],\n\n bytes: *mut MaybeUninit<u8>,\n\n buffer_position: u32,\n\n ) {\n\n let bytes = bytes as *mut [MaybeUninit<u8>; 1];\n\n for (i, v) in values.iter().enumerate() {\n\n v.write(Cursor::new(&mut *bytes.add(i)), buffer_position - i as u32);\n\n }\n\n }\n\n}\n", "file_path": "planus/src/impls/bool_.rs", "rank": 42, "score": 82939.67589916452 }, { "content": " fn prepare(&self, _builder: &mut Builder) -> Self {\n\n *self\n\n }\n\n}\n\n\n\nimpl WriteAsDefault<bool, bool> for bool {\n\n type Prepared = Self;\n\n #[inline]\n\n fn prepare(&self, _builder: &mut Builder, default: &bool) -> Option<bool> {\n\n if self == default {\n\n None\n\n } else {\n\n Some(*self)\n\n }\n\n }\n\n}\n\n\n\nimpl WriteAsOptional<bool> for bool {\n\n type Prepared = Self;\n\n #[inline]\n", "file_path": "planus/src/impls/bool_.rs", "rank": 43, "score": 82939.59210527212 }, { "content": " fn prepare(&self, _builder: &mut Builder) -> Option<Self> {\n\n Some(*self)\n\n }\n\n}\n\n\n\nimpl<'buf> TableRead<'buf> for bool {\n\n #[inline]\n\n fn from_buffer(\n\n buffer: SliceWithStartOffset<'buf>,\n\n offset: usize,\n\n ) -> core::result::Result<bool, ErrorKind> {\n\n Ok(buffer.advance_as_array::<1>(offset)?.as_array()[0] != 0)\n\n }\n\n}\n\n\n\nimpl<'buf> VectorRead<'buf> for bool {\n\n const STRIDE: usize = 1;\n\n\n\n #[inline]\n\n unsafe fn from_buffer(buffer: SliceWithStartOffset<'buf>, offset: usize) -> bool {\n", "file_path": "planus/src/impls/bool_.rs", "rank": 44, "score": 82932.05397662806 }, { "content": "use crate::{\n\n builder::Builder, errors::ErrorKind, slice_helpers::SliceWithStartOffset, traits::*, Cursor,\n\n};\n\nuse core::mem::MaybeUninit;\n\n\n\nimpl Primitive for bool {\n\n const ALIGNMENT: usize = 1;\n\n const SIZE: usize = 1;\n\n}\n\n\n\nimpl WriteAsPrimitive<bool> for bool {\n\n #[inline]\n\n fn write<const N: usize>(&self, cursor: Cursor<'_, N>, _buffer_position: u32) {\n\n cursor.assert_size().finish([if *self { 1 } else { 0 }]);\n\n }\n\n}\n\n\n\nimpl WriteAs<bool> for bool {\n\n type Prepared = Self;\n\n #[inline]\n", "file_path": "planus/src/impls/bool_.rs", "rank": 45, "score": 82930.1029070743 }, { "content": "mod array;\n\nmod bool_;\n\nmod box_;\n\nmod byte_slice;\n\nmod offset;\n\nmod option;\n\nmod planus_vectors;\n\nmod primitives;\n\nmod ref_;\n\nmod result;\n\nmod slice;\n\nmod str;\n\nmod string;\n\nmod union_offset;\n\nmod unit;\n\nmod vec;\n\n\n", "file_path": "planus/src/impls/mod.rs", "rank": 46, "score": 82707.53310001732 }, { "content": "#[derive(Clone)]\n\nenum TypeDescription {\n\n Table,\n\n Struct { size: u32, alignment: u32 },\n\n Enum(Enum),\n\n Union,\n\n RpcService,\n\n}\n\n\n\n// do not start translating any declarations, until all declarations have been collected\n\n// then:\n\n// 1) get names and type descriptions for all declarations\n\n// - enums are translated immediately since they are self-contained and are needed\n\n// to translate table fields\n\n// 2) do preliminary translation with wrong sizes which resolves\n\n// what each ast::NamespacePath points to\n\n// 3) do topological sort of all structs to get sizes\n\n// 4) use this information to update all sizes\n\n\n\nimpl<'a> Translator<'a> {\n\n pub fn new(ctx: &'a Ctx, reachability: SortedMap<FileId, SortedSet<FileId>>) -> Self {\n", "file_path": "planus-cli/src/intermediate_language/translation.rs", "rank": 47, "score": 64385.12669937835 }, { "content": "fn main() {\n\n lalrpop::process_root().unwrap();\n\n}\n", "file_path": "planus-cli/build.rs", "rank": 48, "score": 62196.729637219374 }, { "content": "struct CstConverter<'ctx> {\n\n pub schema: Schema,\n\n pub ctx: &'ctx Ctx,\n\n pub current_span: Span,\n\n}\n\n\n", "file_path": "planus-cli/src/ast/convert.rs", "rank": 49, "score": 62113.3232917486 }, { "content": "fn main() {}\n\n\n\n/*\n\nuse criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};\n\nuse planus::{Builder, Offset};\n\n\n", "file_path": "test/rust/benches/benchmark.rs", "rank": 50, "score": 61028.70320563901 }, { "content": "fn main() {\n\n let path = std::env::args()\n\n .nth(1)\n\n .expect(\"Usage: api_example (output file)\");\n\n let mut builder = Builder::new();\n\n\n\n // Create an owned version of the monster to serialize\n\n let monster = Monster {\n\n pos: Some(Vec3 {\n\n x: 1.0,\n\n y: 2.0,\n\n z: 3.0,\n\n }),\n\n mana: 150,\n\n hp: 80,\n\n name: Some(\"Orc\".to_string()),\n\n inventory: Some(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]),\n\n color: Color::Red,\n\n weapons: Some(vec![\n\n Weapon {\n", "file_path": "examples/rust/examples/api_example.rs", "rank": 51, "score": 59924.916270192516 }, { "content": "fn generate_test_code(\n\n in_dir: &str,\n\n out_dir: &str,\n\n template: Option<&str>,\n\n generate_flatc: bool,\n\n) -> Result<()> {\n\n fs::create_dir_all(out_dir).with_context(|| format_err!(\"Cannot create dir: {}\", out_dir))?;\n\n\n\n let mut mod_code = String::new();\n\n\n\n // We want the same generated files as here in rust-build, but not the tests.\n\n // Symlinking the relevant files and adding this check was the least bad option\n\n // I could think of, but it's still not pretty.\n\n let is_main_crate = std::env::var(\"CARGO_PKG_NAME\").unwrap() == \"rust_test\";\n\n\n\n for entry in\n\n std::fs::read_dir(in_dir).with_context(|| format_err!(\"Cannot read dir: {}\", in_dir))?\n\n {\n\n let entry = entry.context(\"Error doing readdir\")?;\n\n let file_path = entry.path();\n", "file_path": "test/rust/build.rs", "rank": 52, "score": 59924.916270192516 }, { "content": "pub trait Backend {\n\n type NamespaceInfo: std::fmt::Debug + Clone;\n\n type TableInfo: std::fmt::Debug + Clone;\n\n type StructInfo: std::fmt::Debug + Clone;\n\n type EnumInfo: std::fmt::Debug + Clone;\n\n type UnionInfo: std::fmt::Debug + Clone;\n\n type RpcServiceInfo: std::fmt::Debug + Clone;\n\n type TableFieldInfo: std::fmt::Debug + Clone;\n\n type StructFieldInfo: std::fmt::Debug + Clone;\n\n type EnumVariantInfo: std::fmt::Debug + Clone;\n\n type UnionVariantInfo: std::fmt::Debug + Clone;\n\n type RpcMethodInfo: std::fmt::Debug + Clone;\n\n\n\n const KEYWORDS: &'static [&'static str];\n\n\n\n fn generate_namespace(\n\n &mut self,\n\n namespace_names: &mut NamespaceNames<'_, '_>,\n\n namespace_name: &AbsolutePath,\n\n namespace: &Namespace,\n", "file_path": "planus-cli/src/codegen/backend.rs", "rank": 53, "score": 59456.95833621783 }, { "content": "fn main() -> Result<()> {\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n\n\n // Create API tests\n\n let planus_api_dir = format!(\"{}/planus_api\", out_dir);\n\n generate_test_code(\"api_files\", &planus_api_dir, None, false)?;\n\n\n\n // Create serialize/deserialize tests\n\n let planus_test_dir = format!(\"{}/planus_test\", out_dir);\n\n let serialize_template = std::fs::read_to_string(\"src/test_template.rs\").ok();\n\n generate_test_code(\n\n \"test_files\",\n\n &planus_test_dir,\n\n serialize_template.as_deref(),\n\n true,\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "test/rust/build.rs", "rank": 54, "score": 59422.403259030005 }, { "content": "#[test]\n\nfn test_deserialize() {\n\n if let Ok(refs_dir) = std::fs::read_dir(format!(\"{}/{}\", FILE_PATH, \"deserialize\")) {\n\n let should_regenerate = std::env::var(\"PLANUS_REGENERATE\").is_ok();\n\n for entry in refs_dir {\n\n let entry = entry.unwrap();\n\n let file_path = entry.path();\n\n if file_path\n\n .extension()\n\n .map_or(false, |extension| extension == \"bin\")\n\n {\n\n let data = std::fs::read(&file_path).unwrap();\n\n\n\n let root_ref = RootRef::read_as_root(&data).unwrap();\n\n\n\n let mut debug_path = file_path.clone();\n\n debug_path.set_extension(\"txt\");\n\n let root_dbg = format!(\"{:#?}\", root_ref);\n\n\n\n crate::tests::compare_regenerate_file_str(\n\n &debug_path,\n\n &root_dbg,\n\n should_regenerate,\n\n )\n\n .unwrap();\n\n }\n\n }\n\n }\n\n}\n", "file_path": "test/rust/src/test_template.rs", "rank": 55, "score": 58880.21106810047 }, { "content": "#[test]\n\nfn test_serialize() {\n\n let should_regenerate = std::env::var(\"PLANUS_REGENERATE\").is_ok();\n\n for entry in std::fs::read_dir(format!(\"{}/{}\", FILE_PATH, \"serialize\")).unwrap() {\n\n let entry = entry.unwrap();\n\n let file_path = entry.path();\n\n if !file_path.is_dir()\n\n && file_path\n\n .extension()\n\n .map_or(false, |extension| extension == \"json\")\n\n {\n\n let json = std::fs::read_to_string(&file_path).unwrap();\n\n let root: Root = serde_json::from_str(&json).unwrap();\n\n\n\n let mut builder = planus::Builder::new();\n\n let offset = root.prepare(&mut builder);\n\n let data = builder.finish(offset, None);\n\n\n\n let root_ref = RootRef::read_as_root(data).unwrap();\n\n let root2 = Root::try_from(root_ref).unwrap();\n\n similar_asserts::assert_eq!(root, root2);\n", "file_path": "test/rust/src/test_template.rs", "rank": 56, "score": 58880.21106810047 }, { "content": "fn generate_test_code(\n\n in_dir: &str,\n\n out_dir: &str,\n\n template: Option<&str>,\n\n generate_flatc: bool,\n\n) -> Result<()> {\n\n fs::create_dir_all(out_dir).with_context(|| format_err!(\"Cannot create dir: {}\", out_dir))?;\n\n\n\n let mut mod_code = String::new();\n\n\n\n // We want the same generated files as here in rust-build, but not the tests.\n\n // Symlinking the relevant files and adding this check was the least bad option\n\n // I could think of, but it's still not pretty.\n\n let is_main_crate = std::env::var(\"CARGO_PKG_NAME\").unwrap() == \"rust_test\";\n\n\n\n for entry in\n\n std::fs::read_dir(in_dir).with_context(|| format_err!(\"Cannot read dir: {}\", in_dir))?\n\n {\n\n let entry = entry.context(\"Error doing readdir\")?;\n\n let file_path = entry.path();\n", "file_path": "test/rust-compat/build.rs", "rank": 57, "score": 58880.21106810047 }, { "content": "pub trait PrettyPrint {\n\n fn print(&self, ctx: &Ctx);\n\n}\n\n\n\nimpl PrettyPrint for Schema {\n\n fn print(&self, ctx: &Ctx) {\n\n println!(\"file {}:\", ctx.get_filename(self.file_id).display());\n\n if !self.native_includes.is_empty() {\n\n println!(\n\n \" native includes: {:?}\",\n\n self.native_includes\n\n .iter()\n\n .map(|include| &include.value)\n\n .collect::<Vec<_>>()\n\n )\n\n }\n\n if !self.includes.is_empty() {\n\n println!(\n\n \" includes: {:?}\",\n\n self.includes\n", "file_path": "planus-cli/src/ast/print.rs", "rank": 58, "score": 58464.231695247705 }, { "content": "pub trait CstNode {\n\n fn span(&self) -> Span;\n\n}\n\n\n\nmacro_rules! cst_node {\n\n ($t:ident) => {\n\n impl CstNode for $t {\n\n fn span(&self) -> Span {\n\n self.span\n\n }\n\n }\n\n };\n\n ($t:ident <'input>) => {\n\n impl<'input> CstNode for $t<'input> {\n\n fn span(&self) -> Span {\n\n self.span\n\n }\n\n }\n\n };\n\n}\n", "file_path": "planus-cli/src/cst/types.rs", "rank": 59, "score": 58464.231695247705 }, { "content": "fn main() -> Result<()> {\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n\n\n // Create API tests\n\n let planus_api_dir = format!(\"{}/planus_api\", out_dir);\n\n generate_test_code(\"api_files\", &planus_api_dir, None, false)?;\n\n\n\n // Create serialize/deserialize tests\n\n let planus_test_dir = format!(\"{}/planus_test\", out_dir);\n\n let serialize_template = std::fs::read_to_string(\"src/test_template.rs\").ok();\n\n generate_test_code(\n\n \"test_files\",\n\n &planus_test_dir,\n\n serialize_template.as_deref(),\n\n true,\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "test/rust-compat/build.rs", "rank": 60, "score": 58318.61632358351 }, { "content": "fn reserve_field_name(\n\n path: &str,\n\n binding_kind: &'static str,\n\n declaration_names: &mut DeclarationNames<'_, '_>,\n\n) -> String {\n\n let name = path.to_snake_case().into();\n\n declaration_names\n\n .declaration_names\n\n .try_reserve_repeat(binding_kind, name, '_')\n\n .into()\n\n}\n\n\n", "file_path": "planus-cli/src/codegen/rust.rs", "rank": 61, "score": 57889.96759726422 }, { "content": "fn format_relative_namespace<'a>(\n\n relative_namespace: &'a RelativeNamespace<'a, RustBackend>,\n\n trailing_part: &'a str,\n\n) -> impl 'a + std::fmt::Display {\n\n relative_namespace.format(\n\n false,\n\n \"super\",\n\n Some(\"self::\"),\n\n \"::\",\n\n |info| &info.name,\n\n trailing_part,\n\n )\n\n}\n\n\n\nimpl Backend for RustBackend {\n\n type NamespaceInfo = Namespace;\n\n type TableInfo = Table;\n\n type TableFieldInfo = TableField;\n\n type StructInfo = Struct;\n\n type StructFieldInfo = StructField;\n", "file_path": "planus-cli/src/codegen/rust.rs", "rank": 62, "score": 56283.66765065522 }, { "content": "fn make_recursive_structure<B: ?Sized + Backend>(\n\n declarations: &Declarations,\n\n translated_namespaces: &mut VecMap<B::NamespaceInfo>,\n\n translated_decls: &mut VecMap<BackendDeclaration<B>>,\n\n current_namespace_index: NamespaceIndex,\n\n) -> BackendNamespace<B> {\n\n let (_, current_namespace) = declarations.get_namespace(current_namespace_index);\n\n let current_translated_namespace = translated_namespaces\n\n .remove(current_namespace_index.0)\n\n .unwrap();\n\n let translated_declarations: Vec<BackendDeclaration<B>> = current_namespace\n\n .declaration_ids\n\n .values()\n\n .map(|id| translated_decls.remove(id.0).unwrap())\n\n .collect();\n\n\n\n let children = current_namespace\n\n .child_namespaces\n\n .values()\n\n .map(|id| {\n", "file_path": "planus-cli/src/codegen/backend_translation.rs", "rank": 63, "score": 50219.88863680487 }, { "content": "fn translate_type<'a, B: ?Sized + Backend>(\n\n translation_context: &DeclarationTranslationContext<'a, '_, B>,\n\n declarations: &'a Declarations,\n\n full_translated_decls: &'a VecMap<BackendDeclaration<B>>,\n\n type_: &'a Type,\n\n current_namespace_path: &AbsolutePath,\n\n) -> ResolvedType<'a, B> {\n\n match &type_.kind {\n\n TypeKind::Table(index)\n\n | TypeKind::Union(index)\n\n | TypeKind::SimpleType(SimpleType::Struct(index))\n\n | TypeKind::SimpleType(SimpleType::Enum(index)) => translate_type_index(\n\n translation_context,\n\n declarations,\n\n full_translated_decls,\n\n index.0,\n\n current_namespace_path,\n\n ),\n\n TypeKind::SimpleType(type_) => translate_simple_type(\n\n translation_context,\n", "file_path": "planus-cli/src/codegen/backend_translation.rs", "rank": 64, "score": 49985.665105190594 }, { "content": "fn integer_type(type_: &IntegerType) -> &'static str {\n\n match &type_ {\n\n IntegerType::U8 => \"u8\",\n\n IntegerType::I8 => \"i8\",\n\n IntegerType::U16 => \"u16\",\n\n IntegerType::I16 => \"i16\",\n\n IntegerType::U32 => \"u32\",\n\n IntegerType::I32 => \"i32\",\n\n IntegerType::U64 => \"u64\",\n\n IntegerType::I64 => \"i64\",\n\n }\n\n}\n\n\n", "file_path": "planus-cli/src/codegen/rust.rs", "rank": 65, "score": 49175.63459515074 }, { "content": "fn translate_simple_type<'a, B: ?Sized + Backend>(\n\n translation_context: &DeclarationTranslationContext<'a, '_, B>,\n\n declarations: &'a Declarations,\n\n full_translated_decls: &'a VecMap<BackendDeclaration<B>>,\n\n type_: &'a SimpleType,\n\n current_namespace_path: &AbsolutePath,\n\n) -> ResolvedType<'a, B> {\n\n match type_ {\n\n SimpleType::Struct(index) | SimpleType::Enum(index) => translate_type_index(\n\n translation_context,\n\n declarations,\n\n full_translated_decls,\n\n index.0,\n\n current_namespace_path,\n\n ),\n\n SimpleType::Bool => ResolvedType::Bool,\n\n SimpleType::Integer(typ) => ResolvedType::Integer(*typ),\n\n SimpleType::Float(typ) => ResolvedType::Float(*typ),\n\n }\n\n}\n\n\n", "file_path": "planus-cli/src/codegen/backend_translation.rs", "rank": 66, "score": 49175.63459515074 }, { "content": "fn float_type(type_: &FloatType) -> &'static str {\n\n match &type_ {\n\n FloatType::F32 => \"f32\",\n\n FloatType::F64 => \"f64\",\n\n }\n\n}\n\n\n", "file_path": "planus-cli/src/codegen/rust.rs", "rank": 67, "score": 49175.63459515074 }, { "content": "trait SizeCheck<T, const X: usize, const Y: usize, const Z: usize> {\n\n const CHECK: Self;\n\n}\n\n\n\nimpl<T, const X: usize, const Y: usize, const Z: usize> SizeCheck<T, X, Y, Z> for () {\n\n const CHECK: () = check_impl::<T, X, Y, Z>();\n\n}\n\n\n", "file_path": "array-init-cursor/src/util.rs", "rank": 68, "score": 47203.90884142924 }, { "content": "fn print_equipment(monster: MonsterRef<'_>) -> Result<(), planus::Error> {\n\n // All accessors on tables return Result<_, planus::Error>\n\n // If the field is optional, then an Result<Option<_>, planus::Error>.\n\n if let Some(equipped) = monster.equipped()? {\n\n // Unions translate to rust enums with data\n\n match equipped {\n\n EquipmentRef::Weapon(weapon) => {\n\n // All generated types implement Debug\n\n println!(\"{:?}\", weapon);\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "examples/rust/examples/api_example.rs", "rank": 69, "score": 46662.444856915215 }, { "content": "use crate::{\n\n builder::Builder, errors::ErrorKind, slice_helpers::SliceWithStartOffset, Cursor, Offset,\n\n Result, UnionOffset,\n\n};\n\nuse core::mem::MaybeUninit;\n\n\n\n#[doc(hidden)]\n", "file_path": "planus/src/traits/mod.rs", "rank": 70, "score": 41429.19494663114 }, { "content": "}\n\n\n\nimpl VectorWrite<Offset<str>> for String {\n\n type Value = Offset<str>;\n\n\n\n const STRIDE: usize = 4;\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder) -> Self::Value {\n\n WriteAs::prepare(self, builder)\n\n }\n\n\n\n #[inline]\n\n unsafe fn write_values(\n\n values: &[Offset<str>],\n\n bytes: *mut MaybeUninit<u8>,\n\n buffer_position: u32,\n\n ) {\n\n let bytes = bytes as *mut [MaybeUninit<u8>; 4];\n\n for (i, v) in values.iter().enumerate() {\n\n v.write(\n\n Cursor::new(&mut *bytes.add(i)),\n\n buffer_position - (4 * i) as u32,\n\n );\n\n }\n\n }\n\n}\n", "file_path": "planus/src/impls/string.rs", "rank": 71, "score": 41293.66193167111 }, { "content": " type Prepared = Self;\n\n #[inline]\n\n fn prepare(&self, _builder: &mut Builder) -> Self {\n\n *self\n\n }\n\n }\n\n\n\n impl WriteAsDefault<$ty, $ty> for $ty {\n\n type Prepared = Self;\n\n #[inline]\n\n fn prepare(&self, _builder: &mut Builder, default: &$ty) -> Option<Self> {\n\n #[allow(clippy::float_cmp)]\n\n if self == default {\n\n None\n\n } else {\n\n Some(*self)\n\n }\n\n }\n\n }\n\n\n", "file_path": "planus/src/impls/primitives.rs", "rank": 72, "score": 41293.144364515145 }, { "content": " const STRIDE: usize = 4;\n\n type Value = Offset<T>;\n\n\n\n #[inline]\n\n fn prepare(&self, _builder: &mut Builder) -> Self::Value {\n\n *self\n\n }\n\n\n\n #[inline]\n\n unsafe fn write_values(\n\n values: &[Offset<T>],\n\n bytes: *mut MaybeUninit<u8>,\n\n buffer_position: u32,\n\n ) {\n\n let bytes = bytes as *mut [MaybeUninit<u8>; 4];\n\n for (i, v) in values.iter().enumerate() {\n\n v.write(\n\n Cursor::new(&mut *bytes.add(i)),\n\n buffer_position - (Self::STRIDE * i) as u32,\n\n );\n\n }\n\n }\n\n}\n", "file_path": "planus/src/impls/offset.rs", "rank": 73, "score": 41292.80618784468 }, { "content": " const STRIDE: usize = $size;\n\n #[inline]\n\n unsafe fn from_buffer(buffer: SliceWithStartOffset<'buf>, offset: usize) -> $ty {\n\n let buffer = buffer.unchecked_advance_as_array(offset).as_array();\n\n <$ty>::from_le_bytes(*buffer)\n\n }\n\n }\n\n\n\n impl VectorWrite<$ty> for $ty {\n\n const STRIDE: usize = $size;\n\n type Value = $ty;\n\n #[inline]\n\n fn prepare(&self, _builder: &mut Builder) -> Self::Value {\n\n *self\n\n }\n\n\n\n #[inline]\n\n unsafe fn write_values(\n\n values: &[$ty],\n\n bytes: *mut MaybeUninit<u8>,\n", "file_path": "planus/src/impls/primitives.rs", "rank": 74, "score": 41292.057620020336 }, { "content": " .as_slice()\n\n .get(..len)\n\n .ok_or(ErrorKind::InvalidLength)\n\n .map_err(add_context)?;\n\n let str = core::str::from_utf8(slice)\n\n .map_err(|source| ErrorKind::InvalidUtf8 { source })\n\n .map_err(add_context)?;\n\n Ok(str)\n\n }\n\n}\n\n\n\nimpl VectorWrite<Offset<str>> for str {\n\n type Value = Offset<str>;\n\n\n\n const STRIDE: usize = 4;\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder) -> Self::Value {\n\n WriteAs::prepare(self, builder)\n\n }\n\n\n", "file_path": "planus/src/impls/str.rs", "rank": 75, "score": 41291.52443573014 }, { "content": "use crate::{\n\n builder::Builder,\n\n traits::{Primitive, WriteAsOptional, WriteAsOptionalUnion, WriteAsPrimitive},\n\n Cursor, UnionOffset, Void,\n\n};\n\n\n\nimpl<P: Primitive> WriteAsPrimitive<P> for Void {\n\n #[inline]\n\n fn write<const N: usize>(&self, _cursor: Cursor<'_, N>, _buffer_position: u32) {\n\n match *self {}\n\n }\n\n}\n\n\n\nimpl<P: Primitive> WriteAsOptional<P> for () {\n\n type Prepared = Void;\n\n #[inline]\n\n fn prepare(&self, _builder: &mut Builder) -> Option<Void> {\n\n None\n\n }\n\n}\n\n\n\nimpl<T: ?Sized> WriteAsOptionalUnion<T> for () {\n\n #[inline]\n\n fn prepare(&self, _builder: &mut Builder) -> Option<UnionOffset<T>> {\n\n None\n\n }\n\n}\n", "file_path": "planus/src/impls/unit.rs", "rank": 76, "score": 41289.57058010541 }, { "content": "\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder) -> Offset<[P]> {\n\n WriteAsOffset::prepare(self.as_slice(), builder)\n\n }\n\n}\n\n\n\nimpl<T, P> WriteAsDefault<Offset<[P]>, ()> for Vec<T>\n\nwhere\n\n P: Primitive,\n\n T: VectorWrite<P>,\n\n{\n\n type Prepared = Offset<[P]>;\n\n\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder, _default: &()) -> Option<Offset<[P]>> {\n\n if self.is_empty() {\n\n None\n\n } else {\n\n Some(WriteAsOffset::prepare(self.as_slice(), builder))\n", "file_path": "planus/src/impls/vec.rs", "rank": 77, "score": 41287.02125269437 }, { "content": " impl WriteAsOptional<$ty> for $ty {\n\n type Prepared = Self;\n\n #[inline]\n\n fn prepare(&self, _builder: &mut Builder) -> Option<Self> {\n\n Some(*self)\n\n }\n\n }\n\n\n\n impl<'buf> TableRead<'buf> for $ty {\n\n #[inline]\n\n fn from_buffer(\n\n buffer: SliceWithStartOffset<'buf>,\n\n offset: usize,\n\n ) -> core::result::Result<$ty, ErrorKind> {\n\n let buffer = buffer.advance_as_array(offset)?.as_array();\n\n Ok(<$ty>::from_le_bytes(*buffer))\n\n }\n\n }\n\n\n\n impl<'buf> VectorRead<'buf> for $ty {\n", "file_path": "planus/src/impls/primitives.rs", "rank": 78, "score": 41286.86662046246 }, { "content": " WriteAsOffset::prepare(self, builder)\n\n }\n\n}\n\n\n\nimpl WriteAsOptional<Offset<str>> for str {\n\n type Prepared = Offset<str>;\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder) -> Option<Offset<str>> {\n\n Some(WriteAsOffset::prepare(self, builder))\n\n }\n\n}\n\n\n\nimpl WriteAsDefault<Offset<str>, str> for str {\n\n type Prepared = Offset<str>;\n\n\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder, default: &str) -> Option<Offset<str>> {\n\n if self == default {\n\n None\n\n } else {\n", "file_path": "planus/src/impls/str.rs", "rank": 79, "score": 41286.85778161186 }, { "content": "\n\nimpl<'a, T1: ?Sized, T2: ?Sized + WriteAsUnion<T1>> WriteAsUnion<T1> for &'a T2 {\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder) -> UnionOffset<T1> {\n\n T2::prepare(self, builder)\n\n }\n\n}\n\n\n\nimpl<'a, T1: ?Sized, T2: ?Sized + WriteAsOptionalUnion<T1>> WriteAsOptionalUnion<T1> for &'a T2 {\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder) -> Option<UnionOffset<T1>> {\n\n T2::prepare(self, builder)\n\n }\n\n}\n\n\n\nimpl<'a, P: Primitive, T: ?Sized + VectorWrite<P>> VectorWrite<P> for &'a T {\n\n const STRIDE: usize = T::STRIDE;\n\n type Value = T::Value;\n\n\n\n #[inline]\n", "file_path": "planus/src/impls/ref_.rs", "rank": 80, "score": 41286.79292204393 }, { "content": "impl WriteAsDefault<Offset<str>, str> for String {\n\n type Prepared = Offset<str>;\n\n\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder, default: &str) -> Option<Offset<str>> {\n\n if self == default {\n\n None\n\n } else {\n\n Some(WriteAsOffset::prepare(self.as_str(), builder))\n\n }\n\n }\n\n}\n\n\n\nimpl WriteAsOptional<Offset<str>> for String {\n\n type Prepared = Offset<str>;\n\n\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder) -> Option<Offset<str>> {\n\n Some(WriteAsOffset::prepare(self.as_str(), builder))\n\n }\n", "file_path": "planus/src/impls/string.rs", "rank": 81, "score": 41286.63353706437 }, { "content": " }\n\n}\n\n\n\nimpl<T: ?Sized> WriteAs<Offset<T>> for Offset<T> {\n\n type Prepared = Self;\n\n #[inline]\n\n fn prepare(&self, _builder: &mut Builder) -> Self {\n\n *self\n\n }\n\n}\n\n\n\nimpl<T: ?Sized> WriteAsOptional<Offset<T>> for Offset<T> {\n\n type Prepared = Self;\n\n #[inline]\n\n fn prepare(&self, _builder: &mut Builder) -> Option<Self> {\n\n Some(*self)\n\n }\n\n}\n\n\n\nimpl<T: ?Sized> VectorWrite<Offset<T>> for Offset<T> {\n", "file_path": "planus/src/impls/offset.rs", "rank": 82, "score": 41286.554589963336 }, { "content": " fn prepare(&self, builder: &mut Builder) -> Self::Value {\n\n T::prepare(self, builder)\n\n }\n\n\n\n #[inline]\n\n unsafe fn write_values(\n\n values: &[Self::Value],\n\n bytes: *mut MaybeUninit<u8>,\n\n buffer_position: u32,\n\n ) {\n\n T::write_values(values, bytes, buffer_position);\n\n }\n\n}\n", "file_path": "planus/src/impls/ref_.rs", "rank": 83, "score": 41286.42599342506 }, { "content": "use crate::traits::{VectorRead, VectorReadInner};\n\n\n\n/*\n\nimpl<T: ToOwned, E> ToOwned for core::result::Result<T, E>\n\nwhere\n\n errors::Error: From<E>,\n\n{\n\n type Value = T::Value;\n\n\n\n #[inline]\n\n fn to_owned(self) -> crate::Result<Self::Value> {\n\n self?.to_owned()\n\n }\n\n}\n\n */\n\n\n\nimpl<'buf, T: VectorReadInner<'buf>, E> VectorRead<'buf> for Result<T, E>\n\nwhere\n\n E: core::convert::From<T::Error>,\n\n{\n\n const STRIDE: usize = T::STRIDE;\n\n\n\n unsafe fn from_buffer(buffer: crate::SliceWithStartOffset<'buf>, offset: usize) -> Self {\n\n Ok(T::from_buffer(buffer, offset)?)\n\n }\n\n}\n", "file_path": "planus/src/impls/result.rs", "rank": 84, "score": 41286.32257097726 }, { "content": " P::ALIGNMENT_MASK.max(3),\n\n |buffer_position, bytes| {\n\n let bytes = bytes.as_mut_ptr();\n\n\n\n (self.len() as u32).write(\n\n Cursor::new(&mut *(bytes as *mut [MaybeUninit<u8>; 4])),\n\n buffer_position,\n\n );\n\n\n\n T::write_values(&tmp, bytes.add(4), buffer_position - 4);\n\n },\n\n )\n\n };\n\n builder.current_offset()\n\n }\n\n}\n\n\n\nimpl<T, P, const N: usize> WriteAs<Offset<[P]>> for [T; N]\n\nwhere\n\n P: Primitive,\n", "file_path": "planus/src/impls/array.rs", "rank": 85, "score": 41285.09859488609 }, { "content": " #[inline]\n\n unsafe fn write_values(\n\n values: &[Offset<str>],\n\n bytes: *mut MaybeUninit<u8>,\n\n buffer_position: u32,\n\n ) {\n\n let bytes = bytes as *mut [MaybeUninit<u8>; 4];\n\n for (i, v) in values.iter().enumerate() {\n\n v.write(\n\n Cursor::new(&mut *bytes.add(i)),\n\n buffer_position - (4 * i) as u32,\n\n );\n\n }\n\n }\n\n}\n", "file_path": "planus/src/impls/str.rs", "rank": 86, "score": 41285.08109078621 }, { "content": " fn prepare(&self, builder: &mut Builder) -> T::Prepared {\n\n T::prepare(self, builder)\n\n }\n\n}\n\n\n\nimpl<'a, P: Primitive, D: ?Sized, T: ?Sized + WriteAsDefault<P, D>> WriteAsDefault<P, D> for &'a T {\n\n type Prepared = T::Prepared;\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder, default: &D) -> Option<T::Prepared> {\n\n T::prepare(self, builder, default)\n\n }\n\n}\n\n\n\nimpl<'a, P: Primitive, T: ?Sized + WriteAsOptional<P>> WriteAsOptional<P> for &'a T {\n\n type Prepared = T::Prepared;\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder) -> Option<T::Prepared> {\n\n T::prepare(self, builder)\n\n }\n\n}\n", "file_path": "planus/src/impls/ref_.rs", "rank": 87, "score": 41283.80263855206 }, { "content": "use crate::{builder::Builder, traits::*, Offset, UnionOffset};\n\nuse alloc::boxed::Box;\n\n\n\nimpl<P, T: ?Sized + WriteAsOffset<P>> WriteAsOffset<P> for Box<T> {\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder) -> Offset<P> {\n\n T::prepare(self, builder)\n\n }\n\n}\n\n\n\nimpl<P: Primitive, T: ?Sized + WriteAs<P>> WriteAs<P> for Box<T> {\n\n type Prepared = T::Prepared;\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder) -> T::Prepared {\n\n T::prepare(self, builder)\n\n }\n\n}\n\n\n\nimpl<P: Primitive, D: ?Sized, T: ?Sized + WriteAsDefault<P, D>> WriteAsDefault<P, D> for Box<T> {\n\n type Prepared = T::Prepared;\n", "file_path": "planus/src/impls/box_.rs", "rank": 88, "score": 41283.163730422835 }, { "content": " P: Primitive,\n\n T: VectorWrite<P>,\n\n{\n\n type Prepared = Offset<[P]>;\n\n\n\n fn prepare(&self, builder: &mut Builder) -> Offset<[P]> {\n\n WriteAsOffset::prepare(&self, builder)\n\n }\n\n}\n\n\n\nimpl<T, P> WriteAsDefault<Offset<[P]>, ()> for [T]\n\nwhere\n\n P: Primitive,\n\n T: VectorWrite<P>,\n\n{\n\n type Prepared = Offset<[P]>;\n\n\n\n fn prepare(&self, builder: &mut Builder, _default: &()) -> Option<Offset<[P]>> {\n\n if self.is_empty() {\n\n None\n", "file_path": "planus/src/impls/slice.rs", "rank": 89, "score": 41282.86877257752 }, { "content": "use crate::{builder::Builder, traits::*, Cursor, Offset};\n\nuse core::mem::MaybeUninit;\n\n\n\nimpl<T: ?Sized> Primitive for Offset<T> {\n\n const ALIGNMENT: usize = 4;\n\n const SIZE: usize = 4;\n\n}\n\n\n\nimpl<T: ?Sized> WriteAsPrimitive<Offset<T>> for Offset<T> {\n\n #[inline]\n\n fn write<const N: usize>(&self, cursor: Cursor<'_, N>, buffer_position: u32) {\n\n cursor\n\n .assert_size()\n\n .finish(u32::to_le_bytes(buffer_position - self.offset));\n\n }\n\n}\n\n\n\nimpl<T: ?Sized> WriteAsOffset<T> for Offset<T> {\n\n fn prepare(&self, _builder: &mut Builder) -> Offset<T> {\n\n *self\n", "file_path": "planus/src/impls/offset.rs", "rank": 90, "score": 41282.49172750946 }, { "content": " .unwrap(),\n\n P::ALIGNMENT_MASK.max(u32::ALIGNMENT_MASK),\n\n |buffer_position, bytes| {\n\n let bytes = bytes.as_mut_ptr();\n\n\n\n (self.len() as u32).write(\n\n Cursor::new(&mut *(bytes as *mut [MaybeUninit<u8>; 4])),\n\n buffer_position,\n\n );\n\n\n\n T::write_values(&tmp, bytes.add(4), buffer_position - 4);\n\n },\n\n )\n\n }\n\n builder.current_offset()\n\n }\n\n}\n\n\n\nimpl<T, P> WriteAs<Offset<[P]>> for [T]\n\nwhere\n", "file_path": "planus/src/impls/slice.rs", "rank": 91, "score": 41282.154619764224 }, { "content": "use crate::{builder::Builder, traits::*, Cursor, Offset, UnionOffset};\n\nuse core::mem::MaybeUninit;\n\n\n\nimpl<'a, P: Primitive, T: ?Sized + WriteAsPrimitive<P>> WriteAsPrimitive<P> for &'a T {\n\n #[inline]\n\n fn write<const N: usize>(&self, cursor: Cursor<'_, N>, buffer_position: u32) {\n\n T::write(*self, cursor, buffer_position)\n\n }\n\n}\n\n\n\nimpl<'a, T1: ?Sized, T2: ?Sized + WriteAsOffset<T1>> WriteAsOffset<T1> for &'a T2 {\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder) -> Offset<T1> {\n\n T2::prepare(self, builder)\n\n }\n\n}\n\n\n\nimpl<'a, P: Primitive, T: ?Sized + WriteAs<P>> WriteAs<P> for &'a T {\n\n type Prepared = T::Prepared;\n\n #[inline]\n", "file_path": "planus/src/impls/ref_.rs", "rank": 92, "score": 41281.73148728389 }, { "content": " T: VectorWrite<P>,\n\n{\n\n type Prepared = Offset<[P]>;\n\n\n\n fn prepare(&self, builder: &mut Builder) -> Offset<[P]> {\n\n WriteAsOffset::prepare(self, builder)\n\n }\n\n}\n\n\n\nimpl<T, P, const N: usize> WriteAsOptional<Offset<[P]>> for [T; N]\n\nwhere\n\n P: Primitive,\n\n T: VectorWrite<P>,\n\n{\n\n type Prepared = Offset<[P]>;\n\n\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder) -> Option<Offset<[P]>> {\n\n Some(WriteAsOffset::prepare(self, builder))\n\n }\n\n}\n", "file_path": "planus/src/impls/array.rs", "rank": 93, "score": 41281.512720505285 }, { "content": " buffer_position,\n\n );\n\n core::ptr::copy_nonoverlapping(\n\n self.as_bytes().as_ptr() as *const MaybeUninit<u8>,\n\n bytes.add(4),\n\n self.len(),\n\n );\n\n bytes.add(4 + self.len()).write(MaybeUninit::new(0));\n\n },\n\n )\n\n }\n\n builder.current_offset()\n\n }\n\n}\n\n\n\nimpl WriteAs<Offset<str>> for str {\n\n type Prepared = Offset<str>;\n\n\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder) -> Offset<str> {\n", "file_path": "planus/src/impls/str.rs", "rank": 94, "score": 41281.48801963768 }, { "content": " #[inline]\n\n fn prepare(&self, builder: &mut Builder, default: &D) -> Option<T::Prepared> {\n\n T::prepare(self, builder, default)\n\n }\n\n}\n\n\n\nimpl<P: Primitive, T: ?Sized + WriteAsOptional<P>> WriteAsOptional<P> for Box<T> {\n\n type Prepared = T::Prepared;\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder) -> Option<T::Prepared> {\n\n T::prepare(self, builder)\n\n }\n\n}\n\n\n\nimpl<T1: ?Sized, T2: ?Sized + WriteAsUnion<T1>> WriteAsUnion<T1> for Box<T2> {\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder) -> UnionOffset<T1> {\n\n T2::prepare(self, builder)\n\n }\n\n}\n\n\n\nimpl<T1: ?Sized, T2: ?Sized + WriteAsOptionalUnion<T1>> WriteAsOptionalUnion<T1> for Box<T2> {\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder) -> Option<UnionOffset<T1>> {\n\n T2::prepare(self, builder)\n\n }\n\n}\n", "file_path": "planus/src/impls/box_.rs", "rank": 95, "score": 41281.04706785885 }, { "content": "use crate::{builder::Builder, traits::*, Cursor, Offset};\n\nuse core::mem::MaybeUninit;\n\n\n\nimpl<T, P, const N: usize> WriteAsOffset<[P]> for [T; N]\n\nwhere\n\n P: Primitive,\n\n T: VectorWrite<P>,\n\n{\n\n fn prepare(&self, builder: &mut Builder) -> Offset<[P]> {\n\n let mut tmp: [MaybeUninit<T::Value>; N] = unsafe { MaybeUninit::uninit().assume_init() };\n\n for (t, v) in tmp.iter_mut().zip(self.iter()) {\n\n t.write(v.prepare(builder));\n\n }\n\n // TODO: replace with std::mem::MaybeUninit::array_assume_init when it becomes stable\n\n // https://github.com/rust-lang/rust/issues/80908\n\n let tmp =\n\n unsafe { (&tmp as *const [MaybeUninit<T::Value>; N] as *const [T::Value; N]).read() };\n\n unsafe {\n\n builder.write_with(\n\n 4 + T::STRIDE.checked_mul(self.len()).unwrap(),\n", "file_path": "planus/src/impls/array.rs", "rank": 96, "score": 41280.59033462235 }, { "content": "use crate::{\n\n builder::Builder, errors::ErrorKind, slice_helpers::SliceWithStartOffset, traits::*, Cursor,\n\n Offset,\n\n};\n\nuse core::mem::MaybeUninit;\n\n\n\nimpl WriteAsOffset<str> for str {\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder) -> Offset<str> {\n\n let size_including_len_and_null = self.len().checked_add(5).unwrap();\n\n // SAFETY: We make sure to write the 4+len+1 bytes inside the closure.\n\n unsafe {\n\n builder.write_with(\n\n size_including_len_and_null,\n\n u32::ALIGNMENT_MASK,\n\n |buffer_position, bytes| {\n\n let bytes = bytes.as_mut_ptr();\n\n\n\n (self.len() as u32).write(\n\n Cursor::new(&mut *(bytes as *mut [MaybeUninit<u8>; 4])),\n", "file_path": "planus/src/impls/str.rs", "rank": 97, "score": 41280.45372695068 }, { "content": "use crate::{builder::Builder, traits::*, Cursor, Offset};\n\nuse alloc::string::String;\n\nuse core::mem::MaybeUninit;\n\n\n\nimpl WriteAsOffset<str> for String {\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder) -> Offset<str> {\n\n WriteAsOffset::prepare(self.as_str(), builder)\n\n }\n\n}\n\n\n\nimpl WriteAs<Offset<str>> for String {\n\n type Prepared = Offset<str>;\n\n\n\n #[inline]\n\n fn prepare(&self, builder: &mut Builder) -> Offset<str> {\n\n WriteAsOffset::prepare(self.as_str(), builder)\n\n }\n\n}\n\n\n", "file_path": "planus/src/impls/string.rs", "rank": 98, "score": 41280.388373256625 }, { "content": " }\n\n}\n\n\n\nimpl<'buf> VectorReadInner<'buf> for &'buf str {\n\n type Error = crate::errors::Error;\n\n\n\n const STRIDE: usize = 4;\n\n #[inline]\n\n unsafe fn from_buffer(\n\n buffer: SliceWithStartOffset<'buf>,\n\n offset: usize,\n\n ) -> crate::Result<&'buf str> {\n\n let add_context =\n\n |e: ErrorKind| e.with_error_location(\"[str]\", \"get\", buffer.offset_from_start);\n\n let (slice, len) = super::array_from_buffer(buffer, offset).map_err(add_context)?;\n\n #[cfg(feature = \"extra-validation\")]\n\n if slice.as_slice().get(len) != Some(&0) {\n\n return Err(add_context(ErrorKind::MissingNullTerminator));\n\n }\n\n let slice = slice\n", "file_path": "planus/src/impls/str.rs", "rank": 99, "score": 41280.23112113298 } ]
Rust
tokera/src/api/bag.rs
tokera-com/ate
42c4ce5a0c0aef47aeb4420cc6dc788ef6ee8804
#![allow(unused_imports)] use ate::prelude::*; use error_chain::bail; use fxhash::FxHashSet; use std::ops::Deref; use std::sync::Arc; use tracing::{debug, error, info, trace, warn}; use crate::api::TokApi; use crate::error::*; use crate::model::*; impl TokApi { pub(super) async fn __get_bag( &mut self, denomination: Denomination, ) -> Result<Option<DaoMut<BagOfCoins>>, WalletError> { let ret = self.wallet.as_mut().bags.get_mut(&denomination).await?; Ok(ret) } pub(super) async fn __get_or_create_bag( &mut self, denomination: Denomination, ) -> Result<DaoMut<BagOfCoins>, WalletError> { let ret = self .wallet .as_mut() .bags .get_or_default(denomination) .await?; Ok(ret) } pub async fn add_coin_to_wallet(&mut self, coin: CarvedCoin) -> Result<(), WalletError> { let lock = self.wallet.try_lock_with_timeout(self.lock_timeout).await?; if lock == false { bail!(WalletErrorKind::WalletLocked); } self.__add_coin_to_wallet(coin).await?; self.wallet.unlock().await?; Ok(()) } pub async fn add_coins_to_wallet( &mut self, coins: impl IntoIterator<Item = CarvedCoin>, ) -> Result<(), WalletError> { let lock = self.wallet.try_lock_with_timeout(self.lock_timeout).await?; if lock == false { bail!(WalletErrorKind::WalletLocked); } for coin in coins { self.__add_coin_to_wallet(coin).await?; } self.wallet.unlock().await?; Ok(()) } pub(super) async fn __add_coin_to_wallet( &mut self, coin: CarvedCoin, ) -> Result<(), WalletError> { let mut bag = self .__get_or_create_bag(Denomination { value: coin.value, currency: coin.currency, }) .await?; let mut active_bag = bag.as_mut(); if active_bag.coins.iter().any(|c| c.coin == coin.coin) { trace!( "ignoing coin (value={}{}) - already in wallet", coin.value, coin.currency ); return Ok(()); } trace!( "adding coin to wallet (value={}{})", coin.value, coin.currency ); active_bag.coins.push(coin); Ok(()) } pub async fn remove_coin_from_wallet( &mut self, denomination: Denomination, ) -> Result<(), WalletError> { let lock = self.wallet.try_lock_with_timeout(self.lock_timeout).await?; if lock == false { bail!(WalletErrorKind::WalletLocked); } self.__remove_coin_from_wallet(denomination).await?; self.wallet.unlock().await?; Ok(()) } pub(super) async fn __remove_coin_from_wallet( &mut self, denomination: Denomination, ) -> Result<Option<CarvedCoin>, WalletError> { let mut bag = match self.__get_bag(denomination).await? { Some(a) => a, None => { return Ok(None); } }; let mut bag = bag.as_mut(); let ret = bag.coins.pop(); Ok(ret) } }
#![allow(unused_imports)] use ate::prelude::*; use error_chain::bail; use fxhash::FxHashSet; use std::ops::Deref; use std::sync::Arc; use tracing::{debug, error, info, trace, warn}; use crate::api::TokApi; use crate::error::*; use crate::model::*; impl TokApi {
pub(super) async fn __get_or_create_bag( &mut self, denomination: Denomination, ) -> Result<DaoMut<BagOfCoins>, WalletError> { let ret = self .wallet .as_mut() .bags .get_or_default(denomination) .await?; Ok(ret) } pub async fn add_coin_to_wallet(&mut self, coin: CarvedCoin) -> Result<(), WalletError> { let lock = self.wallet.try_lock_with_timeout(self.lock_timeout).await?; if lock == false { bail!(WalletErrorKind::WalletLocked); } self.__add_coin_to_wallet(coin).await?; self.wallet.unlock().await?; Ok(()) } pub async fn add_coins_to_wallet( &mut self, coins: impl IntoIterator<Item = CarvedCoin>, ) -> Result<(), WalletError> { let lock = self.wallet.try_lock_with_timeout(self.lock_timeout).await?; if lock == false { bail!(WalletErrorKind::WalletLocked); } for coin in coins { self.__add_coin_to_wallet(coin).await?; } self.wallet.unlock().await?; Ok(()) } pub(super) async fn __add_coin_to_wallet( &mut self, coin: CarvedCoin, ) -> Result<(), WalletError> { let mut bag = self .__get_or_create_bag(Denomination { value: coin.value, currency: coin.currency, }) .await?; let mut active_bag = bag.as_mut(); if active_bag.coins.iter().any(|c| c.coin == coin.coin) { trace!( "ignoing coin (value={}{}) - already in wallet", coin.value, coin.currency ); return Ok(()); } trace!( "adding coin to wallet (value={}{})", coin.value, coin.currency ); active_bag.coins.push(coin); Ok(()) } pub async fn remove_coin_from_wallet( &mut self, denomination: Denomination, ) -> Result<(), WalletError> { let lock = self.wallet.try_lock_with_timeout(self.lock_timeout).await?; if lock == false { bail!(WalletErrorKind::WalletLocked); } self.__remove_coin_from_wallet(denomination).await?; self.wallet.unlock().await?; Ok(()) } pub(super) async fn __remove_coin_from_wallet( &mut self, denomination: Denomination, ) -> Result<Option<CarvedCoin>, WalletError> { let mut bag = match self.__get_bag(denomination).await? { Some(a) => a, None => { return Ok(None); } }; let mut bag = bag.as_mut(); let ret = bag.coins.pop(); Ok(ret) } }
pub(super) async fn __get_bag( &mut self, denomination: Denomination, ) -> Result<Option<DaoMut<BagOfCoins>>, WalletError> { let ret = self.wallet.as_mut().bags.get_mut(&denomination).await?; Ok(ret) }
function_block-full_function
[ { "content": "fn conv_err(err: FsError) -> Box<dyn std::error::Error> {\n\n error!(\"{}\", err);\n\n let err: std::io::Error = err.into();\n\n err.into()\n\n}\n", "file_path": "wasm-bus-fuse/examples/find.rs", "rank": 0, "score": 91894.57967854003 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n wasm_bus::task::block_on(main_async())\n\n}\n\n\n\nasync fn main_async() -> Result<(), Box<dyn std::error::Error>> {\n\n SubscriberBuilder::default()\n\n .with_writer(std::io::stderr)\n\n .with_max_level(LevelFilter::DEBUG)\n\n //.with_env_filter(EnvFilter::from_default_env())\n\n .init();\n\n\n\n let args: Vec<String> = std::env::args().collect();\n\n let program = args[0].clone();\n\n\n\n if args.len() != 3 && args.len() != 5 {\n\n eprintln!(\"usage: {} <db-name> <filename> [instance] [access-code]\", program);\n\n return Ok(());\n\n }\n\n let name = args[1].clone();\n\n let file = args[2].clone();\n", "file_path": "wasm-bus-fuse/examples/find.rs", "rank": 1, "score": 88644.58597797382 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\nfn main() -> Result<(), Box<dyn std::error::Error>> {\n\n wasm_bus::task::block_on(main_async())\n\n}\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\n#[tokio::main(flavor = \"multi_thread\")]\n\nasync fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n main_async().await?;\n\n std::process::exit(0);\n\n}\n\n\n\nasync fn main_async() -> Result<(), Box<dyn std::error::Error>> {\n\n let mut stdin = Tty::stdin().await?;\n\n let mut stdout = Tty::stdout().await?;\n\n loop {\n\n if let Some(data) = stdin.read().await {\n\n if data.len() == 1 && data[0] == 120u8 {\n\n break;\n\n }\n\n stdout.write(data).await?;\n\n stdout.flush().await?;\n\n } else {\n\n break;\n\n }\n\n }\n\n Ok(())\n\n}", "file_path": "wasm-bus-tty/examples/passthru.rs", "rank": 2, "score": 88644.58597797382 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\nfn main() -> Result<(), Box<dyn std::error::Error>> {\n\n wasm_bus::task::block_on(main_async())\n\n}\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\n#[tokio::main(flavor = \"current_thread\")]\n\nasync fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n main_async().await?;\n\n std::process::exit(0);\n\n}\n\n\n\nasync fn main_async() -> Result<(), Box<dyn std::error::Error>> {\n\n let opts: Opts = Opts::parse();\n\n \n\n let destination = opts.destination;\n\n let count = opts.count as u64;\n\n let interval = Duration::from_millis(opts.interval);\n\n let timeout = Duration::from_millis(opts.timeout);\n\n\n\n let mut dups = HashMap::<u64, u32>::new();\n", "file_path": "wasm-bus-mio/examples/ping.rs", "rank": 3, "score": 88644.58597797382 }, { "content": "#[test]\n\nfn test_secure_data() -> Result<(), Box<dyn std::error::Error>> {\n\n crate::utils::bootstrap_test_env();\n\n\n\n static KEY_SIZES: [KeySize; 3] = [KeySize::Bit128, KeySize::Bit192, KeySize::Bit256];\n\n for key_size in KEY_SIZES.iter() {\n\n let client1 = EncryptKey::generate(key_size.clone());\n\n\n\n let plain_text1 = \"the cat ran up the wall\".to_string();\n\n let cipher = EncryptedSecureData::new(&client1, plain_text1.clone())?;\n\n\n\n let plain_text2 = cipher.unwrap(&client1).expect(\"Should have decrypted.\");\n\n assert_eq!(plain_text1, plain_text2);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crypto/src/crypto/tests.rs", "rank": 4, "score": 87683.84012176715 }, { "content": "#[test]\n\nfn test_ntru_encrypt() -> Result<(), Box<dyn std::error::Error>> {\n\n crate::utils::bootstrap_test_env();\n\n\n\n static KEY_SIZES: [KeySize; 3] = [KeySize::Bit128, KeySize::Bit192, KeySize::Bit256];\n\n for key_size in KEY_SIZES.iter() {\n\n let sk = PrivateEncryptKey::generate(key_size.clone());\n\n let pk = sk.as_public_key();\n\n\n\n let plain_text1 = \"the cat ran up the wall\".to_string();\n\n let cipher_text = pk.encrypt(plain_text1.as_bytes());\n\n let plain_test2 =\n\n String::from_utf8(sk.decrypt(&cipher_text.iv, &cipher_text.data)?).unwrap();\n\n\n\n assert_eq!(plain_text1, plain_test2);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crypto/src/crypto/tests.rs", "rank": 5, "score": 87683.84012176715 }, { "content": "#[test]\n\nfn test_derived_keys() -> Result<(), Box<dyn std::error::Error>> {\n\n static KEY_SIZES: [KeySize; 3] = [KeySize::Bit128, KeySize::Bit192, KeySize::Bit256];\n\n for key_size1 in KEY_SIZES.iter() {\n\n for key_size2 in KEY_SIZES.iter() {\n\n // Generate a derived key and encryption key\n\n let key2 = EncryptKey::generate(*key_size1);\n\n let mut key1 = DerivedEncryptKey::new(&key2);\n\n\n\n // Encrypt some data\n\n let plain_text1 = \"the cat ran up the wall\".to_string();\n\n let encrypted_text1 = key1.transmute(&key2)?.encrypt(plain_text1.as_bytes());\n\n\n\n // Check that it decrypts properly\n\n let plain_text2 = String::from_utf8(\n\n key1.transmute(&key2)?\n\n .decrypt(&encrypted_text1.iv, &encrypted_text1.data[..]),\n\n )\n\n .unwrap();\n\n assert_eq!(plain_text1, plain_text2);\n\n\n", "file_path": "crypto/src/crypto/tests.rs", "rank": 6, "score": 87683.84012176715 }, { "content": "#[test]\n\nfn test_multi_encrypt() -> Result<(), Box<dyn std::error::Error>> {\n\n crate::utils::bootstrap_test_env();\n\n\n\n static KEY_SIZES: [KeySize; 3] = [KeySize::Bit128, KeySize::Bit192, KeySize::Bit256];\n\n for key_size in KEY_SIZES.iter() {\n\n let client1 = PrivateEncryptKey::generate(key_size.clone());\n\n let client2 = PrivateEncryptKey::generate(key_size.clone());\n\n let client3 = PrivateEncryptKey::generate(key_size.clone());\n\n\n\n let plain_text1 = \"the cat ran up the wall\".to_string();\n\n let mut multi = MultiEncryptedSecureData::new(\n\n &client1.as_public_key(),\n\n \"meta\".to_string(),\n\n plain_text1.clone(),\n\n )?;\n\n multi.add(\n\n &client2.as_public_key(),\n\n \"another_meta\".to_string(),\n\n &client1,\n\n )?;\n", "file_path": "crypto/src/crypto/tests.rs", "rank": 7, "score": 87683.84012176715 }, { "content": "#[test]\n\nfn test_public_secure_data() -> Result<(), Box<dyn std::error::Error>> {\n\n crate::utils::bootstrap_test_env();\n\n\n\n #[derive(Debug, Serialize, Deserialize, Clone)]\n\n struct TestClass {\n\n data: String,\n\n }\n\n\n\n static KEY_SIZES: [KeySize; 3] = [KeySize::Bit128, KeySize::Bit192, KeySize::Bit256];\n\n for key_size in KEY_SIZES.iter() {\n\n let key = PrivateEncryptKey::generate(key_size.clone());\n\n let container = PublicEncryptedSecureData::<TestClass>::new(key.as_public_key(), TestClass {\n\n data: \"the cat ran up the wall\".to_string()\n\n }).unwrap();\n\n\n\n let out = container.unwrap(&key).unwrap();\n\n assert_eq!(out.data.as_str(), \"the cat ran up the wall\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crypto/src/crypto/tests.rs", "rank": 8, "score": 86755.17436382962 }, { "content": "#[test]\n\nfn test_signed_protected_data() -> Result<(), Box<dyn std::error::Error>> {\n\n let sign_key = PrivateSignKey::generate(KeySize::Bit256);\n\n let data = \"test data\".to_string();\n\n\n\n let test = SignedProtectedData::new(&sign_key, data)?;\n\n assert!(\n\n test.verify(&sign_key.as_public_key())?,\n\n \"Failed to verify the protected data\"\n\n );\n\n\n\n Ok(())\n\n}\n", "file_path": "crypto/src/crypto/tests.rs", "rank": 9, "score": 86755.17436382962 }, { "content": "fn conv_err(err: smoltcp::Error) -> SocketErrorKind {\n\n use smoltcp::Error::*;\n\n match err {\n\n Exhausted => SocketErrorKind::StorageFull,\n\n Illegal => SocketErrorKind::PermissionDenied,\n\n Unaddressable => SocketErrorKind::AddrNotAvailable,\n\n Finished => SocketErrorKind::ResourceBusy,\n\n Truncated => SocketErrorKind::InvalidData,\n\n Checksum => SocketErrorKind::InvalidData,\n\n Unrecognized => SocketErrorKind::InvalidData,\n\n Fragmented => SocketErrorKind::InvalidData,\n\n Malformed => SocketErrorKind::InvalidData,\n\n Dropped => SocketErrorKind::InvalidData,\n\n NotSupported => SocketErrorKind::Unsupported,\n\n _ => SocketErrorKind::Unsupported\n\n }\n\n}\n\n\n", "file_path": "atenet/src/port.rs", "rank": 10, "score": 82503.92724364965 }, { "content": "fn conv_err2(err: api::MioError) -> std::io::Error {\n\n err.into()\n\n}", "file_path": "wasm-bus-mio/src/mio.rs", "rank": 11, "score": 76109.88811826144 }, { "content": "fn conv_io_err(err: wasm_bus_fuse::api::FsError) -> io::Error {\n\n err.into()\n\n}\n\n\n", "file_path": "atesess/src/adapter.rs", "rank": 12, "score": 75211.72207229857 }, { "content": "fn conv_err(err: wasm_bus::abi::CallError) -> std::io::Error {\n\n err.into_io_error()\n\n}\n\n\n", "file_path": "wasm-bus-mio/src/mio.rs", "rank": 13, "score": 72933.87753585748 }, { "content": "pub fn fault(handle: CallHandle, error: u32) {\n\n unsafe {\n\n raw::fault(handle.id, error);\n\n }\n\n}\n\n\n", "file_path": "wasm-bus/src/abi/syscall.rs", "rank": 14, "score": 65241.90255671516 }, { "content": "#[allow(unused_imports)]\n\nuse tracing::{debug, error, info, instrument, span, trace, warn, Level};\n\n\n\nuse super::*;\n\n\n\n#[derive(Debug, Default)]\n\npub struct ProcessError {\n\n pub sink_errors: Vec<SinkError>,\n\n pub validation_errors: Vec<ValidationError>,\n\n}\n\n\n\nimpl ProcessError {\n\n pub fn has_errors(&self) -> bool {\n\n if self.sink_errors.is_empty() == false {\n\n return true;\n\n }\n\n if self.validation_errors.is_empty() == false {\n\n return true;\n\n }\n\n false\n", "file_path": "lib/src/error/process_error.rs", "rank": 15, "score": 64398.095898534244 }, { "content": "use error_chain::error_chain;\n\n\n\nerror_chain! {\n\n types {\n\n CryptoError, CryptoErrorKind, ResultExt, Result;\n\n }\n\n errors {\n\n NoIvPresent {\n\n description(\"no initialization vector\")\n\n display(\"no initialization vector\")\n\n }\n\n }\n\n}\n\n\n\nimpl From<CryptoError> for std::io::Error {\n\n fn from(error: CryptoError) -> Self {\n\n match error {\n\n CryptoError(CryptoErrorKind::NoIvPresent, _) => std::io::Error::new(\n\n std::io::ErrorKind::Other,\n\n \"The metadata does not have IV component present\",\n\n ),\n\n _ => std::io::Error::new(\n\n std::io::ErrorKind::Other,\n\n \"An unknown error occured while performing ate crypto\",\n\n ),\n\n }\n\n }\n\n}\n", "file_path": "crypto/src/error/crypto_error.rs", "rank": 16, "score": 64381.83772573374 }, { "content": "use error_chain::error_chain;\n\n\n\nuse crate::model::*;\n\nuse crate::request::*;\n\nuse ate::prelude::*;\n\n\n\nuse super::*;\n\n\n\nerror_chain! {\n\n types {\n\n ContractError, ContractErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n CoreError(super::CoreError, super::CoreErrorKind);\n\n QueryError(super::QueryError, super::QueryErrorKind);\n\n }\n\n foreign_links {\n\n IO(tokio::io::Error);\n\n }\n\n errors {\n", "file_path": "tokera/src/error/contract_error.rs", "rank": 17, "score": 64381.423901355745 }, { "content": "use error_chain::error_chain;\n\nuse rmp_serde::decode::Error as RmpDecodeError;\n\nuse rmp_serde::encode::Error as RmpEncodeError;\n\nuse serde_json::Error as JsonError;\n\n\n\nuse crate::spec::PrimaryKey;\n\n\n\nerror_chain! {\n\n types {\n\n SerializationError, SerializationErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n }\n\n foreign_links {\n\n IO(tokio::io::Error);\n\n EncodeError(RmpEncodeError);\n\n DecodeError(RmpDecodeError);\n\n JsonError(JsonError);\n\n BincodeError(bincode::Error);\n\n }\n", "file_path": "crypto/src/error/serialization_error.rs", "rank": 18, "score": 64381.30981282583 }, { "content": "use error_chain::error_chain;\n\nuse rmp_serde::decode::Error as RmpDecodeError;\n\nuse rmp_serde::encode::Error as RmpEncodeError;\n\n\n\nuse crate::crypto::AteHash;\n\nuse crate::header::PrimaryKey;\n\n\n\nerror_chain! {\n\n types {\n\n LoadError, LoadErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n SerializationError(super::SerializationError, super::SerializationErrorKind);\n\n TransformationError(super::TransformError, super::TransformErrorKind);\n\n }\n\n errors {\n\n IO(err: String) {\n\n description(\"IO error\")\n\n display(\"{}\", err)\n\n }\n", "file_path": "lib/src/error/load_error.rs", "rank": 19, "score": 64381.18893611481 }, { "content": "use error_chain::error_chain;\n\nuse rmp_serde::decode::Error as RmpDecodeError;\n\nuse serde_json::Error as JsonError;\n\nuse tokio::sync::mpsc;\n\n\n\nuse crate::crypto::KeySize;\n\n\n\nerror_chain! {\n\n types {\n\n CommsError, CommsErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n SerializationError(super::SerializationError, super::SerializationErrorKind);\n\n ValidationError(super::ValidationError, super::ValidationErrorKind);\n\n LoadError(super::LoadError, super::LoadErrorKind);\n\n }\n\n foreign_links {\n\n IO(::tokio::io::Error);\n\n JoinError(::tokio::task::JoinError);\n\n UrlError(::url::ParseError);\n", "file_path": "lib/src/error/comms_error.rs", "rank": 20, "score": 64381.06224644564 }, { "content": "use chrono::DateTime;\n\nuse chrono::Utc;\n\nuse error_chain::error_chain;\n\nuse std::time::SystemTime;\n\n\n\nuse std::time::SystemTimeError;\n\n\n\nerror_chain! {\n\n types {\n\n TimeError, TimeErrorKind, ResultExt, Result;\n\n }\n\n foreign_links {\n\n IO(std::io::Error);\n\n SystemTimeError(SystemTimeError);\n\n }\n\n errors {\n\n BeyondTolerance(tolerance: u32) {\n\n description(\"the network latency is beyond tolerance to synchronize the clocks\"),\n\n display(\"the network latency is beyond tolerance ({}) to synchronize the clocks\", tolerance.to_string()),\n\n }\n", "file_path": "lib/src/error/time_error.rs", "rank": 21, "score": 64380.90850484404 }, { "content": "use error_chain::error_chain;\n\nuse std::time::Duration;\n\n\n\nuse crate::request::*;\n\nuse ::ate::prelude::*;\n\n\n\nerror_chain! {\n\n types {\n\n LoginError, LoginErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n AteError(::ate::error::AteError, ::ate::error::AteErrorKind);\n\n ChainCreationError(::ate::error::ChainCreationError, ::ate::error::ChainCreationErrorKind);\n\n SerializationError(::ate::error::SerializationError, ::ate::error::SerializationErrorKind);\n\n InvokeError(::ate::error::InvokeError, ::ate::error::InvokeErrorKind);\n\n }\n\n foreign_links {\n\n IO(tokio::io::Error);\n\n }\n\n errors {\n", "file_path": "auth/src/error/login_error.rs", "rank": 22, "score": 64380.84255776358 }, { "content": "\n\nimpl From<::ate::error::InvokeError> for ContractError {\n\n fn from(err: ::ate::error::InvokeError) -> Self {\n\n ContractErrorKind::CoreError(CoreErrorKind::InvokeError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::InvokeErrorKind> for ContractErrorKind {\n\n fn from(err: ::ate::error::InvokeErrorKind) -> Self {\n\n ContractErrorKind::CoreError(CoreErrorKind::InvokeError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::TimeError> for ContractError {\n\n fn from(err: ::ate::error::TimeError) -> Self {\n\n ContractErrorKind::CoreError(CoreErrorKind::TimeError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::TimeErrorKind> for ContractErrorKind {\n", "file_path": "tokera/src/error/contract_error.rs", "rank": 23, "score": 64380.82820443507 }, { "content": "impl From<::ate::error::CommitError> for CoinError {\n\n fn from(err: ::ate::error::CommitError) -> Self {\n\n CoinErrorKind::CoreError(CoreErrorKind::CommitError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::CommitErrorKind> for CoinErrorKind {\n\n fn from(err: ::ate::error::CommitErrorKind) -> Self {\n\n CoinErrorKind::CoreError(CoreErrorKind::CommitError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::LockError> for CoinError {\n\n fn from(err: ::ate::error::LockError) -> Self {\n\n CoinErrorKind::CoreError(CoreErrorKind::LockError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::LockErrorKind> for CoinErrorKind {\n\n fn from(err: ::ate::error::LockErrorKind) -> Self {\n", "file_path": "tokera/src/error/coin_error.rs", "rank": 24, "score": 64380.719399508 }, { "content": "impl From<::ate::error::SerializationError> for WalletError {\n\n fn from(err: ::ate::error::SerializationError) -> Self {\n\n WalletErrorKind::CoreError(CoreErrorKind::SerializationError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::SerializationErrorKind> for WalletErrorKind {\n\n fn from(err: ::ate::error::SerializationErrorKind) -> Self {\n\n WalletErrorKind::CoreError(CoreErrorKind::SerializationError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::InvokeError> for WalletError {\n\n fn from(err: ::ate::error::InvokeError) -> Self {\n\n WalletErrorKind::CoreError(CoreErrorKind::InvokeError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::InvokeErrorKind> for WalletErrorKind {\n\n fn from(err: ::ate::error::InvokeErrorKind) -> Self {\n", "file_path": "tokera/src/error/wallet_error.rs", "rank": 25, "score": 64380.719399508 }, { "content": "use error_chain::error_chain;\n\nuse std::time::Duration;\n\n\n\nuse crate::request::*;\n\nuse ::ate::prelude::*;\n\n\n\nerror_chain! {\n\n types {\n\n SudoError, SudoErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n AteError(::ate::error::AteError, ::ate::error::AteErrorKind);\n\n ChainCreationError(::ate::error::ChainCreationError, ::ate::error::ChainCreationErrorKind);\n\n SerializationError(::ate::error::SerializationError, ::ate::error::SerializationErrorKind);\n\n InvokeError(::ate::error::InvokeError, ::ate::error::InvokeErrorKind);\n\n LoginError(super::LoginError, super::LoginErrorKind);\n\n }\n\n foreign_links {\n\n IO(tokio::io::Error);\n\n }\n", "file_path": "auth/src/error/sudo_error.rs", "rank": 26, "score": 64380.714565072594 }, { "content": "impl From<::ate::error::ChainCreationError> for CoinError {\n\n fn from(err: ::ate::error::ChainCreationError) -> Self {\n\n CoinErrorKind::CoreError(CoreErrorKind::ChainCreationError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::ChainCreationErrorKind> for CoinErrorKind {\n\n fn from(err: ::ate::error::ChainCreationErrorKind) -> Self {\n\n CoinErrorKind::CoreError(CoreErrorKind::ChainCreationError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::SerializationError> for CoinError {\n\n fn from(err: ::ate::error::SerializationError) -> Self {\n\n CoinErrorKind::CoreError(CoreErrorKind::SerializationError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::SerializationErrorKind> for CoinError {\n\n fn from(err: ::ate::error::SerializationErrorKind) -> Self {\n", "file_path": "tokera/src/error/coin_error.rs", "rank": 27, "score": 64380.62765971578 }, { "content": "impl From<::ate::error::LoadErrorKind> for InstanceErrorKind {\n\n fn from(err: ::ate::error::LoadErrorKind) -> Self {\n\n InstanceErrorKind::CoreError(CoreErrorKind::LoadError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::CommitError> for InstanceError {\n\n fn from(err: ::ate::error::CommitError) -> Self {\n\n InstanceErrorKind::CoreError(CoreErrorKind::CommitError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::CommitErrorKind> for InstanceErrorKind {\n\n fn from(err: ::ate::error::CommitErrorKind) -> Self {\n\n InstanceErrorKind::CoreError(CoreErrorKind::CommitError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::LockError> for InstanceError {\n\n fn from(err: ::ate::error::LockError) -> Self {\n\n InstanceErrorKind::CoreError(CoreErrorKind::LockError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::LockErrorKind> for InstanceErrorKind {\n\n fn from(err: ::ate::error::LockErrorKind) -> Self {\n\n InstanceErrorKind::CoreError(CoreErrorKind::LockError(err))\n\n }\n\n}", "file_path": "tokera/src/error/instance_error.rs", "rank": 28, "score": 64380.61703483445 }, { "content": "impl From<::ate::error::AteErrorKind> for InstanceErrorKind {\n\n fn from(err: ::ate::error::AteErrorKind) -> Self {\n\n InstanceErrorKind::CoreError(CoreErrorKind::AteError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::ChainCreationError> for InstanceError {\n\n fn from(err: ::ate::error::ChainCreationError) -> Self {\n\n InstanceErrorKind::CoreError(CoreErrorKind::ChainCreationError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::ChainCreationErrorKind> for InstanceErrorKind {\n\n fn from(err: ::ate::error::ChainCreationErrorKind) -> Self {\n\n InstanceErrorKind::CoreError(CoreErrorKind::ChainCreationError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::SerializationError> for InstanceError {\n\n fn from(err: ::ate::error::SerializationError) -> Self {\n", "file_path": "tokera/src/error/instance_error.rs", "rank": 29, "score": 64380.609547873755 }, { "content": " }\n\n}\n\n\n\nimpl From<tokio::io::Error> for LoadError {\n\n fn from(err: tokio::io::Error) -> LoadError {\n\n LoadErrorKind::IO(err.to_string()).into()\n\n }\n\n}\n\n\n\nimpl From<RmpEncodeError> for LoadError {\n\n fn from(err: RmpEncodeError) -> LoadError {\n\n LoadErrorKind::SerializationError(super::SerializationErrorKind::EncodeError(err).into())\n\n .into()\n\n }\n\n}\n\n\n\nimpl From<RmpDecodeError> for LoadError {\n\n fn from(err: RmpDecodeError) -> LoadError {\n\n LoadErrorKind::SerializationError(super::SerializationErrorKind::DecodeError(err).into())\n\n .into()\n", "file_path": "lib/src/error/load_error.rs", "rank": 30, "score": 64380.59244057255 }, { "content": "}\n\n\n\nimpl From<super::ChainCreationError> for CommsError {\n\n fn from(err: super::ChainCreationError) -> CommsError {\n\n CommsErrorKind::FatalError(err.to_string()).into()\n\n }\n\n}\n\n\n\nimpl From<super::ChainCreationErrorKind> for CommsError {\n\n fn from(err: super::ChainCreationErrorKind) -> CommsError {\n\n CommsErrorKind::FatalError(err.to_string()).into()\n\n }\n\n}\n\n\n\nimpl From<bincode::Error> for CommsError {\n\n fn from(err: bincode::Error) -> CommsError {\n\n CommsErrorKind::SerializationError(super::SerializationErrorKind::BincodeError(err).into())\n\n .into()\n\n }\n\n}\n", "file_path": "lib/src/error/comms_error.rs", "rank": 31, "score": 64380.57112228899 }, { "content": "use error_chain::error_chain;\n\n\n\nuse crate::request::*;\n\n\n\nuse super::*;\n\n\n\nerror_chain! {\n\n types {\n\n CoinError, CoinErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n CoreError(super::CoreError, super::CoreErrorKind);\n\n }\n\n foreign_links {\n\n IO(tokio::io::Error);\n\n }\n\n errors {\n\n InvalidReference(reference_number: String) {\n\n description(\"invalid reference number\"),\n\n display(\"invalid reference number ({})\", reference_number),\n", "file_path": "tokera/src/error/coin_error.rs", "rank": 32, "score": 64380.54844745843 }, { "content": " }\n\n}\n\n\n\nimpl From<bincode::Error> for LoadError {\n\n fn from(err: bincode::Error) -> LoadError {\n\n LoadErrorKind::SerializationError(super::SerializationErrorKind::BincodeError(err).into())\n\n .into()\n\n }\n\n}\n\n\n\nimpl From<super::ChainCreationError> for LoadError {\n\n fn from(err: super::ChainCreationError) -> LoadError {\n\n LoadErrorKind::ChainCreationError(err.to_string()).into()\n\n }\n\n}\n\n\n\nimpl From<super::ChainCreationErrorKind> for LoadError {\n\n fn from(err: super::ChainCreationErrorKind) -> LoadError {\n\n LoadErrorKind::ChainCreationError(err.to_string()).into()\n\n }\n\n}\n", "file_path": "lib/src/error/load_error.rs", "rank": 33, "score": 64380.52124635191 }, { "content": "}\n\n\n\nimpl From<::ate::error::TimeError> for InstanceError {\n\n fn from(err: ::ate::error::TimeError) -> Self {\n\n InstanceErrorKind::CoreError(CoreErrorKind::TimeError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::TimeErrorKind> for InstanceErrorKind {\n\n fn from(err: ::ate::error::TimeErrorKind) -> Self {\n\n InstanceErrorKind::CoreError(CoreErrorKind::TimeError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::LoadError> for InstanceError {\n\n fn from(err: ::ate::error::LoadError) -> Self {\n\n InstanceErrorKind::CoreError(CoreErrorKind::LoadError(err.0)).into()\n\n }\n\n}\n\n\n", "file_path": "tokera/src/error/instance_error.rs", "rank": 34, "score": 64380.47931355617 }, { "content": " }\n\n}\n\n\n\nimpl From<::ate::error::CommitErrorKind> for ContractErrorKind {\n\n fn from(err: ::ate::error::CommitErrorKind) -> Self {\n\n ContractErrorKind::CoreError(CoreErrorKind::CommitError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::LockError> for ContractError {\n\n fn from(err: ::ate::error::LockError) -> Self {\n\n ContractErrorKind::CoreError(CoreErrorKind::LockError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::LockErrorKind> for ContractErrorKind {\n\n fn from(err: ::ate::error::LockErrorKind) -> Self {\n\n ContractErrorKind::CoreError(CoreErrorKind::LockError(err))\n\n }\n\n}\n", "file_path": "tokera/src/error/contract_error.rs", "rank": 35, "score": 64380.41186565498 }, { "content": "}\n\n\n\nimpl From<::ate::error::LoadErrorKind> for WalletErrorKind {\n\n fn from(err: ::ate::error::LoadErrorKind) -> Self {\n\n WalletErrorKind::CoreError(CoreErrorKind::LoadError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::CommitError> for WalletError {\n\n fn from(err: ::ate::error::CommitError) -> Self {\n\n WalletErrorKind::CoreError(CoreErrorKind::CommitError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::CommitErrorKind> for WalletErrorKind {\n\n fn from(err: ::ate::error::CommitErrorKind) -> Self {\n\n WalletErrorKind::CoreError(CoreErrorKind::CommitError(err))\n\n }\n\n}\n\n\n", "file_path": "tokera/src/error/wallet_error.rs", "rank": 36, "score": 64380.41186565498 }, { "content": "use error_chain::error_chain;\n\n\n\nuse crate::request::*;\n\nuse ::ate::prelude::*;\n\n\n\nerror_chain! {\n\n types {\n\n QueryError, QueryErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n AteError(::ate::error::AteError, ::ate::error::AteErrorKind);\n\n ChainCreationError(::ate::error::ChainCreationError, ::ate::error::ChainCreationErrorKind);\n\n SerializationError(::ate::error::SerializationError, ::ate::error::SerializationErrorKind);\n\n InvokeError(::ate::error::InvokeError, ::ate::error::InvokeErrorKind);\n\n }\n\n foreign_links {\n\n IO(tokio::io::Error);\n\n }\n\n errors {\n\n NotFound {\n", "file_path": "auth/src/error/query_error.rs", "rank": 37, "score": 64380.4115716688 }, { "content": "}\n\n\n\nimpl From<::ate::error::TimeErrorKind> for CoinErrorKind {\n\n fn from(err: ::ate::error::TimeErrorKind) -> Self {\n\n CoinErrorKind::CoreError(CoreErrorKind::TimeError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::LoadError> for CoinError {\n\n fn from(err: ::ate::error::LoadError) -> Self {\n\n CoinErrorKind::CoreError(CoreErrorKind::LoadError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::LoadErrorKind> for CoinErrorKind {\n\n fn from(err: ::ate::error::LoadErrorKind) -> Self {\n\n CoinErrorKind::CoreError(CoreErrorKind::LoadError(err))\n\n }\n\n}\n\n\n", "file_path": "tokera/src/error/coin_error.rs", "rank": 38, "score": 64380.41186565498 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl<T> From<tokio::sync::mpsc::error::SendError<T>> for InvokeError {\n\n fn from(err: tokio::sync::mpsc::error::SendError<T>) -> InvokeError {\n\n InvokeErrorKind::PipeError(err.to_string()).into()\n\n }\n\n}\n\n\n\nimpl From<tokio::time::error::Elapsed> for InvokeError {\n\n fn from(_elapsed: tokio::time::error::Elapsed) -> InvokeError {\n\n InvokeErrorKind::Timeout.into()\n\n }\n\n}\n\n\n\n#[cfg(target_arch = \"wasm32\")]\n\nimpl From<wasm_bus_time::prelude::Elapsed> for InvokeError {\n\n fn from(_elapsed: wasm_bus_time::prelude::Elapsed) -> InvokeError {\n\n InvokeErrorKind::Timeout.into()\n\n }\n\n}\n", "file_path": "lib/src/error/invoke_error.rs", "rank": 39, "score": 64380.357558558004 }, { "content": " }\n\n}\n\n\n\nimpl From<::ate::error::ChainCreationErrorKind> for ContractErrorKind {\n\n fn from(err: ::ate::error::ChainCreationErrorKind) -> Self {\n\n ContractErrorKind::CoreError(CoreErrorKind::ChainCreationError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::SerializationError> for ContractError {\n\n fn from(err: ::ate::error::SerializationError) -> Self {\n\n ContractErrorKind::CoreError(CoreErrorKind::SerializationError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::SerializationErrorKind> for ContractErrorKind {\n\n fn from(err: ::ate::error::SerializationErrorKind) -> Self {\n\n ContractErrorKind::CoreError(CoreErrorKind::SerializationError(err))\n\n }\n\n}\n", "file_path": "tokera/src/error/contract_error.rs", "rank": 40, "score": 64380.34552868373 }, { "content": "use super::*;\n\nuse crate::request::*;\n\nuse error_chain::error_chain;\n\n\n\nerror_chain! {\n\n types {\n\n WalletError, WalletErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n CoreError(super::CoreError, super::CoreErrorKind);\n\n CoinError(super::CoinError, super::CoinErrorKind);\n\n GatherError(super::GatherError, super::GatherErrorKind);\n\n }\n\n foreign_links {\n\n IO(tokio::io::Error);\n\n }\n\n errors {\n\n InvalidReference(reference_number: String) {\n\n description(\"invalid reference number\"),\n\n display(\"invalid reference number ({})\", reference_number),\n", "file_path": "tokera/src/error/wallet_error.rs", "rank": 41, "score": 64380.33437884574 }, { "content": "\n\nimpl From<RmpDecodeError> for CommsError {\n\n fn from(err: RmpDecodeError) -> CommsError {\n\n CommsErrorKind::SerializationError(super::SerializationErrorKind::DecodeError(err).into())\n\n .into()\n\n }\n\n}\n\n\n\nimpl From<JsonError> for CommsError {\n\n fn from(err: JsonError) -> CommsError {\n\n CommsErrorKind::SerializationError(super::SerializationErrorKind::JsonError(err).into())\n\n .into()\n\n }\n\n}\n", "file_path": "lib/src/error/comms_error.rs", "rank": 42, "score": 64380.33105746907 }, { "content": " CoinErrorKind::CoreError(CoreErrorKind::SerializationError(err)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::InvokeError> for CoinError {\n\n fn from(err: ::ate::error::InvokeError) -> Self {\n\n CoinErrorKind::CoreError(CoreErrorKind::InvokeError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::InvokeErrorKind> for CoinError {\n\n fn from(err: ::ate::error::InvokeErrorKind) -> Self {\n\n CoinErrorKind::CoreError(CoreErrorKind::InvokeError(err)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::TimeError> for CoinError {\n\n fn from(err: ::ate::error::TimeError) -> Self {\n\n CoinErrorKind::CoreError(CoreErrorKind::TimeError(err.0)).into()\n\n }\n", "file_path": "tokera/src/error/coin_error.rs", "rank": 43, "score": 64380.311712697854 }, { "content": " WalletErrorKind::CoreError(CoreErrorKind::InvokeError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::TimeError> for WalletError {\n\n fn from(err: ::ate::error::TimeError) -> Self {\n\n WalletErrorKind::CoreError(CoreErrorKind::TimeError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::TimeErrorKind> for WalletErrorKind {\n\n fn from(err: ::ate::error::TimeErrorKind) -> Self {\n\n WalletErrorKind::CoreError(CoreErrorKind::TimeError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::LoadError> for WalletError {\n\n fn from(err: ::ate::error::LoadError) -> Self {\n\n WalletErrorKind::CoreError(CoreErrorKind::LoadError(err.0)).into()\n\n }\n", "file_path": "tokera/src/error/wallet_error.rs", "rank": 44, "score": 64380.290760306685 }, { "content": "}\n\n\n\nimpl From<::ate::error::AteErrorKind> for WalletErrorKind {\n\n fn from(err: ::ate::error::AteErrorKind) -> Self {\n\n WalletErrorKind::CoreError(CoreErrorKind::AteError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::ChainCreationError> for WalletError {\n\n fn from(err: ::ate::error::ChainCreationError) -> Self {\n\n WalletErrorKind::CoreError(CoreErrorKind::ChainCreationError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::ChainCreationErrorKind> for WalletErrorKind {\n\n fn from(err: ::ate::error::ChainCreationErrorKind) -> Self {\n\n WalletErrorKind::CoreError(CoreErrorKind::ChainCreationError(err))\n\n }\n\n}\n\n\n", "file_path": "tokera/src/error/wallet_error.rs", "rank": 45, "score": 64380.28027167433 }, { "content": "use error_chain::error_chain;\n\n\n\nuse crate::request::*;\n\nuse ate::prelude::*;\n\n\n\nerror_chain! {\n\n types {\n\n CreateError, CreateErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n QueryError(super::QueryError, super::QueryErrorKind);\n\n AteError(::ate::error::AteError, ::ate::error::AteErrorKind);\n\n ChainCreationError(::ate::error::ChainCreationError, ::ate::error::ChainCreationErrorKind);\n\n SerializationError(::ate::error::SerializationError, ::ate::error::SerializationErrorKind);\n\n InvokeError(::ate::error::InvokeError, ::ate::error::InvokeErrorKind);\n\n }\n\n foreign_links {\n\n IO(tokio::io::Error);\n\n }\n\n errors {\n", "file_path": "auth/src/error/create_error.rs", "rank": 46, "score": 64380.27765644745 }, { "content": "use error_chain::error_chain;\n\nuse tokio::sync::broadcast;\n\nuse tokio::sync::watch;\n\n\n\nerror_chain! {\n\n types {\n\n CompactError, CompactErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n SinkError(super::SinkError, super::SinkErrorKind);\n\n TimeError(super::TimeError, super::TimeErrorKind);\n\n SerializationError(super::SerializationError, super::SerializationErrorKind);\n\n LoadError(super::LoadError, super::LoadErrorKind);\n\n }\n\n foreign_links {\n\n IO(tokio::io::Error);\n\n }\n\n errors {\n\n WatchError(err: String) {\n\n description(\"failed to compact the chain due to an error in watch notification\"),\n", "file_path": "lib/src/error/compact_error.rs", "rank": 47, "score": 64380.25816543366 }, { "content": " InstanceErrorKind::CoreError(CoreErrorKind::SerializationError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::SerializationErrorKind> for InstanceErrorKind {\n\n fn from(err: ::ate::error::SerializationErrorKind) -> Self {\n\n InstanceErrorKind::CoreError(CoreErrorKind::SerializationError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::InvokeError> for InstanceError {\n\n fn from(err: ::ate::error::InvokeError) -> Self {\n\n InstanceErrorKind::CoreError(CoreErrorKind::InvokeError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::InvokeErrorKind> for InstanceErrorKind {\n\n fn from(err: ::ate::error::InvokeErrorKind) -> Self {\n\n InstanceErrorKind::CoreError(CoreErrorKind::InvokeError(err))\n\n }\n", "file_path": "tokera/src/error/instance_error.rs", "rank": 48, "score": 64380.22858095652 }, { "content": " fn from(err: ContractError) -> AteError {\n\n AteErrorKind::ServiceError(err.to_string()).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::AteError> for ContractError {\n\n fn from(err: ::ate::error::AteError) -> Self {\n\n ContractErrorKind::CoreError(CoreErrorKind::AteError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::AteErrorKind> for ContractErrorKind {\n\n fn from(err: ::ate::error::AteErrorKind) -> Self {\n\n ContractErrorKind::CoreError(CoreErrorKind::AteError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::ChainCreationError> for ContractError {\n\n fn from(err: ::ate::error::ChainCreationError) -> Self {\n\n ContractErrorKind::CoreError(CoreErrorKind::ChainCreationError(err.0)).into()\n", "file_path": "tokera/src/error/contract_error.rs", "rank": 49, "score": 64380.208077000214 }, { "content": " fn from(err: ::ate::error::TimeErrorKind) -> Self {\n\n ContractErrorKind::CoreError(CoreErrorKind::TimeError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::LoadError> for ContractError {\n\n fn from(err: ::ate::error::LoadError) -> Self {\n\n ContractErrorKind::CoreError(CoreErrorKind::LoadError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::LoadErrorKind> for ContractErrorKind {\n\n fn from(err: ::ate::error::LoadErrorKind) -> Self {\n\n ContractErrorKind::CoreError(CoreErrorKind::LoadError(err))\n\n }\n\n}\n\n\n\nimpl From<::ate::error::CommitError> for ContractError {\n\n fn from(err: ::ate::error::CommitError) -> Self {\n\n ContractErrorKind::CoreError(CoreErrorKind::CommitError(err.0)).into()\n", "file_path": "tokera/src/error/contract_error.rs", "rank": 50, "score": 64380.17058694093 }, { "content": "use error_chain::error_chain;\n\n\n\nuse crate::request::*;\n\nuse ::ate::prelude::*;\n\n\n\nerror_chain! {\n\n types {\n\n ResetError, ResetErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n AteError(::ate::error::AteError, ::ate::error::AteErrorKind);\n\n ChainCreationError(::ate::error::ChainCreationError, ::ate::error::ChainCreationErrorKind);\n\n SerializationError(::ate::error::SerializationError, ::ate::error::SerializationErrorKind);\n\n InvokeError(::ate::error::InvokeError, ::ate::error::InvokeErrorKind);\n\n LoginError(super::LoginError, super::LoginErrorKind);\n\n SudoError(super::SudoError, super::SudoErrorKind);\n\n }\n\n foreign_links {\n\n IO(tokio::io::Error);\n\n }\n", "file_path": "auth/src/error/reset_error.rs", "rank": 51, "score": 64380.1377196859 }, { "content": "use error_chain::error_chain;\n\n\n\nuse crate::request::*;\n\nuse ::ate::prelude::*;\n\n\n\nerror_chain! {\n\n types {\n\n GatherError, GatherErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n AteError(::ate::error::AteError, ::ate::error::AteErrorKind);\n\n ChainCreationError(::ate::error::ChainCreationError, ::ate::error::ChainCreationErrorKind);\n\n SerializationError(::ate::error::SerializationError, ::ate::error::SerializationErrorKind);\n\n InvokeError(::ate::error::InvokeError, ::ate::error::InvokeErrorKind);\n\n LoginError(super::LoginError, super::LoginErrorKind);\n\n SudoError(super::SudoError, super::SudoErrorKind);\n\n }\n\n foreign_links {\n\n IO(tokio::io::Error);\n\n }\n", "file_path": "auth/src/error/gather_error.rs", "rank": 52, "score": 64380.1377196859 }, { "content": "use error_chain::error_chain;\n\n\n\nuse crate::crypto::AteHash;\n\n\n\nerror_chain! {\n\n types {\n\n SinkError, SinkErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n TrustError(super::TrustError, super::TrustErrorKind);\n\n }\n\n errors {\n\n MissingPublicKey(hash: AteHash) {\n\n description(\"the public key for signature could not be found in the chain-of-trust\"),\n\n display(\"the public key ({}) for signature could not be found in the chain-of-trust\", hash.to_string()),\n\n }\n\n InvalidSignature(hash: AteHash, err: Option<pqcrypto_traits_wasi::Error>) {\n\n description(\"failed verification of hash while using public key\"),\n\n display(\"failed verification of hash while using public key ({}) - {}\", hash.to_string(), err.map(|a| a.to_string()).unwrap_or_else(|| \"unknown reason\".to_string()))\n\n }\n\n }\n\n}\n", "file_path": "lib/src/error/sink_error.rs", "rank": 53, "score": 64380.13287859435 }, { "content": " description(\"the dIO that created this object has gone out of scope\")\n\n display(\"the dIO that created this object has gone out of scope\")\n\n }\n\n }\n\n}\n\n\n\nimpl From<super::CommitError> for LockError {\n\n fn from(err: super::CommitError) -> LockError {\n\n LockErrorKind::CommitError(err.to_string()).into()\n\n }\n\n}\n\n\n\nimpl From<super::CommitErrorKind> for LockError {\n\n fn from(err: super::CommitErrorKind) -> LockError {\n\n LockErrorKind::CommitError(err.to_string()).into()\n\n }\n\n}\n", "file_path": "lib/src/error/lock_error.rs", "rank": 54, "score": 64380.0494870037 }, { "content": "\n\nimpl From<serde_json::Error> for AteError {\n\n fn from(err: serde_json::Error) -> AteError {\n\n AteErrorKind::SerializationError(\n\n super::SerializationErrorKind::SerdeError(err.to_string()).into(),\n\n )\n\n .into()\n\n }\n\n}\n\n\n\nimpl From<tokio::sync::watch::error::RecvError> for AteError {\n\n fn from(err: tokio::sync::watch::error::RecvError) -> AteError {\n\n AteErrorKind::IO(tokio::io::Error::new(\n\n tokio::io::ErrorKind::Other,\n\n err.to_string(),\n\n ))\n\n .into()\n\n }\n\n}\n\n\n", "file_path": "lib/src/error/ate_error.rs", "rank": 55, "score": 64380.00407127906 }, { "content": "impl From<::ate::error::LockError> for WalletError {\n\n fn from(err: ::ate::error::LockError) -> Self {\n\n WalletErrorKind::CoreError(CoreErrorKind::LockError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::LockErrorKind> for WalletErrorKind {\n\n fn from(err: ::ate::error::LockErrorKind) -> Self {\n\n WalletErrorKind::CoreError(CoreErrorKind::LockError(err))\n\n }\n\n}\n\n\n\nimpl From<CancelDepositFailed> for WalletError {\n\n fn from(err: CancelDepositFailed) -> WalletError {\n\n match err {\n\n CancelDepositFailed::AuthenticationFailed => {\n\n WalletErrorKind::CoreError(CoreErrorKind::AuthenticationFailed).into()\n\n }\n\n CancelDepositFailed::AlreadyPaid => WalletErrorKind::AlreadyPaid.into(),\n\n CancelDepositFailed::InvalidCommodity => {\n", "file_path": "tokera/src/error/wallet_error.rs", "rank": 56, "score": 64379.9949582786 }, { "content": "impl From<tokio_tungstenite::tungstenite::Error> for CommsError {\n\n fn from(err: tokio_tungstenite::tungstenite::Error) -> CommsError {\n\n CommsErrorKind::WebSocketError(err.to_string()).into()\n\n }\n\n}\n\n\n\n#[cfg(feature = \"enable_full\")]\n\nimpl From<tokio_tungstenite::tungstenite::http::uri::InvalidUri> for CommsError {\n\n fn from(err: tokio_tungstenite::tungstenite::http::uri::InvalidUri) -> CommsError {\n\n CommsErrorKind::WebSocketInternalError(format!(\n\n \"Failed to establish websocket due to an invalid URI - {}\",\n\n err.to_string()\n\n ))\n\n .into()\n\n }\n\n}\n\n\n\nimpl<T> From<tokio::sync::broadcast::error::SendError<T>> for CommsError {\n\n fn from(err: tokio::sync::broadcast::error::SendError<T>) -> CommsError {\n\n CommsErrorKind::SendError(err.to_string()).into()\n", "file_path": "lib/src/error/comms_error.rs", "rank": 57, "score": 64379.94626852392 }, { "content": " }\n\n}\n\n\n\nimpl From<tokio::sync::broadcast::error::RecvError> for CommsError {\n\n fn from(err: tokio::sync::broadcast::error::RecvError) -> CommsError {\n\n CommsErrorKind::ReceiveError(err.to_string()).into()\n\n }\n\n}\n\n\n\nimpl From<super::CommitError> for CommsError {\n\n fn from(err: super::CommitError) -> CommsError {\n\n match err {\n\n super::CommitError(super::CommitErrorKind::ValidationError(errs), _) => {\n\n CommsErrorKind::ValidationError(errs).into()\n\n }\n\n err => {\n\n CommsErrorKind::InternalError(format!(\"commit-failed - {}\", err.to_string())).into()\n\n }\n\n }\n\n }\n", "file_path": "lib/src/error/comms_error.rs", "rank": 58, "score": 64379.79175796133 }, { "content": "where\n\n T: std::fmt::Debug,\n\n{\n\n fn from(err: watch::error::SendError<T>) -> CompactError {\n\n CompactErrorKind::WatchError(err.to_string()).into()\n\n }\n\n}\n\n\n\nimpl From<broadcast::error::RecvError> for CompactError {\n\n fn from(err: broadcast::error::RecvError) -> CompactError {\n\n CompactErrorKind::BroadcastError(err.to_string()).into()\n\n }\n\n}\n\n\n\nimpl<T> From<broadcast::error::SendError<T>> for CompactError\n\nwhere\n\n T: std::fmt::Debug,\n\n{\n\n fn from(err: broadcast::error::SendError<T>) -> CompactError {\n\n CompactErrorKind::BroadcastError(err.to_string()).into()\n\n }\n\n}\n", "file_path": "lib/src/error/compact_error.rs", "rank": 59, "score": 64379.78157579797 }, { "content": " }\n\n\n\n pub fn as_result(self) -> Result<(), ProcessError> {\n\n match self.has_errors() {\n\n true => Err(self),\n\n false => Ok(()),\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for ProcessError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n let mut err = \"Processing error - \".to_string();\n\n for sink in self.sink_errors.iter() {\n\n err = err + &sink.to_string()[..] + \" - \";\n\n }\n\n for validation in self.validation_errors.iter() {\n\n err = err + &validation.to_string()[..] + \" - \";\n\n }\n\n write!(f, \"{}\", err)\n\n }\n\n}\n\n\n\nimpl std::error::Error for ProcessError {}\n", "file_path": "lib/src/error/process_error.rs", "rank": 60, "score": 64379.7155765826 }, { "content": " display(\"failed to compact the chain due to an error in watch notification - {}\", err),\n\n }\n\n BroadcastError(err: String) {\n\n description(\"failed to compact the chain due to an error in broadcast notification\"),\n\n display(\"failed to compact the chain due to an error in broadcast notification - {}\", err)\n\n }\n\n Aborted {\n\n description(\"compacting has been aborted\")\n\n display(\"compacting has been aborted\")\n\n }\n\n }\n\n}\n\n\n\nimpl From<watch::error::RecvError> for CompactError {\n\n fn from(err: watch::error::RecvError) -> CompactError {\n\n CompactErrorKind::WatchError(err.to_string()).into()\n\n }\n\n}\n\n\n\nimpl<T> From<watch::error::SendError<T>> for CompactError\n", "file_path": "lib/src/error/compact_error.rs", "rank": 61, "score": 64379.63999317191 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl From<GatherError> for AteError {\n\n fn from(err: GatherError) -> AteError {\n\n AteErrorKind::ServiceError(err.to_string()).into()\n\n }\n\n}\n\n\n\nimpl From<GatherFailed> for GatherError {\n\n fn from(err: GatherFailed) -> GatherError {\n\n match err {\n\n GatherFailed::GroupNotFound(group) => GatherErrorKind::NotFound(group).into(),\n\n GatherFailed::NoAccess => GatherErrorKind::NoAccess.into(),\n\n GatherFailed::NoMasterKey => GatherErrorKind::NoMasterKey.into(),\n\n GatherFailed::InternalError(code) => GatherErrorKind::InternalError(code).into(),\n\n }\n\n }\n\n}\n", "file_path": "auth/src/error/gather_error.rs", "rank": 62, "score": 64379.62854410519 }, { "content": " }\n\n InvalidAmount {\n\n description(\"the coin is not big enough to be carved by this amount of the carvng amount is invalid\"),\n\n display(\"the coin is not big enough to be carved by this amount of the carvng amount is invalid\"),\n\n }\n\n }\n\n}\n\n\n\nimpl From<::ate::error::AteError> for CoinError {\n\n fn from(err: ::ate::error::AteError) -> Self {\n\n CoinErrorKind::CoreError(CoreErrorKind::AteError(err.0)).into()\n\n }\n\n}\n\n\n\nimpl From<::ate::error::AteErrorKind> for CoinErrorKind {\n\n fn from(err: ::ate::error::AteErrorKind) -> Self {\n\n CoinErrorKind::CoreError(CoreErrorKind::AteError(err))\n\n }\n\n}\n\n\n", "file_path": "tokera/src/error/coin_error.rs", "rank": 63, "score": 64379.62655158824 }, { "content": "use error_chain::error_chain;\n\n\n\nuse super::*;\n\n\n\nerror_chain! {\n\n types {\n\n InstanceError, InstanceErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n CoreError(super::CoreError, super::CoreErrorKind);\n\n QueryError(super::QueryError, super::QueryErrorKind);\n\n ContractError(super::ContractError, super::ContractErrorKind);\n\n FileSystemError(ate_files::error::FileSystemError, ate_files::error::FileSystemErrorKind);\n\n }\n\n foreign_links {\n\n IO(tokio::io::Error);\n\n MioError(wasm_bus_mio::api::MioError);\n\n }\n\n errors {\n\n Unauthorized {\n", "file_path": "tokera/src/error/instance_error.rs", "rank": 64, "score": 64379.566111130676 }, { "content": "}\n\n\n\n#[cfg(target_arch = \"wasm32\")]\n\nimpl From<wasm_bus_time::prelude::Elapsed> for CommsError {\n\n fn from(_err: wasm_bus_time::prelude::Elapsed) -> CommsError {\n\n CommsErrorKind::IO(std::io::Error::new(\n\n std::io::ErrorKind::TimedOut,\n\n format!(\"Timeout while waiting for communication channel\").to_string(),\n\n ))\n\n .into()\n\n }\n\n}\n\n\n\nimpl<T> From<mpsc::error::SendError<T>> for CommsError {\n\n fn from(err: mpsc::error::SendError<T>) -> CommsError {\n\n CommsErrorKind::SendError(err.to_string()).into()\n\n }\n\n}\n\n\n\n#[cfg(feature = \"enable_full\")]\n", "file_path": "lib/src/error/comms_error.rs", "rank": 65, "score": 64379.55208667192 }, { "content": "use error_chain::error_chain;\n\n\n\nuse crate::crypto::AteHash;\n\n\n\nerror_chain! {\n\n types {\n\n LintError, LintErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n TrustError(super::TrustError, super::TrustErrorKind);\n\n TimeError(super::TimeError, super::TimeErrorKind);\n\n SerializationError(super::SerializationError, super::SerializationErrorKind);\n\n }\n\n foreign_links {\n\n IO(std::io::Error);\n\n }\n\n errors {\n\n MissingWriteKey(hash: AteHash) {\n\n description(\"could not find the write public key in the session\"),\n\n display(\"could not find the write public key ({}) in the session\", hash.to_string()),\n\n }\n\n }\n\n}\n", "file_path": "lib/src/error/lint_error.rs", "rank": 66, "score": 64379.487448275046 }, { "content": "impl From<CoinCollectFailed> for CoinError {\n\n fn from(err: CoinCollectFailed) -> CoinError {\n\n match err {\n\n CoinCollectFailed::AuthenticationFailed => {\n\n CoinErrorKind::CoreError(CoreErrorKind::AuthenticationFailed).into()\n\n }\n\n CoinCollectFailed::InvalidCommodity => CoinErrorKind::InvalidCommodity.into(),\n\n CoinCollectFailed::InvalidCoin => CoinErrorKind::InvalidCoin.into(),\n\n CoinCollectFailed::OperatorBanned => {\n\n CoinErrorKind::CoreError(CoreErrorKind::OperatorBanned).into()\n\n }\n\n CoinCollectFailed::InternalError(code) => {\n\n CoinErrorKind::CoreError(CoreErrorKind::InternalError(code)).into()\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl From<CoinRotateFailed> for CoinError {\n\n fn from(err: CoinRotateFailed) -> CoinError {\n", "file_path": "tokera/src/error/coin_error.rs", "rank": 67, "score": 64379.03191547115 }, { "content": "use error_chain::error_chain;\n\n\n\nuse crate::request::*;\n\n\n\nerror_chain! {\n\n types {\n\n CoreError, CoreErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n AteError(::ate::error::AteError, ::ate::error::AteErrorKind);\n\n ChainCreationError(::ate::error::ChainCreationError, ::ate::error::ChainCreationErrorKind);\n\n SerializationError(::ate::error::SerializationError, ::ate::error::SerializationErrorKind);\n\n InvokeError(::ate::error::InvokeError, ::ate::error::InvokeErrorKind);\n\n TimeError(::ate::error::TimeError, ::ate::error::TimeErrorKind);\n\n LoadError(::ate::error::LoadError, ::ate::error::LoadErrorKind);\n\n CommitError(::ate::error::CommitError, ::ate::error::CommitErrorKind);\n\n LockError(::ate::error::LockError, ::ate::error::LockErrorKind);\n\n }\n\n foreign_links {\n\n IO(tokio::io::Error);\n", "file_path": "tokera/src/error/core_error.rs", "rank": 68, "score": 64379.0099746642 }, { "content": "use error_chain::error_chain;\n\n\n\nerror_chain! {\n\n types {\n\n BusError, BusErrorKind, ResultExt, Result;\n\n }\n\n foreign_links {\n\n IO(tokio::io::Error);\n\n }\n\n errors {\n\n LoginFailed {\n\n description(\"failed to login with the supplied token\"),\n\n display(\"failed to login with the supplied token\"),\n\n }\n\n }\n\n}\n", "file_path": "tokera/src/error/bus_error.rs", "rank": 69, "score": 64378.979504399584 }, { "content": "impl From<SudoError> for AteError {\n\n fn from(err: SudoError) -> AteError {\n\n AteErrorKind::ServiceError(err.to_string()).into()\n\n }\n\n}\n\n\n\nimpl From<SudoFailed> for SudoError {\n\n fn from(err: SudoFailed) -> SudoError {\n\n match err {\n\n SudoFailed::AccountLocked(duration) => SudoErrorKind::AccountLocked(duration).into(),\n\n SudoFailed::MissingToken => SudoErrorKind::MissingToken.into(),\n\n SudoFailed::NoMasterKey => SudoErrorKind::NoMasterKey.into(),\n\n SudoFailed::Unverified(username) => SudoErrorKind::Unverified(username).into(),\n\n SudoFailed::UserNotFound(username) => SudoErrorKind::NotFound(username).into(),\n\n SudoFailed::WrongCode => SudoErrorKind::WrongCode.into(),\n\n SudoFailed::InternalError(code) => SudoErrorKind::InternalError(code).into(),\n\n }\n\n }\n\n}\n", "file_path": "auth/src/error/sudo_error.rs", "rank": 70, "score": 64378.93776793961 }, { "content": "impl<T> From<tokio::sync::watch::error::SendError<T>> for AteError\n\nwhere\n\n T: std::fmt::Debug,\n\n{\n\n fn from(err: tokio::sync::watch::error::SendError<T>) -> AteError {\n\n AteErrorKind::IO(tokio::io::Error::new(\n\n tokio::io::ErrorKind::Other,\n\n err.to_string(),\n\n ))\n\n .into()\n\n }\n\n}\n", "file_path": "lib/src/error/ate_error.rs", "rank": 71, "score": 64378.93077450907 }, { "content": " description(\"failed to commit the data due to an error receiving the result in the interprocess pipe\"),\n\n display(\"failed to commit the data due to an error receiving the result in the interprocess pipe - {}\", err.to_string()),\n\n }\n\n RootError(err: String) {\n\n description(\"failed to commit the data due to an error at the root server while processing the events\"),\n\n display(\"failed to commit the data due to an error at the root server while processing the events - {}\", err.to_string()),\n\n }\n\n }\n\n}\n\n\n\nimpl<T> From<tokio::sync::mpsc::error::SendError<T>> for CommitError {\n\n fn from(err: tokio::sync::mpsc::error::SendError<T>) -> CommitError {\n\n CommitErrorKind::PipeError(err.to_string()).into()\n\n }\n\n}\n\n\n\nimpl<T> From<tokio::sync::broadcast::error::SendError<T>> for CommitError {\n\n fn from(err: tokio::sync::broadcast::error::SendError<T>) -> CommitError {\n\n CommitErrorKind::PipeError(err.to_string()).into()\n\n }\n\n}\n", "file_path": "lib/src/error/commit_error.rs", "rank": 72, "score": 64378.913797611836 }, { "content": " description(\"an internal error has occured\")\n\n display(\"an internal error has occured - code={}\", code)\n\n }\n\n NoInput {\n\n description(\"no input was supplied to the command\")\n\n display(\"no input was supplied to the command\")\n\n }\n\n Unsupported {\n\n description(\"the operation is not yet supported\")\n\n display(\"the operation is not yet supported\")\n\n }\n\n }\n\n}\n\n\n\nimpl From<::ate::error::AteError> for InstanceError {\n\n fn from(err: ::ate::error::AteError) -> Self {\n\n InstanceErrorKind::CoreError(CoreErrorKind::AteError(err.0)).into()\n\n }\n\n}\n\n\n", "file_path": "tokera/src/error/instance_error.rs", "rank": 73, "score": 64378.523798629176 }, { "content": " AteErrorKind::ServiceError(err.to_string()).into()\n\n }\n\n}\n\n\n\nimpl From<QueryFailed> for QueryError {\n\n fn from(err: QueryFailed) -> QueryError {\n\n match err {\n\n QueryFailed::Banned => QueryErrorKind::Banned.into(),\n\n QueryFailed::NotFound => QueryErrorKind::NotFound.into(),\n\n QueryFailed::Suspended => QueryErrorKind::Suspended.into(),\n\n QueryFailed::InternalError(code) => QueryErrorKind::InternalError(code).into(),\n\n }\n\n }\n\n}\n", "file_path": "auth/src/error/query_error.rs", "rank": 74, "score": 64378.4986264734 }, { "content": "use error_chain::error_chain;\n\n\n\nerror_chain! {\n\n types {\n\n ValidationError, ValidationErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n TrustError(super::TrustError, super::TrustErrorKind);\n\n TimeError(super::TimeError, super::TimeErrorKind);\n\n }\n\n errors {\n\n Denied(reason: String) {\n\n description(\"the data was rejected\"),\n\n display(\"the data was rejected - {}\", reason),\n\n }\n\n Many(errors: Vec<ValidationError>) {\n\n description(\"the data was rejected by one (or more) of the validators\"),\n\n display(\"the data was rejected by {} of the validators\", errors.len()),\n\n }\n\n AllAbstained {\n", "file_path": "lib/src/error/validation_error.rs", "rank": 75, "score": 64378.348891213966 }, { "content": "use error_chain::error_chain;\n\n\n\nerror_chain! {\n\n types {\n\n TransformError, TransformErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n CryptoError(super::CryptoError, super::CryptoErrorKind);\n\n TrustError(super::TrustError, super::TrustErrorKind);\n\n }\n\n foreign_links {\n\n IO(std::io::Error);\n\n }\n\n errors {\n\n #[cfg(feature = \"enable_openssl\")]\n\n EncryptionError(stack: openssl::error::ErrorStack) {\n\n description(\"encryption error while transforming event data\"),\n\n display(\"encryption error while transforming event data - {}\", err),\n\n }\n\n MissingData {\n", "file_path": "lib/src/error/transform_error.rs", "rank": 76, "score": 64378.29284412872 }, { "content": "use error_chain::error_chain;\n\n\n\nuse crate::header::PrimaryKey;\n\n\n\nerror_chain! {\n\n types {\n\n TrustError, TrustErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n TimeError(super::TimeError, super::TimeErrorKind);\n\n }\n\n errors {\n\n NoAuthorizationWrite(type_code: String, key: PrimaryKey, write: crate::meta::WriteOption) {\n\n description(\"data object with key could not be written as the current session has no signature key for this authorization\"),\n\n display(\"data object of type ({}) with key ({}) could not be written as the current session has no signature key for this authorization ({})\", type_code, key.as_hex_string(), write),\n\n }\n\n NoAuthorizationRead(type_code: String, key: PrimaryKey, read: crate::meta::ReadOption) {\n\n description(\"data object with key could not be read as the current session has no encryption key for this authorization\"),\n\n display(\"data object of type ({}) with key ({}) could not be read as the current session has no encryption key for this authorization ({})\", type_code, key.as_hex_string(), read),\n\n }\n", "file_path": "lib/src/error/trust_error.rs", "rank": 77, "score": 64378.267086320324 }, { "content": " }\n\n WebSocketInternalError(err: String) {\n\n description(\"web socket internal error\"),\n\n display(\"web socket internal error - {}\", err),\n\n }\n\n UnsupportedProtocolError(proto: String) {\n\n description(\"unsupported wire protocol\"),\n\n display(\"unsupported wire protocol ({})\", proto),\n\n }\n\n }\n\n}\n\n\n\nimpl From<tokio::time::error::Elapsed> for CommsError {\n\n fn from(_err: tokio::time::error::Elapsed) -> CommsError {\n\n CommsErrorKind::IO(std::io::Error::new(\n\n std::io::ErrorKind::TimedOut,\n\n format!(\"Timeout while waiting for communication channel\").to_string(),\n\n ))\n\n .into()\n\n }\n", "file_path": "lib/src/error/comms_error.rs", "rank": 78, "score": 64378.10270869878 }, { "content": "use error_chain::error_chain;\n\n\n\nerror_chain! {\n\n types {\n\n CommitError, CommitErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n CommsError(super::CommsError, super::CommsErrorKind);\n\n ValidationError(super::ValidationError, super::ValidationErrorKind);\n\n TransformError(super::TransformError, super::TransformErrorKind);\n\n LintError(super::LintError, super::LintErrorKind);\n\n TimeError(super::TimeError, super::TimeErrorKind);\n\n SinkError(super::SinkError, super::SinkErrorKind);\n\n SerializationError(super::SerializationError, super::SerializationErrorKind);\n\n }\n\n foreign_links {\n\n IO(::tokio::io::Error);\n\n }\n\n errors {\n\n Aborted {\n", "file_path": "lib/src/error/commit_error.rs", "rank": 79, "score": 64377.98389887577 }, { "content": "use error_chain::error_chain;\n\n\n\nerror_chain! {\n\n types {\n\n InvokeError, InvokeErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n LoadError(super::LoadError, super::LoadErrorKind);\n\n SerializationError(super::SerializationError, super::SerializationErrorKind);\n\n CommitError(super::CommitError, super::CommitErrorKind);\n\n TransformError(super::TransformError, super::TransformErrorKind);\n\n LockError(super::LockError, super::LockErrorKind);\n\n }\n\n foreign_links {\n\n IO(std::io::Error);\n\n }\n\n errors {\n\n PipeError(err: String) {\n\n description(\"command failed due to pipe error\"),\n\n display(\"command failed due to pipe error - {}\", err)\n", "file_path": "lib/src/error/invoke_error.rs", "rank": 80, "score": 64377.98150806224 }, { "content": " WalletErrorKind::CoinError(CoinErrorKind::InvalidCommodity).into()\n\n }\n\n CancelDepositFailed::InvalidCoin => {\n\n WalletErrorKind::CoinError(CoinErrorKind::InvalidCoin).into()\n\n }\n\n CancelDepositFailed::Forbidden => {\n\n WalletErrorKind::CoreError(CoreErrorKind::Forbidden).into()\n\n }\n\n CancelDepositFailed::InternalError(code) => {\n\n WalletErrorKind::CoreError(CoreErrorKind::InternalError(code)).into()\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl From<DepositFailed> for WalletError {\n\n fn from(err: DepositFailed) -> WalletError {\n\n match err {\n\n DepositFailed::OperatorBanned => {\n\n WalletErrorKind::CoreError(CoreErrorKind::OperatorBanned).into()\n", "file_path": "tokera/src/error/wallet_error.rs", "rank": 81, "score": 64377.95765662303 }, { "content": " description(\"missing data for this record\")\n\n display(\"missing data for this record\")\n\n }\n\n MissingReadKey(hash: String) {\n\n description(\"missing the read key needed to encrypt/decrypt this data object\"),\n\n display(\"missing the read key ({}) needed to encrypt/decrypt this data object\", hash)\n\n }\n\n UnspecifiedReadability {\n\n description(\"the readability for this data object has not been specified\")\n\n display(\"the readability for this data object has not been specified\")\n\n }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"enable_openssl\")]\n\nimpl From<openssl::error::ErrorStack> for Error {\n\n fn from(err: openssl::error::ErrorStack) -> Error {\n\n ErrorKind::EncryptionError(err).into()\n\n }\n\n}\n", "file_path": "lib/src/error/transform_error.rs", "rank": 82, "score": 64377.9479321225 }, { "content": " }\n\n NotDeposited {\n\n description(\"the funds do not exist as the deposit was never completed\")\n\n display(\"the funds do not exist as the deposit was never completed\")\n\n }\n\n WalletLocked {\n\n description(\"the wallet is currently locked for modification due to a concurrent operation\"),\n\n display(\"the wallet is currently locked for modification due to a concurrent operation\"),\n\n }\n\n EmailError(err: String) {\n\n description(\"failed to send email\"),\n\n display(\"failed to send email - {}\", err),\n\n }\n\n }\n\n}\n\n\n\nimpl From<::ate::error::AteError> for WalletError {\n\n fn from(err: ::ate::error::AteError) -> Self {\n\n WalletErrorKind::CoreError(CoreErrorKind::AteError(err.0)).into()\n\n }\n", "file_path": "tokera/src/error/wallet_error.rs", "rank": 83, "score": 64377.89916824109 }, { "content": " }\n\n InvalidAuthenticatorCode {\n\n description(\"one or more of the supplied authenticator codes was not valid\")\n\n display(\"one or more of the supplied authenticator codes was not valid\")\n\n }\n\n NoMasterKey {\n\n description(\"reset failed as the server has not been properly initialized\")\n\n display(\"reset failed as the server has not been properly initialized\")\n\n }\n\n InternalError(code: u16) {\n\n description(\"reset failed as the server experienced an internal error\")\n\n display(\"reset failed as the server experienced an internal error - code={}\", code)\n\n }\n\n }\n\n}\n\n\n\nimpl From<ResetError> for AteError {\n\n fn from(err: ResetError) -> AteError {\n\n AteErrorKind::ServiceError(err.to_string()).into()\n\n }\n", "file_path": "auth/src/error/reset_error.rs", "rank": 84, "score": 64377.87692541713 }, { "content": " ContractCreateFailed::OperatorBanned => {\n\n ContractErrorKind::CoreError(CoreErrorKind::OperatorBanned).into()\n\n }\n\n ContractCreateFailed::OperatorNotFound => {\n\n ContractErrorKind::CoreError(CoreErrorKind::OperatorNotFound).into()\n\n }\n\n ContractCreateFailed::UnsupportedCurrency(currency) => {\n\n ContractErrorKind::UnsupportedCurrency(currency).into()\n\n }\n\n ContractCreateFailed::InternalError(code) => {\n\n ContractErrorKind::CoreError(CoreErrorKind::InternalError(code)).into()\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl From<ContractActionFailed> for ContractError {\n\n fn from(err: ContractActionFailed) -> ContractError {\n\n match err {\n\n ContractActionFailed::AccountSuspended => {\n", "file_path": "tokera/src/error/contract_error.rs", "rank": 85, "score": 64377.85176921561 }, { "content": " Unverified(username: String) {\n\n description(\"login failed as the account is not yet verified\")\n\n display(\"login failed for {} as the account is not yet verified\", username)\n\n }\n\n WrongPassword {\n\n description(\"login failed due to an incorrect password\")\n\n display(\"login failed due to an incorrect password\")\n\n }\n\n InternalError(code: u16) {\n\n description(\"login failed as the server experienced an internal error\")\n\n display(\"login failed as the server experienced an internal error - code={}\", code)\n\n }\n\n }\n\n}\n\n\n\nimpl From<LoginError> for AteError {\n\n fn from(err: LoginError) -> AteError {\n\n AteErrorKind::ServiceError(err.to_string()).into()\n\n }\n\n}\n", "file_path": "auth/src/error/login_error.rs", "rank": 86, "score": 64377.84339428835 }, { "content": "use error_chain::error_chain;\n\n\n\nerror_chain! {\n\n types {\n\n LockError, LockErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n SerializationError(super::SerializationError, super::SerializationErrorKind);\n\n LintError(super::LintError, super::LintErrorKind);\n\n }\n\n errors {\n\n CommitError(err: String) {\n\n description(\"failed to lock the data object due to issue committing the event to the pipe\"),\n\n display(\"failed to lock the data object due to issue committing the event to the pipe - {}\", err),\n\n }\n\n ReceiveError(err: String) {\n\n description(\"failed to lock the data object due to an error receiving on the pipe\"),\n\n display(\"failed to lock the data object due to an error receiving on the pipe - {}\", err),\n\n }\n\n WeakDio {\n", "file_path": "lib/src/error/lock_error.rs", "rank": 87, "score": 64377.844939365095 }, { "content": "\n\nimpl From<LoginFailed> for LoginError {\n\n fn from(err: LoginFailed) -> LoginError {\n\n match err {\n\n LoginFailed::AccountLocked(duration) => LoginErrorKind::AccountLocked(duration).into(),\n\n LoginFailed::NoMasterKey => LoginErrorKind::NoMasterKey.into(),\n\n LoginFailed::Unverified(username) => LoginErrorKind::Unverified(username).into(),\n\n LoginFailed::UserNotFound(username) => LoginErrorKind::NotFound(username).into(),\n\n LoginFailed::WrongPassword => LoginErrorKind::WrongPassword.into(),\n\n LoginFailed::InternalError(code) => LoginErrorKind::InternalError(code).into(),\n\n }\n\n }\n\n}\n", "file_path": "auth/src/error/login_error.rs", "rank": 88, "score": 64377.803715567454 }, { "content": " description(\"query failed as the user could not be found\")\n\n display(\"query failed as the user could not be found\")\n\n }\n\n Banned {\n\n description(\"query failed as the user has been banned\")\n\n display(\"query failed as the user has been banned\")\n\n }\n\n Suspended {\n\n description(\"query failed as the user has been suspended\")\n\n display(\"query failed as the user has been suspended\")\n\n }\n\n InternalError(code: u16) {\n\n description(\"query failed as the server experienced an internal error\")\n\n display(\"query failed as the server experienced an internal error - code={}\", code)\n\n }\n\n }\n\n}\n\n\n\nimpl From<QueryError> for AteError {\n\n fn from(err: QueryError) -> AteError {\n", "file_path": "auth/src/error/query_error.rs", "rank": 89, "score": 64377.78399727197 }, { "content": "use error_chain::error_chain;\n\n\n\nerror_chain! {\n\n types {\n\n BusError, BusErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n LoadError(super::LoadError, super::LoadErrorKind);\n\n SerializationError(super::SerializationError, super::SerializationErrorKind);\n\n LockError(super::LockError, super::LockErrorKind);\n\n TransformError(super::TransformError, super::TransformErrorKind);\n\n }\n\n errors {\n\n ReceiveError(err: String) {\n\n description(\"failed to receive event from bus due to an internal error\"),\n\n display(\"failed to receive event from bus due to an internal error: '{}'\", err),\n\n }\n\n ChannelClosed {\n\n description(\"failed to receive event from bus as the channel is closed\"),\n\n display(\"failed to receive event from bus as the channel is closed\"),\n", "file_path": "lib/src/error/bus_error.rs", "rank": 90, "score": 64377.78382679021 }, { "content": " CoinErrorKind::CoreError(CoreErrorKind::LockError(err))\n\n }\n\n}\n\n\n\nimpl From<CoinCarveFailed> for CoinError {\n\n fn from(err: CoinCarveFailed) -> CoinError {\n\n match err {\n\n CoinCarveFailed::AuthenticationFailed => {\n\n CoinErrorKind::CoreError(CoreErrorKind::AuthenticationFailed).into()\n\n }\n\n CoinCarveFailed::InvalidAmount => CoinErrorKind::InvalidAmount.into(),\n\n CoinCarveFailed::InvalidCommodity => CoinErrorKind::InvalidCommodity.into(),\n\n CoinCarveFailed::InvalidCoin => CoinErrorKind::InvalidCoin.into(),\n\n CoinCarveFailed::InternalError(code) => {\n\n CoinErrorKind::CoreError(CoreErrorKind::InternalError(code)).into()\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "tokera/src/error/coin_error.rs", "rank": 91, "score": 64377.74584704138 }, { "content": "}\n\n\n\nimpl From<ResetFailed> for ResetError {\n\n fn from(err: ResetFailed) -> ResetError {\n\n match err {\n\n ResetFailed::InvalidEmail(email) => ResetErrorKind::NotFound(email).into(),\n\n ResetFailed::InvalidRecoveryCode => ResetErrorKind::InvalidRecoveryCode.into(),\n\n ResetFailed::InvalidAuthenticatorCode => {\n\n ResetErrorKind::InvalidAuthenticatorCode.into()\n\n }\n\n ResetFailed::RecoveryImpossible => ResetErrorKind::RecoveryImpossible.into(),\n\n ResetFailed::NoMasterKey => ResetErrorKind::NoMasterKey.into(),\n\n ResetFailed::InternalError(code) => ResetErrorKind::InternalError(code).into(),\n\n }\n\n }\n\n}\n", "file_path": "auth/src/error/reset_error.rs", "rank": 92, "score": 64377.741583915675 }, { "content": "}\n\n\n\nimpl From<WithdrawFailed> for WalletError {\n\n fn from(err: WithdrawFailed) -> WalletError {\n\n match err {\n\n WithdrawFailed::OperatorBanned => {\n\n WalletErrorKind::CoreError(CoreErrorKind::OperatorBanned).into()\n\n }\n\n WithdrawFailed::OperatorNotFound => {\n\n WalletErrorKind::CoreError(CoreErrorKind::OperatorNotFound).into()\n\n }\n\n WithdrawFailed::AuthenticationFailed => {\n\n WalletErrorKind::CoreError(CoreErrorKind::AuthenticationFailed).into()\n\n }\n\n WithdrawFailed::AccountSuspended => {\n\n WalletErrorKind::CoreError(CoreErrorKind::AccountSuspended).into()\n\n }\n\n WithdrawFailed::AlreadyWithdrawn => WalletErrorKind::AlreadyPaid.into(),\n\n WithdrawFailed::NotDeposited => WalletErrorKind::NotDeposited.into(),\n\n WithdrawFailed::TooSmall => WalletErrorKind::TooSmall.into(),\n", "file_path": "tokera/src/error/wallet_error.rs", "rank": 93, "score": 64377.65073049217 }, { "content": " }\n\n}\n\n\n\nimpl From<CreateUserFailed> for CreateError {\n\n fn from(err: CreateUserFailed) -> CreateError {\n\n match err {\n\n CreateUserFailed::AlreadyExists(msg) => CreateErrorKind::AlreadyExists(msg).into(),\n\n CreateUserFailed::InvalidEmail => CreateErrorKind::InvalidEmail.into(),\n\n CreateUserFailed::NoMasterKey => CreateErrorKind::NoMasterKey.into(),\n\n CreateUserFailed::NoMoreRoom => CreateErrorKind::NoMoreRoom.into(),\n\n CreateUserFailed::InternalError(code) => CreateErrorKind::InternalError(code).into(),\n\n CreateUserFailed::TermsAndConditions(terms) => {\n\n CreateErrorKind::TermsAndConditions(terms).into()\n\n }\n\n }\n\n }\n\n}\n", "file_path": "auth/src/error/create_error.rs", "rank": 94, "score": 64377.59122376123 }, { "content": " NoMasterKey {\n\n description(\"create failed as the server has not been properly initialized\")\n\n display(\"create failed as the server has not been properly initialized\")\n\n }\n\n ValidationError(reason: String) {\n\n description(\"create failed as there was a validation error\")\n\n display(\"create failed as there was a validation error - {}\", reason)\n\n }\n\n TermsAndConditions(terms: String) {\n\n description(\"create failed as the caller did not agree to the terms and conditions\")\n\n display(\"create failed as the caller did not agree to the terms and conditions\")\n\n }\n\n InternalError(code: u16) {\n\n description(\"create failed as the server experienced an internal error\")\n\n display(\"create failed as the server experienced an internal error - code={}\", code)\n\n }\n\n }\n\n}\n\n\n\nimpl From<CreateError> for AteError {\n", "file_path": "auth/src/error/create_error.rs", "rank": 95, "score": 64377.56878928463 }, { "content": " display(\"this server has not been properly initialized (paypal config)\"),\n\n }\n\n SafetyCheckFailed {\n\n description(\"one of the saftey and security failsafes was triggered\"),\n\n display(\"one of the saftey and security failsafes was triggered\"),\n\n }\n\n InternalError(code: u16) {\n\n description(\"the server experienced an internal error\")\n\n display(\"the server experienced an internal error - code={}\", code)\n\n }\n\n Other(err: String) {\n\n description(\"this server experienced an error\"),\n\n display(\"this server experienced an error - {}\", err),\n\n }\n\n }\n\n}\n\n\n\nimpl From<ServiceFindFailed> for CoreError {\n\n fn from(err: ServiceFindFailed) -> CoreError {\n\n match err {\n\n ServiceFindFailed::Forbidden => CoreErrorKind::Forbidden.into(),\n\n ServiceFindFailed::InternalError(code) => CoreErrorKind::InternalError(code).into(),\n\n }\n\n }\n\n}", "file_path": "tokera/src/error/core_error.rs", "rank": 96, "score": 64377.55907859647 }, { "content": "\n\nimpl From<ContractCreateFailed> for ContractError {\n\n fn from(err: ContractCreateFailed) -> ContractError {\n\n match err {\n\n ContractCreateFailed::AccountSuspended => {\n\n ContractErrorKind::CoreError(CoreErrorKind::AccountSuspended).into()\n\n }\n\n ContractCreateFailed::AlreadyExists(msg) => {\n\n ContractErrorKind::AlreadyExists(msg).into()\n\n }\n\n ContractCreateFailed::AuthenticationFailed => {\n\n ContractErrorKind::CoreError(CoreErrorKind::AuthenticationFailed).into()\n\n }\n\n ContractCreateFailed::Forbidden => {\n\n ContractErrorKind::CoreError(CoreErrorKind::Forbidden).into()\n\n }\n\n ContractCreateFailed::InvalidService => ContractErrorKind::InvalidService.into(),\n\n ContractCreateFailed::NoMasterKey => {\n\n ContractErrorKind::CoreError(CoreErrorKind::NoMasterKey).into()\n\n }\n", "file_path": "tokera/src/error/contract_error.rs", "rank": 97, "score": 64377.54105876721 }, { "content": " }\n\n}\n\n\n\nimpl From<CoinCombineFailed> for CoinError {\n\n fn from(err: CoinCombineFailed) -> CoinError {\n\n match err {\n\n CoinCombineFailed::AuthenticationFailed => {\n\n CoinErrorKind::CoreError(CoreErrorKind::AuthenticationFailed).into()\n\n }\n\n CoinCombineFailed::OperatorBanned => {\n\n CoinErrorKind::CoreError(CoreErrorKind::OperatorBanned).into()\n\n }\n\n CoinCombineFailed::InvalidCommodity => CoinErrorKind::InvalidCommodity.into(),\n\n CoinCombineFailed::InvalidCoin => CoinErrorKind::InvalidCoin.into(),\n\n CoinCombineFailed::InvalidRequest(err) => CoinErrorKind::CoreError(\n\n CoreErrorKind::InternalError(ate::utils::obscure_error_str(&err)),\n\n )\n\n .into(),\n\n CoinCombineFailed::InternalError(code) => {\n\n CoinErrorKind::CoreError(CoreErrorKind::InternalError(code)).into()\n\n }\n\n }\n\n }\n\n}\n", "file_path": "tokera/src/error/coin_error.rs", "rank": 98, "score": 64377.4895207001 }, { "content": "use error_chain::error_chain;\n\n\n\nerror_chain! {\n\n types {\n\n AteError, AteErrorKind, ResultExt, Result;\n\n }\n\n links {\n\n BusError(super::BusError, super::BusErrorKind);\n\n ChainCreationError(super::ChainCreationError, super::ChainCreationErrorKind);\n\n CommitError(super::CommitError, super::CommitErrorKind);\n\n CommsError(super::CommsError, super::CommsErrorKind);\n\n CompactError(super::CompactError, super::CompactErrorKind);\n\n CryptoError(super::CryptoError, super::CryptoErrorKind);\n\n InvokeError(super::InvokeError, super::InvokeErrorKind);\n\n LintError(super::LintError, super::LintErrorKind);\n\n LoadError(super::LoadError, super::LoadErrorKind);\n\n LockError(super::LockError, super::LockErrorKind);\n\n SerializationError(super::SerializationError, super::SerializationErrorKind);\n\n SinkError(super::SinkError, super::SinkErrorKind);\n\n TimeError(super::TimeError, super::TimeErrorKind);\n", "file_path": "lib/src/error/ate_error.rs", "rank": 99, "score": 64377.3071953701 } ]
Rust
src/kernel/src/log.rs
ariadiamond/twizzler-Rust
5f5d01bac9127ca1d64bb8aa472a04f6634fc3a9
use core::{ fmt::Write, sync::atomic::{AtomicU64, Ordering}, }; use twizzler_abi::syscall::{ KernelConsoleReadBufferError, KernelConsoleReadError, KernelConsoleReadFlags, }; use crate::{interrupt, spinlock::Spinlock}; const KEC_BUFFER_LEN: usize = 4096; const MAX_SINGLE_WRITE: usize = KEC_BUFFER_LEN / 2; struct KernelConsoleInner { state: AtomicU64, buffer: core::cell::UnsafeCell<[u8; KEC_BUFFER_LEN]>, } unsafe impl Sync for KernelConsoleInner {} pub trait MessageLevel {} pub struct EmergencyMessage; impl MessageLevel for EmergencyMessage {} pub struct NormalMessage; impl MessageLevel for NormalMessage {} pub struct ConsoleWriteError; const INPUT_BUFFER_SIZE: usize = 1024; pub struct KernelConsoleReadBuffer { buf: [u8; INPUT_BUFFER_SIZE], pos: usize, } impl KernelConsoleReadBuffer { const fn new() -> Self { Self { buf: [0; INPUT_BUFFER_SIZE], pos: 0, } } pub fn push_input_byte(&mut self, byte: u8) { if self.pos == INPUT_BUFFER_SIZE { return; } self.buf[self.pos] = byte; self.pos += 1; } pub fn read_byte(&mut self) -> Option<u8> { if self.pos == 0 { return None; } let byte = self.buf[0]; self.buf.copy_within(1.., 0); self.pos -= 1; Some(byte) } } pub struct KernelConsole<T: KernelConsoleHardware, Level: MessageLevel> { inner: &'static KernelConsoleInner, hardware: T, lock: Spinlock<()>, read_lock: Spinlock<KernelConsoleReadBuffer>, _pd: core::marker::PhantomData<Level>, } unsafe impl<T: KernelConsoleHardware, Level: MessageLevel> Sync for KernelConsole<T, Level> {} static KERNEL_CONSOLE_MAIN: KernelConsoleInner = KernelConsoleInner { state: AtomicU64::new(0), buffer: core::cell::UnsafeCell::new([0; KEC_BUFFER_LEN]), }; pub trait KernelConsoleHardware { fn write(&self, data: &[u8], flags: KernelConsoleWriteFlags); } impl<T: KernelConsoleHardware> core::fmt::Write for KernelConsole<T, EmergencyMessage> { fn write_str(&mut self, s: &str) -> core::fmt::Result { let _ = self.write(s.as_bytes(), KernelConsoleWriteFlags::empty()); Ok(()) } } impl<T: KernelConsoleHardware> core::fmt::Write for KernelConsole<T, NormalMessage> { fn write_str(&mut self, s: &str) -> core::fmt::Result { let _ = self.write(s.as_bytes(), KernelConsoleWriteFlags::empty()); Ok(()) } } bitflags::bitflags! { pub struct KernelConsoleWriteFlags: u32 { const DISCARD_ON_FULL = 1; } } impl From<twizzler_abi::syscall::KernelConsoleWriteFlags> for KernelConsoleWriteFlags { fn from(x: twizzler_abi::syscall::KernelConsoleWriteFlags) -> Self { if x.contains(twizzler_abi::syscall::KernelConsoleWriteFlags::DISCARD_ON_FULL) { Self::DISCARD_ON_FULL } else { Self::empty() } } } fn write_head(s: u64) -> u64 { (s >> 32) & 0xffff } fn write_resv(s: u64) -> u64 { (s >> 16) & 0xffff } fn read_head(s: u64) -> u64 { s & 0xffff } fn new_state(rh: u64, wh: u64, wr: u64) -> u64 { ((rh % KEC_BUFFER_LEN as u64) & 0xffff) | (((wh % KEC_BUFFER_LEN as u64) & 0xffff) << 32) | (((wr % KEC_BUFFER_LEN as u64) & 0xffff) << 16) } fn did_pass(x: u64, y: u64, l: u64, n: u64) -> bool { assert!(l < n); let next_x = (x + l) % n; let did_wrap = next_x < x; if x < y { did_wrap || next_x >= y } else { next_x >= y && did_wrap } } fn reserve_write(state: u64, len: usize) -> u64 { let len = len as u64; let wr = write_resv(state); let mut wh = write_head(state); let mut rh = read_head(state); let passed_rh = did_pass(wr, rh, len, KEC_BUFFER_LEN as u64); let passed_wh = did_pass(wr, wh, len, KEC_BUFFER_LEN as u64); let wr = (wr + len) % KEC_BUFFER_LEN as u64; if passed_rh { rh = wr; } if passed_wh { wh = (wr - len) % KEC_BUFFER_LEN as u64; } new_state(rh, wh, wr) } fn commit_write(state: u64, len: usize) -> u64 { let wh = write_head(state); let wr = write_resv(state); new_state(read_head(state), wh + len as u64, wr) } fn reserve_space(state: u64, len: usize, toss: bool) -> (bool, u64, u64) { let new_state = reserve_write(state, len); ( read_head(state) == read_head(new_state) || !toss, new_state, write_head(state), ) } impl KernelConsoleInner { fn try_commit(&self, old: u64, new: u64) -> bool { self.state .compare_exchange(old, new, Ordering::SeqCst, Ordering::SeqCst) .is_ok() } fn write_buffer( &self, data: &[u8], flags: KernelConsoleWriteFlags, ) -> Result<(), ConsoleWriteError> { let data = &data[0..core::cmp::min(data.len(), MAX_SINGLE_WRITE)]; loop { let state = self.state.load(Ordering::SeqCst); let (ok, new_state, copy_offset) = reserve_space( state, data.len(), flags.contains(KernelConsoleWriteFlags::DISCARD_ON_FULL), ); if !ok { return Err(ConsoleWriteError {}); } if !self.try_commit(state, new_state) { continue; } let (first_len, second_len) = if copy_offset + data.len() as u64 > KEC_BUFFER_LEN as u64 { let first_len = KEC_BUFFER_LEN as u64 - copy_offset; (first_len, data.len() as u64 - first_len) } else { (data.len() as u64, 0) }; (&mut unsafe { *self.buffer.get() }) [copy_offset as usize..(copy_offset + first_len) as usize] .copy_from_slice(&data[0..first_len as usize]); (&mut unsafe { *self.buffer.get() })[0..second_len as usize] .copy_from_slice(&data[first_len as usize..(first_len + second_len) as usize]); let new_committed_state = commit_write(new_state, data.len()); if self.try_commit(new_state, new_committed_state) { break; } } Ok(()) } } impl<T: KernelConsoleHardware> KernelConsole<T, EmergencyMessage> { pub fn write( &self, data: &[u8], flags: KernelConsoleWriteFlags, ) -> Result<(), ConsoleWriteError> { self.hardware.write(data, flags); self.inner.write_buffer(data, flags) } } impl<T: KernelConsoleHardware> KernelConsole<T, NormalMessage> { pub fn write( &self, data: &[u8], flags: KernelConsoleWriteFlags, ) -> Result<(), ConsoleWriteError> { self.hardware.write(data, flags); self.inner.write_buffer(data, flags) } } impl<T: KernelConsoleHardware, M: MessageLevel> KernelConsole<T, M> { fn read_buffer_bytes(&self, _slice: &mut [u8]) -> Result<usize, KernelConsoleReadBufferError> { todo!() } fn read_bytes( &self, slice: &mut [u8], flags: KernelConsoleReadFlags, ) -> Result<usize, KernelConsoleReadError> { let mut i = 0; loop { if i == slice.len() { break; } let b = &mut slice[i]; if let Some(x) = self.read_lock.lock().read_byte() { *b = match x { 4 => return Ok(i), _ => x, }; i += 1; } else if flags.contains(KernelConsoleReadFlags::NONBLOCKING) || i > 0 { return Ok(i); } else { crate::sched::schedule(true); } } Ok(slice.len()) } } pub fn write_bytes(slice: &[u8], flags: KernelConsoleWriteFlags) -> Result<(), ConsoleWriteError> { unsafe { NORMAL_CONSOLE.write(slice, flags) } } pub fn read_bytes( slice: &mut [u8], flags: KernelConsoleReadFlags, ) -> Result<usize, KernelConsoleReadError> { unsafe { NORMAL_CONSOLE.read_bytes(slice, flags) } } pub fn read_buffer_bytes(slice: &mut [u8]) -> Result<usize, KernelConsoleReadBufferError> { unsafe { NORMAL_CONSOLE.read_buffer_bytes(slice) } } pub fn push_input_byte(byte: u8) { unsafe { let byte = match byte { 13 => 10, 127 => 8, x => x, }; NORMAL_CONSOLE.read_lock.lock().push_input_byte(byte); if byte == 8 { let _ = write_bytes(&[8, b' '], KernelConsoleWriteFlags::DISCARD_ON_FULL); } let _ = write_bytes(&[byte], KernelConsoleWriteFlags::DISCARD_ON_FULL); } } static mut EMERGENCY_CONSOLE: KernelConsole< crate::machine::MachineConsoleHardware, EmergencyMessage, > = KernelConsole { inner: &KERNEL_CONSOLE_MAIN, hardware: crate::machine::MachineConsoleHardware::new(), _pd: core::marker::PhantomData, lock: Spinlock::new(()), read_lock: Spinlock::new(KernelConsoleReadBuffer::new()), }; static mut NORMAL_CONSOLE: KernelConsole<crate::machine::MachineConsoleHardware, NormalMessage> = KernelConsole { inner: &KERNEL_CONSOLE_MAIN, hardware: crate::machine::MachineConsoleHardware::new(), _pd: core::marker::PhantomData, lock: Spinlock::new(()), read_lock: Spinlock::new(KernelConsoleReadBuffer::new()), }; #[doc(hidden)] pub fn _print_normal(args: ::core::fmt::Arguments) { let istate = interrupt::disable(); unsafe { let _guard = NORMAL_CONSOLE.lock.lock(); NORMAL_CONSOLE .write_fmt(args) .expect("printing to serial failed"); } interrupt::set(istate); } pub fn _print_emergency(args: ::core::fmt::Arguments) { unsafe { EMERGENCY_CONSOLE .write_fmt(args) .expect("printing to serial failed"); } } #[macro_export] macro_rules! log { ($($arg:tt)*) => { $crate::log::_print_normal(format_args!($($arg)*)) }; } #[macro_export] macro_rules! logln { () => { $crate::log!("\n") }; ($fmt:expr) => { $crate::log!(concat!($fmt, "\n")) }; ($fmt:expr, $($arg:tt)*) => { $crate::log!(concat!($fmt, "\n"), $($arg)*) }; } #[macro_export] macro_rules! emerglog { ($($arg:tt)*) => { $crate::log::_print_emergency(format_args!($($arg)*)) }; } #[macro_export] macro_rules! emerglogln { () => { $crate::emerglog!("\n") }; ($fmt:expr) => { $crate::emerglog!(concat!($fmt, "\n")) }; ($fmt:expr, $($arg:tt)*) => { $crate::emerglog!(concat!($fmt, "\n"), $($arg)*) }; }
use core::{ fmt::Write, sync::atomic::{AtomicU64, Ordering}, }; use twizzler_abi::syscall::{ KernelConsoleReadBufferError, KernelConsoleReadError, KernelConsoleReadFlags, }; use crate::{interrupt, spinlock::Spinlock}; const KEC_BUFFER_LEN: usize = 4096; const MAX_SINGLE_WRITE: usize = KEC_BUFFER_LEN / 2; struct KernelConsoleInner { state: AtomicU64, buffer: core::cell::UnsafeCell<[u8; KEC_BUFFER_LEN]>, } unsafe impl Sync for KernelConsoleInner {} pub trait MessageLevel {} pub struct EmergencyMessage; impl MessageLevel for EmergencyMessage {} pub struct NormalMessage; impl MessageLevel for NormalMessage {} pub struct ConsoleWriteError; const INPUT_BUFFER_SIZE: usize = 1024; pub struct KernelConsoleReadBuffer { buf: [u8; INPUT_BUFFER_SIZE], pos: usize, } impl KernelConsoleReadBuffer { const fn new() -> Self { Self { buf: [0; INPUT_BUFFER_SIZE], pos: 0, } } pub fn push_input_byte(&mut self, byte: u8) { if self.pos == INPUT_BUFFER_SIZE { return; } self.buf[self.pos] = byte; self.pos += 1; } pub fn read_byte(&mut self) -> Option<u8> { if self.pos == 0 { return None; } let byte = self.buf[0]; self.buf.copy_within(1.., 0); self.pos -= 1; Some(byte) } } pub struct KernelConsole<T: KernelConsoleHardware, Level: MessageLevel> { inner: &'static KernelConsoleInner, hardware: T, lock: Spinlock<()>, read_lock: Spinlock<KernelConsoleReadBuffer>, _pd: core::marker::PhantomData<Level>, } unsafe impl<T: KernelConsoleHardware, Level: MessageLevel> Sync for KernelConsole<T, Level> {} static KERNEL_CONSOLE_MAIN: KernelConsoleInner = KernelConsoleInner { state: AtomicU64::new(0), buffer: core::cell::UnsafeCell::new([0; KEC_BUFFER_LEN]), }; pub trait KernelConsoleHardware { fn write(&self, data: &[u8], flags: KernelConsoleWriteFlags); } impl<T: KernelConsoleHardware> core::fmt::Write for KernelConsole<T, EmergencyMessage> { fn write_str(&mut self, s: &str) -> core::fmt::Result { let _ = self.write(s.as_bytes(), KernelConsoleWriteFlags::empty()); Ok(()) } } impl<T: KernelConsoleHardware> core::fmt::Write for KernelConsole<T, NormalMessage> { fn write_str(&mut self, s: &str) -> core::fmt::Result { let _ = self.write(s.as_bytes(), KernelConsoleWriteFlags::empty()); Ok(()) } } bitflags::bitflags! { pub struct KernelConsoleWriteFlags: u32 { const DISCARD_ON_FULL = 1; } } impl From<twizzler_abi::syscall::KernelConsoleWriteFlags> for KernelConsoleWriteFlags {
} fn write_head(s: u64) -> u64 { (s >> 32) & 0xffff } fn write_resv(s: u64) -> u64 { (s >> 16) & 0xffff } fn read_head(s: u64) -> u64 { s & 0xffff } fn new_state(rh: u64, wh: u64, wr: u64) -> u64 { ((rh % KEC_BUFFER_LEN as u64) & 0xffff) | (((wh % KEC_BUFFER_LEN as u64) & 0xffff) << 32) | (((wr % KEC_BUFFER_LEN as u64) & 0xffff) << 16) } fn did_pass(x: u64, y: u64, l: u64, n: u64) -> bool { assert!(l < n); let next_x = (x + l) % n; let did_wrap = next_x < x; if x < y { did_wrap || next_x >= y } else { next_x >= y && did_wrap } } fn reserve_write(state: u64, len: usize) -> u64 { let len = len as u64; let wr = write_resv(state); let mut wh = write_head(state); let mut rh = read_head(state); let passed_rh = did_pass(wr, rh, len, KEC_BUFFER_LEN as u64); let passed_wh = did_pass(wr, wh, len, KEC_BUFFER_LEN as u64); let wr = (wr + len) % KEC_BUFFER_LEN as u64; if passed_rh { rh = wr; } if passed_wh { wh = (wr - len) % KEC_BUFFER_LEN as u64; } new_state(rh, wh, wr) } fn commit_write(state: u64, len: usize) -> u64 { let wh = write_head(state); let wr = write_resv(state); new_state(read_head(state), wh + len as u64, wr) } fn reserve_space(state: u64, len: usize, toss: bool) -> (bool, u64, u64) { let new_state = reserve_write(state, len); ( read_head(state) == read_head(new_state) || !toss, new_state, write_head(state), ) } impl KernelConsoleInner { fn try_commit(&self, old: u64, new: u64) -> bool { self.state .compare_exchange(old, new, Ordering::SeqCst, Ordering::SeqCst) .is_ok() } fn write_buffer( &self, data: &[u8], flags: KernelConsoleWriteFlags, ) -> Result<(), ConsoleWriteError> { let data = &data[0..core::cmp::min(data.len(), MAX_SINGLE_WRITE)]; loop { let state = self.state.load(Ordering::SeqCst); let (ok, new_state, copy_offset) = reserve_space( state, data.len(), flags.contains(KernelConsoleWriteFlags::DISCARD_ON_FULL), ); if !ok { return Err(ConsoleWriteError {}); } if !self.try_commit(state, new_state) { continue; } let (first_len, second_len) = if copy_offset + data.len() as u64 > KEC_BUFFER_LEN as u64 { let first_len = KEC_BUFFER_LEN as u64 - copy_offset; (first_len, data.len() as u64 - first_len) } else { (data.len() as u64, 0) }; (&mut unsafe { *self.buffer.get() }) [copy_offset as usize..(copy_offset + first_len) as usize] .copy_from_slice(&data[0..first_len as usize]); (&mut unsafe { *self.buffer.get() })[0..second_len as usize] .copy_from_slice(&data[first_len as usize..(first_len + second_len) as usize]); let new_committed_state = commit_write(new_state, data.len()); if self.try_commit(new_state, new_committed_state) { break; } } Ok(()) } } impl<T: KernelConsoleHardware> KernelConsole<T, EmergencyMessage> { pub fn write( &self, data: &[u8], flags: KernelConsoleWriteFlags, ) -> Result<(), ConsoleWriteError> { self.hardware.write(data, flags); self.inner.write_buffer(data, flags) } } impl<T: KernelConsoleHardware> KernelConsole<T, NormalMessage> { pub fn write( &self, data: &[u8], flags: KernelConsoleWriteFlags, ) -> Result<(), ConsoleWriteError> { self.hardware.write(data, flags); self.inner.write_buffer(data, flags) } } impl<T: KernelConsoleHardware, M: MessageLevel> KernelConsole<T, M> { fn read_buffer_bytes(&self, _slice: &mut [u8]) -> Result<usize, KernelConsoleReadBufferError> { todo!() } fn read_bytes( &self, slice: &mut [u8], flags: KernelConsoleReadFlags, ) -> Result<usize, KernelConsoleReadError> { let mut i = 0; loop { if i == slice.len() { break; } let b = &mut slice[i]; if let Some(x) = self.read_lock.lock().read_byte() { *b = match x { 4 => return Ok(i), _ => x, }; i += 1; } else if flags.contains(KernelConsoleReadFlags::NONBLOCKING) || i > 0 { return Ok(i); } else { crate::sched::schedule(true); } } Ok(slice.len()) } } pub fn write_bytes(slice: &[u8], flags: KernelConsoleWriteFlags) -> Result<(), ConsoleWriteError> { unsafe { NORMAL_CONSOLE.write(slice, flags) } } pub fn read_bytes( slice: &mut [u8], flags: KernelConsoleReadFlags, ) -> Result<usize, KernelConsoleReadError> { unsafe { NORMAL_CONSOLE.read_bytes(slice, flags) } } pub fn read_buffer_bytes(slice: &mut [u8]) -> Result<usize, KernelConsoleReadBufferError> { unsafe { NORMAL_CONSOLE.read_buffer_bytes(slice) } } pub fn push_input_byte(byte: u8) { unsafe { let byte = match byte { 13 => 10, 127 => 8, x => x, }; NORMAL_CONSOLE.read_lock.lock().push_input_byte(byte); if byte == 8 { let _ = write_bytes(&[8, b' '], KernelConsoleWriteFlags::DISCARD_ON_FULL); } let _ = write_bytes(&[byte], KernelConsoleWriteFlags::DISCARD_ON_FULL); } } static mut EMERGENCY_CONSOLE: KernelConsole< crate::machine::MachineConsoleHardware, EmergencyMessage, > = KernelConsole { inner: &KERNEL_CONSOLE_MAIN, hardware: crate::machine::MachineConsoleHardware::new(), _pd: core::marker::PhantomData, lock: Spinlock::new(()), read_lock: Spinlock::new(KernelConsoleReadBuffer::new()), }; static mut NORMAL_CONSOLE: KernelConsole<crate::machine::MachineConsoleHardware, NormalMessage> = KernelConsole { inner: &KERNEL_CONSOLE_MAIN, hardware: crate::machine::MachineConsoleHardware::new(), _pd: core::marker::PhantomData, lock: Spinlock::new(()), read_lock: Spinlock::new(KernelConsoleReadBuffer::new()), }; #[doc(hidden)] pub fn _print_normal(args: ::core::fmt::Arguments) { let istate = interrupt::disable(); unsafe { let _guard = NORMAL_CONSOLE.lock.lock(); NORMAL_CONSOLE .write_fmt(args) .expect("printing to serial failed"); } interrupt::set(istate); } pub fn _print_emergency(args: ::core::fmt::Arguments) { unsafe { EMERGENCY_CONSOLE .write_fmt(args) .expect("printing to serial failed"); } } #[macro_export] macro_rules! log { ($($arg:tt)*) => { $crate::log::_print_normal(format_args!($($arg)*)) }; } #[macro_export] macro_rules! logln { () => { $crate::log!("\n") }; ($fmt:expr) => { $crate::log!(concat!($fmt, "\n")) }; ($fmt:expr, $($arg:tt)*) => { $crate::log!(concat!($fmt, "\n"), $($arg)*) }; } #[macro_export] macro_rules! emerglog { ($($arg:tt)*) => { $crate::log::_print_emergency(format_args!($($arg)*)) }; } #[macro_export] macro_rules! emerglogln { () => { $crate::emerglog!("\n") }; ($fmt:expr) => { $crate::emerglog!(concat!($fmt, "\n")) }; ($fmt:expr, $($arg:tt)*) => { $crate::emerglog!(concat!($fmt, "\n"), $($arg)*) }; }
fn from(x: twizzler_abi::syscall::KernelConsoleWriteFlags) -> Self { if x.contains(twizzler_abi::syscall::KernelConsoleWriteFlags::DISCARD_ON_FULL) { Self::DISCARD_ON_FULL } else { Self::empty() } }
function_block-function_prefix_line
[]
Rust
src/main.rs
boylede/aoc2020
397cc12bb13b7cefb04a151ce87d992b6b7afb6d
use aoc2020::{Day, RunError, Session, SessionError}; use clap::Clap; #[clap(version = "0.1.0", author = "Daniel Boyle")] #[derive(Debug, Clone, Clap)] pub struct Config { #[clap(short = 'd', long = "day", default_value = "1")] pub day: i32, #[clap(short = 'a', long = "all")] pub all: bool, #[clap(short = 'o', long = "offline")] pub offline: bool, #[clap(short = 's', long = "session")] pub session: Option<String>, #[clap(short = 'i', long = "input")] pub input: Option<String>, #[clap(short = 'e', long = "examples")] pub examples: bool, #[clap(long = "accept")] pub accept: bool, #[clap(long = "validate")] pub validate: bool, #[clap(long = "clear-cache")] pub clear: bool, } fn main() { let config = Config::parse(); let days = aoc2020::DAYS; if config.all { for day in days { run_day(day, &config); } } else { let index = (config.day - 1) as usize; if index < days.len() { let day = &days[index]; run_day(day, &config); } else { println!("Invalid day selection: {}", config.day); } } } fn run_day(day: &Day, config: &Config) { if config.clear { println!("Clearing cache..."); day.clear_cache(); } if config.examples { println!("running examples for day {}...", day.index); match day.run_with_examples() { Ok(_) => (), Err(e) => {print_error(e)}, } } else if !config.offline && config.input.is_none() { let session = if let Some(session) = &config.session { Session::new(&session) } else { Session::from_file("session.txt") }; if let Ok(session) = session { let output = day.cache_input_and_run(&session); match output { Ok(result) => { let next_output = if config.validate { day.validate_result(result) } else if config.accept { day.cache_result(result) } else { Ok(()) }; if let Err(e) = next_output { print_error(e); } } Err(e) => print_error(e), } } else { println!("Please create a session.txt file or provide --session on the command line."); } } else if config.input.is_none() { let output = day.run_with_cached_input(); match output { Ok(result) => { let next_output = if config.validate { day.validate_result(result) } else if config.accept { day.cache_result(result) } else { Ok(()) }; if let Err(e) = next_output { print_error(e); } } Err(e) => print_error(e), } } else { let input_filename = config.input.as_ref().expect("unreachable"); let output = day.run_with_test_input(&input_filename); if let Err(e) = output { print_error(e); } } } fn print_error(err: RunError) { use RunError::*; use SessionError::*; match err { SessionFailed(TokenFormat) => println!("Session token was unreadable."), SessionFailed(IoError(desc)) => println!("{}", desc), SessionFailed(NetworkError) => println!("Network request failed."), SessionFailed(BufferError) => println!("An error occured while writing memory."), SessionFailed(DomError) => println!("Unable to parse DOM."), CacheInError => println!("No cached input available."), CacheOutError => println!("No cached result available."), InputError => println!("Couldn't open test input file."), DayError(reason) => println!("Errors with this Day: {}", reason), } }
use aoc2020::{Day, RunError, Session, SessionError}; use clap::Clap; #[clap(version = "0.1.0", author = "Daniel Boyle")] #[derive(Debug, Clone, Clap)] pub struct Config { #[clap(short = 'd', long = "day", default_value = "1")] pub day: i32, #[clap(short = 'a', long = "all")] pub all: bool, #[clap(short = 'o', long = "offline")] pub offline: bool, #[clap(short = 's', long = "session")] pub session: Option<String>, #[clap(short = 'i', long = "input")] pub input: Option<String>, #[clap(short = 'e', long = "examples")] pub examples: bool, #[clap(long = "accept")] pub accept: bool, #[clap(long = "validate")] pub validate: bool, #[clap(long = "clear-cache")] pub clear: bool, } fn main() { let config = Config::parse(); let days = aoc2020::DAYS; if config.all { for day in day
fn run_day(day: &Day, config: &Config) { if config.clear { println!("Clearing cache..."); day.clear_cache(); } if config.examples { println!("running examples for day {}...", day.index); match day.run_with_examples() { Ok(_) => (), Err(e) => {print_error(e)}, } } else if !config.offline && config.input.is_none() { let session = if let Some(session) = &config.session { Session::new(&session) } else { Session::from_file("session.txt") }; if let Ok(session) = session { let output = day.cache_input_and_run(&session); match output { Ok(result) => { let next_output = if config.validate { day.validate_result(result) } else if config.accept { day.cache_result(result) } else { Ok(()) }; if let Err(e) = next_output { print_error(e); } } Err(e) => print_error(e), } } else { println!("Please create a session.txt file or provide --session on the command line."); } } else if config.input.is_none() { let output = day.run_with_cached_input(); match output { Ok(result) => { let next_output = if config.validate { day.validate_result(result) } else if config.accept { day.cache_result(result) } else { Ok(()) }; if let Err(e) = next_output { print_error(e); } } Err(e) => print_error(e), } } else { let input_filename = config.input.as_ref().expect("unreachable"); let output = day.run_with_test_input(&input_filename); if let Err(e) = output { print_error(e); } } } fn print_error(err: RunError) { use RunError::*; use SessionError::*; match err { SessionFailed(TokenFormat) => println!("Session token was unreadable."), SessionFailed(IoError(desc)) => println!("{}", desc), SessionFailed(NetworkError) => println!("Network request failed."), SessionFailed(BufferError) => println!("An error occured while writing memory."), SessionFailed(DomError) => println!("Unable to parse DOM."), CacheInError => println!("No cached input available."), CacheOutError => println!("No cached result available."), InputError => println!("Couldn't open test input file."), DayError(reason) => println!("Errors with this Day: {}", reason), } }
s { run_day(day, &config); } } else { let index = (config.day - 1) as usize; if index < days.len() { let day = &days[index]; run_day(day, &config); } else { println!("Invalid day selection: {}", config.day); } } }
function_block-function_prefixed
[ { "content": "pub fn cache_input_for_day(day: i32, session: &Session) -> Result<Vec<String>, SessionError> {\n\n let file_path = input_cache_path(day);\n\n let file = fs::OpenOptions::new()\n\n .read(true)\n\n .write(false)\n\n .create(false)\n\n .open(&file_path);\n\n let len = (&file)\n\n .as_ref()\n\n .map(|content| content.metadata().map(|m| m.len()).ok())\n\n .ok()\n\n .flatten();\n\n let lines = match file {\n\n Ok(content) if Some(210) != len => {\n\n // necessary to convert Result types\n\n pre_parse_input(content)\n\n }\n\n Ok(_) | Err(_) => {\n\n let url = input_url(day);\n\n println!(\"Downloading inputs for day {}.\", day);\n\n let new_file = session.download_file(&url, &file_path)?;\n\n pre_parse_input(new_file)\n\n }\n\n };\n\n Ok(lines)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 0, "score": 240881.18016238254 }, { "content": "pub fn cache_instructions_for_day(day: i32, session: &Session) -> Result<(), SessionError> {\n\n let file_path = instruction_cache_path(day);\n\n let file = fs::OpenOptions::new()\n\n .read(true)\n\n .write(false)\n\n .create(false)\n\n .open(&file_path);\n\n if let Err(_e) = file {\n\n let file = fs::OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .create(true)\n\n .open(&file_path);\n\n if let Ok(mut file) = file {\n\n let mut buf = Cursor::new(Vec::with_capacity(20480)); // 20kb buffer\n\n let url = instruction_cache_url(day);\n\n session.download(&url, &mut buf)?;\n\n let doc = Document::from_read(buf).map_err(|_| SessionError::DomError)?;\n\n for main in doc.find(Name(\"body\").descendant(Name(\"main\"))) {\n\n node_to_markdown(main, &mut file).map_err(|_| SessionError::DomError)?;\n\n }\n\n file.flush().map_err(|_| {\n\n SessionError::IoError(format!(\"Unable to close file: {}\", file_path))\n\n })?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 1, "score": 239069.01484956927 }, { "content": "pub fn cache_files(day: i32, session: &Session) -> Result<Vec<String>, SessionError> {\n\n cache_instructions_for_day(day, &session)?;\n\n cache_input_for_day(day, &session)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 3, "score": 216174.78670102026 }, { "content": "pub fn input_url(day: i32) -> String {\n\n format!(\"{}{}/day/{}/input\", AOC_URL, YEAR, day)\n\n}\n", "file_path": "src/lib.rs", "rank": 4, "score": 201648.54554275027 }, { "content": "pub fn input_cache_path(day: i32) -> String {\n\n format!(\"input/day{:02}.txt\", day)\n\n}\n", "file_path": "src/lib.rs", "rank": 5, "score": 197149.35648832485 }, { "content": "pub fn instruction_cache_url(day: i32) -> String {\n\n format!(\"{}{}/day/{}\", AOC_URL, YEAR, day)\n\n}\n", "file_path": "src/lib.rs", "rank": 6, "score": 173984.4570356063 }, { "content": "pub fn instruction_cache_path(day: i32) -> String {\n\n format!(\"instructions/day{:02}.md\", day)\n\n}\n", "file_path": "src/lib.rs", "rank": 7, "score": 173984.45703560632 }, { "content": "pub fn result_cache_path(day: i32) -> String {\n\n format!(\"results/day{:02}.txt\", day)\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct Results(String, String);\n", "file_path": "src/lib.rs", "rank": 8, "score": 173984.45703560632 }, { "content": "struct Coord(i32, i32, i32);\n\n\n", "file_path": "src/day24.rs", "rank": 9, "score": 126248.9157471483 }, { "content": "#[derive(Debug, Hash, Clone, Copy, Eq, PartialEq)]\n\nstruct Coord(i32, i32);\n\n\n", "file_path": "src/day11.rs", "rank": 10, "score": 123506.62303313818 }, { "content": "fn valid_pid(value: &str) -> bool {\n\n if value.len() != 9 {\n\n false\n\n } else if value.parse::<u32>().is_err() {\n\n false\n\n } else {\n\n true\n\n }\n\n}\n\n\n", "file_path": "src/day4.rs", "rank": 12, "score": 116815.66636692856 }, { "content": "fn valid_height(value: &str) -> bool {\n\n let len = value.len();\n\n if value.ends_with(\"cm\") {\n\n is_between(&value[0..len - 2], 150, 193)\n\n } else if value.ends_with(\"in\") {\n\n is_between(&value[0..len - 2], 59, 76)\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/day4.rs", "rank": 13, "score": 116815.66636692856 }, { "content": "fn valid_birth_year(value: &str) -> bool {\n\n if value.chars().count() == 4 {\n\n is_between(value, 1920, 2002)\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/day4.rs", "rank": 14, "score": 113899.31326193825 }, { "content": "fn valid_eye_color(value: &str) -> bool {\n\n if !VALID_EYE_COLORS.contains(&value) {\n\n false\n\n } else {\n\n true\n\n }\n\n}\n\n\n", "file_path": "src/day4.rs", "rank": 15, "score": 113899.31326193825 }, { "content": "fn valid_hair_color(value: &str) -> bool {\n\n let length = value.len();\n\n let mut color = value.chars();\n\n if color.next() == Some('#') {\n\n if color.any(|c| !VALID_HEX_DIGITS.contains(c)) {\n\n false\n\n } else if length != 7 {\n\n // 6 required digits plus #\n\n false\n\n } else {\n\n true\n\n }\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/day4.rs", "rank": 16, "score": 113899.31326193825 }, { "content": "fn valid_issue_year(value: &str) -> bool {\n\n if value.chars().count() == 4 {\n\n is_between(value, 2010, 2020)\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/day4.rs", "rank": 17, "score": 113899.31326193825 }, { "content": "fn valid_expiration_year(value: &str) -> bool {\n\n if value.chars().count() == 4 {\n\n is_between(value, 2020, 2030)\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/day4.rs", "rank": 18, "score": 113899.31326193825 }, { "content": "fn validate(preamble: &[u64], num: u64) -> bool {\n\n for (i, num_a) in preamble.iter().enumerate() {\n\n for (j, num_b) in preamble.iter().enumerate() {\n\n if i != j && num_a != num_b {\n\n if num_a + num_b == num {\n\n return true;\n\n }\n\n }\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/day9.rs", "rank": 19, "score": 108563.88051347993 }, { "content": "fn range<T>(area: &HashMap<Coord, T>) -> (i32, i32, i32, i32) {\n\n let max_x = *area.iter().map(|(Coord(x, y), _)| x).max().unwrap();\n\n let max_y = *area.iter().map(|(Coord(x, y), _)| y).max().unwrap();\n\n let min_x = *area.iter().map(|(Coord(x, y), _)| x).min().unwrap();\n\n let min_y = *area.iter().map(|(Coord(x, y), _)| y).min().unwrap();\n\n (min_x, max_x, min_y, max_y)\n\n}\n\n\n", "file_path": "src/day11.rs", "rank": 20, "score": 102613.82000249944 }, { "content": "pub fn pre_parse_input(file: File) -> Vec<String> {\n\n let lines: Vec<String> = BufReader::new(&file)\n\n .lines()\n\n .filter_map(|l| l.ok())\n\n .collect();\n\n lines\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 21, "score": 100193.9025695248 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct Example {\n\n result: (String, String),\n\n input: ExampleInput,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 23, "score": 91220.41009805514 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct ExampleFile {\n\n examples: Vec<Example>,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 24, "score": 88323.61228075708 }, { "content": "fn test_run(mut program: Vec<(Operation, i32)>, instruction: usize) -> Option<i32> {\n\n let (op, _) = &mut program[instruction];\n\n use Operation::*;\n\n *op = match op {\n\n Jmp => Nop,\n\n Nop => Jmp,\n\n Acc => panic!(\"changed wrong instruction\"),\n\n };\n\n let mut visited: HashSet<usize> = HashSet::new();\n\n let mut accumulator: i32 = 0;\n\n let mut current: usize = 0;\n\n loop {\n\n visited.insert(current);\n\n use Operation::*;\n\n match program[current] {\n\n (Acc, arg) => {\n\n accumulator += arg;\n\n current += 1;\n\n }\n\n (Jmp, arg) => current = ((current as i32) + arg) as usize,\n", "file_path": "src/day8.rs", "rank": 25, "score": 86441.15064035029 }, { "content": "fn validate_stream(start: usize, length: usize, stream: &Vec<u64>, test: u64) -> bool {\n\n let numbers = &stream[start..start + length];\n\n let sum = numbers.iter().sum();\n\n test == sum\n\n}\n", "file_path": "src/day9.rs", "rank": 26, "score": 86012.62247694944 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n let expenses: Vec<i32> = lines\n\n .iter()\n\n .map(|line| line.parse::<i32>().unwrap())\n\n .collect();\n\n let result = expenses\n\n .iter()\n\n .flat_map(|a| std::iter::repeat(a).zip(expenses.iter()))\n\n .map(|(a, b)| (a + b, a, b))\n\n .filter(|(n, _, _)| *n == 2020)\n\n // .inspect(|t| println!(\"results: {:?}\", t))\n\n .map(|(_, a, b)| a * b)\n\n .next()\n\n .unwrap();\n\n Ok(result.to_string())\n\n}\n\n\n", "file_path": "src/day1.rs", "rank": 27, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n let trees = parse_input(&lines);\n\n let trees = count_trees(&trees, Coord(1, 1))\n\n * count_trees(&trees, Coord(3, 1))\n\n * count_trees(&trees, Coord(5, 1))\n\n * count_trees(&trees, Coord(7, 1))\n\n * count_trees(&trees, Coord(1, 2));\n\n Ok(trees.to_string())\n\n}\n\n\n", "file_path": "src/day3.rs", "rank": 28, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n\n\n", "file_path": "src/day16.rs", "rank": 29, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n\n\n", "file_path": "src/day21.rs", "rank": 30, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n let passports: Passports<Strict> = lines.iter().collect();\n\n Ok(passports.count_valid().to_string())\n\n}\n\n\n", "file_path": "src/day4.rs", "rank": 31, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n", "file_path": "src/day17.rs", "rank": 32, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n let mut direction = 0; // heading in degrees. 0 will be east\n\n let destination = lines\n\n .iter()\n\n .map(|line| {\n\n // parse the input into a tuple of command and value\n\n let mut chars = line.chars();\n\n let command = chars.next().unwrap();\n\n let value: String = chars.collect();\n\n (command, value.parse::<i32>().unwrap())\n\n })\n\n // .inspect(|(c, v)|println!(\"command {} value {}\", c, v))\n\n .map(|(c, v)| {\n\n // add in the current direction to every element\n\n if c == 'R' {\n\n direction -= v;\n\n } else if c == 'L' {\n\n direction += v;\n\n }\n\n direction += 360;\n", "file_path": "src/day12.rs", "rank": 33, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n let passports: Passports<Careless> = lines.iter().collect();\n\n Ok(passports.count_valid().to_string())\n\n}\n\n\n", "file_path": "src/day4.rs", "rank": 34, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n\n\n", "file_path": "src/day20.rs", "rank": 35, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n let mut waypoint = (10, 1);\n\n let destination = lines\n\n .iter()\n\n .map(|line| {\n\n // parse the input into a tuple of command and value\n\n let mut chars = line.chars();\n\n let command = chars.next().unwrap();\n\n let value: String = chars.collect();\n\n (command, value.parse::<i32>().unwrap())\n\n })\n\n // .inspect(|(c, v)|println!(\"command {} value {}\", c, v))\n\n .map(|(c, v)| {\n\n // add in the current waypoint to every element\n\n match c {\n\n 'N' => waypoint.1 += v,\n\n 'S' => waypoint.1 -= v,\n\n 'E' => waypoint.0 += v,\n\n 'W' => waypoint.0 -= v,\n\n 'R' => match v {\n", "file_path": "src/day12.rs", "rank": 36, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n\n\n", "file_path": "src/day18.rs", "rank": 37, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n", "file_path": "src/day15.rs", "rank": 38, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n let program: Vec<(Operation, i32)> = lines\n\n .iter()\n\n .map(|line| {\n\n let mut instruction = line.split(\" \");\n\n (\n\n get_operation(&mut instruction).unwrap(),\n\n get_argument(&mut instruction).unwrap(),\n\n )\n\n })\n\n .collect();\n\n\n\n let result: i32 = program\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(i, (op, _))| {\n\n if *op == Operation::Jmp || *op == Operation::Nop {\n\n Some(i)\n\n } else {\n\n None\n\n }\n\n })\n\n .filter_map(|i| test_run(program.clone(), i))\n\n .next()\n\n .unwrap();\n\n\n\n Ok(result.to_string())\n\n}\n\n\n", "file_path": "src/day8.rs", "rank": 39, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n\n\n", "file_path": "src/day25.rs", "rank": 40, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n let expenses: Vec<i32> = lines\n\n .iter()\n\n .map(|line| {\n\n let num = line.parse::<i32>().unwrap();\n\n num\n\n })\n\n .collect();\n\n let result = expenses\n\n .iter()\n\n .flat_map(|&a| std::iter::repeat(a).zip(expenses.iter()))\n\n .flat_map(|(a, &b)| {\n\n std::iter::repeat(a)\n\n .zip(std::iter::repeat(b))\n\n .zip(expenses.iter())\n\n })\n\n .map(|((a, b), &c)| (a + b + c, a, b, c))\n\n .filter(|(n, _, _, _)| *n == 2020)\n\n // .inspect(|t| println!(\"results: {:?}\", t))\n\n .map(|(_, a, b, c)| a * b * c)\n\n .next()\n\n .unwrap();\n\n Ok(result.to_string())\n\n}\n", "file_path": "src/day1.rs", "rank": 41, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n\n\n", "file_path": "src/day14.rs", "rank": 42, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n", "file_path": "src/day22.rs", "rank": 43, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n", "file_path": "src/day23.rs", "rank": 44, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n let mut signal = lines.iter().map(|line| line.parse::<u64>().unwrap());\n\n let mut preamble = vec![];\n\n // load preamble\n\n for _ in 0..25 {\n\n preamble.push(signal.next().unwrap());\n\n }\n\n // println!(\"finished loading preamble: {:?}\", preamble);\n\n let result = loop {\n\n let next_num = signal.next().unwrap();\n\n\n\n if !validate(&preamble, next_num) {\n\n // println!(\"{} doesnt validate\", next_num);\n\n break next_num;\n\n } else {\n\n // println!(\"{} validates\", next_num);\n\n preamble.remove(0);\n\n preamble.push(next_num);\n\n }\n\n };\n\n Ok(result.to_string())\n\n}\n\n\n", "file_path": "src/day9.rs", "rank": 45, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n let reverse_rules: HashMap<&str, Vec<&str>> = lines\n\n .iter()\n\n .flat_map(|line| {\n\n let mut tokens = line.split(\"bags contain\");\n\n let bag_type = tokens.next().unwrap().trim();\n\n tokens\n\n .next()\n\n .unwrap()\n\n .split(',')\n\n .map(|s| s.trim())\n\n .filter_map(|span| {\n\n let mut spans = span.splitn(2, ' ');\n\n spans\n\n .next()\n\n .unwrap()\n\n .trim()\n\n .parse::<usize>()\n\n .ok()\n\n .map(|num| {\n", "file_path": "src/day7.rs", "rank": 46, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n let trees = parse_input(&lines);\n\n let points = count_trees(&trees, Coord(3, 1));\n\n Ok(points.to_string())\n\n}\n\n\n", "file_path": "src/day3.rs", "rank": 47, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n", "file_path": "src/day14.rs", "rank": 48, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n let best_seat = lines\n\n .iter()\n\n .map(|seat| {\n\n seat.chars()\n\n .rev()\n\n .enumerate()\n\n .filter(|(_, c)| *c == 'B' || *c == 'R')\n\n .fold(0, |a, (i, _)| a | (1 << i))\n\n })\n\n .max()\n\n .unwrap();\n\n Ok(best_seat.to_string())\n\n}\n\n\n", "file_path": "src/day5.rs", "rank": 49, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n let mut plane = vec![];\n\n {\n\n let mut group: Vec<HashSet<char>> = vec![];\n\n for line in lines.iter() {\n\n if line.is_empty() {\n\n plane.push(group.clone());\n\n group.clear();\n\n } else {\n\n let pairs: HashSet<char> = line.chars().collect();\n\n group.push(pairs);\n\n }\n\n }\n\n if group.len() > 0 {\n\n plane.push(group.clone());\n\n group.clear();\n\n }\n\n }\n\n\n\n let result: usize = plane\n", "file_path": "src/day6.rs", "rank": 50, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n\n\n", "file_path": "src/day24.rs", "rank": 51, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n let mut waiting_area: HashMap<Coord, Square> = lines\n\n .iter()\n\n .enumerate()\n\n .flat_map(|(y, line)| -> Vec<_> {\n\n line.chars()\n\n .enumerate()\n\n .filter_map(|(x, c)| {\n\n if c == 'L' {\n\n Some((Coord(x as i32, y as i32), Square::Seat(Seat::Empty)))\n\n } else {\n\n Some((Coord(x as i32, y as i32), Square::Floor))\n\n }\n\n })\n\n .collect()\n\n })\n\n .collect();\n\n while tick_distant(&mut waiting_area) {}\n\n let result = count_occupied(&waiting_area);\n\n Ok(result.to_string())\n\n}\n\n\n", "file_path": "src/day11.rs", "rank": 52, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n let rules: HashMap<&str, Vec<(usize, &str)>> = lines\n\n .iter()\n\n .map(|line| {\n\n let mut tokens = line.split(\"bags contain\");\n\n let bag_type = tokens.next().unwrap().trim();\n\n let children = tokens\n\n .next()\n\n .unwrap()\n\n .split(',')\n\n .map(|s| s.trim())\n\n .filter_map(|span| {\n\n let mut spans = span.splitn(2, ' ');\n\n spans\n\n .next()\n\n .unwrap()\n\n .trim()\n\n .parse::<usize>()\n\n .ok()\n\n .map(|num| {\n", "file_path": "src/day7.rs", "rank": 53, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n let mut waiting_area: HashMap<Coord, Square> = lines\n\n .iter()\n\n .enumerate()\n\n .flat_map(|(y, line)| -> Vec<_> {\n\n line.chars()\n\n .enumerate()\n\n .filter_map(|(x, c)| {\n\n if c == 'L' {\n\n Some((Coord(x as i32, y as i32), Square::Seat(Seat::Empty)))\n\n } else {\n\n Some((Coord(x as i32, y as i32), Square::Floor))\n\n }\n\n })\n\n .collect()\n\n })\n\n .collect();\n\n while tick(&mut waiting_area) {}\n\n let result = count_occupied(&waiting_area);\n\n Ok(result.to_string())\n\n}\n\n\n", "file_path": "src/day11.rs", "rank": 54, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n let test_stream = lines.iter().map(|line| line.parse::<u64>().unwrap());\n\n let mut signal = test_stream.clone();\n\n let mut preamble = vec![];\n\n for _ in 0..25 {\n\n preamble.push(signal.next().unwrap());\n\n }\n\n // println!(\"finished loading preamble: {:?}\", preamble);\n\n let test = loop {\n\n let next_num = signal.next().unwrap();\n\n\n\n if !validate(&preamble, next_num) {\n\n // println!(\"{} doesnt validate\", next_num);\n\n break next_num;\n\n } else {\n\n // println!(\"{} validates\", next_num);\n\n preamble.remove(0);\n\n preamble.push(next_num);\n\n }\n\n };\n", "file_path": "src/day9.rs", "rank": 55, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n\n\n", "file_path": "src/day19.rs", "rank": 56, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n let rules = lines[1]\n\n .split(',')\n\n .enumerate()\n\n .filter_map(|(i, s)| {\n\n let n = s.parse::<i64>();\n\n if let Ok(num) = n {\n\n Some((i as i64, num))\n\n } else {\n\n None\n\n }\n\n })\n\n .collect::<Vec<(i64, i64)>>();\n\n\n\n let (max_i, max) = rules.iter().max_by(|(i, b), (ii, bb)| b.cmp(bb)).unwrap();\n\n println!(\"rules: {:?}\", rules);\n\n let slope: i64 = rules.iter().map(|(_, b)| b).product();\n\n\n\n println!(\"determined slope: {}\", slope);\n\n let zeroth = rules\n", "file_path": "src/day13.rs", "rank": 57, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n\n\n", "file_path": "src/day17.rs", "rank": 58, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n", "file_path": "src/day21.rs", "rank": 59, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n let timestamp = lines[0].parse::<i32>().unwrap();\n\n let (best_bus, time) = lines[1]\n\n .split(',')\n\n .filter_map(|n| n.parse::<i32>().ok())\n\n .map(|bus| {\n\n let mut time = bus;\n\n while time < timestamp {\n\n time += bus;\n\n }\n\n (bus, time)\n\n })\n\n .map(|(bus, time)| (bus, time - timestamp))\n\n .min_by(|(_, time_a), (_, time_b)| time_a.cmp(time_b))\n\n .unwrap();\n\n Ok((best_bus * time).to_string())\n\n}\n\n\n", "file_path": "src/day13.rs", "rank": 60, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n let mut seats: Vec<u32> = lines\n\n .iter()\n\n .map(|seat| {\n\n // let count = seat.len() - 1;\n\n seat.chars()\n\n .rev()\n\n .enumerate()\n\n .filter(|(_, c)| *c == 'B' || *c == 'R')\n\n .fold(0, |a, (i, _)| a | (1 << i))\n\n })\n\n .collect();\n\n seats.sort_unstable();\n\n let my_seat: u32 = seats\n\n .iter()\n\n .zip(seats.iter().skip(1))\n\n .filter_map(|(&a, &b)| if b > a + 1 { Some(a + 1) } else { None })\n\n .next()\n\n .unwrap();\n\n Ok(my_seat.to_string())\n\n}\n", "file_path": "src/day5.rs", "rank": 61, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n\n\n", "file_path": "src/day15.rs", "rank": 62, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n let passwords: Vec<Password> = lines\n\n .iter()\n\n .map(|line| line.parse::<Password>().unwrap())\n\n .collect();\n\n let number = passwords\n\n .iter()\n\n .filter(|password| password.is_valid_tobaggan())\n\n .count();\n\n Ok(number.to_string())\n\n}\n\n\n", "file_path": "src/day2.rs", "rank": 63, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n let total: i64 = lines\n\n .iter()\n\n // .inspect(|expr| println!(\"{}\", expr))\n\n // .filter_map(|line| {\n\n // println!(\"------------------------------------\");\n\n // consume_expression(&mut Walk::wrap(line)).ok()\n\n // })\n\n // .inspect(|expr| println!(\"{}\", expr))\n\n // .map(|e|e.evaluate())\n\n // .inspect(|result| println!(\"{:?}\", result))\n\n // .sum();\n\n .map(|line| {\n\n line.chars()\n\n .filter(|c| *c != ' ')\n\n .fold(ParseState::Free, |s, c| s.push(c))\n\n })\n\n // .inspect(|expr| println!(\"{:?}\", expr))\n\n .map(|s| s.express().evaluate())\n\n .sum();\n\n // .fold(ParseState::Free, |s, c|)\n\n\n\n Ok(total.to_string())\n\n}\n\n\n", "file_path": "src/day18.rs", "rank": 64, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n", "file_path": "src/day25.rs", "rank": 65, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n", "file_path": "src/day16.rs", "rank": 66, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n\n\n", "file_path": "src/day23.rs", "rank": 67, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n let mut plane = vec![];\n\n {\n\n let mut group: HashSet<char> = HashSet::new();\n\n for line in lines.iter() {\n\n if line.is_empty() {\n\n plane.push(group.clone());\n\n group.clear();\n\n } else {\n\n let pairs = line.chars();\n\n for pair in pairs {\n\n group.insert(pair);\n\n }\n\n }\n\n }\n\n if group.len() > 0 {\n\n plane.push(group.clone());\n\n group.clear();\n\n }\n\n }\n\n let result: usize = plane.iter().map(|group| group.len()).sum();\n\n Ok(result.to_string())\n\n}\n\n\n", "file_path": "src/day6.rs", "rank": 68, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n", "file_path": "src/day19.rs", "rank": 69, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n\n\n", "file_path": "src/day22.rs", "rank": 70, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n let program: Vec<(Operation, i32)> = lines\n\n .iter()\n\n .map(|line| {\n\n let mut instruction = line.split(\" \");\n\n (\n\n get_operation(&mut instruction).unwrap(),\n\n get_argument(&mut instruction).unwrap(),\n\n )\n\n })\n\n .collect();\n\n let mut visited: HashSet<usize> = HashSet::new();\n\n let mut accumulator: i32 = 0;\n\n let mut current: usize = 0;\n\n let mut stop = false;\n\n while !stop {\n\n visited.insert(current);\n\n use Operation::*;\n\n match program[current] {\n\n (Acc, arg) => {\n", "file_path": "src/day8.rs", "rank": 71, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n let passwords: Vec<Password> = lines\n\n .iter()\n\n .map(|line| line.parse::<Password>().unwrap())\n\n .collect();\n\n let number = passwords\n\n .iter()\n\n .filter(|password| password.is_valid_sled())\n\n .count();\n\n Ok(number.to_string())\n\n}\n\n\n", "file_path": "src/day2.rs", "rank": 72, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n let mut adapters = lines\n\n .iter()\n\n .map(|line| line.parse::<i32>().unwrap())\n\n .collect::<Vec<_>>();\n\n adapters.sort();\n\n // find inflection points in the input data\n\n // where we know all paths must cross\n\n let mut inflection_points = adapters\n\n .iter()\n\n .skip(1)\n\n .zip(adapters.iter())\n\n .filter(|(b, a)| **b == **a + 3)\n\n .map(|(b, _)| *b)\n\n .collect::<Vec<i32>>();\n\n let mut visited_inflection_points: HashMap<i32, usize> = HashMap::new();\n\n let mut wavefront: Vec<usize> = Vec::new();\n\n let max = adapters[adapters.len() - 1];\n\n inflection_points.push(max);\n\n // load starting points into wavefront\n", "file_path": "src/day10.rs", "rank": 73, "score": 79473.70221590495 }, { "content": "pub fn part1(lines: &Vec<String>) -> PartResult {\n\n let flip_tiles = lines\n\n .iter()\n\n .map(|line| -> (i32, i32, i32) {\n\n let chars = line.chars().collect::<Vec<char>>();\n\n let mut directions = vec![];\n\n let mut position = 0;\n\n while position < chars.len() {\n\n let (next, pos) = consume_direction(&chars, position);\n\n directions.push(next);\n\n position = pos;\n\n }\n\n directions\n\n .iter()\n\n .map(|dir| dir.to_coords())\n\n .fold((0, 0, 0), |a, d| (a.0 + d.0, a.1 + d.1, a.2 + d.2))\n\n })\n\n .collect::<Vec<(i32, i32, i32)>>();\n\n let mut visited: HashMap<(i32, i32, i32), u32> = HashMap::new();\n\n for tile in flip_tiles.into_iter() {\n\n *visited.entry(tile).or_insert(0) += 1;\n\n }\n\n let black = visited\n\n .drain()\n\n .filter(|(tile, turns)| turns % 2 == 1)\n\n .count();\n\n Ok(black.to_string())\n\n}\n\n\n", "file_path": "src/day24.rs", "rank": 74, "score": 79473.70221590495 }, { "content": "// we make some pretty extensive assumptions here.\n\npub fn part1(lines: &Vec<String>) -> PartResult {\n\n let mut adapters = lines\n\n .iter()\n\n .map(|line| line.parse::<i32>().unwrap())\n\n .collect::<Vec<_>>();\n\n adapters.sort();\n\n let groups = adapters\n\n .iter()\n\n .zip(adapters.iter().skip(1))\n\n .map(|(c, n)| n - c);\n\n // counting is off by one so instead of fixing just add one.\n\n let ones = groups.clone().filter(|n| *n == 1).count() + 1;\n\n let threes = groups.clone().filter(|n| *n == 3).count() + 1;\n\n let result = ones * threes;\n\n // println!(\"found {} ones and {} threes\", ones, threes);\n\n Ok(result.to_string())\n\n}\n\n\n", "file_path": "src/day10.rs", "rank": 75, "score": 79473.70221590495 }, { "content": "pub fn part2(lines: &Vec<String>) -> PartResult {\n\n Ok(\"\".to_string())\n\n}\n", "file_path": "src/day20.rs", "rank": 76, "score": 79473.70221590495 }, { "content": "fn tick(area: &mut HashMap<Coord, Square>) -> bool {\n\n let o = area.clone();\n\n let mut changes: Vec<(Coord, Square)> = vec![];\n\n for (coord, square) in area.iter() {\n\n if let Square::Seat(seat) = square {\n\n let directions = [-1, 0, 1];\n\n let neighbors = directions\n\n .iter()\n\n .flat_map(|dx: &i32| -> Vec<(i32, i32)> {\n\n directions.iter().map(|dy| (*dx, *dy)).collect()\n\n })\n\n .filter(|(x, y)| !(*x == 0 && *y == 0))\n\n .map(|(x, y)| Coord(coord.0 + x, coord.1 + y))\n\n .filter(|coord| occupied(area, coord))\n\n .count();\n\n if neighbors >= 4 {\n\n changes.push((*coord, Square::Seat(Seat::Empty)));\n\n } else if neighbors == 0 {\n\n changes.push((*coord, Square::Seat(Seat::Full)));\n\n }\n\n }\n\n }\n\n for (coord, seat) in changes.iter() {\n\n area.entry(*coord).and_modify(|e| *e = *seat);\n\n }\n\n o != *area\n\n}\n\n\n", "file_path": "src/day11.rs", "rank": 77, "score": 78936.31327987087 }, { "content": "fn is_between(value: &str, min: u32, max: u32) -> bool {\n\n let number: Result<u32, _> = value.parse();\n\n match number {\n\n Ok(num) => num >= min && num <= max,\n\n Err(_) => false,\n\n }\n\n}\n\n\n", "file_path": "src/day4.rs", "rank": 78, "score": 78587.59542132812 }, { "content": "fn tick_distant(area: &mut HashMap<Coord, Square>) -> bool {\n\n let o = area.clone();\n\n let mut changes: Vec<(Coord, Square)> = vec![];\n\n for (coord, square) in area.iter() {\n\n if let Square::Seat(_) = square {\n\n let directions = [-1, 0, 1];\n\n let neighbors = directions\n\n .iter()\n\n .flat_map(|dx: &i32| -> Vec<(i32, i32)> {\n\n directions.iter().map(|dy| (*dx, *dy)).collect()\n\n })\n\n .filter(|(x, y)| !(*x == 0 && *y == 0))\n\n .map(|(dx, dy)| {\n\n let (mut x, mut y) = (coord.0, coord.1);\n\n x += dx;\n\n y += dy;\n\n while area.get(&Coord(x, y)) == Some(&Square::Floor) {\n\n x += dx;\n\n y += dy;\n\n }\n", "file_path": "src/day11.rs", "rank": 79, "score": 77424.9098948985 }, { "content": "fn get_argument<'a, T>(mut iter: T) -> Option<i32>\n\nwhere\n\n T: Iterator<Item = &'a str>,\n\n{\n\n match iter.next().unwrap().parse::<i32>() {\n\n Ok(arg) => Some(arg),\n\n Err(_) => None,\n\n }\n\n}\n", "file_path": "src/day8.rs", "rank": 80, "score": 76959.09622173809 }, { "content": "fn occupied(area: &HashMap<Coord, Square>, coord: &Coord) -> bool {\n\n area.contains_key(coord) && area.get(coord) == Some(&Square::Seat(Seat::Full))\n\n}\n\n\n", "file_path": "src/day11.rs", "rank": 81, "score": 75447.6928367657 }, { "content": "struct Careless;\n\nimpl Mode for Careless {\n\n fn create_passport(map: HashMap<String, String>) -> Passport {\n\n let valid = REQUIRED_KEYS\n\n .iter()\n\n .all(|key| map.contains_key(key.to_owned()));\n\n use Passport::*;\n\n match valid {\n\n true => Valid(map),\n\n false => Invalid(map),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/day4.rs", "rank": 82, "score": 60949.06512294315 }, { "content": "#[derive(Debug)]\n\nstruct Password {\n\n a: i32,\n\n b: i32,\n\n letter: char,\n\n password: String,\n\n}\n\n\n", "file_path": "src/day2.rs", "rank": 83, "score": 60949.06512294315 }, { "content": "struct Strict;\n\nimpl Mode for Strict {\n\n fn create_passport(map: HashMap<String, String>) -> Passport {\n\n let valid = REQUIRED_KEYS\n\n .iter()\n\n .all(|key| map.contains_key(key.to_owned()));\n\n if valid {\n\n let mut items: Vec<(&String, &String)> = map.iter().collect();\n\n items.sort();\n\n for (key, value) in items.iter() {\n\n let strict_valid = match key.as_str() {\n\n \"byr\" => valid_birth_year(value),\n\n \"iyr\" => valid_issue_year(value),\n\n \"eyr\" => valid_expiration_year(value),\n\n \"hgt\" => valid_height(value),\n\n \"ecl\" => valid_eye_color(value),\n\n \"hcl\" => valid_hair_color(value),\n\n \"pid\" => valid_pid(value),\n\n \"cid\" => true,\n\n _ => false,\n", "file_path": "src/day4.rs", "rank": 84, "score": 60949.06512294315 }, { "content": "fn parse_input(lines: &Vec<String>) -> HashSet<Coord> {\n\n lines\n\n .iter()\n\n .enumerate()\n\n .flat_map(|(y, line)| -> Vec<Coord> {\n\n line.chars()\n\n .enumerate()\n\n .filter_map(|(x, c)| if c == '#' { Some(Coord(x, y)) } else { None })\n\n .collect()\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/day3.rs", "rank": 85, "score": 59737.98593422999 }, { "content": "struct CoordIter {\n\n current: Coord,\n\n delta: Coord,\n\n}\n\n\n\nimpl Iterator for CoordIter {\n\n type Item = Coord;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let x = (self.current.0 + self.delta.0) % 31;\n\n let y = self.current.1 + self.delta.1;\n\n if y > 322 {\n\n None\n\n } else {\n\n let c = Coord(x, y);\n\n self.current = c;\n\n Some(c)\n\n }\n\n }\n\n}\n", "file_path": "src/day3.rs", "rank": 86, "score": 59506.58459092627 }, { "content": "struct Walk<'a> {\n\n inner: &'a str,\n\n index: usize,\n\n}\n\n\n\nimpl<'a> Walk<'a> {\n\n fn next(&mut self) -> Option<char> {\n\n loop {\n\n let c = self.inner.chars().nth(self.index);\n\n if let Some(' ') = c {\n\n self.index += 1;\n\n } else {\n\n self.index += 1;\n\n break c;\n\n }\n\n }\n\n }\n\n fn backward(&mut self) {\n\n self.index -= 1;\n\n }\n\n fn wrap(inner: &'a str) -> Self {\n\n Walk { inner, index: 0 }\n\n }\n\n}\n", "file_path": "src/day18.rs", "rank": 87, "score": 58506.60202793553 }, { "content": "#[derive(Serialize, Deserialize)]\n\nenum ExampleInput {\n\n File(String),\n\n Text(String),\n\n}\n", "file_path": "src/lib.rs", "rank": 88, "score": 57546.449253077866 }, { "content": "#[derive(Debug)]\n\nstruct Passports<M> {\n\n inner: Vec<Passport>,\n\n _mode: PhantomData<M>,\n\n}\n\n\n\nimpl<'a, M: Mode> FromIterator<&'a String> for Passports<M> {\n\n fn from_iter<I: IntoIterator<Item = &'a String>>(iter: I) -> Self {\n\n let mut inner = vec![];\n\n let mut current: HashMap<String, String> = HashMap::new();\n\n for line in iter {\n\n if line.is_empty() {\n\n let passport = M::create_passport(current.clone());\n\n inner.push(passport);\n\n current.clear();\n\n } else {\n\n let pairs = line.split(\" \");\n\n for pair in pairs {\n\n let mut p = pair.split(\":\");\n\n let key = p.next().unwrap().to_string();\n\n let value = p.next().unwrap().to_string();\n", "file_path": "src/day4.rs", "rank": 89, "score": 57064.12149591865 }, { "content": "#[derive(Copy, Clone, Hash, Eq, PartialEq, Debug)]\n\nstruct Coord(usize, usize);\n\n\n\nimpl Coord {\n\n fn into_iter(self) -> CoordIter {\n\n CoordIter {\n\n current: Coord(0, 0),\n\n delta: self,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/day3.rs", "rank": 90, "score": 53671.96485212383 }, { "content": "fn print_area(area: &HashMap<Coord, Square>) {\n\n let (min_x, max_x, min_y, max_y) = range(area);\n\n for y in min_y..=max_y {\n\n for x in min_x..=max_x {\n\n if let Some(square) = area.get(&Coord(x, y)) {\n\n if let Square::Seat(seat) = square {\n\n if *seat == Seat::Full {\n\n print!(\"#\");\n\n } else {\n\n print!(\"L\");\n\n }\n\n } else {\n\n print!(\".\");\n\n }\n\n } else {\n\n panic!(\"map not rectangular?\");\n\n }\n\n }\n\n println!(\"\");\n\n }\n\n println!(\"\\nOccupied: {}\", count_occupied(&area));\n\n}\n\n\n", "file_path": "src/day11.rs", "rank": 91, "score": 39327.66056861054 }, { "content": "fn consume_factor(stream: &mut Walk) -> Result<Factor, ()> {\n\n let maybe_number = stream.next().ok_or(())?;\n\n if let Ok(num) = maybe_number.to_string().parse::<i64>() {\n\n Ok(Factor::Number(num))\n\n } else {\n\n if maybe_number == '(' {\n\n println!(\"found open paren\");\n\n let expr = consume_expression(stream)?;\n\n let expect_paren = stream.next().ok_or(())?;\n\n if expect_paren == ')' {\n\n println!(\"found close paren\");\n\n Ok(Factor::Expression(Box::new(expr)))\n\n } else {\n\n stream.backward();\n\n stream.backward();\n\n println!(\"f4\");\n\n Err(())\n\n }\n\n } else {\n\n stream.backward();\n\n println!(\"f5\");\n\n Err(())\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/day18.rs", "rank": 92, "score": 38207.79887245178 }, { "content": "fn consume_operation(stream: &mut Walk) -> Result<char, ()> {\n\n let maybe_operation = stream.next().ok_or(())?;\n\n if maybe_operation == '+' {\n\n Ok('+')\n\n } else if maybe_operation == '*' {\n\n Ok('*')\n\n } else {\n\n stream.backward();\n\n Err(())\n\n }\n\n}\n\n\n\n// fn consume_left(first: char, stream: &mut Walk) -> Result<Expression, ()> {\n\n// // let left = consume_factor(stream)?;\n\n// let left = first;\n\n// if let Ok(op) = consume_operation(stream) {\n\n// let right = consume_factor(stream)?;\n\n// let expr = match op {\n\n// '+' => Expression::Addition(left, right),\n\n// '*' => Expression::Multiplication(left, right),\n\n// _ => panic!(\"???\"),\n\n// };\n\n// Ok(expr)\n\n// } else {\n\n// Ok(Expression::Factor(left))\n\n// }\n\n// }\n\n\n", "file_path": "src/day18.rs", "rank": 93, "score": 38207.79887245178 }, { "content": "fn consume_expression(stream: &mut Walk) -> Result<Expression, ()> {\n\n // let left = consume_left(stream.next().ok_or(())?, stream);\n\n // if let Ok(op) = consume_operation(stream) {\n\n // let right = consume_factor(stream)?;\n\n // let expr = match op {\n\n // '+' => Expression::Addition(left, right),\n\n // '*' => Expression::Multiplication(left, right),\n\n // _ => panic!(\"???\"),\n\n // };\n\n // Ok(expr)\n\n // } else {\n\n // Ok(Expression::Factor(left))\n\n // }\n\n let factor_a = consume_factor(stream)?;\n\n println!(\"found factor {}\", factor_a);\n\n if let Some(maybe_operation) = stream.next() {\n\n if maybe_operation == '+' {\n\n println!(\"+\");\n\n let factor_b = consume_factor(stream)?;\n\n // if let Ok(factor_b) = maybe_factor {\n", "file_path": "src/day18.rs", "rank": 94, "score": 38207.79887245178 }, { "content": "fn count_occupied(area: &HashMap<Coord, Square>) -> usize {\n\n area.iter()\n\n .filter(|(_, s)| **s == Square::Seat(Seat::Full))\n\n .count()\n\n}\n\n\n", "file_path": "src/day11.rs", "rank": 95, "score": 37436.97085336695 }, { "content": "fn count_trees(map: &HashSet<Coord>, slope: Coord) -> usize {\n\n slope.into_iter().filter(|c| map.contains(c)).count()\n\n}\n\n\n", "file_path": "src/day3.rs", "rank": 96, "score": 35728.524107939025 }, { "content": "fn get_operation<'a, T>(mut iter: T) -> Option<Operation>\n\nwhere\n\n T: Iterator<Item = &'a str>,\n\n{\n\n match iter.next().unwrap() {\n\n \"acc\" => Some(Operation::Acc),\n\n \"jmp\" => Some(Operation::Jmp),\n\n \"nop\" => Some(Operation::Nop),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/day8.rs", "rank": 97, "score": 35557.311721297745 }, { "content": "fn consume_direction(stream: &Vec<char>, position: usize) -> (Direction, usize) {\n\n let c = stream[position];\n\n match c {\n\n 's' => {\n\n let cc = stream[position + 1];\n\n match cc {\n\n 'e' => (Direction::SouthEast, position + 2),\n\n 'w' => (Direction::SouthWest, position + 2),\n\n _ => panic!(\"unexpected input\"),\n\n }\n\n }\n\n 'n' => {\n\n let cc = stream[position + 1];\n\n match cc {\n\n 'e' => (Direction::NorthEast, position + 2),\n\n 'w' => (Direction::NorthWest, position + 2),\n\n _ => panic!(\"unexpected input\"),\n\n }\n\n }\n\n 'e' => (Direction::East, position + 1),\n\n 'w' => (Direction::West, position + 1),\n\n _ => panic!(\"unexpected input\"),\n\n }\n\n}\n", "file_path": "src/day24.rs", "rank": 98, "score": 34841.909418631796 } ]
Rust
src/sdk/metrics/mod.rs
zoidbergwill/opentelemetry-rust
30e65af8942c5c7a635c86bd7a02001b833030d5
use crate::api; use crate::exporter::metrics::prometheus; use std::borrow::Cow; use std::collections::HashMap; pub type LabelSet = HashMap<Cow<'static, str>, Cow<'static, str>>; impl api::LabelSet for LabelSet {} #[allow(missing_debug_implementations)] pub struct Meter { registry: &'static prometheus::Registry, component: &'static str, } impl Meter { pub fn new(component: &'static str) -> Self { Meter { registry: prometheus::default_registry(), component, } } fn build_opts(&self, name: String, description: String) -> prometheus::Opts { let help = if !description.is_empty() { description } else { format!("{} metric", name) }; prometheus::Opts::new(name, help).namespace(format!("{}_", self.component)) } } impl api::Meter for Meter { type LabelSet = LabelSet; type I64Counter = prometheus::IntCounterVec; type F64Counter = prometheus::CounterVec; type I64Gauge = prometheus::IntGaugeVec; type F64Gauge = prometheus::GaugeVec; type I64Measure = prometheus::IntMeasure; type F64Measure = prometheus::HistogramVec; fn labels(&self, key_values: Vec<api::KeyValue>) -> Self::LabelSet { let mut label_set: Self::LabelSet = Default::default(); for api::KeyValue { key, value } in key_values.into_iter() { label_set.insert(key.into(), value.into()); } label_set } fn new_i64_counter<S: Into<String>>( &self, name: S, opts: api::MetricOptions, ) -> Self::I64Counter { let api::MetricOptions { description, unit: _unit, keys, alternate: _alternative, } = opts; let counter_opts = self.build_opts(name.into(), description); let labels = prometheus::convert_labels(&keys); let counter = prometheus::IntCounterVec::new(counter_opts, &labels).unwrap(); self.registry.register(Box::new(counter.clone())).unwrap(); counter } fn new_f64_counter<S: Into<String>>( &self, name: S, opts: api::MetricOptions, ) -> Self::F64Counter { let api::MetricOptions { description, unit: _unit, keys, alternate: _alternative, } = opts; let counter_opts = self.build_opts(name.into(), description); let labels = prometheus::convert_labels(&keys); let counter = prometheus::CounterVec::new(counter_opts, &labels).unwrap(); self.registry.register(Box::new(counter.clone())).unwrap(); counter } fn new_i64_gauge<S: Into<String>>(&self, name: S, opts: api::MetricOptions) -> Self::I64Gauge { let api::MetricOptions { description, unit: _unit, keys, alternate: _alternative, } = opts; let gauge_opts = self.build_opts(name.into(), description); let labels = prometheus::convert_labels(&keys); let gauge = prometheus::IntGaugeVec::new(gauge_opts, &labels).unwrap(); self.registry.register(Box::new(gauge.clone())).unwrap(); gauge } fn new_f64_gauge<S: Into<String>>(&self, name: S, opts: api::MetricOptions) -> Self::F64Gauge { let api::MetricOptions { description, unit: _unit, keys, alternate: _alternative, } = opts; let gauge_opts = self.build_opts(name.into(), description); let labels = prometheus::convert_labels(&keys); let gauge = prometheus::GaugeVec::new(gauge_opts, &labels).unwrap(); self.registry.register(Box::new(gauge.clone())).unwrap(); gauge } fn new_i64_measure<S: Into<String>>( &self, name: S, opts: api::MetricOptions, ) -> Self::I64Measure { let api::MetricOptions { description, unit: _unit, keys, alternate: _alternative, } = opts; let common_opts = self.build_opts(name.into(), description); let labels = prometheus::convert_labels(&keys); let histogram_opts = prometheus::HistogramOpts::from(common_opts); let histogram = prometheus::HistogramVec::new(histogram_opts, &labels).unwrap(); self.registry.register(Box::new(histogram.clone())).unwrap(); prometheus::IntMeasure::new(histogram) } fn new_f64_measure<S: Into<String>>( &self, name: S, opts: api::MetricOptions, ) -> Self::F64Measure { let api::MetricOptions { description, unit: _unit, keys, alternate: _alternative, } = opts; let common_opts = self.build_opts(name.into(), description); let labels = prometheus::convert_labels(&keys); let histogram_opts = prometheus::HistogramOpts::from(common_opts); let histogram = prometheus::HistogramVec::new(histogram_opts, &labels).unwrap(); self.registry.register(Box::new(histogram.clone())).unwrap(); histogram } fn record_batch<M: IntoIterator<Item = api::Measurement<Self::LabelSet>>>( &self, label_set: &Self::LabelSet, measurements: M, ) { for measure in measurements.into_iter() { let instrument = measure.instrument(); instrument.record_one(measure.into_value(), &label_set); } } }
use crate::api; use crate::exporter::metrics::prometheus; use std::borrow::Cow; use std::collections::HashMap; pub type LabelSet = HashMap<Cow<'static, str>, Cow<'static, str>>; impl api::LabelSet for LabelSet {} #[allow(missing_debug_implementations)] pub struct Meter { registry: &'static prometheus::Registry, component: &'static str, } impl Meter { pub fn new(component: &'static str) -> Self { Meter { registry: prometheus::default_registry(), component, } } fn build_opts(&self, name: String, description: String) -> prometheus::Opts { let help = if !description.is_empty() { description } else { format!("{} metric", name) }; prometheus::Opts::new(name, help).namespace(format!("{}_", self.component)) } } impl api::Meter for Meter { type LabelSet = LabelSet; type I64Counter = prometheus::IntCounterVec; type F64Counter = prometheus::CounterVec; type I64Gauge = prometheus::IntGaugeVec; type F64Gauge = prometheus::GaugeVec; type I64Measure = prometheus::IntMeasure; type F64Measure = prometheus::HistogramVec; fn labels(&self, key_values: Vec<api::KeyValue>) -> Self::LabelSet { let mut label_set: Self::LabelSet = Default::default(); for api::KeyValue { key, value } in key_values.into_iter() { label_set.insert(key.into(), value.into()); } label_set } fn new_i64_counter<S: Into<String>>( &self, name: S, opts: api::MetricOptions, ) -> Self::I64Counter { let api::MetricOptions { description, unit: _unit, keys, alternate: _alternative, } = opts; let counter_opts = self.build_opts(name.into(), description); let labels = prometheus::convert_labels(&keys); let counter = prometheus::IntCounterVec::new(counter_opts, &labels).unwrap(); self.registry.register(Box::new(counter.clone())).unwrap(); counter }
fn new_i64_gauge<S: Into<String>>(&self, name: S, opts: api::MetricOptions) -> Self::I64Gauge { let api::MetricOptions { description, unit: _unit, keys, alternate: _alternative, } = opts; let gauge_opts = self.build_opts(name.into(), description); let labels = prometheus::convert_labels(&keys); let gauge = prometheus::IntGaugeVec::new(gauge_opts, &labels).unwrap(); self.registry.register(Box::new(gauge.clone())).unwrap(); gauge } fn new_f64_gauge<S: Into<String>>(&self, name: S, opts: api::MetricOptions) -> Self::F64Gauge { let api::MetricOptions { description, unit: _unit, keys, alternate: _alternative, } = opts; let gauge_opts = self.build_opts(name.into(), description); let labels = prometheus::convert_labels(&keys); let gauge = prometheus::GaugeVec::new(gauge_opts, &labels).unwrap(); self.registry.register(Box::new(gauge.clone())).unwrap(); gauge } fn new_i64_measure<S: Into<String>>( &self, name: S, opts: api::MetricOptions, ) -> Self::I64Measure { let api::MetricOptions { description, unit: _unit, keys, alternate: _alternative, } = opts; let common_opts = self.build_opts(name.into(), description); let labels = prometheus::convert_labels(&keys); let histogram_opts = prometheus::HistogramOpts::from(common_opts); let histogram = prometheus::HistogramVec::new(histogram_opts, &labels).unwrap(); self.registry.register(Box::new(histogram.clone())).unwrap(); prometheus::IntMeasure::new(histogram) } fn new_f64_measure<S: Into<String>>( &self, name: S, opts: api::MetricOptions, ) -> Self::F64Measure { let api::MetricOptions { description, unit: _unit, keys, alternate: _alternative, } = opts; let common_opts = self.build_opts(name.into(), description); let labels = prometheus::convert_labels(&keys); let histogram_opts = prometheus::HistogramOpts::from(common_opts); let histogram = prometheus::HistogramVec::new(histogram_opts, &labels).unwrap(); self.registry.register(Box::new(histogram.clone())).unwrap(); histogram } fn record_batch<M: IntoIterator<Item = api::Measurement<Self::LabelSet>>>( &self, label_set: &Self::LabelSet, measurements: M, ) { for measure in measurements.into_iter() { let instrument = measure.instrument(); instrument.record_one(measure.into_value(), &label_set); } } }
fn new_f64_counter<S: Into<String>>( &self, name: S, opts: api::MetricOptions, ) -> Self::F64Counter { let api::MetricOptions { description, unit: _unit, keys, alternate: _alternative, } = opts; let counter_opts = self.build_opts(name.into(), description); let labels = prometheus::convert_labels(&keys); let counter = prometheus::CounterVec::new(counter_opts, &labels).unwrap(); self.registry.register(Box::new(counter.clone())).unwrap(); counter }
function_block-full_function
[ { "content": "/// Convert from `sdk::LabelSet` to `prometheus`' label format.\n\nfn convert_label_set(label_set: &sdk::LabelSet) -> HashMap<&str, &str> {\n\n label_set\n\n .iter()\n\n .map(|(key, value)| (key.as_ref(), value.as_ref()))\n\n .collect()\n\n}\n\n\n\n/// Convert from list of `Key`s to prometheus' label format.\n\npub(crate) fn convert_labels(labels: &[Key]) -> Vec<&str> {\n\n labels\n\n .iter()\n\n .map(|k| k.inner())\n\n .map(|k| k.as_ref())\n\n .collect()\n\n}\n\n\n\n/// Prometheus IntCounterHandle\n\n#[derive(Clone)]\n\n#[allow(missing_debug_implementations)]\n\npub struct IntCounterHandle(prometheus::IntCounter);\n", "file_path": "src/exporter/metrics/prometheus/mod.rs", "rank": 0, "score": 155130.92165890988 }, { "content": "/// Meter is an interface to the metrics portion of the OpenTelemetry SDK.\n\n///\n\n/// The Meter interface allows creating of a registered metric instrument using methods specific to\n\n/// each kind of metric. There are six constructors representing the three kinds of instrument\n\n/// taking either floating point or integer inputs, see the detailed design below.\n\n///\n\n/// Binding instruments to a single Meter instance has two benefits:\n\n///\n\n/// 1. Instruments can be exported from the zero state, prior to first use, with no explicit\n\n/// Register call\n\n/// 2. The component name provided by the named Meter satisfies a namespace requirement\n\n///\n\n/// The recommended practice is to define structures to contain the instruments in use and keep\n\n/// references only to the instruments that are specifically needed.\n\n///\n\n/// We recognize that many existing metric systems support allocating metric instruments statically\n\n/// and providing the Meter interface at the time of use. In this example, typical of statsd\n\n/// clients, existing code may not be structured with a convenient place to store new metric\n\n/// instruments. Where this becomes a burden, it is recommended to use the global meter factory to\n\n/// construct a static named Meter, to construct metric instruments.\n\n///\n\n/// The situation is similar for users of Prometheus clients, where instruments are allocated\n\n/// statically and there is an implicit global. Such code may not have access to the appropriate\n\n/// Meter where instruments are defined. Where this becomes a burden, it is recommended to use the\n\n/// global meter factory to construct a static named Meter, to construct metric instruments.\n\n///\n\n/// Applications are expected to construct long-lived instruments. Instruments are considered\n\n/// permanent for the lifetime of a SDK, there is no method to delete them.\n\npub trait Meter {\n\n /// The `LabelSet` data type for this meter.\n\n type LabelSet: LabelSet;\n\n /// The `I64Counter` data type for this meter.\n\n type I64Counter: Counter<i64, Self::LabelSet>;\n\n /// The `F64Counter` data type for this meter.\n\n type F64Counter: Counter<f64, Self::LabelSet>;\n\n /// The `I64Gauge` data type for this meter.\n\n type I64Gauge: Gauge<i64, Self::LabelSet>;\n\n /// The `F64Gauge` data type for this meter.\n\n type F64Gauge: Gauge<f64, Self::LabelSet>;\n\n /// The `I64Measure` data type for this meter.\n\n type I64Measure: Measure<i64, Self::LabelSet>;\n\n /// The `F64Measure` data type for this meter.\n\n type F64Measure: Measure<f64, Self::LabelSet>;\n\n\n\n /// Returns a reference to a set of labels that cannot be read by the application.\n\n fn labels(&self, key_values: Vec<api::KeyValue>) -> Self::LabelSet;\n\n\n\n /// Creates a new `i64` counter with a given name and customized with passed options.\n", "file_path": "src/api/metrics/mod.rs", "rank": 1, "score": 117084.59219938946 }, { "content": "/// `LabelSet` is an implementation-level interface that represents a\n\n/// set of `KeyValue` for use as pre-defined labels in the metrics API.\n\npub trait LabelSet {}\n\n\n\n/// `MetricOptions` contains some options for metrics of any kind.\n\n#[derive(Default, Debug)]\n\npub struct MetricOptions {\n\n /// Description is an optional field describing the metric instrument.\n\n pub description: String,\n\n\n\n /// Unit is an optional field describing the metric instrument.\n\n /// Valid values are specified according to the\n\n /// [UCUM](http://unitsofmeasure.org/ucum.html).\n\n pub unit: api::Unit,\n\n\n\n /// Keys are dimension names for the given metric.\n\n pub keys: Vec<api::Key>,\n\n\n\n /// Alternate defines the property of metric value dependent on\n\n /// a metric type.\n\n ///\n\n /// - for `Counter`, `true` implies that the metric is an up-down\n", "file_path": "src/api/metrics/mod.rs", "rank": 2, "score": 113645.97507091952 }, { "content": "/// Returns `NoopMeter` for now\n\npub fn global_meter() -> crate::api::NoopMeter {\n\n crate::api::NoopMeter {}\n\n}\n", "file_path": "src/global.rs", "rank": 3, "score": 108584.51110410287 }, { "content": "/// An interface for recording values where the sum is of primary interest.\n\npub trait Counter<T, LS>: metrics::Instrument<LS>\n\nwhere\n\n T: Into<metrics::value::MeasurementValue>,\n\n LS: metrics::LabelSet,\n\n{\n\n /// The handle type for the implementing `Counter`.\n\n type Handle: CounterHandle<T>;\n\n\n\n /// Creates a `Measurement` object to be used by a `Meter` when batch recording.\n\n fn measurement(&self, value: T) -> metrics::Measurement<LS>;\n\n\n\n /// Creates a handle for this counter. The labels should contain the\n\n /// keys and values for each key specified in the `LabelSet`.\n\n ///\n\n /// If the labels do not contain a value for the key specified in the\n\n /// `LabelSet`, then the missing value will be treated as unspecified.\n\n fn acquire_handle(&self, labels: &LS) -> Self::Handle;\n\n\n\n /// Adds the value to the `Counter`'s sum.\n\n fn add(&self, value: T, label_set: &LS) {\n\n self.record_one(value.into(), label_set)\n\n }\n\n}\n\n\n", "file_path": "src/api/metrics/counter.rs", "rank": 4, "score": 108223.70495682847 }, { "content": "/// `CounterHandle` is a handle for `Counter` instances.\n\n///\n\n/// It allows for repeated `add` calls for a pre-determined `LabelSet`.\n\npub trait CounterHandle<T>: metrics::InstrumentHandle\n\nwhere\n\n T: Into<metrics::value::MeasurementValue>,\n\n{\n\n /// Add works by calling the underlying `record_one` method\n\n /// available because this trait also implements `InstrumentHandle`.\n\n fn add(&self, value: T) {\n\n self.record_one(value.into())\n\n }\n\n}\n", "file_path": "src/api/metrics/counter.rs", "rank": 5, "score": 96859.98361275371 }, { "content": "/// Returns a reference to the global `Provider`\n\npub fn trace_provider() -> Arc<GlobalProvider> {\n\n GLOBAL_TRACER_PROVIDER\n\n .read()\n\n .expect(\"GLOBAL_TRACER_PROVIDER RwLock poisoned\")\n\n .clone()\n\n}\n\n\n", "file_path": "src/global.rs", "rank": 6, "score": 79255.48490956485 }, { "content": "/// Used to serialize and deserialize `SpanContext`s to and from a binary\n\n/// representation.\n\npub trait BinaryFormat {\n\n /// Serializes span context into a byte array and returns the array.\n\n fn to_bytes(&self, context: &api::SpanContext) -> [u8; 29];\n\n\n\n /// Deserializes a span context from a byte array.\n\n fn from_bytes(&self, bytes: Vec<u8>) -> api::SpanContext;\n\n}\n\n\n", "file_path": "src/api/trace/propagator.rs", "rank": 7, "score": 68804.8781111713 }, { "content": "/// Assigns the global `Tracer`\n\npub fn set_provider<P, T, S>(new_provider: P)\n\nwhere\n\n S: api::Span + 'static,\n\n T: api::Tracer<Span = S> + 'static,\n\n P: api::Provider<Tracer = T> + 'static,\n\n{\n\n let mut global_provider = GLOBAL_TRACER_PROVIDER\n\n .write()\n\n .expect(\"GLOBAL_TRACER_PROVIDER RwLock poisoned\");\n\n *global_provider = Arc::new(GlobalProvider::new(new_provider));\n\n}\n\n\n", "file_path": "src/global.rs", "rank": 8, "score": 67850.34369380536 }, { "content": "/// The implementation-level interface to Set/Add/Record individual\n\n/// metrics with precomputed labels.\n\npub trait InstrumentHandle {\n\n /// Allows the SDK to observe a single metric event.\n\n fn record_one(&self, value: MeasurementValue);\n\n}\n\n\n", "file_path": "src/api/metrics/mod.rs", "rank": 9, "score": 67362.14113444944 }, { "content": "///is used to inject and extract a value as text into carriers that travel\n\n/// in-band across process boundaries.\n\npub trait HttpTextFormat {\n\n /// Properly encodes the values of the `SpanContext` and injects them\n\n /// into the `Carrier`.\n\n fn inject(&self, context: api::SpanContext, carrier: &mut dyn Carrier);\n\n\n\n /// Retrieves encoded `SpanContext`s using the `Carrier`. It decodes\n\n /// the `SpanContext` and returns it. If no `SpanContext` was retrieved\n\n /// OR if the retrieved SpanContext is invalid then an empty `SpanContext`\n\n /// is returned.\n\n fn extract(&self, carrier: &dyn Carrier) -> api::SpanContext;\n\n}\n\n\n", "file_path": "src/api/trace/propagator.rs", "rank": 10, "score": 66745.12286580661 }, { "content": "/// The implementation-level interface to Set/Add/Record individual\n\n/// metrics without precomputed labels.\n\npub trait Instrument<LS> {\n\n /// Allows the SDK to observe a single metric event for a given set of labels.\n\n fn record_one(&self, value: MeasurementValue, label_set: &LS);\n\n}\n\n\n", "file_path": "src/api/metrics/mod.rs", "rank": 11, "score": 65761.60472401403 }, { "content": "/// `GaugeHandle` is a handle for `Gauge` instances.\n\n///\n\n/// It allows for repeated `set` calls for a pre-determined `LabelSet`.\n\npub trait GaugeHandle<T>: metrics::InstrumentHandle\n\nwhere\n\n T: Into<metrics::value::MeasurementValue>,\n\n{\n\n /// Set works by calling the underlying `record_one` method\n\n /// available because this trait also implements `InstrumentHandle`.\n\n fn set(&self, value: T) {\n\n self.record_one(value.into())\n\n }\n\n}\n", "file_path": "src/api/metrics/gauge.rs", "rank": 12, "score": 64971.504979335616 }, { "content": "/// `MeasureHandle` is a handle for `Measure` instances.\n\n///\n\n/// It allows for repeated `record` calls for a pre-determined `LabelSet`.\n\npub trait MeasureHandle<T>: metrics::InstrumentHandle\n\nwhere\n\n T: Into<metrics::value::MeasurementValue>,\n\n{\n\n /// Record works by calling the underlying `record_one` method\n\n /// available because this trait also implements `InstrumentHandle`.\n\n fn record(&self, value: T) {\n\n self.record_one(value.into())\n\n }\n\n}\n", "file_path": "src/api/metrics/measure.rs", "rank": 13, "score": 64971.504979335616 }, { "content": "/// An interface for recording values where the metric cannot be expressed\n\n/// as a sum or because the measurement interval is arbitrary.\n\npub trait Gauge<T, LS>: metrics::Instrument<LS>\n\nwhere\n\n T: Into<metrics::value::MeasurementValue>,\n\n LS: metrics::LabelSet,\n\n{\n\n /// The handle type for the implementing `Gauge`.\n\n type Handle: GaugeHandle<T>;\n\n\n\n /// Creates a `Measurement` object to be used by a `Meter` when batch recording.\n\n fn measurement(&self, value: T) -> metrics::Measurement<LS>;\n\n\n\n /// Creates a handle for this gauge. The labels should contain the\n\n /// keys and values for each key specified in the `LabelSet`.\n\n ///\n\n /// If the labels do not contain a value for the key specified in the\n\n /// `LabelSet`, then the missing value will be treated as unspecified.\n\n fn acquire_handle(&self, labels: &LS) -> Self::Handle;\n\n\n\n /// Assigns the passed value to the value of the gauge. The labels\n\n /// should contain the keys and values for each key specified in\n\n /// the `LabelSet`.\n\n ///\n\n /// If the labels do not contain a value for the key specified in the\n\n /// `LabelSet`, then the missing value will be treated as unspecified.\n\n fn set(&self, value: T, label_set: &LS) {\n\n self.record_one(value.into(), label_set)\n\n }\n\n}\n\n\n", "file_path": "src/api/metrics/gauge.rs", "rank": 14, "score": 63222.05603024627 }, { "content": "/// An interface for recording values where the count or rate of\n\n/// events is meaningful.\n\npub trait Measure<T, LS>: metrics::Instrument<LS>\n\nwhere\n\n T: Into<metrics::value::MeasurementValue>,\n\n LS: metrics::LabelSet,\n\n{\n\n /// The handle type for the implementing `Measure`.\n\n type Handle: MeasureHandle<T>;\n\n\n\n /// Creates a `Measurement` object to be used by a `Meter`\n\n /// when batch recording.\n\n fn measurement(&self, value: T) -> metrics::Measurement<LS>;\n\n\n\n /// Creates a handle for this measure. The labels should contain the\n\n /// keys and values for each key specified in the `LabelSet`.\n\n ///\n\n /// If the labels do not contain a value for the key specified in the\n\n /// `LabelSet`, then the missing value will be treated as unspecified.\n\n fn acquire_handle(&self, labels: &LS) -> Self::Handle;\n\n\n\n /// Records the passed value to the value of the measure. The labels\n\n /// should contain the keys and values for each key specified in\n\n /// the `LabelSet`.\n\n ///\n\n /// If the labels do not contain a value for the key specified in the\n\n /// `LabelSet`, then the missing value will be treated as unspecified.\n\n fn record(&self, value: T, label_set: &LS) {\n\n self.record_one(value.into(), label_set)\n\n }\n\n}\n\n\n", "file_path": "src/api/metrics/measure.rs", "rank": 15, "score": 63218.651419923655 }, { "content": " pub fn into_f64(self) -> f64 {\n\n f64::from_bits(self.0.into_inner())\n\n }\n\n}\n\n\n\nimpl From<i64> for MeasurementValue {\n\n /// Convert `i64` instances to `MeasurementValue` instances for use by\n\n /// `Instrument`s.\n\n fn from(value: i64) -> Self {\n\n MeasurementValue(atomic::AtomicU64::new(value as u64))\n\n }\n\n}\n\n\n\nimpl From<f64> for MeasurementValue {\n\n /// Convert `f64` instances to `MeasurementValue` instances for use by\n\n /// `Instrument`s.\n\n fn from(value: f64) -> Self {\n\n MeasurementValue(atomic::AtomicU64::new(value.to_bits()))\n\n }\n\n}\n", "file_path": "src/api/metrics/value.rs", "rank": 16, "score": 58400.22978973461 }, { "content": "//! # OpenTelemetry Metrics Measurement Values\n\n//!\n\n//! All values recorded by `Instrument`s must be formatted as `MeasurementValue`.\n\n//! `Meter`s can record values that impl `Into<MeasurementValue`. The two default\n\n//! types of values recorded are `i64` and `f64`.\n\nuse std::sync::atomic;\n\n\n\n/// `MeasurementValue` represents either an integer or a floating point value of a measurement. It\n\n/// needs to be accompanied with a value kind or some source that provides a value kind describing\n\n/// this measurement value.\n\n#[derive(Debug)]\n\npub struct MeasurementValue(atomic::AtomicU64);\n\n\n\nimpl MeasurementValue {\n\n /// Convert the underlying `AtomicU64` into a standard `i64`.\n\n pub fn into_i64(self) -> i64 {\n\n self.0.into_inner() as i64\n\n }\n\n\n\n /// Convert the underlying `AtomicU64` into a standard `f64`.\n", "file_path": "src/api/metrics/value.rs", "rank": 17, "score": 58399.73086088085 }, { "content": "//! # Metrics Counter Interface\n\n//!\n\n//! Counters support `add(value, label_set)`. Choose this kind of metric when\n\n//! the value is a quantity, the sum is of primary interest, and\n\n//! the event count and value distribution are not of primary interest.\n\n//!\n\n//! `Counter`s are defined as `monotonic = true` by default, meaning\n\n//! that positive values are expected. `monotonic = true` counters are\n\n//! typically used because they can automatically be interpreted as a rate.\n\n//!\n\n//! When passing `MetricOptions`, counters can be declared as `with_monotonic(false)`,\n\n//! in which case they support positive and negative increments.\n\n//! `monotonic = false` counters are useful to report changes in an\n\n//! accounting scheme, such as the number of bytes allocated and\n\n//! deallocated.\n\nuse crate::api::metrics;\n\n\n\n/// An interface for recording values where the sum is of primary interest.\n", "file_path": "src/api/metrics/counter.rs", "rank": 18, "score": 58375.3316532433 }, { "content": "fn main() {\n\n init_tracer();\n\n let meter = sdk::Meter::new(\"ex_com_basic\");\n\n\n\n let lemons_key = Key::new(\"ex_com_lemons\");\n\n let another_key = Key::new(\"ex_com_another\");\n\n\n\n let one_metric = meter.new_f64_gauge(\n\n \"ex_com_one\",\n\n MetricOptions::default()\n\n .with_keys(vec![lemons_key.clone()])\n\n .with_description(\"A gauge set to 1.0\"),\n\n );\n\n\n\n let measure_two = meter.new_f64_measure(\n\n \"ex_com_two\",\n\n MetricOptions::default().with_keys(vec![lemons_key.clone()]),\n\n );\n\n\n\n let common_labels = meter.labels(vec![lemons_key.i64(10)]);\n", "file_path": "examples/basic.rs", "rank": 19, "score": 52462.66673187335 }, { "content": "fn main() {\n\n let addr = ([127, 0, 0, 1], 9898).into();\n\n println!(\"Listening address: {:?}\", addr);\n\n let meter = sdk::Meter::new(\"hyper\");\n\n\n\n let common_key = Key::new(\"handler\");\n\n let common_labels = meter.labels(vec![common_key.string(\"all\")]);\n\n\n\n let http_counter = meter\n\n .new_i64_counter(\n\n \"example_http_requests_total\",\n\n MetricOptions::default()\n\n .with_description(\"Total number of HTTP requests made.\")\n\n .with_keys(vec![common_key.clone()]),\n\n )\n\n .acquire_handle(&common_labels);\n\n\n\n let http_req_histogram = meter\n\n .new_f64_measure(\n\n \"example_http_request_duration_seconds\",\n", "file_path": "examples/hyper.rs", "rank": 20, "score": 52462.66673187335 }, { "content": "fn main() {\n\n let tracer = sdk::Provider::default().get_tracer(\"report_example\");\n\n {\n\n let span0 = tracer.start(\"main\", None);\n\n thread::sleep(Duration::from_millis(10));\n\n {\n\n let mut span1 = tracer.start(\"sub\", Some(span0.get_context()));\n\n span1.set_attribute(api::Key::new(\"foo\").string(\"bar\"));\n\n span1.add_event(\"something wrong\".to_string());\n\n thread::sleep(Duration::from_millis(10));\n\n }\n\n }\n\n\n\n // Allow flush\n\n thread::sleep(Duration::from_millis(250));\n\n}\n", "file_path": "examples/report.rs", "rank": 21, "score": 52462.66673187335 }, { "content": "fn init_tracer() {\n\n let exporter = jaeger::Exporter::builder()\n\n .with_collector_endpoint(\"127.0.0.1:6831\".parse().unwrap())\n\n .with_process(jaeger::Process {\n\n service_name: \"trace-demo\",\n\n tags: vec![\n\n Key::new(\"exporter\").string(\"jaeger\"),\n\n Key::new(\"float\").f64(312.23),\n\n ],\n\n })\n\n .init();\n\n let provider = sdk::Provider::builder()\n\n .with_exporter(exporter)\n\n .with_config(sdk::Config {\n\n default_sampler: Sampler::Always,\n\n ..Default::default()\n\n })\n\n .build();\n\n global::set_provider(provider);\n\n}\n\n\n", "file_path": "examples/basic.rs", "rank": 22, "score": 50992.361119322086 }, { "content": "fn main() {\n\n init_tracer();\n\n let meter = sdk::Meter::new(\"ex_com_basic\");\n\n\n\n let lemons_key = Key::new(\"ex_com_lemons\");\n\n let another_key = Key::new(\"ex_com_another\");\n\n\n\n let one_metric = meter.new_f64_gauge(\n\n \"ex_com_one\",\n\n MetricOptions::default()\n\n .with_keys(vec![lemons_key.clone()])\n\n .with_description(\"A gauge set to 1.0\"),\n\n );\n\n\n\n let measure_two = meter.new_f64_measure(\n\n \"ex_com_two\",\n\n MetricOptions::default().with_keys(vec![lemons_key.clone()]),\n\n );\n\n\n\n let common_labels = meter.labels(vec![lemons_key.i64(10)]);\n", "file_path": "examples/basic_print.rs", "rank": 23, "score": 50992.361119322086 }, { "content": "/// Used to track `Span` and its status in the stack\n\nstruct ContextId {\n\n span: sdk::Span,\n\n duplicate: bool,\n\n}\n\n\n\n/// A stack of `Span`s that can be used to track active `Span`s per thread.\n\npub(crate) struct SpanStack {\n\n stack: Vec<ContextId>,\n\n ids: HashSet<u64>,\n\n}\n\n\n\nimpl SpanStack {\n\n /// Create a new `SpanStack`\n\n fn new() -> Self {\n\n SpanStack {\n\n stack: vec![],\n\n ids: HashSet::new(),\n\n }\n\n }\n\n\n", "file_path": "src/sdk/trace/tracer.rs", "rank": 24, "score": 49953.03186811978 }, { "content": "fn init_tracer() {\n\n let exporter = print::Exporter::builder()\n\n .init();\n\n let provider = sdk::Provider::builder()\n\n .with_exporter(exporter)\n\n .with_config(sdk::Config {\n\n default_sampler: Sampler::Always,\n\n ..Default::default()\n\n })\n\n .build();\n\n global::set_provider(provider);\n\n}\n\n\n", "file_path": "examples/basic_print.rs", "rank": 25, "score": 49654.04156969416 }, { "content": "/// Carriers provide an interface for adding and removing fields from an\n\n/// underlying struct like `HashMap`.\n\npub trait Carrier {\n\n /// Get a value for a key from the underlying data.\n\n fn get(&self, key: &'static str) -> Option<&str>;\n\n /// Add a key and value to the underlying.\n\n fn set(&mut self, key: &'static str, value: String);\n\n}\n\n\n\nimpl<S: std::hash::BuildHasher> api::Carrier for HashMap<&'static str, String, S> {\n\n /// Get a value for a key from the HashMap.\n\n fn get(&self, key: &'static str) -> Option<&str> {\n\n self.get(key).map(|v| v.as_str())\n\n }\n\n\n\n /// Set a key and value in the HashMap.\n\n fn set(&mut self, key: &'static str, value: String) {\n\n self.insert(key, value);\n\n }\n\n}\n", "file_path": "src/api/trace/propagator.rs", "rank": 26, "score": 43606.53701584788 }, { "content": "/// TracerGenerics are functions that have generic type parameters. They are a separate\n\n/// trait so that `Tracer` can be used as a trait object in `GlobalTracer`.\n\npub trait TracerGenerics: Tracer {\n\n /// Wraps the execution of the function body with a span.\n\n /// It starts a new span and sets it as the active span for the given function.\n\n /// It then executes the body. It closes the span before returning the execution result.\n\n fn with_span<T, F>(&self, name: &'static str, f: F) -> T\n\n where\n\n F: FnOnce(&mut Self::Span) -> T;\n\n}\n\n\n\n// These functions can be implemented for all tracers to allow for convenient `with_span` syntax.\n\nimpl<S: Tracer> TracerGenerics for S {\n\n /// Wraps the execution of the function body with a span.\n\n /// It starts a new span and sets it as the active span for the given function.\n\n /// It then executes the body. It closes the span before returning the execution result.\n\n fn with_span<T, F>(&self, name: &'static str, f: F) -> T\n\n where\n\n F: FnOnce(&mut Self::Span) -> T,\n\n {\n\n let active_context = self.get_active_span().get_context();\n\n let parent = if active_context.is_valid() {\n", "file_path": "src/api/trace/tracer.rs", "rank": 27, "score": 39876.88683848765 }, { "content": "/// `GenericTracer` allows `BoxedTracer`'s to contain and use a `Tracer` trait object.\n\npub trait GenericTracer: Send + Sync {\n\n /// Create a new invalid span for use in cases where there are no active spans.\n\n fn invalid_boxed(&self) -> Box<dyn api::Span>;\n\n\n\n /// Returns a trait object so the underlying implementation can be swapped\n\n /// out at runtime.\n\n fn start_boxed(&self, name: &str, parent: Option<api::SpanContext>) -> Box<dyn api::Span>;\n\n\n\n /// Returns the currently active span as a BoxedSpan\n\n fn get_active_span_boxed(&self) -> Box<dyn api::Span>;\n\n\n\n /// Returns the currently active span as a BoxedSpan\n\n fn mark_span_as_active_boxed(&self, span: &dyn api::Span);\n\n\n\n /// Marks the current span as inactive\n\n fn mark_span_as_inactive_boxed(&self, span_id: u64);\n\n}\n\n\n\nimpl<S: api::Span + 'static> GenericTracer for Box<dyn api::Tracer<Span = S>> {\n\n /// Create a new invalid span for use in cases where there are no active spans.\n", "file_path": "src/global.rs", "rank": 28, "score": 39576.026429663034 }, { "content": "/// `GenericProvider` allows `GlobalProvider`'s to contain and use a `Provider` trait object.\n\npub trait GenericProvider: Send + Sync {\n\n /// Creates a named tracer instance that is a trait object through the underlying `Provider`.\n\n fn get_tracer_boxed(&self, name: &'static str) -> Box<dyn GenericTracer>;\n\n}\n\n\n\nimpl api::Provider for dyn GenericProvider {\n\n /// Tracer is a boxed tracer so it can wrap any implementation of `Tracer`.\n\n type Tracer = BoxedTracer;\n\n\n\n /// Find or create a named instance of `BoxedTracer`.\n\n fn get_tracer(&self, name: &'static str) -> Self::Tracer {\n\n BoxedTracer(self.get_tracer_boxed(name))\n\n }\n\n}\n\n\n\nimpl<T, S> GenericProvider for Box<dyn api::Provider<Tracer = T>>\n\nwhere\n\n S: api::Span + 'static,\n\n T: api::Tracer<Span = S> + 'static,\n\n{\n", "file_path": "src/global.rs", "rank": 29, "score": 39576.026429663034 }, { "content": "/// Interface for constructing `Span`s.\n\npub trait Tracer: Send + Sync {\n\n /// The `Span` type used by this `Tracer`.\n\n type Span: api::Span;\n\n\n\n /// Returns a span with an invalid `SpanContext`. Used by functions that\n\n /// need to return a default span like `get_active_span` if no span is present.\n\n fn invalid(&self) -> Self::Span;\n\n\n\n /// Starts a new `Span`.\n\n ///\n\n /// By default the currently active `Span` is set as the new `Span`'s\n\n /// parent. The `Tracer` MAY provide other default options for newly\n\n /// created `Span`s.\n\n ///\n\n /// `Span` creation MUST NOT set the newly created `Span` as the currently\n\n /// active `Span` by default, but this functionality MAY be offered additionally\n\n /// as a separate operation.\n\n ///\n\n /// Each span has zero or one parent spans and zero or more child spans, which\n\n /// represent causally related operations. A tree of related spans comprises a\n", "file_path": "src/api/trace/tracer.rs", "rank": 30, "score": 38550.649881913196 }, { "content": "/// An interface to create `Tracer` instances.\n\npub trait Provider: Send + Sync {\n\n /// The `Tracer` type that this `Provider` will return.\n\n type Tracer: api::Tracer;\n\n\n\n /// Creates a named tracer instance of `Self::Tracer`.\n\n /// If the name is an empty string then provider uses default name.\n\n fn get_tracer(&self, name: &'static str) -> Self::Tracer;\n\n}\n", "file_path": "src/api/trace/provider.rs", "rank": 31, "score": 38550.649881913196 }, { "content": "/// Interface for a single operation within a trace.\n\npub trait Span: Send + Sync + std::fmt::Debug {\n\n /// An API to record events in the context of a given `Span`.\n\n ///\n\n /// Events have a time associated with the moment when they are\n\n /// added to the `Span`.\n\n ///\n\n /// Events SHOULD preserve the order in which they're set. This will typically match\n\n /// the ordering of the events' timestamps.\n\n ///\n\n /// Note that the OpenTelemetry project documents certain [\"standard event names and\n\n /// keys\"](https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/data-semantic-conventions.md)\n\n /// which have prescribed semantic meanings.\n\n fn add_event(&mut self, message: String) {\n\n self.add_event_with_timestamp(message, SystemTime::now())\n\n }\n\n\n\n /// An API to record events at a specific time in the context of a given `Span`.\n\n ///\n\n /// Events SHOULD preserve the order in which they're set. This will typically match\n\n /// the ordering of the events' timestamps.\n", "file_path": "src/api/trace/span.rs", "rank": 32, "score": 33057.95757067458 }, { "content": "/// `SpanExporter` defines the interface that protocol-specific exporters must\n\n/// implement so that they can be plugged into OpenTelemetry SDK and support\n\n/// sending of telemetry data.\n\n///\n\n/// The goals of the interface are:\n\n///\n\n/// - Minimize burden of implementation for protocol-dependent telemetry\n\n/// exporters. The protocol exporter is expected to be primarily a simple\n\n/// telemetry data encoder and transmitter.\n\n/// - Allow implementing helpers as composable components that use the same\n\n/// chainable Exporter interface. SDK authors are encouraged to implement common\n\n/// functionality such as queuing, batching, tagging, etc. as helpers. This\n\n/// functionality will be applicable regardless of what protocol exporter is used.\n\npub trait SpanExporter: Send + Sync + std::fmt::Debug {\n\n /// The type of `Span` that is exported\n\n type Span: api::Span;\n\n /// Exports a batch of telemetry data. Protocol exporters that will implement\n\n /// this function are typically expected to serialize and transmit the data\n\n /// to the destination.\n\n ///\n\n /// This function will never be called concurrently for the same exporter\n\n /// instance. It can be called again only after the current call returns.\n\n ///\n\n /// This function must not block indefinitely, there must be a reasonable\n\n /// upper limit after which the call must time out with an error result.\n\n fn export(&self, batch: Vec<Self::Span>) -> Result<(), ()>;\n\n\n\n /// Shuts down the exporter. Called when SDK is shut down. This is an\n\n /// opportunity for exporter to do any cleanup required.\n\n ///\n\n /// `shutdown` should be called only once for each Exporter instance. After\n\n /// the call to `shutdown`, subsequent calls to `SpanExport` are not allowed\n\n /// and should return an error.\n\n ///\n\n /// Shutdown should not block indefinitely (e.g. if it attempts to flush the\n\n /// data and the destination is unavailable). SDK authors can\n\n /// decide if they want to make the shutdown timeout to be configurable.\n\n fn shutdown(&self);\n\n\n\n /// Allows exporter to be downcast\n\n fn as_any(&self) -> &dyn std::any::Any;\n\n}\n", "file_path": "src/exporter/trace/mod.rs", "rank": 33, "score": 32305.128106693897 }, { "content": " /// `Counter`\n\n ///\n\n /// - for `Gauge`, `true` implies that the metric is a\n\n /// non-descending `Gauge`\n\n ///\n\n /// - for `Measure`, `true` implies that the metric supports\n\n /// positive and negative values\n\n pub alternate: bool,\n\n}\n\n\n\nimpl MetricOptions {\n\n /// Set a description for the current set of options.\n\n pub fn with_description<S: Into<String>>(self, description: S) -> Self {\n\n MetricOptions {\n\n description: description.into(),\n\n ..self\n\n }\n\n }\n\n\n\n /// Set a `Unit` for the current set of metric options.\n", "file_path": "src/api/metrics/mod.rs", "rank": 40, "score": 28395.71320016423 }, { "content": " type F64Measure = NoopMeasure<f64>;\n\n\n\n /// Returns a no-op `NoopLabelSet`.\n\n fn labels(&self, _key_values: Vec<api::KeyValue>) -> Self::LabelSet {\n\n NoopLabelSet {}\n\n }\n\n\n\n /// Returns a no-op `I64Counter` instance.\n\n fn new_i64_counter<S: Into<String>>(\n\n &self,\n\n _name: S,\n\n _opts: api::MetricOptions,\n\n ) -> Self::I64Counter {\n\n NoopCounter {\n\n _marker: marker::PhantomData,\n\n }\n\n }\n\n\n\n /// Returns a no-op `F64Counter` instance.\n\n fn new_f64_counter<S: Into<String>>(\n", "file_path": "src/api/metrics/noop.rs", "rank": 43, "score": 28393.50745654088 }, { "content": " pub fn with_unit(self, unit: api::Unit) -> Self {\n\n MetricOptions { unit, ..self }\n\n }\n\n\n\n /// Set a list of `Key`s for the current set metric of options.\n\n pub fn with_keys(self, keys: Vec<api::Key>) -> Self {\n\n MetricOptions { keys, ..self }\n\n }\n\n\n\n /// Set monotonic for the given set of metric options.\n\n pub fn with_monotonic(self, _monotonic: bool) -> Self {\n\n // TODO figure out counter vs gauge issue here.\n\n unimplemented!()\n\n }\n\n\n\n /// Set absolute for the given set of metric options.\n\n pub fn with_absolute(self, absolute: bool) -> Self {\n\n MetricOptions {\n\n alternate: !absolute,\n\n ..self\n", "file_path": "src/api/metrics/mod.rs", "rank": 45, "score": 28391.096045840324 }, { "content": " fn new_i64_counter<S: Into<String>>(&self, name: S, opts: MetricOptions) -> Self::I64Counter;\n\n\n\n /// Creates a new `f64` counter with a given name and customized with passed options.\n\n fn new_f64_counter<S: Into<String>>(&self, name: S, opts: MetricOptions) -> Self::F64Counter;\n\n\n\n /// Creates a new `i64` gauge with a given name and customized with passed options.\n\n fn new_i64_gauge<S: Into<String>>(&self, name: S, opts: MetricOptions) -> Self::I64Gauge;\n\n\n\n /// Creates a new `f64` gauge with a given name and customized with passed options.\n\n fn new_f64_gauge<S: Into<String>>(&self, name: S, opts: MetricOptions) -> Self::F64Gauge;\n\n\n\n /// Creates a new `i64` measure with a given name and customized with passed options.\n\n fn new_i64_measure<S: Into<String>>(&self, name: S, opts: MetricOptions) -> Self::I64Measure;\n\n\n\n /// Creates a new `f64` measure with a given name and customized with passed options.\n\n fn new_f64_measure<S: Into<String>>(&self, name: S, opts: MetricOptions) -> Self::F64Measure;\n\n\n\n /// Atomically records a batch of measurements.\n\n fn record_batch<M: IntoIterator<Item = Measurement<Self::LabelSet>>>(\n\n &self,\n\n label_set: &Self::LabelSet,\n\n measurements: M,\n\n );\n\n}\n", "file_path": "src/api/metrics/mod.rs", "rank": 46, "score": 28391.004003183534 }, { "content": "pub mod gauge;\n\npub mod measure;\n\npub mod noop;\n\npub mod value;\n\n\n\nuse counter::Counter;\n\nuse gauge::Gauge;\n\nuse measure::Measure;\n\nuse value::MeasurementValue;\n\n\n\n/// The implementation-level interface to Set/Add/Record individual\n\n/// metrics without precomputed labels.\n", "file_path": "src/api/metrics/mod.rs", "rank": 48, "score": 28387.627708693864 }, { "content": "//! # No-op OpenTelemetry Metrics Implementation\n\n//!\n\n//! This implementation is returned as the global Meter if no `Meter`\n\n//! has been set. It is also useful for testing purposes as it is intended\n\n//! to have minimal resource utilization and runtime impact.\n\nuse crate::api;\n\nuse std::marker;\n\nuse std::sync::Arc;\n\n\n\n/// A no-op instance of a `Meter`.\n\n#[derive(Debug)]\n\npub struct NoopMeter {}\n\n\n\nimpl api::Meter for NoopMeter {\n\n type LabelSet = NoopLabelSet;\n\n type I64Counter = NoopCounter<i64>;\n\n type F64Counter = NoopCounter<f64>;\n\n type I64Gauge = NoopGauge<i64>;\n\n type F64Gauge = NoopGauge<f64>;\n\n type I64Measure = NoopMeasure<i64>;\n", "file_path": "src/api/metrics/noop.rs", "rank": 49, "score": 28386.64784744102 }, { "content": " &self,\n\n _name: S,\n\n _opts: api::MetricOptions,\n\n ) -> Self::F64Counter {\n\n NoopCounter {\n\n _marker: marker::PhantomData,\n\n }\n\n }\n\n\n\n /// Returns a no-op `I64Gauge` instance.\n\n fn new_i64_gauge<S: Into<String>>(\n\n &self,\n\n _name: S,\n\n _opts: api::MetricOptions,\n\n ) -> Self::I64Gauge {\n\n NoopGauge {\n\n _marker: marker::PhantomData,\n\n }\n\n }\n\n\n", "file_path": "src/api/metrics/noop.rs", "rank": 50, "score": 28386.593802297215 }, { "content": " }\n\n }\n\n}\n\n\n\n/// Used to record `MeasurementValue`s for a given `Instrument` for use in\n\n/// batch recording by a `Meter`.\n\n#[allow(missing_debug_implementations)]\n\npub struct Measurement<LS> {\n\n instrument: Arc<dyn Instrument<LS>>,\n\n value: MeasurementValue,\n\n}\n\n\n\nimpl<LS: LabelSet> Measurement<LS> {\n\n /// Create a new measurement\n\n pub fn new(instrument: Arc<dyn Instrument<LS>>, value: MeasurementValue) -> Self {\n\n Measurement { instrument, value }\n\n }\n\n\n\n /// Returns an instrument that created this measurement.\n\n pub fn instrument(&self) -> Arc<dyn Instrument<LS>> {\n\n self.instrument.clone()\n\n }\n\n\n\n /// Returns a value recorded in this measurement.\n\n pub fn into_value(self) -> MeasurementValue {\n\n self.value\n\n }\n\n}\n\n\n", "file_path": "src/api/metrics/mod.rs", "rank": 51, "score": 28385.42857242814 }, { "content": "\n\nimpl<T> api::CounterHandle<T> for NoopHandle<T> where T: Into<api::MeasurementValue> {}\n\n\n\nimpl<T> api::GaugeHandle<T> for NoopHandle<T> where T: Into<api::MeasurementValue> {}\n\n\n\nimpl<T> api::MeasureHandle<T> for NoopHandle<T> where T: Into<api::MeasurementValue> {}\n\n\n\n/// A no-op instance of a `Counter`.\n\n#[derive(Debug)]\n\npub struct NoopCounter<T> {\n\n _marker: marker::PhantomData<T>,\n\n}\n\n\n\nimpl<T: Into<api::MeasurementValue> + 'static> api::Counter<T, NoopLabelSet> for NoopCounter<T> {\n\n type Handle = NoopHandle<T>;\n\n\n\n /// Returns a no-op `Measurement`.\n\n fn measurement(&self, value: T) -> api::Measurement<NoopLabelSet> {\n\n let handle = self.acquire_handle(&NoopLabelSet {});\n\n api::Measurement {\n", "file_path": "src/api/metrics/noop.rs", "rank": 52, "score": 28384.131729356453 }, { "content": " /// Returns a no-op `F64Gauge` instance.\n\n fn new_f64_gauge<S: Into<String>>(\n\n &self,\n\n _name: S,\n\n _opts: api::MetricOptions,\n\n ) -> Self::F64Gauge {\n\n NoopGauge {\n\n _marker: marker::PhantomData,\n\n }\n\n }\n\n\n\n /// Returns a no-op `I64Measure` instance.\n\n fn new_i64_measure<S: Into<String>>(\n\n &self,\n\n _name: S,\n\n _opts: api::MetricOptions,\n\n ) -> Self::I64Measure {\n\n NoopMeasure {\n\n _marker: marker::PhantomData,\n\n }\n", "file_path": "src/api/metrics/noop.rs", "rank": 53, "score": 28383.051415728034 }, { "content": " }\n\n\n\n /// Returns a no-op `F64Measure` instance.\n\n fn new_f64_measure<S: Into<String>>(\n\n &self,\n\n _name: S,\n\n _opts: api::MetricOptions,\n\n ) -> Self::F64Measure {\n\n NoopMeasure {\n\n _marker: marker::PhantomData,\n\n }\n\n }\n\n\n\n /// Ignores batch recordings\n\n fn record_batch<M: IntoIterator<Item = api::Measurement<NoopLabelSet>>>(\n\n &self,\n\n _label_set: &NoopLabelSet,\n\n _measurements: M,\n\n ) {\n\n // Ignored\n", "file_path": "src/api/metrics/noop.rs", "rank": 54, "score": 28382.66140517782 }, { "content": " }\n\n}\n\n\n\n/// A no-op instance of `LabelSet`.\n\n#[derive(Debug)]\n\npub struct NoopLabelSet {}\n\n\n\nimpl api::LabelSet for NoopLabelSet {}\n\n\n\n/// A no-op instance of all metric `InstrumentHandler`\n\n#[derive(Debug)]\n\npub struct NoopHandle<T> {\n\n _marker: marker::PhantomData<T>,\n\n}\n\n\n\nimpl<T> api::Instrument<NoopLabelSet> for NoopHandle<T> {\n\n fn record_one(&self, _value: api::MeasurementValue, _label_set: &NoopLabelSet) {\n\n // Ignored\n\n }\n\n}\n", "file_path": "src/api/metrics/noop.rs", "rank": 55, "score": 28382.601777719938 }, { "content": " instrument: Arc::new(handle),\n\n value: value.into(),\n\n }\n\n }\n\n\n\n /// Returns a `NoopHandle`\n\n fn acquire_handle(&self, _labels: &NoopLabelSet) -> Self::Handle {\n\n NoopHandle {\n\n _marker: marker::PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl<T> api::Instrument<NoopLabelSet> for NoopCounter<T> {\n\n /// Ignores all recorded measurement values.\n\n fn record_one(&self, _value: api::MeasurementValue, _labels: &NoopLabelSet) {\n\n // Ignored\n\n }\n\n}\n\n\n", "file_path": "src/api/metrics/noop.rs", "rank": 56, "score": 28382.175341583992 }, { "content": "//! form a sufficient basis for expression of a wide variety of metric data.\n\n//! Programmers write and read these as `add()`, `set()`, and `record()`\n\n//! method calls, signifying the semantics and standard interpretation,\n\n//! and we believe these three methods are all that are needed.\n\n//!\n\n//! Nevertheless, it is common to apply restrictions on metric values, the\n\n//! inputs to `add()`, `set()`, and `record()`, in order to refine their\n\n//! standard interpretation. Generally, there is a question of whether\n\n//! the instrument can be used to compute a rate, because that is usually\n\n//! a desirable analysis. Each metric instrument offers an optional\n\n//! declaration, specifying restrictions on values input to the metric.\n\n//! For example, Measures are declared as non-negative by default,\n\n//! appropriate for reporting sizes and durations; a Measure option is\n\n//! provided to record positive or negative values, but it does not change\n\n//! the kind of instrument or the method name used, as the semantics are\n\n//! unchanged.\n\nuse crate::api;\n\nuse std::sync::Arc;\n\n\n\npub mod counter;\n", "file_path": "src/api/metrics/mod.rs", "rank": 57, "score": 28381.55947501478 }, { "content": "/// A no-op instance of a `Gauge`.\n\n#[derive(Debug)]\n\npub struct NoopGauge<T> {\n\n _marker: marker::PhantomData<T>,\n\n}\n\n\n\nimpl api::Gauge<i64, NoopLabelSet> for NoopGauge<i64> {\n\n type Handle = NoopHandle<i64>;\n\n\n\n /// Returns a no-op `Measurement`.\n\n fn measurement(&self, value: i64) -> api::Measurement<NoopLabelSet> {\n\n let handle = self.acquire_handle(&NoopLabelSet {});\n\n api::Measurement {\n\n instrument: Arc::new(handle),\n\n value: api::MeasurementValue::from(value),\n\n }\n\n }\n\n\n\n /// Returns a `NoopHandle`\n\n fn acquire_handle(&self, _labels: &NoopLabelSet) -> Self::Handle {\n", "file_path": "src/api/metrics/noop.rs", "rank": 58, "score": 28380.435284230145 }, { "content": "//! To capture measurements using an `Instrument`, you need an SDK that\n\n//! implements the `Meter` API.\n\n//!\n\n//! ## Metric kinds and inputs\n\n//!\n\n//! The API distinguishes metric instruments by semantic meaning, not by\n\n//! the type of value produced in an exporter. This is a departure from\n\n//! convention, compared with a number of common metric libraries, and\n\n//! stems from the separation of the API and the SDK. The SDK ultimately\n\n//! determines how to handle metric events and could potentially implement\n\n//! non-standard behavior.\n\n//!\n\n//! This explains why the metric API does not have metric instrument kinds\n\n//! for exporting \"Histogram\" and \"Summary\" distribution explicitly, for\n\n//! example. These are both semantically `Measure` instruments and an SDK\n\n//! can be configured to produce histograms or distribution summaries from\n\n//! Measure events. It is out of scope for the Metrics API to specify how\n\n//! these alternatives are configured in a particular SDK.\n\n//!\n\n//! We believe the three metric kinds `Counter`, `Gauge`, and `Measure`\n", "file_path": "src/api/metrics/mod.rs", "rank": 59, "score": 28380.186502163484 }, { "content": "/// A no-op instance of a `Measure`.\n\n#[derive(Debug)]\n\npub struct NoopMeasure<T> {\n\n _marker: marker::PhantomData<T>,\n\n}\n\n\n\nimpl api::Measure<i64, NoopLabelSet> for NoopMeasure<i64> {\n\n type Handle = NoopHandle<i64>;\n\n\n\n /// Returns a no-op `Measurement`.\n\n fn measurement(&self, value: i64) -> api::Measurement<NoopLabelSet> {\n\n let handle = self.acquire_handle(&NoopLabelSet {});\n\n\n\n api::Measurement::new(Arc::new(handle), api::MeasurementValue::from(value))\n\n }\n\n\n\n /// Returns a `NoopHandle`\n\n fn acquire_handle(&self, _labels: &NoopLabelSet) -> Self::Handle {\n\n NoopHandle {\n\n _marker: marker::PhantomData,\n", "file_path": "src/api/metrics/noop.rs", "rank": 60, "score": 28379.79324710551 }, { "content": "//! # Metrics Measure Interface\n\n//!\n\n//! `Measure`s support `record(value, label_set)`, signifying that\n\n//! events report individual measurements. This kind of metric\n\n//! should be used when the count or rate of events is meaningful\n\n//! and either:\n\n//!\n\n//! - The sum is of interest in addition to the count (rate)\n\n//! - Quantile information is of interest.\n\n//!\n\n//! `Measure`s are defined as `with_absolute(true)` by default,\n\n//! meaning that negative values are invalid. `absolute = true`\n\n//! measures are typically used to record absolute values such as\n\n//! durations and sizes.\n\n//!\n\n//! When passing `MetricOptions`, measures can be declared as\n\n//! `with_abslute(false)` to indicate support for positive and negative values.\n\nuse crate::api::metrics;\n\n\n\n/// An interface for recording values where the count or rate of\n\n/// events is meaningful.\n", "file_path": "src/api/metrics/measure.rs", "rank": 61, "score": 28379.53680683349 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl api::Measure<f64, NoopLabelSet> for NoopMeasure<f64> {\n\n type Handle = NoopHandle<f64>;\n\n\n\n /// Returns a no-op `Measurement`.\n\n fn measurement(&self, value: f64) -> api::Measurement<NoopLabelSet> {\n\n let handle = self.acquire_handle(&NoopLabelSet {});\n\n\n\n api::Measurement::new(Arc::new(handle), api::MeasurementValue::from(value))\n\n }\n\n\n\n /// Returns a `NoopHandle`\n\n fn acquire_handle(&self, _labels: &NoopLabelSet) -> Self::Handle {\n\n NoopHandle {\n\n _marker: marker::PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl<T> api::Instrument<NoopLabelSet> for NoopMeasure<T> {\n\n /// Ignores all measurement values and labels.\n\n fn record_one(&self, _value: api::MeasurementValue, _labels: &NoopLabelSet) {\n\n // Ignored\n\n }\n\n}\n", "file_path": "src/api/metrics/noop.rs", "rank": 62, "score": 28379.514813972597 }, { "content": "//! # Metrics Gauge Interface\n\n//!\n\n//! `Gauge`s support `set(value, label_set)`. `Gauge` metrics express\n\n//! a pre-calculated value that is either `set` by explicit instrumentation\n\n//! or observed through a callback. Generally, this kind of metric should\n\n//! be used when the metric cannot be expressed as a sum or because the\n\n//! measurement interval is arbitrary. Use this kind of metric when the\n\n//! measurement is not a quantity, and the sum and event count are not of\n\n//! interest.\n\n//!\n\n//! `Gauge`s are defined as `monotonic = false` by default, meaning that new\n\n//! values are permitted to make positive or negative changes to the\n\n//! gauge. There is no restriction on the sign of the input for gauges.\n\n//!\n\n//! As an option, gauges can be declared as `with_monotonic(true)`, in which case\n\n//! successive values are expected to rise monotonically. `monotonic = true`\n\n//! gauges are useful in reporting computed cumulative sums, allowing an\n\n//! application to compute a current value and report it, without\n\n//! remembering the last-reported value in order to report an increment.\n\nuse crate::api::metrics;\n\n\n\n/// An interface for recording values where the metric cannot be expressed\n\n/// as a sum or because the measurement interval is arbitrary.\n", "file_path": "src/api/metrics/gauge.rs", "rank": 63, "score": 28379.466030317017 }, { "content": " NoopHandle {\n\n _marker: marker::PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl api::Gauge<f64, NoopLabelSet> for NoopGauge<f64> {\n\n type Handle = NoopHandle<f64>;\n\n\n\n /// Returns a no-op `Measurement`.\n\n fn measurement(&self, value: f64) -> api::Measurement<NoopLabelSet> {\n\n let handle = self.acquire_handle(&NoopLabelSet {});\n\n api::Measurement {\n\n instrument: Arc::new(handle),\n\n value: api::MeasurementValue::from(value),\n\n }\n\n }\n\n\n\n /// Returns a `NoopHandle`\n\n fn acquire_handle(&self, _labels: &NoopLabelSet) -> Self::Handle {\n", "file_path": "src/api/metrics/noop.rs", "rank": 64, "score": 28378.959576845016 }, { "content": " NoopHandle {\n\n _marker: marker::PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl<T> api::InstrumentHandle for NoopHandle<T> {\n\n /// Ignores all measurement values.\n\n fn record_one(&self, _value: api::MeasurementValue) {\n\n // Ignored\n\n }\n\n}\n\n\n\nimpl<T> api::Instrument<NoopLabelSet> for NoopGauge<T> {\n\n /// Ignores all measurement values and labels.\n\n fn record_one(&self, _value: api::MeasurementValue, _labels: &NoopLabelSet) {\n\n // Ignored\n\n }\n\n}\n\n\n", "file_path": "src/api/metrics/noop.rs", "rank": 65, "score": 28378.345052926845 }, { "content": "//! # OpenTelemetry Metrics API\n\n//!\n\n//! The user-facing metrics API supports producing diagnostic measurements\n\n//! using three basic kinds of instrument. \"Metrics\" are the thing being\n\n//! produced--mathematical, statistical summaries of certain observable\n\n//! behavior in the program. `Instrument`s are the devices used by the\n\n//! program to record observations about their behavior. Therefore, we use\n\n//! \"metric instrument\" to refer to a program object, allocated through the\n\n//! API, used for recording metrics. There are three distinct instruments\n\n//! in the Metrics API, commonly known as `Counter`s, `Gauge`s, and\n\n//! `Measure`s.\n\n//!\n\n//! Monitoring and alerting are the common use-case for the data provided\n\n//! through metric instruments, after various collection and aggregation\n\n//! strategies are applied to the data. We find there are many other uses\n\n//! for the metric events that stream into these instruments. We imagine\n\n//! metric data being aggregated and recorded as events in tracing and\n\n//! logging systems too, and for this reason OpenTelemetry requires a\n\n//! separation of the API from the SDK.\n\n//!\n", "file_path": "src/api/metrics/mod.rs", "rank": 66, "score": 28376.741174123275 }, { "content": "//! Metric exporters\n\npub mod prometheus;\n", "file_path": "src/exporter/metrics/mod.rs", "rank": 67, "score": 28374.60844579191 }, { "content": "\n\nimpl api::Counter<i64, sdk::LabelSet> for prometheus::IntCounterVec {\n\n /// Prometheus' `CounterHandle`\n\n type Handle = IntCounterHandle;\n\n\n\n /// Creates a `Measurement` object to be used by a `Meter` when batch recording.\n\n fn measurement(&self, value: i64) -> api::Measurement<sdk::LabelSet> {\n\n api::Measurement::new(Arc::new(self.clone()), api::MeasurementValue::from(value))\n\n }\n\n\n\n /// Creates a handle for this instrument.\n\n fn acquire_handle(&self, labels: &sdk::LabelSet) -> Self::Handle {\n\n IntCounterHandle(self.with(&convert_label_set(labels)))\n\n }\n\n}\n\n\n\nimpl api::Instrument<sdk::LabelSet> for prometheus::IntCounterVec {\n\n /// Record a single counter measurement value\n\n fn record_one(&self, value: api::MeasurementValue, label_set: &sdk::LabelSet) {\n\n self.with(&convert_label_set(label_set))\n", "file_path": "src/exporter/metrics/prometheus/mod.rs", "rank": 68, "score": 27113.42998490238 }, { "content": "impl api::InstrumentHandle for CounterHandle {\n\n /// record a single counter measurement value for precomputed labels\n\n fn record_one(&self, value: api::MeasurementValue) {\n\n self.0.inc_by(value.into_f64())\n\n }\n\n}\n\n\n\nimpl api::CounterHandle<f64> for CounterHandle {}\n\n\n\n// GAUGE COMPAT\n\n\n\n/// Prometheus IntGaugeHandle\n\n#[derive(Clone)]\n\n#[allow(missing_debug_implementations)]\n\npub struct IntGaugeHandle(prometheus::IntGauge);\n\n\n\nimpl api::Gauge<i64, sdk::LabelSet> for prometheus::IntGaugeVec {\n\n type Handle = IntGaugeHandle;\n\n\n\n /// Creates a `Measurement` object to be used by a `Meter` when batch recording.\n", "file_path": "src/exporter/metrics/prometheus/mod.rs", "rank": 69, "score": 27113.00016016006 }, { "content": "\n\n /// Creates a `Measurement` object to be used by a `Meter` when batch recording.\n\n fn measurement(&self, value: f64) -> api::Measurement<sdk::LabelSet> {\n\n api::Measurement::new(Arc::new(self.clone()), api::MeasurementValue::from(value))\n\n }\n\n\n\n /// Creates a handle for this instrument.\n\n fn acquire_handle(&self, labels: &sdk::LabelSet) -> Self::Handle {\n\n CounterHandle(self.with(&convert_label_set(labels)))\n\n }\n\n}\n\n\n\nimpl api::Instrument<sdk::LabelSet> for prometheus::CounterVec {\n\n /// record a single counter measurement value\n\n fn record_one(&self, value: api::MeasurementValue, label_set: &sdk::LabelSet) {\n\n self.with(&convert_label_set(label_set))\n\n .inc_by(value.into_f64())\n\n }\n\n}\n\n\n", "file_path": "src/exporter/metrics/prometheus/mod.rs", "rank": 70, "score": 27112.99224508055 }, { "content": "//! # OpenTelemetry Prometheus Exporter\n\n//!\n\n//! This exporter currently delegates to the [Prometheus library]\n\n//! library which implements the [Prometheus API].\n\n//!\n\n//! [Prometheus library]: https://github.com/tikv/rust-prometheus\n\n//! [Prometheus API]: https://prometheus.io\n\nuse crate::api;\n\nuse crate::sdk;\n\nuse api::Key;\n\npub use prometheus::{\n\n default_registry, Counter, CounterVec, Encoder, Gauge, GaugeVec, Histogram, HistogramOpts,\n\n HistogramVec, IntCounter, IntCounterVec, IntGauge, IntGaugeVec, Opts, Registry, TextEncoder,\n\n};\n\nuse std::collections::HashMap;\n\nuse std::sync::Arc;\n\n\n\n/// Convert from `sdk::LabelSet` to `prometheus`' label format.\n", "file_path": "src/exporter/metrics/prometheus/mod.rs", "rank": 71, "score": 27112.865047902138 }, { "content": " .inc_by(value.into_i64())\n\n }\n\n}\n\n\n\nimpl api::InstrumentHandle for IntCounterHandle {\n\n /// Record a single counter measurement value for preset values\n\n fn record_one(&self, value: api::MeasurementValue) {\n\n self.0.inc_by(value.into_i64())\n\n }\n\n}\n\n\n\nimpl api::CounterHandle<i64> for IntCounterHandle {}\n\n\n\n/// Prometheus CounterHandle\n\n#[derive(Clone)]\n\n#[allow(missing_debug_implementations)]\n\npub struct CounterHandle(prometheus::Counter);\n\n\n\nimpl api::Counter<f64, sdk::LabelSet> for prometheus::CounterVec {\n\n type Handle = CounterHandle;\n", "file_path": "src/exporter/metrics/prometheus/mod.rs", "rank": 72, "score": 27110.00569340173 }, { "content": "}\n\n\n\nimpl api::Measure<i64, sdk::LabelSet> for IntMeasure {\n\n type Handle = IntMeasureHandle;\n\n\n\n /// Creates a `Measurement` object to be used by a `Meter` when batch recording.\n\n fn measurement(&self, value: i64) -> api::Measurement<sdk::LabelSet> {\n\n api::Measurement::new(Arc::new(self.clone()), api::MeasurementValue::from(value))\n\n }\n\n\n\n /// Creates a handle for this instrument.\n\n fn acquire_handle(&self, labels: &sdk::LabelSet) -> Self::Handle {\n\n IntMeasureHandle(self.0.with(&convert_label_set(labels)))\n\n }\n\n}\n\n\n\nimpl api::Instrument<sdk::LabelSet> for IntMeasure {\n\n /// record a single measure measurement value\n\n fn record_one(&self, value: api::MeasurementValue, label_set: &sdk::LabelSet) {\n\n self.0\n", "file_path": "src/exporter/metrics/prometheus/mod.rs", "rank": 73, "score": 27109.98488395923 }, { "content": " type Handle = MeasureHandle;\n\n\n\n /// Creates a `Measurement` object to be used by a `Meter` when batch recording.\n\n fn measurement(&self, value: f64) -> api::Measurement<sdk::LabelSet> {\n\n api::Measurement::new(Arc::new(self.clone()), api::MeasurementValue::from(value))\n\n }\n\n\n\n /// Creates a handle for this instrument.\n\n fn acquire_handle(&self, labels: &sdk::LabelSet) -> Self::Handle {\n\n MeasureHandle(self.with(&convert_label_set(labels)))\n\n }\n\n}\n\n\n\nimpl api::Instrument<sdk::LabelSet> for prometheus::HistogramVec {\n\n /// record a single measure measurement value\n\n fn record_one(&self, value: api::MeasurementValue, label_set: &sdk::LabelSet) {\n\n self.with(&convert_label_set(label_set))\n\n .observe(value.into_f64())\n\n }\n\n}\n", "file_path": "src/exporter/metrics/prometheus/mod.rs", "rank": 74, "score": 27109.46975674256 }, { "content": " fn record_one(&self, value: api::MeasurementValue) {\n\n self.0.set(value.into_i64())\n\n }\n\n}\n\n\n\nimpl api::GaugeHandle<i64> for IntGaugeHandle {}\n\n\n\n/// Prometheus GaugeHandle\n\n#[derive(Clone)]\n\n#[allow(missing_debug_implementations)]\n\npub struct GaugeHandle(prometheus::Gauge);\n\n\n\nimpl api::Gauge<f64, sdk::LabelSet> for prometheus::GaugeVec {\n\n type Handle = GaugeHandle;\n\n\n\n /// Creates a `Measurement` object to be used by a `Meter` when batch recording.\n\n fn measurement(&self, value: f64) -> api::Measurement<sdk::LabelSet> {\n\n api::Measurement::new(Arc::new(self.clone()), api::MeasurementValue::from(value))\n\n }\n\n\n", "file_path": "src/exporter/metrics/prometheus/mod.rs", "rank": 75, "score": 27109.244757043736 }, { "content": " /// Creates a handle for this instrument.\n\n fn acquire_handle(&self, labels: &sdk::LabelSet) -> Self::Handle {\n\n GaugeHandle(self.with(&convert_label_set(labels)))\n\n }\n\n}\n\n\n\nimpl api::Instrument<sdk::LabelSet> for prometheus::GaugeVec {\n\n /// record a single gauge measurement value\n\n fn record_one(&self, value: api::MeasurementValue, label_set: &sdk::LabelSet) {\n\n self.with(&convert_label_set(label_set))\n\n .set(value.into_f64())\n\n }\n\n}\n\n\n\nimpl api::InstrumentHandle for GaugeHandle {\n\n /// record a single gauge measurement value for precomputed labels\n\n fn record_one(&self, value: api::MeasurementValue) {\n\n self.0.set(value.into_f64())\n\n }\n\n}\n", "file_path": "src/exporter/metrics/prometheus/mod.rs", "rank": 76, "score": 27107.055480404884 }, { "content": " .with(&convert_label_set(label_set))\n\n .observe(value.into_i64() as f64)\n\n }\n\n}\n\n\n\nimpl api::InstrumentHandle for IntMeasureHandle {\n\n /// record a single measure measurement value for precomputed labels\n\n fn record_one(&self, value: api::MeasurementValue) {\n\n self.0.observe(value.into_i64() as f64)\n\n }\n\n}\n\n\n\nimpl api::MeasureHandle<i64> for IntMeasureHandle {}\n\n\n\n/// Prometheus MeasureHandle\n\n#[derive(Clone)]\n\n#[allow(missing_debug_implementations)]\n\npub struct MeasureHandle(prometheus::Histogram);\n\n\n\nimpl api::Measure<f64, sdk::LabelSet> for prometheus::HistogramVec {\n", "file_path": "src/exporter/metrics/prometheus/mod.rs", "rank": 77, "score": 27106.726571308816 }, { "content": " fn measurement(&self, value: i64) -> api::Measurement<sdk::LabelSet> {\n\n api::Measurement::new(Arc::new(self.clone()), api::MeasurementValue::from(value))\n\n }\n\n\n\n /// Creates a handle for this instrument.\n\n fn acquire_handle(&self, labels: &sdk::LabelSet) -> Self::Handle {\n\n IntGaugeHandle(self.with(&convert_label_set(labels)))\n\n }\n\n}\n\n\n\nimpl api::Instrument<sdk::LabelSet> for prometheus::IntGaugeVec {\n\n /// record a single gauge measurement value\n\n fn record_one(&self, value: api::MeasurementValue, label_set: &sdk::LabelSet) {\n\n self.with(&convert_label_set(label_set))\n\n .set(value.into_i64())\n\n }\n\n}\n\n\n\nimpl api::InstrumentHandle for IntGaugeHandle {\n\n /// record a single gauge measurement value for precomputed labels\n", "file_path": "src/exporter/metrics/prometheus/mod.rs", "rank": 78, "score": 27106.624528164386 }, { "content": "\n\nimpl api::InstrumentHandle for MeasureHandle {\n\n /// record a single measure measurement value for precomputed labels\n\n fn record_one(&self, value: api::MeasurementValue) {\n\n self.0.observe(value.into_f64())\n\n }\n\n}\n\n\n\nimpl api::MeasureHandle<f64> for MeasureHandle {}\n", "file_path": "src/exporter/metrics/prometheus/mod.rs", "rank": 79, "score": 27104.360642608786 }, { "content": "\n\nimpl api::GaugeHandle<f64> for GaugeHandle {}\n\n\n\n// MEASURE COMPAT\n\n\n\n/// Prometheus IntMeasureHandle\n\n#[derive(Clone)]\n\n#[allow(missing_debug_implementations)]\n\npub struct IntMeasureHandle(prometheus::Histogram);\n\n\n\n/// Prometheus Histograms do not have i64 variant, `IntMeasure` will convert i64 to float when it\n\n/// records values.\n\n#[derive(Clone)]\n\n#[allow(missing_debug_implementations)]\n\npub struct IntMeasure(prometheus::HistogramVec);\n\n\n\nimpl IntMeasure {\n\n pub(crate) fn new(histogram: prometheus::HistogramVec) -> Self {\n\n IntMeasure(histogram)\n\n }\n", "file_path": "src/exporter/metrics/prometheus/mod.rs", "rank": 80, "score": 27104.16783089015 }, { "content": "//! OpenTelemetry shared core date types\n\nuse std::borrow::Cow;\n\n\n\n/// Key used for metric `LabelSet`s and trace `Span` attributes.\n\n#[derive(Clone, Debug)]\n\npub struct Key(Cow<'static, str>);\n\n\n\nimpl Key {\n\n /// Create a new `Key`.\n\n pub fn new<S: Into<Cow<'static, str>>>(value: S) -> Self {\n\n Key(value.into())\n\n }\n\n\n\n /// Create a `KeyValue` pair for `bool` values.\n\n pub fn bool(&self, value: bool) -> KeyValue {\n\n KeyValue {\n\n key: self.clone(),\n\n value: Value::Bool(value),\n\n }\n\n }\n", "file_path": "src/api/core.rs", "rank": 81, "score": 28.96560182753313 }, { "content": "pub use self::core::{Key, KeyValue, Unit, Value};\n\npub use distributed_context::http_b3_propagator::HttpB3Propagator;\n\npub use metrics::{\n\n counter::{Counter, CounterHandle},\n\n gauge::{Gauge, GaugeHandle},\n\n measure::{Measure, MeasureHandle},\n\n noop::NoopMeter,\n\n value::MeasurementValue,\n\n Instrument, InstrumentHandle, LabelSet, Measurement, Meter, MetricOptions,\n\n};\n\npub use trace::{\n\n noop::{NoopProvider, NoopSpan, NoopTracer},\n\n propagator::{BinaryFormat, Carrier, HttpTextFormat},\n\n provider::Provider,\n\n sampler::Sampler,\n\n span::Span,\n\n span_context::{SpanContext, TRACE_FLAGS_UNUSED, TRACE_FLAG_SAMPLED},\n\n tracer::{Tracer, TracerGenerics},\n\n};\n", "file_path": "src/api/mod.rs", "rank": 82, "score": 27.12385552980011 }, { "content": "\n\n /// Returns a reference to the key's `Cow` type for use in `LabelSet`s.\n\n pub fn inner(&self) -> &Cow<'static, str> {\n\n &self.0\n\n }\n\n\n\n /// Returns the inner `Cow` type.\n\n pub fn into_inner(self) -> Cow<'static, str> {\n\n self.0\n\n }\n\n}\n\n\n\nimpl From<&'static str> for Key {\n\n /// Convert a `&str` to a `Key`.\n\n fn from(key_str: &'static str) -> Self {\n\n Key(Cow::from(key_str))\n\n }\n\n}\n\n\n\nimpl Into<Cow<'static, str>> for Key {\n", "file_path": "src/api/core.rs", "rank": 83, "score": 26.992576051813614 }, { "content": " /// Converts `Key` instances into `Cow`\n\n fn into(self) -> Cow<'static, str> {\n\n self.0\n\n }\n\n}\n\n\n\nimpl Into<String> for Key {\n\n /// Converts `Key` instances into `String`.\n\n fn into(self) -> String {\n\n self.0.to_string()\n\n }\n\n}\n\n\n\n/// Value types for use in `KeyValue` pairs.\n\n#[derive(Clone, Debug)]\n\npub enum Value {\n\n /// bool values\n\n Bool(bool),\n\n /// i64 values\n\n I64(i64),\n", "file_path": "src/api/core.rs", "rank": 84, "score": 25.383690004227674 }, { "content": " }\n\n }\n\n}\n\n\n\n/// Units denote underlying data units tracked by `Meter`s.\n\n#[derive(Default, Debug)]\n\npub struct Unit(String);\n\n\n\nimpl Unit {\n\n /// Create a new `Unit` from an `Into<String>`\n\n pub fn new<S: Into<String>>(value: S) -> Self {\n\n Unit(value.into())\n\n }\n\n}\n", "file_path": "src/api/core.rs", "rank": 85, "score": 24.042162997059734 }, { "content": " return sdk::Tracer::new(component_name, tracer)\n\n } else if let Some(exporter) = exporter.downcast_ref::<print::Exporter>() {\n\n return sdk::Tracer::new(component_name, tracer)\n\n } else {\n\n fail(\"Only jaeger or print exporters allowed\");\n\n }\n\n }\n\n}\n\n\n\nimpl api::Provider for Provider {\n\n /// This implementation of `api::Provider` produces `sdk::Tracer` instances.\n\n type Tracer = sdk::Tracer;\n\n\n\n /// Find or create `Tracer` instance by name.\n\n fn get_tracer(&self, name: &'static str) -> Self::Tracer {\n\n // Use default value if name is invalid empty string\n\n let component_name = if name.is_empty() {\n\n DEFAULT_COMPONENT_NAME\n\n } else {\n\n name\n", "file_path": "src/sdk/trace/provider.rs", "rank": 86, "score": 22.077262417561823 }, { "content": " fn to_string(&self) -> String {\n\n match self {\n\n Value::Bool(value) => value.to_string(),\n\n Value::I64(value) => value.to_string(),\n\n Value::U64(value) => value.to_string(),\n\n Value::F64(value) => value.to_string(),\n\n Value::String(value) => value.clone(),\n\n Value::Bytes(value) => {\n\n String::from_utf8(value.clone()).unwrap_or_else(|_| String::new())\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Into<Cow<'static, str>> for Value {\n\n /// Convert `Value` types into `Cow` for use in `LabelSet`s.\n\n fn into(self) -> Cow<'static, str> {\n\n self.to_string().into()\n\n }\n\n}\n", "file_path": "src/api/core.rs", "rank": 87, "score": 21.763807172864922 }, { "content": " /// u64 values\n\n U64(u64),\n\n /// f64 values\n\n F64(f64),\n\n /// String values\n\n String(String),\n\n /// Byte array values\n\n Bytes(Vec<u8>),\n\n}\n\n\n\nimpl From<&str> for Value {\n\n /// Convenience method for creating a `Value` form a `&str`.\n\n fn from(value_str: &str) -> Self {\n\n Value::String(value_str.to_string())\n\n }\n\n}\n\n\n\nimpl ToString for Value {\n\n /// Convert `Value` types to `String` for use by exporters that only use\n\n /// `String` values.\n", "file_path": "src/api/core.rs", "rank": 88, "score": 21.66909790607231 }, { "content": "\n\n/// `KeyValue` pairs are used by `LabelSet`s and `Span` attributes.\n\n#[derive(Clone, Debug)]\n\npub struct KeyValue {\n\n /// Dimension or event key\n\n pub key: Key,\n\n /// Dimension or event value\n\n pub value: Value,\n\n}\n\n\n\nimpl KeyValue {\n\n /// Create a new `KeyValue` pair.\n\n pub fn new<K, V>(key: K, value: V) -> Self\n\n where\n\n K: Into<Key>,\n\n V: Into<Value>,\n\n {\n\n KeyValue {\n\n key: key.into(),\n\n value: value.into(),\n", "file_path": "src/api/core.rs", "rank": 89, "score": 21.41505139891015 }, { "content": " fn as_any(&self) -> &dyn any::Any {\n\n self\n\n }\n\n}\n\n\n\n/// Jaeger process configuration\n\n#[derive(Debug)]\n\npub struct Process {\n\n /// The name of the traced service that all spans will be reported as belonging to.\n\n pub service_name: &'static str,\n\n /// Metadata about the service that will appear in all `Span`s.\n\n pub tags: Vec<api::KeyValue>,\n\n}\n\n\n\nimpl Default for Process {\n\n /// Default `Process` config\n\n fn default() -> Self {\n\n Process {\n\n service_name: DEFAULT_SERVICE_NAME,\n\n tags: Default::default(),\n", "file_path": "src/exporter/trace/jaeger.rs", "rank": 90, "score": 21.11922791841714 }, { "content": "pub struct Provider {\n\n named_tracers: RwLock<HashMap<&'static str, sdk::Tracer>>,\n\n exporters: Vec<Box<dyn SpanExporter<Span = sdk::Span> + 'static>>,\n\n config: sdk::Config,\n\n}\n\n\n\nimpl Default for Provider {\n\n fn default() -> Self {\n\n Provider::builder().build()\n\n }\n\n}\n\n\n\nimpl Provider {\n\n /// Create a new `Provider` builder.\n\n pub fn builder() -> Builder {\n\n Builder::default()\n\n }\n\n\n\n /// Initialize a new `Tracer` by name.\n\n fn initialize_tracer(&self, component_name: &'static str) -> sdk::Tracer {\n", "file_path": "src/sdk/trace/provider.rs", "rank": 91, "score": 20.391778025839006 }, { "content": "use hyper::{header::CONTENT_TYPE, rt::Future, service::service_fn_ok, Body, Response, Server};\n\nuse opentelemetry::api::{\n\n Counter, CounterHandle, Gauge, GaugeHandle, Key, Measure, MeasureHandle, Meter, MetricOptions,\n\n};\n\nuse opentelemetry::exporter::metrics::prometheus::{Encoder, TextEncoder};\n\nuse opentelemetry::sdk;\n\nuse std::time::SystemTime;\n\n\n", "file_path": "examples/hyper.rs", "rank": 92, "score": 20.029758215840996 }, { "content": "//! #### Measure\n\n//!\n\n//! `Measure` describes the type of the individual values recorded by a library. It\n\n//! defines a contract between the library exposing the measurements and an\n\n//! application that will aggregate those individual measurements into a `Metric`.\n\n//! `Measure` is identified by name, description and a unit of values.\n\n//!\n\n//! #### Measurement\n\n//!\n\n//! `Measurement` describes a single value to be collected for a `Measure`.\n\n//! `Measurement` is an empty interface in API surface. This interface is defined in\n\n//! SDK.\n\n//!\n\n//! ### Recording metrics with predefined aggregation\n\n//!\n\n//! The base trait for creating new metrics metrics is called `Meter`. It\n\n//! defines basic methods like creating metrics with a name and labels. Structs\n\n//! implementing the various metrics define their aggregation type as well as a structure of\n\n//! individual measurements or Points. API defines the following types of\n\n//! pre-aggregated metrics:\n", "file_path": "src/lib.rs", "rank": 93, "score": 19.53409807991317 }, { "content": " /// Sets a single `Attribute` where the attribute properties are passed as arguments.\n\n ///\n\n /// Note that the OpenTelemetry project documents certain [\"standard\n\n /// attributes\"](https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/data-semantic-conventions.md)\n\n /// that have prescribed semantic meanings.\n\n fn set_attribute(&mut self, attribute: api::KeyValue) {\n\n let _ = self.data.try_lock().map(|mut span_data| {\n\n let api::KeyValue { key, value } = attribute;\n\n span_data.set_tag(|| jaeger::Tag::new(key, value.to_string()));\n\n });\n\n }\n\n\n\n /// Sets the status of the `Span`. If used, this will override the default `Span`\n\n /// status, which is `OK`.\n\n fn set_status(&mut self, _status: String) {\n\n // Ignored for now\n\n }\n\n\n\n /// Updates the `Span`'s name.\n\n fn update_name(&mut self, new_name: String) {\n", "file_path": "src/sdk/trace/span.rs", "rank": 94, "score": 17.99168211739031 }, { "content": "//! # Binary Propagator\n\n//!\n\n//! `BinaryFormat` is a formatter to serialize and deserialize a\n\n//! value into a binary format.\n\n//!\n\n//! `BinaryFormat` MUST expose the APIs that serializes values into bytes,\n\n//! and deserializes values from bytes.\n\nuse crate::api;\n\nuse std::convert::TryInto;\n\n\n\n/// Extracts and injects `SpanContext`s from byte arrays.\n\n#[derive(Debug, Default)]\n\npub struct BinaryPropagator {}\n\n\n\nimpl BinaryPropagator {\n\n /// Create a new binary propagator.\n\n pub fn new() -> Self {\n\n BinaryPropagator {}\n\n }\n\n}\n", "file_path": "src/api/distributed_context/binary_propagator.rs", "rank": 95, "score": 16.915437092565526 }, { "content": " MetricOptions::default()\n\n .with_description(\"The HTTP request latencies in seconds.\")\n\n .with_keys(vec![common_key.clone()]),\n\n )\n\n .acquire_handle(&common_labels);\n\n\n\n let http_body_gauge = meter\n\n .new_f64_gauge(\n\n \"example_http_response_size_bytes\",\n\n MetricOptions::default()\n\n .with_description(\"The HTTP response sizes in bytes.\")\n\n .with_keys(vec![common_key]),\n\n )\n\n .acquire_handle(&common_labels);\n\n\n\n let new_service = move || {\n\n let encoder = TextEncoder::new();\n\n let http_counter = http_counter.clone();\n\n let http_body_gauge = http_body_gauge.clone();\n\n let http_req_histogram = http_req_histogram.clone();\n", "file_path": "examples/hyper.rs", "rank": 96, "score": 16.86593255080105 }, { "content": "\n\n let gauge = one_metric.acquire_handle(&common_labels);\n\n\n\n let measure = measure_two.acquire_handle(&common_labels);\n\n\n\n global::trace_provider()\n\n .get_tracer(\"component-main\")\n\n .with_span(\"operation\", move |span| {\n\n span.add_event(\"Nice operation!\".to_string());\n\n span.set_attribute(another_key.string(\"yes\"));\n\n\n\n gauge.set(1.0);\n\n\n\n meter.record_batch(\n\n &common_labels,\n\n vec![one_metric.measurement(1.0), measure_two.measurement(2.0)],\n\n );\n\n\n\n global::trace_provider()\n\n .get_tracer(\"component-bar\")\n", "file_path": "examples/basic.rs", "rank": 97, "score": 16.73063649658037 }, { "content": "\n\n let gauge = one_metric.acquire_handle(&common_labels);\n\n\n\n let measure = measure_two.acquire_handle(&common_labels);\n\n\n\n global::trace_provider()\n\n .get_tracer(\"component-main\")\n\n .with_span(\"operation\", move |span| {\n\n span.add_event(\"Nice operation!\".to_string());\n\n span.set_attribute(another_key.string(\"yes\"));\n\n\n\n gauge.set(1.0);\n\n\n\n meter.record_batch(\n\n &common_labels,\n\n vec![one_metric.measurement(1.0), measure_two.measurement(2.0)],\n\n );\n\n\n\n global::trace_provider()\n\n .get_tracer(\"component-bar\")\n", "file_path": "examples/basic_print.rs", "rank": 98, "score": 16.73063649658037 }, { "content": "//! OpenTelemetry global `Tracer` and `Meter` singletons.\n\nuse crate::api::{self, KeyValue, SpanContext, Tracer};\n\nuse std::any::Any;\n\nuse std::sync::{Arc, RwLock};\n\nuse std::time::SystemTime;\n\n\n\n/// Boxed span wraps a generic trait object so that `BoxedTracer`s\n\n/// can return whichever type of span they were configured to use.\n\n#[derive(Debug)]\n\npub struct BoxedSpan(Box<dyn api::Span>);\n\n\n\nimpl api::Span for BoxedSpan {\n\n /// Delegates to inner span.0\n\n fn add_event_with_timestamp(&mut self, message: String, timestamp: SystemTime) {\n\n self.0.add_event_with_timestamp(message, timestamp)\n\n }\n\n\n\n /// Delegates to inner span.\n\n fn add_link(&mut self, link: api::SpanContext) {\n\n self.0.add_link(link)\n", "file_path": "src/global.rs", "rank": 99, "score": 16.59123369573057 } ]
Rust
examples/swdump.rs
a1ien/jaylink
09de031fb4cc3f76f24e4a361977ec6efb6838bd
use jaylink::{Interface, JayLink, SpeedConfig}; use log::trace; use std::{cmp, fmt}; use structopt::StructOpt; const IDLE_CYCLES_BEFORE_ACCESS: usize = 2; #[derive(StructOpt)] struct Opts { #[structopt(long = "serial")] serial: Option<String>, #[structopt(long = "speed")] speed: Option<u16>, } fn main() { env_logger::init(); let opts = Opts::from_args(); if let Err(e) = run(opts) { eprintln!("error: {}", e); std::process::exit(1); } } enum Port { Debug, #[allow(unused)] Access, } #[derive(Debug)] enum SwdError { Probe(jaylink::Error), Fault, NoResponse, Parity, } impl From<jaylink::Error> for SwdError { fn from(e: jaylink::Error) -> Self { SwdError::Probe(e) } } impl fmt::Display for SwdError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { SwdError::Probe(e) => e.fmt(f), SwdError::Fault => f.write_str("target returned FAULT response"), SwdError::NoResponse => f.write_str("no response from target chip"), SwdError::Parity => f.write_str("SWD parity error"), } } } enum Ack { Ok, Wait, Fault, } fn parse_ack(sl: &[bool]) -> Option<Ack> { assert_eq!(sl.len(), 3); trace!("ACK: {:?}", sl); Some(match (sl[0], sl[1], sl[2]) { (true, false, false) => Ack::Ok, (false, true, false) => Ack::Wait, (false, false, true) => Ack::Fault, _ => return None, }) } trait JayLinkExt { fn swj_seq(&mut self) -> jaylink::Result<()>; fn raw_read(&mut self, port: Port, a: u32) -> Result<u32, SwdError>; fn raw_write(&mut self, port: Port, a: u32, value: u32) -> Result<(), SwdError>; } impl JayLinkExt for JayLink { fn swj_seq(&mut self) -> jaylink::Result<()> { let mut dir = Vec::new(); let mut swdio = Vec::new(); swdio.resize(64, true); dir.resize(64, true); let mut seq = 0xE79E; for _ in 0..16 { swdio.push(seq & 0b1 != 0); seq >>= 1; } dir.resize(dir.len() + 16, true); swdio.resize(swdio.len() + 64, true); dir.resize(dir.len() + 64, true); swdio.resize(swdio.len() + 10, false); dir.resize(dir.len() + 10, true); self.swd_io(dir, swdio)?; Ok(()) } fn raw_read(&mut self, port: Port, a: u32) -> Result<u32, SwdError> { let mut dir = Vec::new(); let mut swdio = Vec::new(); swdio.resize(swdio.len() + IDLE_CYCLES_BEFORE_ACCESS, false); dir.resize(dir.len() + IDLE_CYCLES_BEFORE_ACCESS, true); let a2 = a & 0b0100 != 0; let a3 = a & 0b1000 != 0; swdio.push(true); swdio.push(match port { Port::Debug => false, Port::Access => true, }); swdio.push(true); swdio.push(a2); swdio.push(a3); let even_parity = (swdio.iter().filter(|b| **b).count() - 1) % 2 != 0; swdio.push(even_parity); swdio.push(false); swdio.push(true); dir.resize(dir.len() + 8, true); swdio.push(false); swdio.push(false); swdio.push(false); swdio.resize(swdio.len() + 32, false); swdio.push(false); dir.resize(dir.len() + 3 + 33, false); loop { let mut response = self.swd_io(dir.iter().copied(), swdio.iter().copied())?; response.split_off(IDLE_CYCLES_BEFORE_ACCESS + 8); trace!("response: {:?}", response); let ack = response.split_off(3).collect::<Vec<_>>(); let value = response.split_off(32).collect::<Vec<_>>(); let parity = response.next().unwrap(); if let Some(ack) = parse_ack(&ack) { match ack { Ack::Ok => { let value = value .iter() .fold(0u32, |accum, bit| (accum >> 1) | (u32::from(*bit) << 31)); trace!("value=0x{:08X}, parity={:?}", value, parity); let expected_parity = value.count_ones() % 2 != 0; if expected_parity == parity { return Ok(value); } else { return Err(SwdError::Parity); } } Ack::Wait => { trace!("WAIT - retrying read access"); continue; } Ack::Fault => { return Err(SwdError::Fault); } } } return Err(SwdError::NoResponse); } } fn raw_write(&mut self, port: Port, a: u32, value: u32) -> Result<(), SwdError> { let mut dir = Vec::new(); let mut swdio = Vec::new(); swdio.resize(swdio.len() + IDLE_CYCLES_BEFORE_ACCESS, false); dir.resize(dir.len() + IDLE_CYCLES_BEFORE_ACCESS, true); let a2 = a & 0b0100 != 0; let a3 = a & 0b1000 != 0; swdio.push(true); swdio.push(match port { Port::Debug => false, Port::Access => true, }); swdio.push(false); swdio.push(a2); swdio.push(a3); let even_parity = (swdio.iter().filter(|b| **b).count() - 1) % 2 != 0; swdio.push(even_parity); swdio.push(false); swdio.push(true); dir.resize(dir.len() + 8, true); swdio.push(false); swdio.push(false); swdio.push(false); dir.resize(dir.len() + 3, false); swdio.push(false); swdio.push(false); dir.resize(dir.len() + 2, false); { let mut value = value; for _ in 0..32 { swdio.push(value & 1 != 0); value >>= 1; } swdio.push(value.count_ones() % 2 != 0); dir.resize(dir.len() + 33, true); } loop { let mut response = self.swd_io(dir.iter().copied(), swdio.iter().copied())?; response.split_off(IDLE_CYCLES_BEFORE_ACCESS + 8); trace!("response: {:?}", response); let ack = response.split_off(3).collect::<Vec<_>>(); if let Some(ack) = parse_ack(&ack) { match ack { Ack::Ok => { return Ok(()); } Ack::Wait => { trace!("WAIT - retrying write access"); continue; } Ack::Fault => { return Err(SwdError::Fault); } } } return Err(SwdError::NoResponse); } } } fn run(opts: Opts) -> Result<(), SwdError> { let mut probe = JayLink::open_by_serial(opts.serial.as_deref())?; probe.select_interface(Interface::Swd)?; let speed = opts .speed .map(|khz| SpeedConfig::khz(cmp::min(khz, 0xfffe)).unwrap()) .unwrap_or(probe.read_speeds()?.max_speed_config()); println!("speed configuration: {}", speed); probe.set_speed(speed)?; probe.swj_seq()?; let dpidr = probe.raw_read(Port::Debug, 0b0000)?; println!("DPIDR=0x{:08X}", dpidr); probe.raw_write(Port::Debug, 0b0000, 0x1e)?; let errmask = 0b10100010; let ctrl_stat = probe.raw_read(Port::Debug, 0b0100)?; println!("CTRL/STAT=0x{:08X}", ctrl_stat); if ctrl_stat & errmask != 0 { eprintln!("errors bits set in CTRL/STAT"); } Ok(()) }
use jaylink::{Interface, JayLink, SpeedConfig}; use log::trace; use std::{cmp, fmt}; use structopt::StructOpt; const IDLE_CYCLES_BEFORE_ACCESS: usize = 2; #[derive(StructOpt)] struct Opts { #[structopt(long = "serial")] serial: Option<String>, #[structopt(long = "speed")] speed: Option<u16>, } fn main() { env_logger::init(); let opts = Opts::from_args(); if let Err(e) = run(opts) { eprintln!("error: {}", e); std::process::exit(1); } } enum Port { Debug, #[allow(unused)] Access, } #[derive(Debug)] enum SwdError { Probe(jaylink::Error), Fault, NoResponse, Parity, } impl From<jaylink::Error> for SwdError { fn from(e: jaylink::Error) -> Self { SwdError::Probe(e) } } impl fmt::Display for SwdError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { SwdError::Probe(e) => e.fmt(f), SwdError::Fault => f.write_str("target returned FAULT response"), SwdError::NoResponse => f.write_str("no response from target chip"), SwdError::Parity => f.write_str("SWD parity error"), } } } enum Ack { Ok, Wait, Fault, } fn parse_ack(sl: &[bool]) -> Option<Ack> { assert_eq!(sl.len(), 3); trace!("ACK: {:?}", sl);
} trait JayLinkExt { fn swj_seq(&mut self) -> jaylink::Result<()>; fn raw_read(&mut self, port: Port, a: u32) -> Result<u32, SwdError>; fn raw_write(&mut self, port: Port, a: u32, value: u32) -> Result<(), SwdError>; } impl JayLinkExt for JayLink { fn swj_seq(&mut self) -> jaylink::Result<()> { let mut dir = Vec::new(); let mut swdio = Vec::new(); swdio.resize(64, true); dir.resize(64, true); let mut seq = 0xE79E; for _ in 0..16 { swdio.push(seq & 0b1 != 0); seq >>= 1; } dir.resize(dir.len() + 16, true); swdio.resize(swdio.len() + 64, true); dir.resize(dir.len() + 64, true); swdio.resize(swdio.len() + 10, false); dir.resize(dir.len() + 10, true); self.swd_io(dir, swdio)?; Ok(()) } fn raw_read(&mut self, port: Port, a: u32) -> Result<u32, SwdError> { let mut dir = Vec::new(); let mut swdio = Vec::new(); swdio.resize(swdio.len() + IDLE_CYCLES_BEFORE_ACCESS, false); dir.resize(dir.len() + IDLE_CYCLES_BEFORE_ACCESS, true); let a2 = a & 0b0100 != 0; let a3 = a & 0b1000 != 0; swdio.push(true); swdio.push(match port { Port::Debug => false, Port::Access => true, }); swdio.push(true); swdio.push(a2); swdio.push(a3); let even_parity = (swdio.iter().filter(|b| **b).count() - 1) % 2 != 0; swdio.push(even_parity); swdio.push(false); swdio.push(true); dir.resize(dir.len() + 8, true); swdio.push(false); swdio.push(false); swdio.push(false); swdio.resize(swdio.len() + 32, false); swdio.push(false); dir.resize(dir.len() + 3 + 33, false); loop { let mut response = self.swd_io(dir.iter().copied(), swdio.iter().copied())?; response.split_off(IDLE_CYCLES_BEFORE_ACCESS + 8); trace!("response: {:?}", response); let ack = response.split_off(3).collect::<Vec<_>>(); let value = response.split_off(32).collect::<Vec<_>>(); let parity = response.next().unwrap(); if let Some(ack) = parse_ack(&ack) { match ack { Ack::Ok => { let value = value .iter() .fold(0u32, |accum, bit| (accum >> 1) | (u32::from(*bit) << 31)); trace!("value=0x{:08X}, parity={:?}", value, parity); let expected_parity = value.count_ones() % 2 != 0; if expected_parity == parity { return Ok(value); } else { return Err(SwdError::Parity); } } Ack::Wait => { trace!("WAIT - retrying read access"); continue; } Ack::Fault => { return Err(SwdError::Fault); } } } return Err(SwdError::NoResponse); } } fn raw_write(&mut self, port: Port, a: u32, value: u32) -> Result<(), SwdError> { let mut dir = Vec::new(); let mut swdio = Vec::new(); swdio.resize(swdio.len() + IDLE_CYCLES_BEFORE_ACCESS, false); dir.resize(dir.len() + IDLE_CYCLES_BEFORE_ACCESS, true); let a2 = a & 0b0100 != 0; let a3 = a & 0b1000 != 0; swdio.push(true); swdio.push(match port { Port::Debug => false, Port::Access => true, }); swdio.push(false); swdio.push(a2); swdio.push(a3); let even_parity = (swdio.iter().filter(|b| **b).count() - 1) % 2 != 0; swdio.push(even_parity); swdio.push(false); swdio.push(true); dir.resize(dir.len() + 8, true); swdio.push(false); swdio.push(false); swdio.push(false); dir.resize(dir.len() + 3, false); swdio.push(false); swdio.push(false); dir.resize(dir.len() + 2, false); { let mut value = value; for _ in 0..32 { swdio.push(value & 1 != 0); value >>= 1; } swdio.push(value.count_ones() % 2 != 0); dir.resize(dir.len() + 33, true); } loop { let mut response = self.swd_io(dir.iter().copied(), swdio.iter().copied())?; response.split_off(IDLE_CYCLES_BEFORE_ACCESS + 8); trace!("response: {:?}", response); let ack = response.split_off(3).collect::<Vec<_>>(); if let Some(ack) = parse_ack(&ack) { match ack { Ack::Ok => { return Ok(()); } Ack::Wait => { trace!("WAIT - retrying write access"); continue; } Ack::Fault => { return Err(SwdError::Fault); } } } return Err(SwdError::NoResponse); } } } fn run(opts: Opts) -> Result<(), SwdError> { let mut probe = JayLink::open_by_serial(opts.serial.as_deref())?; probe.select_interface(Interface::Swd)?; let speed = opts .speed .map(|khz| SpeedConfig::khz(cmp::min(khz, 0xfffe)).unwrap()) .unwrap_or(probe.read_speeds()?.max_speed_config()); println!("speed configuration: {}", speed); probe.set_speed(speed)?; probe.swj_seq()?; let dpidr = probe.raw_read(Port::Debug, 0b0000)?; println!("DPIDR=0x{:08X}", dpidr); probe.raw_write(Port::Debug, 0b0000, 0x1e)?; let errmask = 0b10100010; let ctrl_stat = probe.raw_read(Port::Debug, 0b0100)?; println!("CTRL/STAT=0x{:08X}", ctrl_stat); if ctrl_stat & errmask != 0 { eprintln!("errors bits set in CTRL/STAT"); } Ok(()) }
Some(match (sl[0], sl[1], sl[2]) { (true, false, false) => Ack::Ok, (false, true, false) => Ack::Wait, (false, false, true) => Ack::Fault, _ => return None, })
call_expression
[ { "content": "fn to_io_error(error: Error) -> io::Error {\n\n io::Error::new(io::ErrorKind::Other, error)\n\n}\n\n\n\nimpl<'a> Read for SwoStream<'a> {\n\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n\n if self.buf.position() == self.buf.get_ref().len() as u64 {\n\n // At end of buffer. (Blocking) Refill.\n\n self.buf.get_mut().resize(self.buf_size as usize, 0);\n\n loop {\n\n // If we have recently polled, wait until the next poll is useful to avoid 100% CPU\n\n // usage.\n\n let now = Instant::now();\n\n if now < self.next_poll {\n\n thread::sleep(self.next_poll - now);\n\n }\n\n\n\n let buf = self.buf.get_mut();\n\n let data = self.jaylink.swo_read(buf).map_err(to_io_error)?;\n\n self.status.set(self.status.get() | data.status);\n", "file_path": "src/lib.rs", "rank": 4, "score": 91368.08197572059 }, { "content": "fn main() {\n\n env_logger::init();\n\n\n\n let opts = Opts::from_args();\n\n if let Err(e) = run(opts) {\n\n eprintln!(\"error: {}\", e);\n\n std::process::exit(1);\n\n }\n\n}\n\n\n", "file_path": "examples/swodump.rs", "rank": 7, "score": 90081.64979772392 }, { "content": "fn main() {\n\n env_logger::init();\n\n\n\n if let Err(e) = run() {\n\n eprintln!(\"error: {}\", e);\n\n std::process::exit(1);\n\n }\n\n}\n\n\n", "file_path": "examples/list.rs", "rank": 8, "score": 90081.64979772392 }, { "content": "fn main() {\n\n env_logger::init();\n\n\n\n let opts = Opts::from_args();\n\n if let Err(e) = run(opts) {\n\n eprintln!(\"error: {}\", e);\n\n std::process::exit(1);\n\n }\n\n}\n\n\n", "file_path": "examples/blink.rs", "rank": 9, "score": 90081.64979772392 }, { "content": "fn run(opts: Opts) -> Result<(), Box<dyn Error>> {\n\n let mut probe = JayLink::open_by_serial(opts.serial.as_deref())?;\n\n\n\n let speeds = probe.read_swo_speeds(SwoMode::Uart)?;\n\n eprintln!(\"Max. SWO Speed: {} Hz\", speeds.max_speed_hz());\n\n\n\n let frequency = opts.frequency.unwrap_or(speeds.max_speed_hz());\n\n eprintln!(\n\n \"Configuring at {} Hz ({} Bytes/sec) with a {} Byte buffer\",\n\n frequency,\n\n frequency / 8,\n\n opts.probe_buf,\n\n );\n\n eprintln!(\"-----------------------------------------\");\n\n\n\n probe.select_interface(Interface::Swd)?;\n\n probe.swo_stop()?;\n\n let stream = probe.swo_start(SwoMode::Uart, frequency, opts.probe_buf)?;\n\n let mut stream = itm::Decoder::new(stream, false);\n\n let out = std::io::stdout();\n", "file_path": "examples/swodump.rs", "rank": 10, "score": 86949.88321763815 }, { "content": "fn main() {\n\n env_logger::init();\n\n\n\n let opts = Opts::from_args();\n\n if let Err(e) = run(opts) {\n\n eprintln!(\"error: {}\", e);\n\n std::process::exit(1);\n\n }\n\n}\n\n\n", "file_path": "examples/jtag_scan.rs", "rank": 11, "score": 86721.61543101072 }, { "content": "fn detailed_info(dev: &mut JayLink) -> Result<String> {\n\n let caps = dev.capabilities();\n\n let firmware = dev.read_firmware_version()?;\n\n let hw_vers = dev.read_hardware_version()?;\n\n let swo_speeds = dev.read_swo_speeds(SwoMode::Uart)?;\n\n let max_mem_block = dev.read_max_mem_block()?;\n\n let avail_intfs = dev.available_interfaces();\n\n let tgt_voltage = dev.read_target_voltage()?;\n\n\n\n let mut info = String::new();\n\n writeln!(info, \"Capabilities: {:?}\", caps).unwrap();\n\n writeln!(info, \"Firmware: {}\", firmware).unwrap();\n\n writeln!(info, \"HW Version: {}\", hw_vers).unwrap();\n\n writeln!(info, \"Max. SWO Speed: {:?} Hz\", swo_speeds.max_speed_hz()).unwrap();\n\n writeln!(info, \"Max. Memblock: {} bytes\", max_mem_block).unwrap();\n\n writeln!(info, \"VTref: {} V\", tgt_voltage as f32 / 1000.0).unwrap();\n\n writeln!(info, \"Interfaces:\").unwrap();\n\n for interface in avail_intfs {\n\n if interface == Interface::Fine {\n\n // FIXME: Selecting FINE hangs the probe.\n", "file_path": "examples/list.rs", "rank": 12, "score": 74236.38902032399 }, { "content": "#[derive(StructOpt)]\n\nstruct Opts {\n\n /// Serial number of the probe to connect to.\n\n #[structopt(long = \"serial\")]\n\n serial: Option<String>,\n\n}\n\n\n", "file_path": "examples/blink.rs", "rank": 14, "score": 72731.91620794588 }, { "content": "#[derive(StructOpt)]\n\nstruct Opts {\n\n /// Serial number of the probe to connect to.\n\n #[structopt(long = \"serial\")]\n\n serial: Option<String>,\n\n\n\n /// Frequency/Baudrate to sample SWO at.\n\n #[structopt(long = \"freq\", short = \"f\")]\n\n frequency: Option<u32>,\n\n\n\n /// Size of on-probe buffer to allocate.\n\n #[structopt(long = \"probe-buf\", default_value = \"1024\")]\n\n probe_buf: u32,\n\n}\n\n\n", "file_path": "examples/swodump.rs", "rank": 15, "score": 72731.91620794588 }, { "content": "#[derive(StructOpt)]\n\nstruct Opts {\n\n /// Serial number of the probe to connect to.\n\n #[structopt(long = \"serial\")]\n\n serial: Option<String>,\n\n\n\n /// Communication speed in kHz.\n\n #[structopt(long = \"speed\", default_value = \"200\")]\n\n speed: u16,\n\n\n\n /// Maximum length of JTAG chain to expect (in bits).\n\n #[structopt(long = \"max-length\", default_value = \"512\")]\n\n max_length: usize,\n\n}\n\n\n", "file_path": "examples/jtag_scan.rs", "rank": 16, "score": 69354.83480504819 }, { "content": "fn run(opts: Opts) -> Result<()> {\n\n let mut probe = JayLink::open_by_serial(opts.serial.as_deref())?;\n\n\n\n // Enable power to enable testing all blinky pins without an ext. supply.\n\n // Ignore errors since probes may not support this.\n\n probe.set_kickstart_power(true).ok();\n\n\n\n loop {\n\n probe.set_tms(true)?;\n\n probe.set_tdi(true)?;\n\n probe.set_reset(true)?;\n\n probe.set_trst(true)?;\n\n println!(\"on {} V\", probe.read_target_voltage()? as f32 / 1000.0);\n\n sleep(Duration::from_millis(500));\n\n probe.set_tms(false)?;\n\n probe.set_tdi(false)?;\n\n probe.set_reset(false)?;\n\n probe.set_trst(false)?;\n\n println!(\"off {} V\", probe.read_target_voltage()? as f32 / 1000.0);\n\n sleep(Duration::from_millis(500));\n\n }\n\n}\n", "file_path": "examples/blink.rs", "rank": 17, "score": 64996.10784789705 }, { "content": "fn run(opts: Opts) -> Result<()> {\n\n let mut probe = JayLink::open_by_serial(opts.serial.as_deref())?;\n\n\n\n probe.select_interface(Interface::Jtag)?;\n\n\n\n // Limit speed so invalid 0xffff doesn't appear\n\n let khz = cmp::min(opts.speed, 0xfffe);\n\n probe.set_speed(SpeedConfig::khz(khz).unwrap())?;\n\n\n\n // Reset TAPs\n\n probe.reset_trst()?;\n\n\n\n let mut probe = JtagProbe {\n\n probe,\n\n state: State::TestLogicReset,\n\n };\n\n probe.enter_test_logic_reset()?;\n\n\n\n println!(\"Checking for JTAG connection...\");\n\n probe.check_connection(opts.max_length)?;\n", "file_path": "examples/jtag_scan.rs", "rank": 18, "score": 62654.735764426965 }, { "content": "/// Scans for J-Link USB devices.\n\n///\n\n/// The returned iterator will yield all devices made by Segger, without filtering the product ID.\n\npub fn scan_usb() -> Result<impl Iterator<Item = UsbDeviceInfo>> {\n\n log_libusb_info();\n\n\n\n Ok(rusb::devices()\n\n .jaylink_err()?\n\n .iter()\n\n .filter_map(|dev| {\n\n // This calls `libusb_get_device_descriptor`, which should be unable to fail in any\n\n // libusb version (it only accesses cached descriptor data).\n\n let descr = dev\n\n .device_descriptor()\n\n .expect(\"libusb_get_device_descriptor returned unexpected error\");\n\n\n\n if descr.vendor_id() == VID_SEGGER {\n\n Some(UsbDeviceInfo {\n\n vid: descr.vendor_id(),\n\n pid: descr.product_id(),\n\n inner: dev,\n\n })\n\n } else {\n\n None\n\n }\n\n })\n\n .collect::<Vec<_>>()\n\n .into_iter())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 19, "score": 55666.22134426789 }, { "content": "/// Parses data register contents containing IDCODE or BYPASS registers in reset (0) state.\n\nfn separate_idcodes(idcodes: &[bool]) -> Result<Vec<Option<IdCode>>> {\n\n // The LSb (transmitted first) of IDCODE is 1, while BYPASS registers are 0 upon reset, which\n\n // allows us to separate them here.\n\n let mut devices = Vec::new();\n\n let mut index = 0;\n\n while index < idcodes.len() {\n\n match idcodes[index] {\n\n false => {\n\n // BYPASS\n\n devices.push(None);\n\n }\n\n true => {\n\n // IDCODE\n\n let mut idcode: u32 = 0x8000_0000;\n\n for _ in 0..31 {\n\n index += 1;\n\n idcode >>= 1;\n\n if idcodes[index] {\n\n idcode |= 0x8000_0000;\n\n }\n", "file_path": "examples/jtag_scan.rs", "rank": 20, "score": 55525.31877698985 }, { "content": "type BoxedError = Box<dyn std::error::Error + Send + Sync>;\n\n\n\n#[allow(unused_imports)] // for intra-doc links\n\nuse crate::{scan_usb, Capabilities, JayLink};\n\n\n\n/// List of specific errors that may occur when using this library.\n\n#[non_exhaustive]\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\npub enum ErrorKind {\n\n /// A USB transport error occurred.\n\n ///\n\n /// This variant is used for all errors reported by the operating system when performing a USB\n\n /// operation. It may indicate that the USB device was unplugged, that another application or an\n\n /// operating system driver is currently using it, or that the current user does not have\n\n /// permission to access it.\n\n Usb,\n\n\n\n /// No (matching) J-Link device was found.\n\n ///\n\n /// This error occurs when calling [`JayLink::open_by_serial`] while no J-Link device is connected\n", "file_path": "src/error.rs", "rank": 21, "score": 51533.6424429366 }, { "content": "#[repr(u8)]\n\n#[allow(dead_code)]\n\nenum Command {\n\n Version = 0x01,\n\n GetSpeeds = 0xC0,\n\n GetMaxMemBlock = 0xD4,\n\n GetCaps = 0xE8,\n\n GetCapsEx = 0xED,\n\n GetHwVersion = 0xF0,\n\n\n\n GetState = 0x07,\n\n GetHwInfo = 0xC1,\n\n GetCounters = 0xC2,\n\n MeasureRtckReact = 0xF6,\n\n\n\n ResetTrst = 0x02,\n\n SetSpeed = 0x05,\n\n SelectIf = 0xC7,\n\n SetKsPower = 0x08,\n\n HwClock = 0xC8,\n\n HwTms0 = 0xC9,\n\n HwTms1 = 0xCA,\n", "file_path": "src/lib.rs", "rank": 22, "score": 46408.80458761472 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nenum State {\n\n /// Initial state. Reached by shifting 5 or more 1-bits through TMS.\n\n ///\n\n /// Entering this state resets the TAP to the IDCODE instruction if implemented, or BYPASS if\n\n /// not. Since the specific bit pattern for IDCODE is implementation-defined, this is the only\n\n /// portable way to execute that instruction.\n\n TestLogicReset,\n\n\n\n /// Run-Test/Idle state.\n\n RunTestIdle,\n\n\n\n /// Shift bits into the Instruction Registers.\n\n ShiftIR,\n\n Exit1IR,\n\n\n\n /// Shift bits into the Data Registers.\n\n ShiftDR,\n\n Exit1DR,\n\n}\n\n\n", "file_path": "examples/jtag_scan.rs", "rank": 23, "score": 44723.72818873095 }, { "content": "#[repr(u8)]\n\nenum SwoParam {\n\n Mode = 0x01,\n\n Baudrate = 0x02,\n\n ReadSize = 0x03,\n\n BufferSize = 0x04,\n\n // FIXME: Do these have hardware/firmware version requirements to be recognized?\n\n}\n\n\n\n/// The supported SWO data encoding modes.\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\n#[repr(u32)]\n\n#[non_exhaustive]\n\npub enum SwoMode {\n\n Uart = 0x00000000,\n\n // FIXME: Manchester encoding?\n\n}\n\n\n\nbitflags! {\n\n /// SWO status returned by probe on SWO buffer read.\n\n struct SwoStatus: u32 {\n", "file_path": "src/lib.rs", "rank": 24, "score": 44720.26091171563 }, { "content": "#[repr(u8)]\n\nenum SwoCommand {\n\n Start = 0x64,\n\n Stop = 0x65,\n\n Read = 0x66,\n\n GetSpeeds = 0x6E,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 25, "score": 44720.26091171563 }, { "content": "/// Keeps track of the JTAG state machine, and provides methods to traverse it.\n\nstruct JtagProbe {\n\n probe: JayLink,\n\n state: State,\n\n}\n\n\n\n/// States of the JTAG state machine.\n\n///\n\n/// The JTAG state machine is controlled through the TMS signal (simultanously in all devices in\n\n/// the scan chain).\n\n///\n\n/// This does not model *all* states, since this is just a demo.\n", "file_path": "examples/jtag_scan.rs", "rank": 26, "score": 43467.54417821964 }, { "content": "struct IdCode(u32);\n\n\n\nimpl IdCode {\n\n fn version(&self) -> u8 {\n\n (self.0 >> 28) as u8\n\n }\n\n\n\n fn part_number(&self) -> u16 {\n\n (self.0 >> 12) as u16\n\n }\n\n\n\n fn manufacturer(&self) -> u16 {\n\n ((self.0 >> 1) & 0b111_1111_1111) as u16\n\n }\n\n\n\n /// Returns the JEDEC Continuation Code page, modulo 16.\n\n fn manufacturer_jedec_cc(&self) -> u8 {\n\n ((self.manufacturer() >> 7) & 0b1111) as u8\n\n }\n\n\n", "file_path": "examples/jtag_scan.rs", "rank": 27, "score": 40146.71704766331 }, { "content": "fn log_libusb_info() {\n\n static DID_LOG: AtomicBool = AtomicBool::new(false);\n\n\n\n if DID_LOG.swap(true, Ordering::Acquire) {\n\n return;\n\n }\n\n\n\n let vers = rusb::version();\n\n debug!(\n\n \"libusb {}.{}.{}.{}{}\",\n\n vers.major(),\n\n vers.minor(),\n\n vers.micro(),\n\n vers.nano(),\n\n vers.rc().map(|rc| format!(\"-{}\", rc)).unwrap_or_default(),\n\n );\n\n\n\n debug!(\"libusb has capability API: {:?}\", rusb::has_capability());\n\n debug!(\"libusb has HID access: {:?}\", rusb::has_hid_access());\n\n debug!(\"libusb has hotplug support: {:?}\", rusb::has_hotplug());\n\n debug!(\n\n \"libusb can detach kernel driver: {:?}\",\n\n rusb::supports_detach_kernel_driver()\n\n );\n\n}\n", "file_path": "src/lib.rs", "rank": 28, "score": 40077.6575842005 }, { "content": "fn run() -> Result<()> {\n\n let list = jaylink::scan_usb()?.collect::<Vec<_>>();\n\n println!(\n\n \"Found {} J-Link device{}.\",\n\n list.len(),\n\n if list.len() == 1 { \"\" } else { \"s\" },\n\n );\n\n\n\n for devinfo in list {\n\n println!();\n\n print!(\n\n \"Bus {:03} Address {:03} Port {:03}: VID={:04x} PID={:04X} – \",\n\n devinfo.bus_number(),\n\n devinfo.address(),\n\n devinfo.port_number(),\n\n devinfo.vid(),\n\n devinfo.pid(),\n\n );\n\n\n\n let dev_data;\n", "file_path": "examples/list.rs", "rank": 29, "score": 39838.654381794804 }, { "content": "#[test]\n\nfn test_readme_deps() {\n\n assert_markdown_deps_updated!(\"README.md\");\n\n}\n\n\n", "file_path": "tests/version-numbers.rs", "rank": 30, "score": 38780.2482444477 }, { "content": "type Result<T> = std::result::Result<T, Box<dyn std::error::Error>>;\n\n\n", "file_path": "examples/jtag_scan.rs", "rank": 31, "score": 38445.676348678244 }, { "content": "#[test]\n\nfn test_html_root_url() {\n\n assert_html_root_url_updated!(\"src/lib.rs\");\n\n}\n", "file_path": "tests/version-numbers.rs", "rank": 32, "score": 37623.37997692342 }, { "content": "pub(crate) trait ResultExt<T, E> {\n\n fn jaylink_err(self) -> Result<T, Error>\n\n where\n\n E: Cause + Into<BoxedError>;\n\n\n\n fn jaylink_err_while(self, while_: &'static str) -> Result<T, Error>\n\n where\n\n E: Cause + Into<BoxedError>;\n\n}\n\n\n\nimpl<T, E> ResultExt<T, E> for Result<T, E> {\n\n fn jaylink_err(self) -> Result<T, Error>\n\n where\n\n E: Cause + Into<BoxedError>,\n\n {\n\n self.map_err(|e| Error::new(E::KIND, e))\n\n }\n\n\n\n fn jaylink_err_while(self, while_: &'static str) -> Result<T, Error>\n\n where\n", "file_path": "src/error.rs", "rank": 33, "score": 27849.6410973678 }, { "content": " while_: Some(while_),\n\n }\n\n }\n\n\n\n fn fmt_while(&self) -> String {\n\n if let Some(while_) = self.while_ {\n\n format!(\" while {}\", while_)\n\n } else {\n\n String::new()\n\n }\n\n }\n\n\n\n /// Returns the [`ErrorKind`] describing this error.\n\n pub fn kind(&self) -> ErrorKind {\n\n self.kind\n\n }\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "src/error.rs", "rank": 34, "score": 27848.32262113884 }, { "content": " E: Cause + Into<BoxedError>,\n\n {\n\n self.map_err(|e| Error::with_while(E::KIND, e, while_))\n\n }\n\n}\n\n\n\nmacro_rules! error_mapping {\n\n (\n\n $(\n\n $errty:ty => $kind:ident,\n\n )+\n\n ) => {\n\n $(\n\n impl Cause for $errty {\n\n const KIND: ErrorKind = ErrorKind::$kind;\n\n }\n\n )+\n\n };\n\n}\n\n\n\nerror_mapping! {\n\n rusb::Error => Usb,\n\n String => Other,\n\n}\n", "file_path": "src/error.rs", "rank": 35, "score": 27844.860352097374 }, { "content": " // Prefix foreign errors with further explanation where they're coming from\n\n match self.kind {\n\n ErrorKind::Usb => write!(f, \"USB error{}: {}\", self.fmt_while(), self.inner),\n\n _ => {\n\n if let Some(while_) = self.while_ {\n\n write!(f, \"error{}: {}\", while_, self.inner)\n\n } else {\n\n self.inner.fmt(f)\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl std::error::Error for Error {\n\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n\n self.inner.source()\n\n }\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 36, "score": 27844.679333559612 }, { "content": " while_: Option<&'static str>,\n\n}\n\n\n\nimpl Error {\n\n pub(crate) fn new(kind: ErrorKind, inner: impl Into<BoxedError>) -> Self {\n\n Self {\n\n kind,\n\n inner: inner.into(),\n\n while_: None,\n\n }\n\n }\n\n\n\n pub(crate) fn with_while(\n\n kind: ErrorKind,\n\n inner: impl Into<BoxedError>,\n\n while_: &'static str,\n\n ) -> Self {\n\n Self {\n\n kind,\n\n inner: inner.into(),\n", "file_path": "src/error.rs", "rank": 37, "score": 27843.93389663394 }, { "content": "\n\n /// The device does not support the selected target interface.\n\n InterfaceNotSupported,\n\n\n\n /// An unspecified error occurred.\n\n Other,\n\n}\n\n\n\npub(crate) trait Cause {\n\n const KIND: ErrorKind;\n\n}\n\n\n\n/// The error type used by this library.\n\n///\n\n/// Errors can be introspected by the user by calling [`Error::kind`] and inspecting the returned\n\n/// [`ErrorKind`].\n\n#[derive(Debug)]\n\npub struct Error {\n\n kind: ErrorKind,\n\n inner: BoxedError,\n", "file_path": "src/error.rs", "rank": 38, "score": 27843.25385761701 }, { "content": " /// (or no device matching the serial number is connected).\n\n DeviceNotFound,\n\n\n\n /// Automatic device connection failed because multiple devices were found.\n\n ///\n\n /// This error occurs when calling [`JayLink::open_by_serial`] without a serial number while\n\n /// multiple J-Link devices are connected. This library will refuse to \"guess\" a device and\n\n /// requires specifying a serial number in this case. The [`scan_usb`] function can also be used\n\n /// to find a specific device to connect to.\n\n MultipleDevicesFound,\n\n\n\n /// A operation was attempted that is not supported by the probe.\n\n ///\n\n /// Some operations are not supported by all firmware/hardware versions, and are instead\n\n /// advertised as optional *capability* bits. This error occurs when the capability bit for an\n\n /// operation isn't set when that operation is attempted.\n\n ///\n\n /// Capabilities can be read by calling [`JayLink::capabilities`], which returns a\n\n /// [`Capabilities`] bitflags struct.\n\n MissingCapability,\n", "file_path": "src/error.rs", "rank": 39, "score": 27840.783533580838 }, { "content": "use std::fmt;\n\n\n", "file_path": "src/error.rs", "rank": 40, "score": 27839.959836599268 }, { "content": "# Changelog\n\n\n\n## Unreleased\n\n\n\nNo changes.\n\n\n\n## [0.2.0 - 2021-05-27](https://github.com/jonas-schievink/jaylink/releases/tag/v0.2.0)\n\n\n\n### New Features\n\n\n\n- `HardwareType` now implements `Display`.\n\n- Add untested and experimental support for additional target interfaces (BDM3,\n\n FINE, PIC32 ICSP, SPI, C2, cJTAG, and Microchip 2-wire JTAG).\n\n- Redesigned the target interface API (breaking change):\n\n - `JayLink::available_interfaces` now returns the set of interfaces instead of an opaque iterator.\n\n- Improved the capabilities API (breaking change):\n\n - A new `Capability` enum represents every capability the library knows about.\n\n - An opaque `Capabilities` struct represents a set of capabilities advertised by a probe.\n\n- `swo_start_uart` is now called `swo_start` and handles future support for\n\n other encodings (breaking change).\n\n- Add a `jtag_scan` example that enumerates a scan chain.\n\n\n\n### Other Improvements\n\n\n\n- Improved documentation.\n\n- Improved the `list` example to list all interface speeds.\n\n- Improved the error messages in the `swdump` example.\n\n- Improved speed defaults in `swdump` and `swodump` examples.\n\n- Eagerly fetch supported interfaces and capabilities.\n\n- Update `rusb` to 0.8.\n\n- Stop automatically selecting SWD when `swo_start` is called, to behave consistently.\n\n- Redesign the speed info and configuration API to be easier to use:\n\n - `Speeds` is now `SpeedInfo`\n\n - `SwoSpeeds` is now `SwoSpeedInfo`\n\n - `CommunicationSpeed` is now `SpeedConfig`\n\n - `max_speed` getters were renamed `max_speed_hz`\n\n - A maximum speed `SpeedConfig` can be created via `SpeedInfo::max_speed_config`\n\n\n\n### Bug Fixes\n\n\n\n- Fix JTAG bitcounting logic.\n\n- Fix `BitIter::split_off` logic.\n\n\n\n## [0.1.5 - 2020-08-27](https://github.com/jonas-schievink/jaylink/releases/tag/v0.1.5)\n\n\n\n- Improve error message on Windows, hinting at installing WinUSB.\n\n\n\n## [0.1.4 - 2020-07-26](https://github.com/jonas-schievink/jaylink/releases/tag/v0.1.4)\n\n\n\n### New Features\n\n\n\n- Implement SWO capture support.\n\n- Add functions to calculate the maximum supported transport speed.\n\n\n", "file_path": "CHANGELOG.md", "rank": 41, "score": 16157.459938178936 }, { "content": "# A crate for controlling J-Link debug probes\n\n\n\n[![crates.io](https://img.shields.io/crates/v/jaylink.svg)](https://crates.io/crates/jaylink)\n\n[![docs.rs](https://docs.rs/jaylink/badge.svg)](https://docs.rs/jaylink/)\n\n![CI](https://github.com/jonas-schievink/jaylink/workflows/CI/badge.svg)\n\n\n\nThis crate allows talking to J-Link debug probes attached via USB. The probe's\n\npins can be controlled and I/O operations using JTAG or SWD can be performed,\n\nenabling control of target MCUs.\n\n\n\nPlease refer to the [changelog](CHANGELOG.md) to see what changed in the last\n\nreleases.\n\n\n\n## Usage\n\n\n\nAdd an entry to your `Cargo.toml`:\n\n\n\n```toml\n\n[dependencies]\n\njaylink = \"0.2.0\"\n\n```\n\n\n\nCheck the [API Documentation](https://docs.rs/jaylink/) for how to use the\n\ncrate's functionality.\n\n\n\n## Rust version support\n\n\n\nThis crate supports the 3 latest stable Rust releases. Bumping the minimum\n\nsupported Rust version (MSRV) is not considered a breaking change as long as\n\nthese 3 versions are still supported.\n\n\n\nThe MSRV is also explicitly tested against in [.travis.yml](.travis.yml).\n", "file_path": "README.md", "rank": 42, "score": 16154.815390931484 }, { "content": "## [0.1.3 - 2020-07-09](https://github.com/jonas-schievink/jaylink/releases/tag/v0.1.3)\n\n\n\nUpdate rusb dependency to version 0.6.2.\n\n\n\n## [0.1.2 - 2020-06-27](https://github.com/jonas-schievink/jaylink/releases/tag/v0.1.2)\n\n\n\nTrim returned firmware version like the official tools.\n\n\n\n## [0.1.1 - 2020-03-01](https://github.com/jonas-schievink/jaylink/releases/tag/v0.1.1)\n\n\n\nFix JTAG I/O command for old J-Links.\n\n\n\n## [0.1.0 - 2019-12-08](https://github.com/jonas-schievink/jaylink/releases/tag/v0.1.0)\n\n\n\nInitial release.\n", "file_path": "CHANGELOG.md", "rank": 43, "score": 16152.25290755598 }, { "content": "# What to do to publish a new release\n\n\n\n1. Ensure all notable changes are in the changelog under \"Unreleased\".\n\n\n\n2. Execute `cargo release <level>` to bump version(s), tag and publish\n\n everything. External subcommand, must be installed with `cargo install\n\n cargo-release`.\n\n \n\n `<level>` can be one of `major|minor|patch`. If this is the first release\n\n (`0.1.0`), use `minor`, since the version starts out as `0.0.0`.\n\n\n\n3. Go to the GitHub releases, edit the just-pushed tag. Copy the release notes\n\n from the changelog.\n", "file_path": "RELEASE_PROCESS.md", "rank": 44, "score": 15488.809873511924 }, { "content": "//! Simple SWO capture demo that prints decoded ITM packets to stdout.\n\n//!\n\n//! Note that this does not reconfigure the target MCU to enable the ITM ports, set the speed, or do\n\n//! anything else that might be required before the MCU starts outputting SWO data. It is expected\n\n//! that the firmware will perform these steps.\n\n//!\n\n//! More advanced tooling would do this automatically, but the mechanism involved is\n\n//! vendor-specific, so `jaylink` does not know how to do this.\n\n\n\nuse jaylink::Interface;\n\nuse jaylink::{JayLink, SwoMode};\n\nuse std::error::Error;\n\nuse std::io::Write;\n\nuse structopt::StructOpt;\n\n\n\n#[derive(StructOpt)]\n", "file_path": "examples/swodump.rs", "rank": 48, "score": 19.242534138033232 }, { "content": " pub(crate) fn single(interface: Interface) -> Self {\n\n Self(InterfaceFlags::from_interface(interface))\n\n }\n\n\n\n /// Returns whether `interface` is contained in `self`.\n\n pub fn contains(&self, interface: Interface) -> bool {\n\n self.0.contains(InterfaceFlags::from_interface(interface))\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Interfaces {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n self.0.fmt(f)\n\n }\n\n}\n\n\n\nimpl IntoIterator for Interfaces {\n\n type Item = Interface;\n\n type IntoIter = InterfaceIter;\n\n\n", "file_path": "src/interface.rs", "rank": 49, "score": 18.145696284534043 }, { "content": "}\n\n\n\n/// Target communication speed setting.\n\n///\n\n/// This determines the clock frequency of the target communication. Supported speeds for the\n\n/// currently selected target interface can be fetched via [`JayLink::read_speeds`].\n\n#[derive(Debug, Copy, Clone)]\n\npub struct SpeedConfig {\n\n raw: u16,\n\n}\n\n\n\nimpl SpeedConfig {\n\n /// Let the J-Link probe decide the speed.\n\n ///\n\n /// Requires the probe to support [`Capability::AdaptiveClocking`].\n\n pub const ADAPTIVE: Self = Self { raw: 0xFFFF };\n\n\n\n /// Manually specify speed in kHz.\n\n ///\n\n /// Returns `None` if the value is the invalid value `0xFFFF`. Note that this doesn't mean that\n", "file_path": "src/lib.rs", "rank": 50, "score": 15.967280674899001 }, { "content": " }\n\n}\n\n\n\nimpl fmt::Debug for JayLink {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"JayLink\")\n\n .field(\"manufacturer\", &self.manufacturer)\n\n .field(\"product\", &self.product)\n\n .field(\"serial\", &self.serial)\n\n .finish()\n\n }\n\n}\n\n\n\n/// A SWO data stream that implements [`std::io::Read`].\n\n///\n\n/// This is one way to consume SWO data. The other is to call [`JayLink::swo_read`] after SWO\n\n/// capturing has been started.\n\n///\n\n/// Reading from this stream will block until some data is captured by the probe.\n\n#[derive(Debug)]\n", "file_path": "src/lib.rs", "rank": 52, "score": 14.575234107276117 }, { "content": " if conf != 1 {\n\n return Err(\"another application is accessing the device\".to_string()).jaylink_err();\n\n }\n\n\n\n let mut this = Self {\n\n manufacturer: handle\n\n .read_manufacturer_string_ascii(&descr)\n\n .jaylink_err()?,\n\n product: handle.read_product_string_ascii(&descr).jaylink_err()?,\n\n serial: handle\n\n .read_serial_number_string_ascii(&descr)\n\n .jaylink_err()?,\n\n read_ep,\n\n write_ep,\n\n cmd_buf: RefCell::new(Vec::new()),\n\n caps: Capabilities::from_raw_legacy(0), // dummy value\n\n interface: Interface::Spi, // dummy value, must not be JTAG\n\n interfaces: Interfaces::from_bits_warn(0), // dummy value\n\n handle,\n\n };\n", "file_path": "src/lib.rs", "rank": 53, "score": 13.87515205837685 }, { "content": "//! Uses the functions for controlling individual pin states to toggle them periodically.\n\n\n\nuse jaylink::*;\n\nuse std::thread::sleep;\n\nuse std::time::Duration;\n\nuse structopt::StructOpt;\n\n\n\n#[derive(StructOpt)]\n", "file_path": "examples/blink.rs", "rank": 54, "score": 13.833699787559652 }, { "content": " /// 2-wire debugging interface used by Microchip's IS208x MCUs.\n\n Mc2WireJtag = 10,\n\n // (*)\n\n // NOTE: When changing this enum, also change all other places with a (*) in addition to\n\n // anything that fails to compile.\n\n // NOTE 2: Keep the docs in sync with the bitflags below!\n\n }\n\n\n\n flags InterfaceFlags: u32;\n\n);\n\n\n\nimpl Interface {\n\n pub(crate) fn as_u8(self) -> u8 {\n\n self as u8\n\n }\n\n}\n\n\n\nimpl fmt::Display for Interface {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.write_str(match self {\n", "file_path": "src/interface.rs", "rank": 55, "score": 13.790472149802145 }, { "content": " );\n\n }\n\n // Hide reserved bits from user-facing output.\n\n capabilities.remove(CapabilityFlags::Reserved0);\n\n Self(capabilities)\n\n }\n\n\n\n /// Determines whether `self` contains capability `cap`.\n\n pub fn contains(&self, cap: Capability) -> bool {\n\n self.0.contains(CapabilityFlags::from_capability(cap))\n\n }\n\n\n\n /// Determines whether `self` contains all capabilities in `caps`.\n\n pub fn contains_all(&self, caps: Capabilities) -> bool {\n\n self.0.contains(caps.0)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Capabilities {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n self.0.fmt(f)\n\n }\n\n}\n", "file_path": "src/capabilities.rs", "rank": 56, "score": 13.734396222061182 }, { "content": "//! A small example showcasing how to auto-detect devices on a JTAG scan chain.\n\n//!\n\n//! This will enumerate JTAG devices and print their manufacturer and identification, as well as\n\n//! JTAG tap properties.\n\n//!\n\n//! This is mostly a port of [Glasgow]'s `jtag_probe` module.\n\n//!\n\n//! [Glasgow]: https://github.com/GlasgowEmbedded/glasgow\n\n\n\nuse std::{cmp, iter};\n\n\n\nuse jaylink::{BitIter, Interface, JayLink, SpeedConfig};\n\nuse structopt::StructOpt;\n\n\n", "file_path": "examples/jtag_scan.rs", "rank": 57, "score": 13.683196023773421 }, { "content": " /// every other value will be accepted by the device.\n\n pub fn khz(khz: u16) -> Option<Self> {\n\n if khz == 0xFFFF {\n\n None\n\n } else {\n\n Some(Self { raw: khz })\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for SpeedConfig {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n if self.raw == Self::ADAPTIVE.raw {\n\n f.write_str(\"adaptive\")\n\n } else {\n\n write!(f, \"{} kHz\", self.raw)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 58, "score": 13.460418734169133 }, { "content": " Error::new(ErrorKind::DeviceNotFound, message)\n\n })?;\n\n\n\n if devices.next().is_some() {\n\n let msg = if let Some(serial) = serial {\n\n format!(\"found multiple devices matching serial {}\", serial)\n\n } else {\n\n \"multiple devices found (specify serial number to select one)\".to_string()\n\n };\n\n return Err(Error::new(ErrorKind::MultipleDevicesFound, msg));\n\n }\n\n\n\n Ok(first)\n\n }\n\n\n\n /// Opens a specific J-Link USB device.\n\n ///\n\n /// **Note**: Probes remember their selected interfaces between reconnections, so it is\n\n /// recommended to always call [`JayLink::select_interface`] after opening a probe.\n\n pub fn open_usb(usb_device: UsbDeviceInfo) -> Result<Self> {\n", "file_path": "src/lib.rs", "rank": 60, "score": 13.04203607950728 }, { "content": " interface: Interface,\n\n\n\n manufacturer: String,\n\n product: String,\n\n serial: String,\n\n}\n\n\n\nimpl JayLink {\n\n /// Opens an attached J-Link device by its serial number.\n\n ///\n\n /// If `serial` is `None`, this will open the only attached J-Link device, and return an error\n\n /// of type [`ErrorKind::MultipleDevicesFound`] when more than one is attached. This is usually\n\n /// the desired behavior of robust applications.\n\n ///\n\n /// **Note**: Probes remember their selected interfaces between reconnections, so it is\n\n /// recommended to always call [`JayLink::select_interface`] after opening a probe.\n\n pub fn open_by_serial(serial: Option<&str>) -> Result<Self> {\n\n let mut devices = scan_usb()?.filter_map(|usb_device| {\n\n let dev = match usb_device.open() {\n\n Ok(dev) => dev,\n", "file_path": "src/lib.rs", "rank": 61, "score": 13.021932088049773 }, { "content": " let mut out = out.lock();\n\n\n\n loop {\n\n match stream.read_packet() {\n\n Ok(packet) => {\n\n if let itm::packet::Kind::Instrumentation(i) = packet.kind() {\n\n out.write_all(i.payload())?;\n\n }\n\n }\n\n Err(e) => {\n\n eprintln!(\"ITM decode error: {}\", e);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "examples/swodump.rs", "rank": 62, "score": 12.836642488151693 }, { "content": "#![allow(non_upper_case_globals)]\n\n\n\nuse std::fmt;\n\n\n\nenum_and_set!(\n\n /// List of target interfaces.\n\n ///\n\n /// Note that this library might not support all of them, despite listing them here.\n\n #[non_exhaustive]\n\n #[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\n pub enum Interface {\n\n /// JTAG interface (IEEE 1149.1). Supported by most J-Link probes (some embedded J-Links might\n\n /// only support SWD).\n\n Jtag = 0,\n\n /// SWD interface (Serial Wire Debug), used by most Cortex-M chips, and supported by almost all\n\n /// J-Link probes.\n\n Swd = 1,\n\n /// Background Debug Mode 3, a single-wire debug interface used on some NXP microcontrollers.\n\n Bdm3 = 2,\n\n /// FINE, a two-wire debugging interface used by Renesas RX MCUs.\n", "file_path": "src/interface.rs", "rank": 63, "score": 12.619034360222463 }, { "content": " fn into_iter(self) -> Self::IntoIter {\n\n InterfaceIter {\n\n interfaces: self,\n\n next: 0,\n\n }\n\n }\n\n}\n\n\n\n/// Iterator over supported [`Interface`]s.\n\n#[derive(Debug)]\n\npub struct InterfaceIter {\n\n interfaces: Interfaces,\n\n next: usize,\n\n}\n\n\n\nimpl Iterator for InterfaceIter {\n\n type Item = Interface;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n loop {\n", "file_path": "src/interface.rs", "rank": 64, "score": 12.570184510367135 }, { "content": " /// When the selected target interface is switched (by calling [`JayLink::select_interface`], or\n\n /// any API method that automatically selects an interface), the communication speed is reset to\n\n /// some unspecified default value.\n\n pub fn set_speed(&mut self, speed: SpeedConfig) -> Result<()> {\n\n if speed.raw == SpeedConfig::ADAPTIVE.raw {\n\n self.require_capability(Capability::AdaptiveClocking)?;\n\n }\n\n\n\n let mut buf = [Command::SetSpeed as u8, 0, 0];\n\n buf[1..3].copy_from_slice(&speed.raw.to_le_bytes());\n\n self.write_cmd(&buf)?;\n\n\n\n Ok(())\n\n }\n\n\n\n /// Reads the target voltage measured on the `VTref` pin, in millivolts.\n\n ///\n\n /// In order to use the J-Link, this voltage must be present, since it will be used as the level\n\n /// of the I/O signals to the target.\n\n pub fn read_target_voltage(&self) -> Result<u16> {\n", "file_path": "src/lib.rs", "rank": 65, "score": 12.452757216220448 }, { "content": "\n\n let mut buf = [0; 32];\n\n self.read(&mut buf)?;\n\n let real_caps = Capabilities::from_raw_ex(buf);\n\n if !real_caps.contains_all(caps) {\n\n return Err(format!(\n\n \"ext. caps are not a superset of legacy caps (legacy: {:?}, ex: {:?})\",\n\n caps, real_caps\n\n ))\n\n .jaylink_err();\n\n }\n\n debug!(\"extended caps: {:?}\", real_caps);\n\n caps = real_caps;\n\n } else {\n\n debug!(\"extended caps not supported\");\n\n }\n\n\n\n self.caps = caps;\n\n Ok(())\n\n }\n", "file_path": "src/lib.rs", "rank": 67, "score": 12.240957172106805 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let s = self\n\n .clone()\n\n .map(|bit| if bit { '1' } else { '0' })\n\n .collect::<String>();\n\n write!(f, \"BitIter({})\", s)\n\n }\n\n}\n\n\n\npub(crate) trait IteratorExt: Sized {\n\n fn collapse_bytes(self) -> ByteIter<Self>;\n\n}\n\n\n\nimpl<I: Iterator<Item = bool>> IteratorExt for I {\n\n fn collapse_bytes(self) -> ByteIter<Self> {\n\n ByteIter { inner: self }\n\n }\n\n}\n\n\n\npub(crate) struct ByteIter<I> {\n", "file_path": "src/bits.rs", "rank": 68, "score": 12.051610863446435 }, { "content": " }\n\n}\n\n\n\n/// J-Link communication speed info.\n\n#[derive(Debug)]\n\npub struct SpeedInfo {\n\n base_freq: u32,\n\n min_div: u16,\n\n}\n\n\n\nimpl SpeedInfo {\n\n /// Returns the maximum supported speed for target communication (in Hz).\n\n pub fn max_speed_hz(&self) -> u32 {\n\n self.base_freq / u32::from(self.min_div)\n\n }\n\n\n\n /// Returns a `SpeedConfig` that configures the fastest supported speed.\n\n pub fn max_speed_config(&self) -> SpeedConfig {\n\n let khz = cmp::min(self.max_speed_hz() / 1000, 0xFFFE);\n\n SpeedConfig::khz(khz.try_into().unwrap()).unwrap()\n", "file_path": "src/lib.rs", "rank": 69, "score": 11.757219888095428 }, { "content": " if self.capabilities().contains(cap) {\n\n Ok(())\n\n } else {\n\n Err(Error::new(\n\n ErrorKind::MissingCapability,\n\n format!(\"device is missing capabilities ({:?}) for operation\", cap),\n\n ))\n\n }\n\n }\n\n\n\n fn require_interface_supported(&self, intf: Interface) -> Result<()> {\n\n if self.interfaces.contains(intf) {\n\n Ok(())\n\n } else {\n\n Err(Error::new(\n\n ErrorKind::InterfaceNotSupported,\n\n format!(\"probe does not support target interface {:?}\", intf),\n\n ))\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 70, "score": 11.676570437870172 }, { "content": "use std::fmt;\n\n\n\n/// An iterator over a received bit stream.\n\n#[derive(Clone)]\n\npub struct BitIter<'a> {\n\n buf: &'a [u8],\n\n next_bit: u8,\n\n bits_left: usize,\n\n}\n\n\n\nimpl<'a> BitIter<'a> {\n\n pub(crate) fn new(buf: &'a [u8], total_bits: usize) -> Self {\n\n assert!(\n\n buf.len() * 8 >= total_bits,\n\n \"cannot pull {} bits out of {} bytes\",\n\n total_bits,\n\n buf.len()\n\n );\n\n\n\n Self {\n", "file_path": "src/bits.rs", "rank": 71, "score": 11.622863210707433 }, { "content": " let dev = match devinfo.open() {\n\n Ok(dev) => {\n\n dev_data = format!(\n\n \"{} {} (Serial {})\",\n\n dev.manufacturer_string(),\n\n dev.product_string(),\n\n dev.serial_string(),\n\n );\n\n Some(dev)\n\n }\n\n Err(e) => {\n\n dev_data = format!(\"<{}>\", e.to_string());\n\n None\n\n }\n\n };\n\n\n\n println!(\"{}\", dev_data);\n\n\n\n if let Some(mut dev) = dev {\n\n // Print detailed information read from the device\n", "file_path": "examples/list.rs", "rank": 72, "score": 11.512390584532916 }, { "content": " Err(_) => return None,\n\n };\n\n\n\n if let Some(serial) = serial {\n\n if dev.serial_string() == serial {\n\n Some(dev)\n\n } else {\n\n None\n\n }\n\n } else {\n\n Some(dev)\n\n }\n\n });\n\n\n\n let first = devices.next().ok_or_else(|| {\n\n let message = if let Some(serial ) = serial {\n\n format!(\"no J-Link device with serial {} was found (make sure your current user has permissions to access it)\", serial)\n\n } else {\n\n \"no J-Link devices found (make sure your current user has permissions to access them)\".to_string()\n\n };\n", "file_path": "src/lib.rs", "rank": 73, "score": 11.359119427071187 }, { "content": " } else {\n\n self.next_bit = 0;\n\n self.buf = &self.buf[1..];\n\n }\n\n\n\n self.bits_left -= 1;\n\n Some(bit)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n (self.bits_left, Some(self.bits_left))\n\n }\n\n}\n\n\n\nimpl ExactSizeIterator for BitIter<'_> {}\n\n\n\nimpl fmt::Debug for BitIter<'_> {\n", "file_path": "src/bits.rs", "rank": 74, "score": 11.341110050438086 }, { "content": " /// Returns the manufacturer string stored in the device descriptor.\n\n pub fn manufacturer_string(&self) -> &str {\n\n &self.manufacturer\n\n }\n\n\n\n /// Returns the product string stored in the device descriptor.\n\n pub fn product_string(&self) -> &str {\n\n &self.product\n\n }\n\n\n\n /// Returns the serial number string stored in the device descriptor.\n\n ///\n\n /// This serial number string can be passed to [`JayLink::open_by_serial`] to open a specific\n\n /// J-Link device.\n\n pub fn serial_string(&self) -> &str {\n\n &self.serial\n\n }\n\n\n\n fn buf(&self, len: usize) -> RefMut<'_, Vec<u8>> {\n\n let mut vec = self.cmd_buf.borrow_mut();\n", "file_path": "src/lib.rs", "rank": 76, "score": 11.17019761407007 }, { "content": "\n\n fn require_interface_selected(&self, intf: Interface) -> Result<()> {\n\n if self.interface == intf {\n\n Ok(())\n\n } else {\n\n Err(Error::new(\n\n ErrorKind::Other,\n\n format!(\"interface {} must be selected for this operation (currently using interface {})\", intf, self.interface),\n\n ))\n\n }\n\n }\n\n\n\n /// Reads the firmware version string from the device.\n\n pub fn read_firmware_version(&self) -> Result<String> {\n\n self.write_cmd(&[Command::Version as u8])?;\n\n\n\n let mut buf = [0; 2];\n\n self.read(&mut buf)?;\n\n let num_bytes = u16::from_le_bytes(buf);\n\n let mut buf = self.buf(num_bytes.into());\n", "file_path": "src/lib.rs", "rank": 77, "score": 11.03106616024651 }, { "content": "\n\n let mut buf = [0; 4];\n\n self.read(&mut buf)?;\n\n\n\n Ok(u32::from_le_bytes(buf))\n\n }\n\n\n\n /// Returns the capabilities advertised by the probe.\n\n pub fn capabilities(&self) -> Capabilities {\n\n self.caps\n\n }\n\n\n\n /// Returns the set of target interfaces supported by the probe.\n\n pub fn available_interfaces(&self) -> Interfaces {\n\n self.interfaces\n\n }\n\n\n\n /// Reads the currently selected target interface.\n\n ///\n\n /// **Note**: There is no guarantee that the returned interface is actually supported (ie. it\n", "file_path": "src/lib.rs", "rank": 78, "score": 10.950959914215849 }, { "content": " }\n\n\n\n /// Scans the data register contents and determines their total length.\n\n ///\n\n /// Returns the old DR contents.\n\n ///\n\n /// Note that DR length can change depending on the active instruction.\n\n fn scan_dr(&mut self, max_length: usize) -> Result<Vec<bool>> {\n\n self.enter_shift_dr()?;\n\n\n\n let mut original_contents = None;\n\n let res = (|| -> Result<_> {\n\n // Fill all data registers with 1s, then flush with 0s and see how many 1-bits were\n\n // retained.\n\n let data_1 = self.shift_tdio(&vec![true; max_length], false)?;\n\n original_contents = Some(data_1.collect::<Vec<_>>());\n\n let mut data_0 = self.shift_tdio(&vec![false; max_length], false)?;\n\n match data_0.position(|bit| !bit) {\n\n Some(i) => Ok(i),\n\n None => Err(format!(\"total DR length longer than {} bits\", max_length).into()),\n", "file_path": "examples/jtag_scan.rs", "rank": 79, "score": 10.948985156034036 }, { "content": " let info = match detailed_info(&mut dev) {\n\n Ok(info) => info,\n\n Err(e) => format!(\"<{}>\", e),\n\n };\n\n\n\n // This is multi-line info\n\n for (i, line) in info.lines().enumerate() {\n\n if i == 0 {\n\n print!(\"⤷ \");\n\n } else {\n\n print!(\" \");\n\n }\n\n\n\n println!(\"{}\", line);\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/list.rs", "rank": 80, "score": 10.867858264324026 }, { "content": "use jaylink::*;\n\nuse std::fmt::Write;\n\n\n", "file_path": "examples/list.rs", "rank": 81, "score": 10.775581076727935 }, { "content": "#[macro_use]\n\nmod macros;\n\nmod bits;\n\nmod capabilities;\n\nmod error;\n\nmod interface;\n\n\n\npub use self::bits::BitIter;\n\npub use self::capabilities::{Capabilities, Capability};\n\npub use self::error::{Error, ErrorKind};\n\npub use self::interface::{Interface, InterfaceIter, Interfaces};\n\n\n\nuse self::bits::IteratorExt as _;\n\nuse self::error::ResultExt as _;\n\nuse bitflags::bitflags;\n\nuse byteorder::{LittleEndian, ReadBytesExt};\n\nuse io::Cursor;\n\nuse log::{debug, trace, warn};\n\nuse std::cell::{Cell, RefCell, RefMut};\n\nuse std::convert::{TryFrom, TryInto};\n", "file_path": "src/lib.rs", "rank": 83, "score": 10.488915851749995 }, { "content": "use std::sync::atomic::{AtomicBool, Ordering};\n\nuse std::time::{Duration, Instant};\n\nuse std::{\n\n cmp, fmt,\n\n io::{self, Read},\n\n ops::Deref,\n\n thread,\n\n};\n\n\n\n/// A result type with the error hardwired to [`Error`].\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\nconst VID_SEGGER: u16 = 0x1366;\n\n\n\nconst TIMEOUT_DEFAULT: Duration = Duration::from_millis(500);\n\n\n\n#[repr(u8)]\n\n#[allow(dead_code)]\n", "file_path": "src/lib.rs", "rank": 84, "score": 10.359037983317384 }, { "content": "impl State {\n\n fn set(&mut self, to: Self) {\n\n log::trace!(\"{:?} -> {:?}\", self, to);\n\n *self = to;\n\n }\n\n}\n\n\n\nimpl JtagProbe {\n\n /// Sends the bits in `tms` to the TMS input of the first device in the chain, while sending\n\n /// 0-bits through TDI.\n\n fn shift_tms(&mut self, tms: &[bool]) -> Result<()> {\n\n self.probe\n\n .jtag_io(tms.iter().copied(), iter::repeat(false).take(tms.len()))?;\n\n Ok(())\n\n }\n\n\n\n fn enter_test_logic_reset(&mut self) -> Result<()> {\n\n // Reset the JTAG state machine to Test-Logic-Reset. Sending enough (5 or more) 1-bits\n\n // through TMS will eventually end up in that state, regardless of what the current state\n\n // is.\n", "file_path": "examples/jtag_scan.rs", "rank": 85, "score": 10.160754374256523 }, { "content": " Ok(())\n\n }\n\n\n\n fn read(&self, buf: &mut [u8]) -> Result<()> {\n\n let mut total = 0;\n\n\n\n while total < buf.len() {\n\n let buf = &mut buf[total..];\n\n let bytes = self\n\n .handle\n\n .read_bulk(self.read_ep, buf, TIMEOUT_DEFAULT)\n\n .jaylink_err_while(\"reading from device\")?;\n\n total += bytes;\n\n }\n\n\n\n trace!(\"read {} bytes: {:x?}\", buf.len(), buf);\n\n Ok(())\n\n }\n\n\n\n fn require_capability(&self, cap: Capability) -> Result<()> {\n", "file_path": "src/lib.rs", "rank": 86, "score": 10.00755696522451 }, { "content": "}\n\n\n\n/// The hardware/product type of the device.\n\n#[non_exhaustive]\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\npub enum HardwareType {\n\n JLink,\n\n JTrace,\n\n Flasher,\n\n JLinkPro,\n\n}\n\n\n\nimpl fmt::Display for HardwareType {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.write_str(match self {\n\n HardwareType::JLink => \"J-Link\",\n\n HardwareType::JTrace => \"J-Trace\",\n\n HardwareType::Flasher => \"J-Flash\",\n\n HardwareType::JLinkPro => \"J-Link Pro\",\n\n })\n", "file_path": "src/lib.rs", "rank": 87, "score": 9.90427775027228 }, { "content": " \"probe I/O command returned error code {:#x}\",\n\n buf[num_bytes]\n\n ))\n\n .jaylink_err();\n\n }\n\n\n\n drop(buf);\n\n\n\n Ok(BitIter::new(\n\n &self.cmd_buf.get_mut()[..num_bytes],\n\n dir_bit_count,\n\n ))\n\n }\n\n\n\n /// Starts capturing SWO data.\n\n ///\n\n /// This will switch the probe to SWD interface mode if necessary (required for SWO capture).\n\n ///\n\n /// Requires the probe to support [`Capability::Swo`].\n\n ///\n", "file_path": "src/lib.rs", "rank": 89, "score": 9.803308708176175 }, { "content": " if has_status_byte && buf[read_len - 1] != 0 {\n\n return Err(Error::new(\n\n ErrorKind::Other,\n\n format!(\n\n \"probe I/O command returned error code {:#x}\",\n\n buf[read_len - 1]\n\n ),\n\n ));\n\n }\n\n\n\n drop(buf);\n\n\n\n Ok(BitIter::new(\n\n &self.cmd_buf.get_mut()[..num_resp_bytes],\n\n tms_bit_count,\n\n ))\n\n }\n\n\n\n /// Performs an SWD I/O operation.\n\n ///\n", "file_path": "src/lib.rs", "rank": 90, "score": 9.438410358488497 }, { "content": " /// might not be in the list returned by [`JayLink::available_interfaces`]). In particular, some\n\n /// embedded J-Link probes start up with JTAG selected, but only support SWD.\n\n pub fn current_interface(&self) -> Interface {\n\n self.interface\n\n }\n\n\n\n /// Selects the interface to use for talking to the target MCU.\n\n ///\n\n /// Switching interfaces will reset the configured transfer speed, so [`JayLink::set_speed`]\n\n /// needs to be called *after* `select_interface`.\n\n ///\n\n /// This requires the probe to support [`Capability::SelectIf`].\n\n ///\n\n /// **Note**: Selecting a different interface may cause the J-Link to perform target I/O!\n\n pub fn select_interface(&mut self, intf: Interface) -> Result<()> {\n\n if self.interface == intf {\n\n return Ok(());\n\n }\n\n\n\n self.require_capability(Capability::SelectIf)?;\n", "file_path": "src/lib.rs", "rank": 91, "score": 9.425866471665909 }, { "content": " }\n\n}\n\n\n\n/// Supported SWO capture speed info.\n\n#[derive(Debug)]\n\npub struct SwoSpeedInfo {\n\n base_freq: u32,\n\n min_div: u32,\n\n max_div: u32,\n\n\n\n // FIXME: Not sure what these are for.\n\n min_presc: u32,\n\n max_presc: u32,\n\n}\n\n\n\nimpl SwoSpeedInfo {\n\n /// Returns the maximum supported speed for SWO capture (in Hz).\n\n pub fn max_speed_hz(&self) -> u32 {\n\n self.base_freq / self.min_div / cmp::max(1, self.min_presc)\n\n }\n", "file_path": "src/lib.rs", "rank": 92, "score": 9.222931973845798 }, { "content": " Box::new(e)\n\n };\n\n\n\n Error::with_while(ErrorKind::Usb, inner, \"opening USB device\")\n\n })?;\n\n\n\n debug!(\"open_usb: device descriptor: {:#x?}\", descr);\n\n\n\n if descr.num_configurations() != 1 {\n\n warn!(\n\n \"device has {} configurations, expected 1\",\n\n descr.num_configurations()\n\n );\n\n }\n\n\n\n let conf = handle\n\n .active_configuration()\n\n .jaylink_err_while(\"reading device configuration\")?;\n\n // Device configurations are 1-indexed, apparently\n\n if conf != 1 {\n", "file_path": "src/lib.rs", "rank": 93, "score": 9.192312656028928 }, { "content": " ///\n\n /// This might not do anything if the pin is not connected to the target. It does not affect\n\n /// non-JTAG target interfaces.\n\n pub fn reset_trst(&mut self) -> Result<()> {\n\n self.write_cmd(&[Command::ResetTrst as u8])\n\n }\n\n\n\n /// Resets the target by temporarily asserting the RESET pin (pin 15).\n\n ///\n\n /// This might not do anything if the RESET pin is not connected to the target.\n\n pub fn reset_target(&mut self) -> Result<()> {\n\n self.write_cmd(&[Command::ResetTarget as u8])\n\n }\n\n\n\n /// Sets the target communication speed.\n\n ///\n\n /// If `speed` is set to [`SpeedConfig::ADAPTIVE`], then the probe has to support\n\n /// [`Capability::AdaptiveClocking`]. Note that adaptive clocking may not work for all target\n\n /// interfaces (eg. SWD).\n\n ///\n", "file_path": "src/lib.rs", "rank": 94, "score": 9.116820462795895 }, { "content": "pub struct SwoStream<'a> {\n\n jaylink: &'a JayLink,\n\n speed: u32,\n\n buf_size: u32,\n\n next_poll: Instant,\n\n /// Internal buffer the size of the on-probe buffer. This is filled in one go to avoid\n\n /// performing small reads which may crash the probe.\n\n buf: Cursor<Vec<u8>>,\n\n /// Accumulated SWO errors.\n\n status: Cell<SwoStatus>,\n\n}\n\n\n\nimpl SwoStream<'_> {\n\n /// Returns whether the probe-internal buffer overflowed at some point, and clears the flag.\n\n ///\n\n /// This indicates that some device data was lost, and should be communicated to the end-user.\n\n pub fn did_overrun(&self) -> bool {\n\n let did = self.status.get().contains(SwoStatus::OVERRUN);\n\n self.status.set(self.status.get() & !SwoStatus::OVERRUN);\n\n did\n", "file_path": "src/lib.rs", "rank": 95, "score": 9.104170498636185 }, { "content": " warn!(\n\n \"device in configuration {}, expected 1; changing configuration\",\n\n conf\n\n );\n\n handle.set_active_configuration(1).jaylink_err()?;\n\n }\n\n\n\n let conf = usb_device\n\n .inner\n\n .active_config_descriptor()\n\n .jaylink_err_while(\"reading device configuration descriptor\")?;\n\n debug!(\"scanning {} interfaces\", conf.num_interfaces());\n\n trace!(\"active configuration descriptor: {:#x?}\", conf);\n\n\n\n let mut jlink_intf = None;\n\n for (i, intf) in conf.interfaces().enumerate() {\n\n trace!(\"interface #{} descriptors:\", i + 1);\n\n\n\n for descr in intf.descriptors() {\n\n trace!(\"{:#x?}\", descr);\n", "file_path": "src/lib.rs", "rank": 96, "score": 9.045174896551842 }, { "content": "//! A crate for talking to J-Link debug probes connected via USB.\n\n//!\n\n//! This crate allows access to the vendor-specific USB interface used to control JTAG / SWD\n\n//! operations and other functionality. It does *not* provide access to the virtual COM port\n\n//! functionality (which is a regular CDC device, so no special support is needed).\n\n//!\n\n//! Inspired by [libjaylink] (though this library is not a port).\n\n//!\n\n//! [libjaylink]: https://repo.or.cz/libjaylink.git\n\n//!\n\n//! # Pinout\n\n//!\n\n//! J-Link uses a pinout based on the standard 20-pin ARM JTAG connector, extended for SWD\n\n//! compatibility and with pins for UART.\n\n//!\n\n//! JTAG pinout:\n\n//!\n\n//! ```notrust\n\n//! ┌───────────┐\n\n//! VTref │ * 1 2 * │ NC\n", "file_path": "src/lib.rs", "rank": 97, "score": 8.915564443114047 }, { "content": " };\n\n\n\n // Update self\n\n let next_byte = (count + self.next_bit as usize) / 8;\n\n self.next_bit = (count as u8 + self.next_bit) % 8;\n\n self.buf = &self.buf[next_byte..];\n\n self.bits_left -= count;\n\n other\n\n }\n\n}\n\n\n\nimpl Iterator for BitIter<'_> {\n\n type Item = bool;\n\n\n\n fn next(&mut self) -> Option<bool> {\n\n if self.bits_left > 0 {\n\n let byte = self.buf.first().unwrap();\n\n let bit = byte & (1 << self.next_bit) != 0;\n\n if self.next_bit < 7 {\n\n self.next_bit += 1;\n", "file_path": "src/bits.rs", "rank": 98, "score": 8.713466134902966 }, { "content": " }\n\n}\n\n\n\n/// SWO data that was read via [`JayLink::swo_read`].\n\n#[derive(Debug)]\n\npub struct SwoData<'a> {\n\n data: &'a [u8],\n\n status: SwoStatus,\n\n}\n\n\n\nimpl<'a> SwoData<'a> {\n\n /// Returns whether the probe-internal buffer overflowed before the last read.\n\n ///\n\n /// This indicates that some device data was lost.\n\n pub fn did_overrun(&self) -> bool {\n\n self.status.contains(SwoStatus::OVERRUN)\n\n }\n\n}\n\n\n\nimpl<'a> AsRef<[u8]> for SwoData<'a> {\n", "file_path": "src/lib.rs", "rank": 99, "score": 8.656033539845403 } ]
Rust
rust/arrow/benches/buffer_bit_ops.rs
jovany-wang/arrow
1f30466ac7042354de35cc69fd49ced1acd54b38
#[macro_use] extern crate criterion; use criterion::Criterion; extern crate arrow; use arrow::buffer::{Buffer, MutableBuffer}; use arrow::error::ArrowError; use arrow::error::Result; #[cfg(feature = "simd")] use arrow::util::bit_util; use std::borrow::BorrowMut; #[cfg(feature = "simd")] use std::slice::{from_raw_parts, from_raw_parts_mut}; fn create_buffer(size: usize) -> Buffer { let mut result = MutableBuffer::new(size).with_bitset(size, false); for i in 0..size { result.data_mut()[i] = 0b01010101; } result.freeze() } fn bench_and_current_impl(left: &Buffer, right: &Buffer) { criterion::black_box((left & right).unwrap()); } #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))] fn bench_and_packed_simd_chunked_exact(left: &Buffer, right: &Buffer) { criterion::black_box( bitwise_bin_op_simd_helper(&left, &right, |a, b| a & b).unwrap(), ); } fn bench_and_chunked_exact(left: &Buffer, right: &Buffer) { criterion::black_box( bitwise_bin_op_autovec_chunked_helper(&left, &right, |a, b| a & b).unwrap(), ); } fn bench_and_autovec(left: &Buffer, right: &Buffer) { criterion::black_box( bitwise_bin_op_autovec_helper(&left, &right, |a, b| a & b).unwrap(), ); } const AUTOVEC_LANES: usize = 64; fn bitwise_bin_op_autovec_chunked_helper<F>( left: &Buffer, right: &Buffer, op: F, ) -> Result<Buffer> where F: Fn(u8, u8) -> u8, { if left.len() != right.len() { return Err(ArrowError::ComputeError( "Buffers must be the same size to apply Bitwise AND.".to_string(), )); } let mut result = MutableBuffer::new(left.len()).with_bitset(left.len(), false); let mut left_chunks = left.data().chunks_exact(AUTOVEC_LANES); let mut right_chunks = right.data().chunks_exact(AUTOVEC_LANES); let mut result_chunks = result.data_mut().chunks_exact_mut(AUTOVEC_LANES); result_chunks .borrow_mut() .zip(left_chunks.borrow_mut().zip(right_chunks.borrow_mut())) .for_each(|(res, (left, right))| { for i in 0..AUTOVEC_LANES { res[i] = op(left[i], right[i]); } }); result_chunks .into_remainder() .iter_mut() .zip( left_chunks .remainder() .iter() .zip(right_chunks.remainder().iter()), ) .for_each(|(res, (left, right))| { *res = op(*left, *right); }); Ok(result.freeze()) } fn bitwise_bin_op_autovec_helper<F>( left: &Buffer, right: &Buffer, op: F, ) -> Result<Buffer> where F: Fn(u8, u8) -> u8, { if left.len() != right.len() { return Err(ArrowError::ComputeError( "Buffers must be the same size to apply Bitwise AND.".to_string(), )); } let mut result = MutableBuffer::new(left.len()).with_bitset(left.len(), false); result .data_mut() .iter_mut() .zip(left.data().iter().zip(right.data().iter())) .for_each(|(res, (left, right))| { *res = op(*left, *right); }); Ok(result.freeze()) } #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))] fn bitwise_bin_op_simd_helper<F>(left: &Buffer, right: &Buffer, op: F) -> Result<Buffer> where F: Fn(packed_simd::u8x64, packed_simd::u8x64) -> packed_simd::u8x64, { if left.len() != right.len() { return Err(ArrowError::ComputeError( "Buffers must be the same size to apply Bitwise AND.".to_string(), )); } let mut result = MutableBuffer::new(left.len()).with_bitset(left.len(), false); let lanes = packed_simd::u8x64::lanes(); for i in (0..left.len()).step_by(lanes) { let left_data = unsafe { from_raw_parts(left.raw_data().add(i), lanes) }; let right_data = unsafe { from_raw_parts(right.raw_data().add(i), lanes) }; let result_slice: &mut [u8] = unsafe { from_raw_parts_mut((result.data_mut().as_mut_ptr() as *mut u8).add(i), lanes) }; unsafe { bit_util::bitwise_bin_op_simd(&left_data, &right_data, result_slice, &op) }; } Ok(result.freeze()) } fn bit_ops_benchmark(c: &mut Criterion) { let left = create_buffer(512); let right = create_buffer(512); c.bench_function("buffer_bit_ops and current impl", |b| { b.iter(|| bench_and_current_impl(&left, &right)) }); #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))] c.bench_function("buffer_bit_ops and packed simd", |b| { b.iter(|| bench_and_packed_simd_chunked_exact(&left, &right)) }); c.bench_function("buffer_bit_ops and chunked autovec", |b| { b.iter(|| bench_and_chunked_exact(&left, &right)) }); c.bench_function("buffer_bit_ops and autovec", |b| { b.iter(|| bench_and_autovec(&left, &right)) }); } criterion_group!(benches, bit_ops_benchmark); criterion_main!(benches);
#[macro_use] extern crate criterion; use criterion::Criterion; extern crate arrow; use arrow::buffer::{Buffer, MutableBuffer}; use arrow::error::ArrowError; use arrow::error::Result; #[cfg(feature = "simd")] use arrow::util::bit_util; use std::borrow::BorrowMut; #[cfg(feature = "simd")] use std::slice::{from_raw_parts, from_raw_parts_mut}; fn create_buffer(size: usize) -> Buffer { let mut result = MutableBuffer::new(size).with_bitset(size, false); for i in 0..size { result.data_mut()[i] = 0b01010101; } result.freeze() } fn bench_and_current_impl(left: &Buffer, right: &Buffer) { criterion::black_box((left & right).unwrap()); } #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))] fn bench_and_packed_simd_chunked_exact(left: &Buffer, right: &Buffer) { criterion::black_box( bitwise_bin_op_simd_helper(&left, &right, |a, b| a & b).unwrap(), ); } fn bench_and_chunked_exact(left: &Buffer, right: &Buffer) { criterion::black_box( bitwise_bin_op_autovec_chunked_helper(&left, &right, |a, b| a & b).unwrap(), ); } fn bench_and_autovec(left: &Buffer, right: &Buffer) { criterion::black_box( bitwise_bin_op_autovec_helper(&left, &right, |a, b| a & b).unwrap(), ); } const AUTOVEC_LANES: usize = 64; fn bitwise_bin_op_autovec_chunked_helper<F>( left: &Buffer, right: &Buffer, op: F, ) -> Result<Buffer> where F: Fn(u8, u8) -> u8, { if left.len() != right.len() { return Err(ArrowError::ComputeError( "Buffers must be the same size to apply Bitwise AND.".to_string(), )); } let mut result = MutableBuffer::new(left.len()).with_bitset(left.len(), false); let mut left_chunks = left.data().chunks_exact(AUTOVEC_LANES); let mut right_chunks = right.data().chunks_exact(AUTOVEC_LANES); let mut result_chunks = result.data_mut().chunks_exact_mut(AUTOVEC_LANES); result_chunks .borrow_mut() .zip(left_chunks.borrow_mut().zip(right_chunks.borrow_mut())) .for_each(|(res, (left, right))| { for i in 0..AUTOVEC_LANES { res[i] = op(left[i], right[i]); } }); result_chunks .into_remainder() .iter_mut() .zip( left_chunks .remainder() .iter() .zip(right_chunks.remainder().iter()), ) .for_each(|(res, (left, right))| { *res = op(*left, *right); }); Ok(result.freeze()) } fn bitwise_bin_op_autovec_helper<F>( left: &Buffer, right: &Buffer, op: F, ) -> Result<Buffer> where F: Fn(u8, u8) -> u8, { if left.len() != right.len() { return Err(ArrowError::ComputeError( "Buffers must be the same size to apply Bitwise AND.".to_string(), )); } let mut result = MutableBuffer::new(left.len()).with_bitset(left.len(), false); result .data_mut() .iter_mut() .
#[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))] fn bitwise_bin_op_simd_helper<F>(left: &Buffer, right: &Buffer, op: F) -> Result<Buffer> where F: Fn(packed_simd::u8x64, packed_simd::u8x64) -> packed_simd::u8x64, { if left.len() != right.len() { return Err(ArrowError::ComputeError( "Buffers must be the same size to apply Bitwise AND.".to_string(), )); } let mut result = MutableBuffer::new(left.len()).with_bitset(left.len(), false); let lanes = packed_simd::u8x64::lanes(); for i in (0..left.len()).step_by(lanes) { let left_data = unsafe { from_raw_parts(left.raw_data().add(i), lanes) }; let right_data = unsafe { from_raw_parts(right.raw_data().add(i), lanes) }; let result_slice: &mut [u8] = unsafe { from_raw_parts_mut((result.data_mut().as_mut_ptr() as *mut u8).add(i), lanes) }; unsafe { bit_util::bitwise_bin_op_simd(&left_data, &right_data, result_slice, &op) }; } Ok(result.freeze()) } fn bit_ops_benchmark(c: &mut Criterion) { let left = create_buffer(512); let right = create_buffer(512); c.bench_function("buffer_bit_ops and current impl", |b| { b.iter(|| bench_and_current_impl(&left, &right)) }); #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))] c.bench_function("buffer_bit_ops and packed simd", |b| { b.iter(|| bench_and_packed_simd_chunked_exact(&left, &right)) }); c.bench_function("buffer_bit_ops and chunked autovec", |b| { b.iter(|| bench_and_chunked_exact(&left, &right)) }); c.bench_function("buffer_bit_ops and autovec", |b| { b.iter(|| bench_and_autovec(&left, &right)) }); } criterion_group!(benches, bit_ops_benchmark); criterion_main!(benches);
zip(left.data().iter().zip(right.data().iter())) .for_each(|(res, (left, right))| { *res = op(*left, *right); }); Ok(result.freeze()) }
function_block-function_prefix_line
[]
Rust
src/main.rs
MichalGniadek/roguelike-tutorial-2021
69b1b7bac0ed939a25f06cdcaf791fe841a9e197
#![feature(iter_intersperse)] #![feature(option_result_contains)] mod bundles; mod dungeon_crawl; mod world_generation; mod world_map; use bevy::{app::AppExit, prelude::*}; use dungeon_crawl::TurnState; use world_map::Grid; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum AppState { MainMenu, WorldGeneration, DungeonCrawlEnter, DungeonCrawl(TurnState), DungeonCrawlExitToMenu, DungeonCrawlDescend, } #[cfg_attr(target_arch = "wasm32", global_allocator)] #[cfg(target_arch = "wasm32")] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; pub struct UiCamera; fn main() { #[cfg(target_arch = "wasm32")] console_error_panic_hook::set_once(); let mut app = App::build(); app.insert_resource(ClearColor(Color::hex("171717").unwrap())); app.insert_resource(WindowDescriptor { title: String::from("Roguelike"), #[cfg(target_arch = "wasm32")] canvas: Some(String::from("#canv")), ..Default::default() }); app.add_plugins(DefaultPlugins); app.insert_resource(Grid { cell_size: IVec2::new(512, 512), }) .add_startup_system( (|mut commands: Commands| { let mut orto = OrthographicCameraBundle::new_2d(); orto.orthographic_projection.scale = 8.0; commands.spawn_bundle(orto); commands .spawn_bundle(UiCameraBundle::default()) .insert(UiCamera); }) .system(), ); #[cfg(target_arch = "wasm32")] app.add_plugin(bevy_webgl2::WebGL2Plugin); app.add_state(AppState::MainMenu) .add_system_set( SystemSet::on_enter(AppState::MainMenu).with_system(main_menu_ui_create.system()), ) .add_system_set( SystemSet::on_update(AppState::MainMenu).with_system(main_menu_interaction.system()), ) .add_system_set( SystemSet::on_exit(AppState::MainMenu).with_system(main_menu_cleanup.system()), ) .add_plugin(dungeon_crawl::DungeonCrawlPlugin) .add_plugins(world_generation::WorldGenerationPlugins); app.run(); } pub struct MainMenuCanvas; pub enum MainMenuButton { Play, Quit, } pub fn main_menu_interaction( q: Query<(&Interaction, &MainMenuButton)>, mut app_state: ResMut<State<AppState>>, mut app_exit_events: EventWriter<AppExit>, ) { for (i, b) in q.iter() { match (i, b) { (Interaction::Clicked, MainMenuButton::Play) => { app_state.set(AppState::WorldGeneration).unwrap(); } (Interaction::Clicked, MainMenuButton::Quit) => app_exit_events.send(AppExit), _ => {} } } } pub fn main_menu_ui_create( mut commands: Commands, asset_server: Res<AssetServer>, mut materials: ResMut<Assets<ColorMaterial>>, ) { commands .spawn_bundle(NodeBundle { style: Style { size: Size::new(Val::Percent(100.0), Val::Percent(100.0)), align_items: AlignItems::Center, justify_content: JustifyContent::Center, flex_direction: FlexDirection::ColumnReverse, ..Default::default() }, material: materials.add(Color::hex("101010").unwrap().into()), ..Default::default() }) .insert(MainMenuCanvas) .with_children(|parent| { parent .spawn_bundle(ButtonBundle { style: Style { margin: Rect::all(Val::Px(50.0)), ..Default::default() }, material: materials.add(Color::hex("101010").unwrap().into()), ..Default::default() }) .insert(MainMenuButton::Play) .with_children(|parent| { parent.spawn_bundle(TextBundle { text: Text::with_section( "PLAY", TextStyle { font: asset_server.load("Roboto/Roboto-Regular.ttf"), font_size: 100.0, color: Color::WHITE, }, TextAlignment::default(), ), ..Default::default() }); }); parent .spawn_bundle(ButtonBundle { style: Style { margin: Rect::all(Val::Px(50.0)), ..Default::default() }, material: materials.add(Color::hex("101010").unwrap().into()), ..Default::default() }) .insert(MainMenuButton::Quit) .with_children(|parent| { parent.spawn_bundle(TextBundle { text: Text::with_section( "QUIT", TextStyle { font: asset_server.load("Roboto/Roboto-Regular.ttf"), font_size: 100.0, color: Color::WHITE, }, TextAlignment::default(), ), ..Default::default() }); }); }); } pub fn main_menu_cleanup(mut commands: Commands, q: Query<Entity, With<MainMenuCanvas>>) { commands.entity(q.single().unwrap()).despawn_recursive(); }
#![feature(iter_intersperse)] #![feature(option_result_contains)] mod bundles; mod dungeon_crawl; mod world_generation; mod world_map; use bevy::{app::AppExit, prelude::*}; use dungeon_crawl::TurnState; use world_map::Grid; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum AppState { MainMenu, WorldGeneration, DungeonCrawlEnter, DungeonCrawl(TurnState), DungeonCrawlExitToMenu, DungeonCrawlDescend, } #[cfg_attr(target_arch = "wasm32", global_allocator)] #[cfg(target_arch = "wasm32")] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; pub struct UiCamera; fn main() { #[cfg(target_arch = "wasm32")] console_error_panic_hook::set_once(); let mut app = App::build(); app.insert_resource(ClearColor(Color::hex("171717").unwrap())); app.insert_resource(WindowDescriptor { title: String::from("Roguelike"), #[cfg(target_arch = "wasm32")] canvas: Some(String::from("#canv")), ..Default::default() }); app.add_plugins(DefaultPlugins); app.insert_resource(Grid { cell_size: IVec2::new(512, 512), }) .add_startup_system( (|mut commands: Commands| { let mut orto = OrthographicCameraBundle::new
ttf"), font_size: 100.0, color: Color::WHITE, }, TextAlignment::default(), ), ..Default::default() }); }); parent .spawn_bundle(ButtonBundle { style: Style { margin: Rect::all(Val::Px(50.0)), ..Default::default() }, material: materials.add(Color::hex("101010").unwrap().into()), ..Default::default() }) .insert(MainMenuButton::Quit) .with_children(|parent| { parent.spawn_bundle(TextBundle { text: Text::with_section( "QUIT", TextStyle { font: asset_server.load("Roboto/Roboto-Regular.ttf"), font_size: 100.0, color: Color::WHITE, }, TextAlignment::default(), ), ..Default::default() }); }); }); } pub fn main_menu_cleanup(mut commands: Commands, q: Query<Entity, With<MainMenuCanvas>>) { commands.entity(q.single().unwrap()).despawn_recursive(); }
_2d(); orto.orthographic_projection.scale = 8.0; commands.spawn_bundle(orto); commands .spawn_bundle(UiCameraBundle::default()) .insert(UiCamera); }) .system(), ); #[cfg(target_arch = "wasm32")] app.add_plugin(bevy_webgl2::WebGL2Plugin); app.add_state(AppState::MainMenu) .add_system_set( SystemSet::on_enter(AppState::MainMenu).with_system(main_menu_ui_create.system()), ) .add_system_set( SystemSet::on_update(AppState::MainMenu).with_system(main_menu_interaction.system()), ) .add_system_set( SystemSet::on_exit(AppState::MainMenu).with_system(main_menu_cleanup.system()), ) .add_plugin(dungeon_crawl::DungeonCrawlPlugin) .add_plugins(world_generation::WorldGenerationPlugins); app.run(); } pub struct MainMenuCanvas; pub enum MainMenuButton { Play, Quit, } pub fn main_menu_interaction( q: Query<(&Interaction, &MainMenuButton)>, mut app_state: ResMut<State<AppState>>, mut app_exit_events: EventWriter<AppExit>, ) { for (i, b) in q.iter() { match (i, b) { (Interaction::Clicked, MainMenuButton::Play) => { app_state.set(AppState::WorldGeneration).unwrap(); } (Interaction::Clicked, MainMenuButton::Quit) => app_exit_events.send(AppExit), _ => {} } } } pub fn main_menu_ui_create( mut commands: Commands, asset_server: Res<AssetServer>, mut materials: ResMut<Assets<ColorMaterial>>, ) { commands .spawn_bundle(NodeBundle { style: Style { size: Size::new(Val::Percent(100.0), Val::Percent(100.0)), align_items: AlignItems::Center, justify_content: JustifyContent::Center, flex_direction: FlexDirection::ColumnReverse, ..Default::default() }, material: materials.add(Color::hex("101010").unwrap().into()), ..Default::default() }) .insert(MainMenuCanvas) .with_children(|parent| { parent .spawn_bundle(ButtonBundle { style: Style { margin: Rect::all(Val::Px(50.0)), ..Default::default() }, material: materials.add(Color::hex("101010").unwrap().into()), ..Default::default() }) .insert(MainMenuButton::Play) .with_children(|parent| { parent.spawn_bundle(TextBundle { text: Text::with_section( "PLAY", TextStyle { font: asset_server.load("Roboto/Roboto-Regular.
random
[ { "content": "pub fn cleanup_log_and_inventory(mut commands: Commands, inventory: Res<GameData>) {\n\n commands.insert_resource(Logs::default());\n\n for e in inventory.inventory.iter().filter_map(|i| *i) {\n\n commands.entity(e).despawn();\n\n }\n\n commands.insert_resource(GameData::default());\n\n}\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 1, "score": 118371.0918289478 }, { "content": "pub fn cleanup(\n\n // Player might have died so it has additional check\n\n q: Query<Entity, Or<(With<MyCanvas>, With<GridPosition>, With<Player>)>>,\n\n mut commands: Commands,\n\n mut data: ResMut<GameData>,\n\n player: Query<&Health, With<Player>>,\n\n) {\n\n for e in q.iter() {\n\n commands.entity(e).despawn_recursive();\n\n }\n\n commands.remove_resource::<InitiativeOrder>();\n\n commands.remove_resource::<WorldMap>();\n\n data.previous_hp = Some(*player.single().unwrap());\n\n data.floor += 1;\n\n}\n\n\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 5, "score": 97653.70759671494 }, { "content": "pub fn update_floor(mut text: Query<&mut Text, With<MyFloorText>>, data: Res<GameData>) {\n\n text.single_mut().unwrap().sections[0].value = format!(\"Floor {}\", data.floor);\n\n}\n\n\n", "file_path": "src/dungeon_crawl/ui/mod.rs", "rank": 6, "score": 92843.54490996516 }, { "content": "pub fn update_details(\n\n mut text: Query<&mut Text, With<MyDetails>>,\n\n names: Query<&Name>,\n\n health: Query<&Health>,\n\n world: Res<WorldMap>,\n\n cursor: Query<&GridPosition, With<Cursor>>,\n\n) {\n\n let grid_pos = cursor.single().unwrap();\n\n\n\n if let Some(tile) = world.tiles.get(grid_pos.x, grid_pos.y) {\n\n if tile.contains(TileFlags::IN_VIEW) {\n\n if let Some(entities) = world.entities.get(grid_pos.x, grid_pos.y) {\n\n let mut details = vec![];\n\n for entity in entities {\n\n let name = names.get(*entity).unwrap().capitalized();\n\n let health = health\n\n .get(*entity)\n\n .map_or(String::from(\"\"), |h| format!(\" ({}/{})\", h.current, h.max));\n\n details.push(format!(\"{}{}\", name, health));\n\n }\n", "file_path": "src/dungeon_crawl/ui/mod.rs", "rank": 7, "score": 92120.43111336658 }, { "content": "pub fn camera_position(\n\n mut query: QuerySet<(\n\n Query<&Transform, With<Player>>,\n\n Query<&mut Transform, (With<Camera>, Without<UiCamera>)>,\n\n )>,\n\n) {\n\n let mut position = match query.q0_mut().single_mut() {\n\n Ok(position) => position.clone(),\n\n Err(_) => return,\n\n };\n\n let mut camera = query.q1_mut().single_mut().unwrap();\n\n position.translation.z = camera.translation.z;\n\n position.translation.x -= 1200.0;\n\n *camera = position;\n\n}\n\n\n", "file_path": "src/dungeon_crawl/ui/mod.rs", "rank": 8, "score": 92120.43111336658 }, { "content": "pub fn update_health(\n\n mut text: Query<&mut Text, With<MyHpText>>,\n\n mut bar: Query<&mut Style, With<MyHpBar>>,\n\n hp: Query<&Health, With<Player>>,\n\n) {\n\n let hp = match hp.single() {\n\n Ok(hp) => hp,\n\n Err(_) => return,\n\n };\n\n\n\n text.single_mut().unwrap().sections[0].value = format!(\"HP: {}/{}\", hp.current, hp.max);\n\n bar.single_mut().unwrap().size.width = Val::Percent(100.0 * hp.current as f32 / hp.max as f32);\n\n}\n\n\n", "file_path": "src/dungeon_crawl/ui/mod.rs", "rank": 9, "score": 92120.43111336658 }, { "content": "pub fn update_log(\n\n mut text: Query<&mut Text, With<MyLog>>,\n\n mut messages: EventReader<LogMessage>,\n\n mut log: ResMut<Logs>,\n\n) {\n\n for m in messages.iter() {\n\n log.0.push_front(m.0.clone());\n\n }\n\n log.0.resize(6, String::from(\" \"));\n\n\n\n text.single_mut().unwrap().sections[0].value = log\n\n .0\n\n .iter()\n\n .intersperse(&String::from(\"\\n\"))\n\n .cloned()\n\n .collect();\n\n}\n\n\n", "file_path": "src/dungeon_crawl/ui/mod.rs", "rank": 10, "score": 92120.43111336658 }, { "content": "pub fn update_position(\n\n mut query: Query<(&mut Transform, &GridPosition), Changed<GridPosition>>,\n\n grid: Res<Grid>,\n\n) {\n\n for (mut transform, grid_position) in query.iter_mut() {\n\n transform.translation.x = (grid_position.x * grid.cell_size.x) as f32;\n\n transform.translation.y = (grid_position.y * grid.cell_size.y) as f32;\n\n }\n\n}\n\n\n", "file_path": "src/dungeon_crawl/ui/mod.rs", "rank": 11, "score": 92120.43111336658 }, { "content": "pub fn update_inventory(\n\n mut text: Query<&mut Text, With<MyInventory>>,\n\n inventory: Res<GameData>,\n\n names: Query<&Name>,\n\n) {\n\n let ind = inventory.selected.unwrap_or(usize::MAX);\n\n\n\n let mut inv = vec![];\n\n for (i, e) in inventory.inventory.iter().enumerate() {\n\n inv.push(format!(\n\n \"{} {} {}\",\n\n if i == ind {\n\n String::from(\">>> \")\n\n } else {\n\n format!(\"[{}] \", i + 1)\n\n },\n\n e.map_or(String::from(\"\"), |e| names.get(e).unwrap().capitalized()),\n\n if let Some(item) = *e {\n\n if inventory.armor == Some(item) {\n\n \"(equiped)\"\n", "file_path": "src/dungeon_crawl/ui/mod.rs", "rank": 12, "score": 92120.43111336658 }, { "content": "pub fn update_xp(\n\n mut text: Query<&mut Text, With<MyXPText>>,\n\n mut bar: Query<&mut Style, With<MyXPBar>>,\n\n data: Res<GameData>,\n\n) {\n\n text.single_mut().unwrap().sections[0].value = format!(\n\n \"XP: {}/{} LEVEL: {}\",\n\n data.current_xp, data.needed_xp, data.level\n\n );\n\n bar.single_mut().unwrap().size.width =\n\n Val::Percent(100.0 * data.current_xp as f32 / data.needed_xp as f32);\n\n}\n\n\n", "file_path": "src/dungeon_crawl/ui/mod.rs", "rank": 13, "score": 92120.43111336658 }, { "content": "pub fn update_cursor(\n\n windows: Res<Windows>,\n\n camera: Query<(&Transform, &OrthographicProjection), (With<Camera>, Without<UiCamera>)>,\n\n grid: Res<Grid>,\n\n inventory: Res<GameData>,\n\n mut cursor: Query<(&mut GridPosition, &mut Visible), With<Cursor>>,\n\n) {\n\n let window = windows.get_primary().unwrap();\n\n\n\n if let Some(pos) = window.cursor_position() {\n\n let size = Vec2::new(window.width(), window.height());\n\n let (camera, orto) = camera.single().unwrap();\n\n let pos = (pos - size / 2.0) * orto.scale;\n\n let world_pos = camera.compute_matrix() * pos.extend(0.0).extend(1.0);\n\n let grid_pos =\n\n (vec2(world_pos.x, world_pos.y) / grid.cell_size.as_f32() + vec2(0.5, 0.5)).as_i32();\n\n\n\n *cursor.single_mut().unwrap().0 = GridPosition {\n\n x: grid_pos.x,\n\n y: grid_pos.y,\n\n };\n\n }\n\n\n\n cursor.single_mut().unwrap().1.is_visible = inventory.selected.is_some();\n\n}\n\n\n", "file_path": "src/dungeon_crawl/ui/mod.rs", "rank": 14, "score": 92120.43111336658 }, { "content": "pub fn handle_initiative(\n\n mut order: ResMut<InitiativeOrder>,\n\n curr: Query<Entity, With<Initiative>>,\n\n characters: Query<Entity, Or<(With<Player>, With<EnemyAI>)>>,\n\n mut commands: Commands,\n\n) {\n\n if let Ok(entity) = curr.single() {\n\n commands.entity(entity).remove::<Initiative>();\n\n }\n\n\n\n for c in characters.iter() {\n\n if !order.0.contains(&c) {\n\n order.0.push_back(c);\n\n }\n\n }\n\n\n\n if let Some(entity) = order.0.pop_front() {\n\n commands.entity(entity).insert(Initiative);\n\n order.0.push_back(entity);\n\n }\n\n}\n", "file_path": "src/dungeon_crawl/setup.rs", "rank": 15, "score": 74057.21710496547 }, { "content": "pub fn player_fov(\n\n player: Query<&GridPosition, With<Player>>,\n\n mut visible: Query<(&mut Visible, &GridPosition, Option<&Tile>), Without<Cursor>>,\n\n mut tiles: Query<(&mut Handle<ColorMaterial>, &GridPosition), With<Tile>>,\n\n mut world: ResMut<WorldMap>,\n\n) {\n\n let position = match player.single() {\n\n Ok(position) => position.clone(),\n\n Err(_) => return,\n\n };\n\n\n\n for end in fov_circle(position.x, position.y, 4) {\n\n let mut previous = None;\n\n for (x, y) in line_drawing::Bresenham::new((position.x, position.y), end) {\n\n if let Some(&tile) = world.tiles.get(x, y) {\n\n // Don't go through diagonal walls.\n\n if let Some((prev_x, prev_y)) = previous {\n\n if (world.tiles[[prev_x, y]] & world.tiles[[x, prev_y]])\n\n .contains(TileFlags::BLOCKS_VISION)\n\n {\n", "file_path": "src/dungeon_crawl/fov.rs", "rank": 16, "score": 74057.21710496547 }, { "content": "pub fn create(\n\n mut commands: Commands,\n\n asset_server: Res<AssetServer>,\n\n mut materials: ResMut<Assets<ColorMaterial>>,\n\n) {\n\n commands\n\n .spawn_bundle(SpriteBundle {\n\n material: materials.add(ColorMaterial {\n\n texture: Some(asset_server.load(\"convergence-target.png\")),\n\n color: Color::hex(\"EDEDED\").unwrap(),\n\n }),\n\n transform: Transform::from_xyz(0.0, 0.0, 5.0),\n\n ..Default::default()\n\n })\n\n .insert_bundle((GridPosition { x: 10, y: 10 }, Cursor));\n\n\n\n commands\n\n .spawn_bundle(NodeBundle {\n\n style: Style {\n\n size: Size::new(Val::Px(300.0), Val::Percent(100.0)),\n", "file_path": "src/dungeon_crawl/ui/ui_setup.rs", "rank": 17, "score": 72259.98147234262 }, { "content": "pub fn update_world_map(\n\n mut world: ResMut<WorldMap>,\n\n t: Query<(&Tile, &BlocksMovement)>,\n\n m: Query<&BlocksMovement>,\n\n v: Query<&BlocksVision>,\n\n) {\n\n let world_size = world.entities.size();\n\n for x in 0..world_size.x {\n\n for y in 0..world_size.y {\n\n world.tiles[[x, y]] &= TileFlags::EXPLORED;\n\n }\n\n }\n\n\n\n for x in 0..world_size.x {\n\n for y in 0..world_size.y {\n\n if world.entities[[x, y]]\n\n .iter()\n\n .any(|e| matches!(m.get(*e), Ok(&BlocksMovement)))\n\n {\n\n world.tiles[[x, y]] |= TileFlags::BLOCKS_MOVEMENT;\n", "file_path": "src/dungeon_crawl/setup.rs", "rank": 18, "score": 72259.98147234262 }, { "content": "fn paralyzed(\n\n mut paralyzed: Query<(Entity, &mut Paralyzed), With<Initiative>>,\n\n mut evs: EventWriter<Ev>,\n\n mut commands: Commands,\n\n) {\n\n if let Ok((entity, mut paralyzed)) = paralyzed.single_mut() {\n\n paralyzed.0 -= 1;\n\n if paralyzed.0 == 0 {\n\n commands.entity(entity).remove::<Paralyzed>();\n\n } else {\n\n evs.send(Ev::Nothing);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 19, "score": 61792.346315508745 }, { "content": "fn player_control(\n\n mut player_q: Query<\n\n (Entity, &GridPosition),\n\n (With<Initiative>, Without<Paralyzed>, With<Player>),\n\n >,\n\n healthy_entities: Query<(), With<Health>>,\n\n mut inventory: ResMut<GameData>,\n\n world: Res<WorldMap>,\n\n keys: Res<Input<KeyCode>>,\n\n buttons: Res<Input<MouseButton>>,\n\n items: Query<(Entity, Option<&GridPosition>, &Item)>,\n\n cursor: Query<&GridPosition, With<Cursor>>,\n\n controllers: Query<Entity, Or<(With<Player>, With<EnemyAI>)>>,\n\n mut evs: EventWriter<Ev>,\n\n) {\n\n let (player_entity, position) = match player_q.single_mut() {\n\n Ok((e, pos)) => (e, pos),\n\n Err(QuerySingleError::NoEntities(_)) => return,\n\n Err(QuerySingleError::MultipleEntities(_)) => panic!(),\n\n };\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 20, "score": 59722.31151652718 }, { "content": "fn handle_evs(\n\n mut events: EventReader<Ev>,\n\n mut positions: Query<&mut GridPosition>,\n\n mut healthy: Query<&mut Health>,\n\n mut world: ResMut<WorldMap>,\n\n names: Query<&Name>,\n\n player: Query<(), With<Player>>,\n\n mut log: EventWriter<LogMessage>,\n\n mut app_state: ResMut<State<AppState>>,\n\n mut data: ResMut<GameData>,\n\n mut visible: Query<&mut Visible>,\n\n mut commands: Commands,\n\n mut order: ResMut<InitiativeOrder>,\n\n) {\n\n let mut evs: VecDeque<Ev> = VecDeque::new();\n\n evs.extend(events.iter());\n\n\n\n let mut next_app_state = None;\n\n if !evs.is_empty() {\n\n next_app_state = Some(AppState::DungeonCrawl(TurnState::WorldUpdate));\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 21, "score": 59722.31151652718 }, { "content": "fn enemy_ai(\n\n enemy: Query<(Entity, &GridPosition), (With<EnemyAI>, With<Initiative>, Without<Paralyzed>)>,\n\n player: Query<(Entity, &GridPosition), With<Player>>,\n\n world: Res<WorldMap>,\n\n inventory: Res<GameData>,\n\n mut evs: EventWriter<Ev>,\n\n) {\n\n let (enemy, position) = match enemy.single() {\n\n Ok(e) => e,\n\n Err(QuerySingleError::NoEntities(_)) => return,\n\n Err(QuerySingleError::MultipleEntities(_)) => panic!(),\n\n };\n\n\n\n if world.tiles[*position].contains(TileFlags::IN_VIEW) {\n\n let (player, player_pos) = player.single().unwrap();\n\n if let Some((path, _)) = world.pathfind(*position, *player_pos) {\n\n if path[1] == *player_pos {\n\n let damage = if inventory.armor.is_some() { 1 } else { 2 };\n\n evs.send(Ev::Attack(enemy, player, damage));\n\n } else if !world.tiles[path[1]].contains(TileFlags::BLOCKS_MOVEMENT) {\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 22, "score": 59722.31151652718 }, { "content": "fn split_into_zones(tile_map: &mut Array2D<TileType>) -> usize {\n\n let mut current_fill_number = 0;\n\n for x in 2..MAP_SIZE - 2 {\n\n for y in 2..MAP_SIZE - 2 {\n\n if tile_map[[x, y]] == TileType::Alive(0) {\n\n current_fill_number += 1;\n\n flood_fill(\n\n tile_map,\n\n (x, y),\n\n TileType::Alive(current_fill_number),\n\n Some(10),\n\n );\n\n }\n\n }\n\n }\n\n current_fill_number\n\n}\n\n\n", "file_path": "src/world_generation/cellular_automata.rs", "rank": 23, "score": 54617.47301636476 }, { "content": "fn select_largest_cave(tile_map: &mut Array2D<TileType>) -> u32 {\n\n let mut current_fill_number = 0;\n\n let mut max_fill_number = 0;\n\n let mut max_fill_count = 0;\n\n for x in 2..MAP_SIZE - 2 {\n\n for y in 2..MAP_SIZE - 2 {\n\n if tile_map[[x, y]] == TileType::Alive(0) {\n\n current_fill_number += 1;\n\n let count =\n\n flood_fill(tile_map, (x, y), TileType::Alive(current_fill_number), None);\n\n\n\n if max_fill_count < count {\n\n max_fill_count = count;\n\n max_fill_number = current_fill_number;\n\n }\n\n }\n\n }\n\n }\n\n\n\n for x in 2..MAP_SIZE - 2 {\n", "file_path": "src/world_generation/cellular_automata.rs", "rank": 24, "score": 53598.670424198994 }, { "content": "fn cellular_automata_steps(map: &mut Array2D<TileType>, iterations: u32) {\n\n let mut map2 = Array2D::<TileType>::with_elem(MAP_SIZE, MAP_SIZE, TileType::Dead);\n\n\n\n for _ in 0..iterations {\n\n for x in 2..MAP_SIZE - 2 {\n\n for y in 2..MAP_SIZE - 2 {\n\n let mut neighbours = 0;\n\n for i in -1..=1i32 {\n\n for j in -1..=1i32 {\n\n if i == 0 && j == 0 {\n\n continue;\n\n }\n\n if let TileType::Alive(_) = map[[x + i, y + j]] {\n\n neighbours += 1;\n\n }\n\n }\n\n }\n\n\n\n if map[[x, y]] == TileType::Dead {\n\n if neighbours > 4 {\n", "file_path": "src/world_generation/cellular_automata.rs", "rank": 25, "score": 51977.96675015877 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\nenum TileType {\n\n Alive(usize),\n\n Dead,\n\n}\n\n\n", "file_path": "src/world_generation/cellular_automata.rs", "rank": 26, "score": 44378.730604111595 }, { "content": "fn cellular_automata(\n\n asset_server: Res<AssetServer>,\n\n mut commands: Commands,\n\n mut materials: ResMut<Assets<ColorMaterial>>,\n\n mut app_state: ResMut<State<AppState>>,\n\n data: Res<GameData>,\n\n) {\n\n let target_size = data.floor_map_size();\n\n let (tile_map, mut zone_entities) = loop {\n\n let mut tile_map = get_random_map();\n\n cellular_automata_steps(&mut tile_map, ITERATIONS);\n\n\n\n let size = select_largest_cave(&mut tile_map);\n\n if size < target_size.0 || size > target_size.1 {\n\n continue;\n\n }\n\n\n\n let zone_count = split_into_zones(&mut tile_map);\n\n if zone_count < 5 {\n\n continue;\n", "file_path": "src/world_generation/cellular_automata.rs", "rank": 27, "score": 37118.32558538749 }, { "content": "fn flood_fill(\n\n map: &mut Array2D<TileType>,\n\n pos: (i32, i32),\n\n fill: TileType,\n\n distance: Option<u32>,\n\n) -> u32 {\n\n let mut tiles = VecDeque::new();\n\n tiles.push_back((pos, 0));\n\n\n\n let mut count = 0;\n\n\n\n while !tiles.is_empty() {\n\n let ((x, y), dist) = tiles.pop_front().unwrap();\n\n\n\n if map[[x, y]] != TileType::Alive(0) {\n\n continue;\n\n }\n\n\n\n map[[x, y]] = fill;\n\n count += 1;\n", "file_path": "src/world_generation/cellular_automata.rs", "rank": 28, "score": 37118.32558538749 }, { "content": "fn get_zone_entities(\n\n commands: &mut Commands,\n\n asset_server: &Res<AssetServer>,\n\n materials: &mut ResMut<Assets<ColorMaterial>>,\n\n data: &GameData,\n\n zone_count: usize,\n\n) -> Vec<Vec<Entity>> {\n\n let mut entities = vec![vec![]; zone_count];\n\n entities[0].push(\n\n commands\n\n .spawn_bundle(PlayerBundle::new(asset_server, materials, data))\n\n .id(),\n\n );\n\n\n\n for _ in 0..data.floor_enemy_count() {\n\n let zone = (random::<usize>() % (zone_count - 1)) + 1;\n\n entities[zone].push(\n\n commands\n\n .spawn_bundle(EnemyBundle::orc(asset_server, materials))\n\n .id(),\n", "file_path": "src/world_generation/cellular_automata.rs", "rank": 29, "score": 36232.04539395031 }, { "content": "fn get_random_map() -> Array2D<TileType> {\n\n let mut map = Array2D::<TileType>::with_elem(MAP_SIZE, MAP_SIZE, TileType::Dead);\n\n\n\n for x in 2..MAP_SIZE - 2 {\n\n for y in 2..MAP_SIZE - 2 {\n\n if random::<f32>() < ALIVE_SPAWN_CHANCE {\n\n map[[x, y]] = TileType::Alive(0);\n\n }\n\n }\n\n }\n\n\n\n map\n\n}\n\n\n", "file_path": "src/world_generation/cellular_automata.rs", "rank": 30, "score": 29994.13878215139 }, { "content": "use crate::{\n\n dungeon_crawl::{EnemyAI, GameData, Health, Item, Name, Player},\n\n world_map::BlocksMovement,\n\n};\n\nuse bevy::prelude::*;\n\n\n\n#[derive(Bundle)]\n\npub struct EnemyBundle {\n\n #[bundle]\n\n sprite: SpriteBundle,\n\n _e: EnemyAI,\n\n _bm: BlocksMovement,\n\n health: Health,\n\n name: Name,\n\n}\n\n\n\nimpl EnemyBundle {\n\n pub fn orc(asset_server: &AssetServer, materials: &mut ResMut<Assets<ColorMaterial>>) -> Self {\n\n Self {\n\n sprite: SpriteBundle {\n", "file_path": "src/bundles.rs", "rank": 37, "score": 28605.262231798893 }, { "content": " health: data.previous_hp.unwrap_or(Health::new(8, 8)),\n\n name: Name(String::from(\"player\")),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Bundle)]\n\npub struct ItemBundle {\n\n #[bundle]\n\n sprite: SpriteBundle,\n\n item: Item,\n\n name: Name,\n\n}\n\n\n\nimpl ItemBundle {\n\n pub fn item(\n\n item: Item,\n\n asset_server: &AssetServer,\n\n materials: &mut ResMut<Assets<ColorMaterial>>,\n\n ) -> Self {\n", "file_path": "src/bundles.rs", "rank": 38, "score": 28601.793017022912 }, { "content": "\n\n pub fn sword(\n\n asset_server: &AssetServer,\n\n materials: &mut ResMut<Assets<ColorMaterial>>,\n\n ) -> Self {\n\n Self {\n\n sprite: SpriteBundle {\n\n material: materials.add(ColorMaterial {\n\n texture: Some(asset_server.load(\"gladius.png\")),\n\n color: Color::hex(\"EDEDED\").unwrap(),\n\n }),\n\n transform: Transform::from_xyz(0.0, 0.0, 1.0),\n\n ..Default::default()\n\n },\n\n item: Item::Sword,\n\n name: Name(String::from(\"sword\")),\n\n }\n\n }\n\n\n\n pub fn armor(\n", "file_path": "src/bundles.rs", "rank": 39, "score": 28600.005864211296 }, { "content": " health: Health,\n\n name: Name,\n\n}\n\n\n\nimpl PlayerBundle {\n\n pub fn new(\n\n asset_server: &AssetServer,\n\n materials: &mut ResMut<Assets<ColorMaterial>>,\n\n data: &GameData,\n\n ) -> Self {\n\n Self {\n\n sprite: SpriteBundle {\n\n material: materials.add(ColorMaterial {\n\n texture: Some(asset_server.load(\"hooded-figure.png\")),\n\n color: Color::hex(\"EDEDED\").unwrap(),\n\n }),\n\n transform: Transform::from_xyz(0.0, 0.0, 1.0),\n\n ..Default::default()\n\n },\n\n player: Player,\n", "file_path": "src/bundles.rs", "rank": 40, "score": 28599.83505831527 }, { "content": " }\n\n }\n\n\n\n pub fn war_axe(\n\n asset_server: &AssetServer,\n\n materials: &mut ResMut<Assets<ColorMaterial>>,\n\n ) -> Self {\n\n Self {\n\n sprite: SpriteBundle {\n\n material: materials.add(ColorMaterial {\n\n texture: Some(asset_server.load(\"battle-axe.png\")),\n\n color: Color::hex(\"EDEDED\").unwrap(),\n\n }),\n\n transform: Transform::from_xyz(0.0, 0.0, 1.0),\n\n ..Default::default()\n\n },\n\n item: Item::WarAxe,\n\n name: Name(String::from(\"war axe\")),\n\n }\n\n }\n", "file_path": "src/bundles.rs", "rank": 41, "score": 28599.20991407901 }, { "content": " ..Default::default()\n\n },\n\n item: Item::ScrollOfLightning,\n\n name: Name(String::from(\"scroll of lightning\")),\n\n }\n\n }\n\n\n\n pub fn scroll_of_paralysis(\n\n asset_server: &AssetServer,\n\n materials: &mut ResMut<Assets<ColorMaterial>>,\n\n ) -> Self {\n\n Self {\n\n sprite: SpriteBundle {\n\n material: materials.add(ColorMaterial {\n\n texture: Some(asset_server.load(\"scroll-unfurled.png\")),\n\n color: Color::hex(\"EDEDED\").unwrap(),\n\n }),\n\n transform: Transform::from_xyz(0.0, 0.0, 1.0),\n\n ..Default::default()\n\n },\n", "file_path": "src/bundles.rs", "rank": 42, "score": 28599.116357528506 }, { "content": " }),\n\n transform: Transform::from_xyz(0.0, 0.0, 1.0),\n\n ..Default::default()\n\n },\n\n item: Item::HealthPotion,\n\n name: Name(String::from(\"health potion\")),\n\n }\n\n }\n\n\n\n pub fn scroll_of_lightning(\n\n asset_server: &AssetServer,\n\n materials: &mut ResMut<Assets<ColorMaterial>>,\n\n ) -> Self {\n\n Self {\n\n sprite: SpriteBundle {\n\n material: materials.add(ColorMaterial {\n\n texture: Some(asset_server.load(\"scroll-unfurled.png\")),\n\n color: Color::hex(\"EDEDED\").unwrap(),\n\n }),\n\n transform: Transform::from_xyz(0.0, 0.0, 1.0),\n", "file_path": "src/bundles.rs", "rank": 43, "score": 28599.070794932784 }, { "content": " item: Item::ScrollOfParalysis,\n\n name: Name(String::from(\"scroll of paralysis\")),\n\n }\n\n }\n\n\n\n pub fn scroll_of_fireball(\n\n asset_server: &AssetServer,\n\n materials: &mut ResMut<Assets<ColorMaterial>>,\n\n ) -> Self {\n\n Self {\n\n sprite: SpriteBundle {\n\n material: materials.add(ColorMaterial {\n\n texture: Some(asset_server.load(\"scroll-unfurled.png\")),\n\n color: Color::hex(\"EDEDED\").unwrap(),\n\n }),\n\n transform: Transform::from_xyz(0.0, 0.0, 1.0),\n\n ..Default::default()\n\n },\n\n item: Item::ScrollOfFireball,\n\n name: Name(String::from(\"scroll of fireball\")),\n", "file_path": "src/bundles.rs", "rank": 44, "score": 28598.81315412006 }, { "content": " material: materials.add(ColorMaterial {\n\n texture: Some(asset_server.load(\"orc-head.png\")),\n\n color: Color::hex(\"DA0037\").unwrap(),\n\n }),\n\n transform: Transform::from_xyz(0.0, 0.0, 1.0),\n\n ..Default::default()\n\n },\n\n _e: EnemyAI,\n\n _bm: BlocksMovement,\n\n health: Health::new(3, 3),\n\n name: Name(String::from(\"orc\")),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Bundle)]\n\npub struct PlayerBundle {\n\n #[bundle]\n\n sprite: SpriteBundle,\n\n player: Player,\n", "file_path": "src/bundles.rs", "rank": 45, "score": 28598.509353318295 }, { "content": " asset_server: &AssetServer,\n\n materials: &mut ResMut<Assets<ColorMaterial>>,\n\n ) -> Self {\n\n Self {\n\n sprite: SpriteBundle {\n\n material: materials.add(ColorMaterial {\n\n texture: Some(asset_server.load(\"breastplate.png\")),\n\n color: Color::hex(\"EDEDED\").unwrap(),\n\n }),\n\n transform: Transform::from_xyz(0.0, 0.0, 1.0),\n\n ..Default::default()\n\n },\n\n item: Item::Armor,\n\n name: Name(String::from(\"armor\")),\n\n }\n\n }\n\n}\n", "file_path": "src/bundles.rs", "rank": 46, "score": 28597.81468367353 }, { "content": " match item {\n\n Item::HealthPotion => Self::health_potion(asset_server, materials),\n\n Item::ScrollOfLightning => Self::scroll_of_lightning(asset_server, materials),\n\n Item::ScrollOfParalysis => Self::scroll_of_paralysis(asset_server, materials),\n\n Item::ScrollOfFireball => Self::scroll_of_fireball(asset_server, materials),\n\n Item::Sword => Self::sword(asset_server, materials),\n\n Item::WarAxe => Self::war_axe(asset_server, materials),\n\n Item::Armor => Self::armor(asset_server, materials),\n\n }\n\n }\n\n\n\n pub fn health_potion(\n\n asset_server: &AssetServer,\n\n materials: &mut ResMut<Assets<ColorMaterial>>,\n\n ) -> Self {\n\n Self {\n\n sprite: SpriteBundle {\n\n material: materials.add(ColorMaterial {\n\n texture: Some(asset_server.load(\"potion-ball.png\")),\n\n color: Color::hex(\"DA0037\").unwrap(),\n", "file_path": "src/bundles.rs", "rank": 47, "score": 28597.629258046112 }, { "content": "fn fov_circle(x: i32, y: i32, r: i32) -> Vec<(i32, i32)> {\n\n let mut points = vec![];\n\n for off in 0..=r {\n\n points.push((x + off, y + r));\n\n points.push((x - off, y + r));\n\n points.push((x + off, y - r));\n\n points.push((x - off, y - r));\n\n points.push((x + r, y + off));\n\n points.push((x - r, y + off));\n\n points.push((x + r, y - off));\n\n points.push((x - r, y - off));\n\n }\n\n for off in 0..=(r / 2) {\n\n points.push((x + off, y + r + 1));\n\n points.push((x - off, y + r + 1));\n\n points.push((x + off, y - r - 1));\n\n points.push((x - off, y - r - 1));\n\n points.push((x + r + 1, y + off));\n\n points.push((x - r - 1, y + off));\n\n points.push((x + r + 1, y - off));\n\n points.push((x - r - 1, y - off));\n\n }\n\n points\n\n}\n", "file_path": "src/dungeon_crawl/fov.rs", "rank": 48, "score": 24765.62846504087 }, { "content": "mod fov;\n\nmod setup;\n\nmod ui;\n\n\n\nuse self::ui::{Logs, MyCanvas};\n\nuse crate::{\n\n dungeon_crawl::ui::LogMessage,\n\n world_map::{GridPosition, TileFlags, WorldMap},\n\n AppState,\n\n};\n\nuse bevy::{ecs::system::QuerySingleError, prelude::*};\n\nuse rand::random;\n\nuse std::collections::{HashMap, VecDeque};\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum TurnState {\n\n WorldUpdate,\n\n Turn,\n\n}\n\n\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 49, "score": 23786.296705720222 }, { "content": "mod cellular_automata;\n\n\n\nuse bevy::prelude::*;\n\n\n\npub struct WorldGenerationPlugins;\n\nimpl PluginGroup for WorldGenerationPlugins {\n\n fn build(&mut self, group: &mut bevy::app::PluginGroupBuilder) {\n\n group.add(cellular_automata::CellularAutomataPlugin);\n\n }\n\n}\n", "file_path": "src/world_generation/mod.rs", "rank": 50, "score": 23778.896276564723 }, { "content": "\n\n level: 1,\n\n current_xp: 0,\n\n needed_xp: 3,\n\n }\n\n }\n\n}\n\n\n\npub struct Player;\n\npub struct EnemyAI;\n\npub struct Initiative;\n\npub struct Name(pub String);\n\n#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]\n\npub enum Item {\n\n HealthPotion,\n\n ScrollOfLightning,\n\n ScrollOfParalysis,\n\n ScrollOfFireball,\n\n Sword,\n\n WarAxe,\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 51, "score": 23778.10345153934 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum Ev {\n\n Move(Entity, GridPosition, GridPosition),\n\n Attack(Entity, Entity, i32),\n\n PickUpItem(Entity, Entity),\n\n DropItem(Entity, Entity, GridPosition),\n\n Heal(Entity, i32),\n\n Paralyze(Entity, i32),\n\n RemoveFromMap(Entity),\n\n AddToMap(Entity, GridPosition),\n\n RemoveFromInitiative(Entity),\n\n Despawn(Entity),\n\n Nothing,\n\n Quit,\n\n Descend,\n\n}\n\n\n\n#[derive(Default, Clone)]\n\npub struct InitiativeOrder(pub VecDeque<Entity>);\n\n\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 52, "score": 23774.68051679435 }, { "content": " Armor,\n\n}\n\npub struct Paralyzed(i32);\n\npub struct Cursor;\n\n\n\nimpl Name {\n\n pub fn capitalized(&self) -> String {\n\n let mut chars = self.0.chars();\n\n let first = chars.next().unwrap().to_uppercase();\n\n format!(\"{}{}\", first.collect::<String>(), chars.collect::<String>())\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub struct Health {\n\n pub current: i32,\n\n pub max: i32,\n\n}\n\n\n\nimpl Health {\n\n pub fn new(current: i32, max: i32) -> Self {\n\n Health { current, max }\n\n }\n\n}\n\n\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 53, "score": 23774.34043189269 }, { "content": "pub struct DungeonCrawlPlugin;\n\nimpl Plugin for DungeonCrawlPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app.add_event::<Ev>()\n\n .add_plugin(ui::DungeonCrawlUIPlugin)\n\n .init_resource::<GameData>()\n\n .init_resource::<InitiativeOrder>();\n\n\n\n macro_rules! switch_app_state {\n\n ($e:expr) => {\n\n (|mut app_state: ResMut<State<AppState>>| {\n\n let _ = app_state.set($e);\n\n })\n\n };\n\n }\n\n\n\n app.add_system_set(\n\n SystemSet::on_enter(AppState::DungeonCrawlEnter).with_system(\n\n switch_app_state!(AppState::DungeonCrawl(TurnState::WorldUpdate)).system(),\n\n ),\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 54, "score": 23773.169218347273 }, { "content": " );\n\n app.add_system_set(\n\n SystemSet::on_enter(AppState::DungeonCrawlExitToMenu)\n\n .with_system(cleanup.system())\n\n .with_system(cleanup_log_and_inventory.system())\n\n .with_system(switch_app_state!(AppState::MainMenu).system()),\n\n );\n\n app.add_system_set(\n\n SystemSet::on_enter(AppState::DungeonCrawlDescend)\n\n .with_system(cleanup.system())\n\n .with_system(switch_app_state!(AppState::WorldGeneration).system()),\n\n );\n\n\n\n use fov::*;\n\n use setup::*;\n\n app.add_system_set(\n\n SystemSet::on_enter(AppState::DungeonCrawl(TurnState::WorldUpdate))\n\n .with_system(update_world_map.system().label(\"update_world_map\"))\n\n .with_system(handle_initiative.system())\n\n .with_system(player_fov.system().after(\"update_world_map\"))\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 55, "score": 23772.559042915338 }, { "content": " world.entities[position].push(entity);\n\n commands.entity(entity).insert(position);\n\n visible.get_mut(entity).unwrap().is_visible = true;\n\n }\n\n Ev::RemoveFromInitiative(entity) => {\n\n let i = order.0.iter().position(|x| *x == entity).unwrap();\n\n order.0.remove(i);\n\n commands.entity(entity).remove::<Initiative>();\n\n }\n\n Ev::Despawn(entity) => {\n\n if player.get(entity).is_ok() {\n\n next_app_state = Some(AppState::DungeonCrawlExitToMenu);\n\n } else {\n\n commands.entity(entity).despawn();\n\n }\n\n }\n\n Ev::Paralyze(entity, duration) => {\n\n commands.entity(entity).insert(Paralyzed(duration));\n\n }\n\n Ev::Quit => {\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 56, "score": 23772.501284789676 }, { "content": "\n\n pub fn floor_item(&self) -> Item {\n\n let mut map = HashMap::new();\n\n for (floor, (item, chance)) in Self::ITEM_CHANCES {\n\n if floor > self.floor {\n\n break;\n\n }\n\n map.insert(item, chance);\n\n }\n\n\n\n let sum: i32 = map.values().sum();\n\n let mut rand = 1 + random::<i32>() % sum;\n\n\n\n for (item, chance) in map {\n\n rand -= chance;\n\n if rand <= 0 {\n\n return item;\n\n }\n\n }\n\n unreachable!()\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 57, "score": 23772.03855163922 }, { "content": " .with_system(switch_app_state!(AppState::DungeonCrawl(TurnState::Turn)).system()),\n\n );\n\n\n\n app.add_system_set(\n\n SystemSet::on_update(AppState::DungeonCrawl(TurnState::Turn))\n\n .before(\"evs\")\n\n .with_system(player_control.system())\n\n .with_system(enemy_ai.system())\n\n .with_system(paralyzed.system()),\n\n );\n\n\n\n app.add_system_set(\n\n SystemSet::on_update(AppState::DungeonCrawl(TurnState::Turn))\n\n .label(\"evs\")\n\n .with_system(handle_evs.system()),\n\n );\n\n }\n\n}\n\n\n\npub struct GameData {\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 58, "score": 23770.727231542332 }, { "content": " Ev::Heal(entity, amount) => {\n\n log.send(LogMessage(format!(\n\n \"{} is healed by {} health.\",\n\n names.get(entity).unwrap().capitalized(),\n\n amount\n\n )));\n\n let mut hp = healthy.get_mut(entity).unwrap();\n\n hp.current = i32::min(hp.max, hp.current + amount);\n\n }\n\n Ev::RemoveFromMap(entity) => {\n\n let pos = positions.get_mut(entity).unwrap();\n\n let i = world.entities[*pos]\n\n .iter()\n\n .position(|x| *x == entity)\n\n .unwrap();\n\n world.entities[*pos].swap_remove(i);\n\n commands.entity(entity).remove::<GridPosition>();\n\n visible.get_mut(entity).unwrap().is_visible = false;\n\n }\n\n Ev::AddToMap(entity, position) => {\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 59, "score": 23769.90956642453 }, { "content": " next_app_state = Some(AppState::DungeonCrawlExitToMenu);\n\n }\n\n Ev::Descend => {\n\n log.send(LogMessage(\"You descend to the next dungeon floor\".into()));\n\n next_app_state = Some(AppState::DungeonCrawlDescend);\n\n }\n\n }\n\n }\n\n\n\n if let Some(state) = next_app_state {\n\n app_state.set(state).unwrap();\n\n }\n\n}\n\n\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 60, "score": 23769.580477449126 }, { "content": " pub inventory: [Option<Entity>; 5],\n\n pub selected: Option<usize>,\n\n pub armor: Option<Entity>,\n\n pub weapon: Option<Entity>,\n\n\n\n pub previous_hp: Option<Health>,\n\n pub floor: u32,\n\n\n\n pub level: u32,\n\n pub current_xp: u32,\n\n pub needed_xp: u32,\n\n}\n\n\n\nimpl GameData {\n\n const MAP_SIZE: [(u32, (u32, u32)); 2] = [(1, (200, 400)), (4, (400, 600))];\n\n\n\n const ENEMY_COUNT: [(u32, u32); 3] = [(1, 3), (2, 4), (4, 6)];\n\n const ITEM_COUNT: [(u32, u32); 3] = [(1, 2), (2, 3), (4, 4)];\n\n\n\n const ITEM_CHANCES: [(u32, (Item, i32)); 7] = [\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 61, "score": 23769.327683866984 }, { "content": " (1, (Item::HealthPotion, 10)),\n\n (2, (Item::Sword, 5)),\n\n (2, (Item::ScrollOfLightning, 5)),\n\n (3, (Item::Armor, 5)),\n\n (4, (Item::ScrollOfFireball, 5)),\n\n (4, (Item::ScrollOfParalysis, 5)),\n\n (4, (Item::WarAxe, 5)),\n\n ];\n\n\n\n pub fn floor_map_size(&self) -> (u32, u32) {\n\n self.calculate_count(Self::MAP_SIZE)\n\n }\n\n\n\n pub fn floor_enemy_count(&self) -> u32 {\n\n self.calculate_count(Self::ENEMY_COUNT)\n\n }\n\n\n\n pub fn floor_item_count(&self) -> u32 {\n\n self.calculate_count(Self::ITEM_COUNT)\n\n }\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 62, "score": 23768.58566984879 }, { "content": " let mut new_pos = position.clone();\n\n\n\n if keys.is_changed() {\n\n match keys.get_just_pressed().next() {\n\n Some(KeyCode::Up | KeyCode::W) => new_pos.y += 1,\n\n Some(KeyCode::Down | KeyCode::S) => new_pos.y -= 1,\n\n Some(KeyCode::Left | KeyCode::A) => new_pos.x -= 1,\n\n Some(KeyCode::Right | KeyCode::D) => new_pos.x += 1,\n\n Some(KeyCode::G) => {\n\n inventory.selected = None;\n\n if let Some((item, _, _)) =\n\n items.iter().find(|(_, item, _)| item.contains(&position))\n\n {\n\n evs.send(Ev::PickUpItem(player_entity, item));\n\n }\n\n return;\n\n }\n\n Some(KeyCode::Key1) => inventory.selected = Some(0),\n\n Some(KeyCode::Key2) => inventory.selected = Some(1),\n\n Some(KeyCode::Key3) => inventory.selected = Some(2),\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 63, "score": 23768.460355992363 }, { "content": " data.needed_xp += 2;\n\n data.level += 1;\n\n\n\n healthy.get_mut(attacker).unwrap().max += 2;\n\n }\n\n }\n\n\n\n evs.push_back(Ev::RemoveFromMap(attackee));\n\n evs.push_back(Ev::RemoveFromInitiative(attackee));\n\n evs.push_back(Ev::Despawn(attackee));\n\n }\n\n }\n\n Ev::PickUpItem(_, item) => {\n\n for slot in &mut data.inventory {\n\n if slot.is_none() {\n\n *slot = Some(item);\n\n log.send(LogMessage(format!(\n\n \"You pick up {}.\",\n\n names.get(item).unwrap().0,\n\n )));\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 64, "score": 23768.087499874782 }, { "content": " names.get(attacker).unwrap().capitalized(),\n\n names.get(attackee).unwrap().0,\n\n damage\n\n )));\n\n\n\n let health = &mut healthy.get_mut(attackee).unwrap().current;\n\n *health -= damage;\n\n\n\n if *health <= 0 {\n\n log.send(LogMessage(format!(\n\n \"{} died!\",\n\n names.get(attackee).unwrap().capitalized()\n\n )));\n\n\n\n if player.get(attacker).is_ok() {\n\n data.current_xp += 1;\n\n if data.current_xp >= data.needed_xp {\n\n log.send(LogMessage(\"You level up!\".into()));\n\n\n\n data.current_xp = 0;\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 65, "score": 23768.087499874782 }, { "content": " }\n\n\n\n while let Some(ev) = evs.pop_front() {\n\n match ev {\n\n Ev::Nothing => {}\n\n Ev::Move(entity, old_pos, new_pos) => {\n\n let i = world.entities[old_pos]\n\n .iter()\n\n .position(|x| *x == entity)\n\n .unwrap();\n\n world.entities[old_pos].swap_remove(i);\n\n world.entities[new_pos].push(entity);\n\n\n\n if let Ok(mut pos) = positions.get_mut(entity) {\n\n *pos = new_pos;\n\n }\n\n }\n\n Ev::Attack(attacker, attackee, damage) => {\n\n log.send(LogMessage(format!(\n\n \"{} attacks {}, dealing {} damage!\",\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 66, "score": 23768.01443202914 }, { "content": " }\n\n\n\n fn calculate_count<T: Copy, const N: usize>(&self, arr: [(u32, T); N]) -> T {\n\n arr.iter()\n\n .find(|(floor, _)| *floor < self.floor + 1)\n\n .unwrap()\n\n .1\n\n }\n\n}\n\n\n\nimpl Default for GameData {\n\n fn default() -> Self {\n\n Self {\n\n inventory: [None; 5],\n\n selected: None,\n\n armor: None,\n\n weapon: None,\n\n\n\n previous_hp: None,\n\n floor: 1,\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 67, "score": 23767.90933842958 }, { "content": " inventory.selected = None;\n\n }\n\n }\n\n Item::ScrollOfFireball => {\n\n for x in -1..=1 {\n\n for y in -1..=1 {\n\n if let Some(e) = world.entities[[cursor.x + x, cursor.y + y]]\n\n .iter()\n\n .find(|e| healthy_entities.get(**e).is_ok())\n\n {\n\n evs.send(Ev::Attack(player_entity, *e, 1));\n\n inventory.inventory[index] = None;\n\n inventory.selected = None;\n\n }\n\n }\n\n }\n\n }\n\n Item::Sword => {\n\n if world.entities[cursor].contains(&player_entity) {\n\n inventory.weapon = Some(item);\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 68, "score": 23765.898965673212 }, { "content": " evs.send(Ev::Descend);\n\n } else if world.tiles[new_pos].contains(TileFlags::BLOCKS_MOVEMENT) {\n\n for &entity in &world.entities[new_pos] {\n\n if let Ok(()) = healthy_entities.get(entity) {\n\n let damage = match inventory.weapon.map(|item| items.get(item).unwrap().2) {\n\n Some(Item::WarAxe) => 3,\n\n Some(Item::Sword) => 2,\n\n _ => 1,\n\n };\n\n evs.send(Ev::Attack(player_entity, entity, damage));\n\n }\n\n }\n\n } else {\n\n evs.send(Ev::Move(player_entity, *position, new_pos));\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 69, "score": 23765.898965673212 }, { "content": " Some(KeyCode::Key4) => inventory.selected = Some(3),\n\n Some(KeyCode::Key5) => inventory.selected = Some(4),\n\n Some(KeyCode::Escape) => evs.send(Ev::Quit),\n\n _ => {}\n\n }\n\n }\n\n\n\n let cursor = *cursor.single().unwrap();\n\n if world.tiles[cursor].contains(TileFlags::IN_VIEW) {\n\n if let Some(index) = inventory.selected {\n\n if let Some(item) = inventory.inventory[index] {\n\n if buttons.just_pressed(MouseButton::Left) {\n\n match items.get(item).unwrap().2 {\n\n Item::HealthPotion => {\n\n if let Some(e) = world.entities[cursor]\n\n .iter()\n\n .find(|e| healthy_entities.get(**e).is_ok())\n\n {\n\n evs.send(Ev::Heal(*e, 4));\n\n inventory.inventory[index] = None;\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 70, "score": 23765.898965673212 }, { "content": " evs.push_back(Ev::RemoveFromMap(item));\n\n break;\n\n }\n\n }\n\n }\n\n Ev::DropItem(_, item, position) => {\n\n if world.tiles[position].contains(TileFlags::BLOCKS_MOVEMENT) {\n\n log.send(LogMessage(format!(\n\n \"{} slams into the wall.\",\n\n names.get(item).unwrap().capitalized(),\n\n )));\n\n evs.push_back(Ev::Despawn(item));\n\n } else {\n\n log.send(LogMessage(format!(\n\n \"{} lands on the floor.\",\n\n names.get(item).unwrap().capitalized(),\n\n )));\n\n evs.push_back(Ev::AddToMap(item, position));\n\n }\n\n }\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 71, "score": 23765.898965673212 }, { "content": " } else if inventory.weapon == inventory.inventory[index] {\n\n inventory.weapon = None;\n\n }\n\n inventory.inventory[index] = None;\n\n inventory.selected = None;\n\n }\n\n }\n\n }\n\n }\n\n\n\n if let Some(i) = inventory.selected {\n\n if inventory.inventory[i].is_none() {\n\n inventory.selected = None;\n\n }\n\n }\n\n\n\n if *position != new_pos {\n\n inventory.selected = None;\n\n\n\n if new_pos == world.stairs {\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 72, "score": 23765.898965673212 }, { "content": " inventory.selected = None;\n\n }\n\n }\n\n Item::ScrollOfLightning => {\n\n if let Some(e) = world.entities[cursor]\n\n .iter()\n\n .find(|e| healthy_entities.get(**e).is_ok())\n\n {\n\n evs.send(Ev::Attack(player_entity, *e, 2));\n\n inventory.inventory[index] = None;\n\n inventory.selected = None;\n\n }\n\n }\n\n Item::ScrollOfParalysis => {\n\n if let Some(e) = world.entities[cursor]\n\n .iter()\n\n .find(|e| controllers.get(**e).is_ok())\n\n {\n\n evs.send(Ev::Paralyze(*e, 4));\n\n inventory.inventory[index] = None;\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 73, "score": 23765.898965673212 }, { "content": " inventory.selected = None;\n\n }\n\n }\n\n Item::WarAxe => {\n\n if world.entities[cursor].contains(&player_entity) {\n\n inventory.weapon = Some(item);\n\n inventory.selected = None;\n\n }\n\n }\n\n Item::Armor => {\n\n if world.entities[cursor].contains(&player_entity) {\n\n inventory.armor = Some(item);\n\n inventory.selected = None;\n\n }\n\n }\n\n }\n\n } else if buttons.just_pressed(MouseButton::Right) {\n\n evs.send(Ev::DropItem(player_entity, item, cursor));\n\n if inventory.armor == inventory.inventory[index] {\n\n inventory.armor = None;\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 74, "score": 23765.898965673212 }, { "content": " evs.send(Ev::Move(enemy, *position, path[1]));\n\n } else {\n\n evs.send(Ev::Nothing);\n\n }\n\n } else {\n\n evs.send(Ev::Nothing);\n\n }\n\n } else {\n\n evs.send(Ev::Nothing);\n\n }\n\n}\n\n\n", "file_path": "src/dungeon_crawl/mod.rs", "rank": 75, "score": 23765.898965673212 }, { "content": "mod ui_setup;\n\n\n\nuse super::{Cursor, GameData, Health, Name, Player, TurnState};\n\nuse crate::{\n\n world_map::{Grid, GridPosition, TileFlags, WorldMap},\n\n AppState, UiCamera,\n\n};\n\nuse bevy::{\n\n math::vec2,\n\n prelude::*,\n\n render::camera::{Camera, OrthographicProjection},\n\n};\n\nuse std::collections::VecDeque;\n\n\n\npub struct MyCanvas;\n\npub struct MyHpText;\n\npub struct MyHpBar;\n\npub struct MyXPText;\n\npub struct MyXPBar;\n\npub struct MyFloorText;\n", "file_path": "src/dungeon_crawl/ui/mod.rs", "rank": 76, "score": 22664.883065482303 }, { "content": "pub struct MyLog;\n\npub struct MyDetails;\n\npub struct MyInventory;\n\n\n\npub struct LogMessage(pub String);\n\n#[derive(Default)]\n\npub struct Logs(VecDeque<String>);\n\n\n\npub struct DungeonCrawlUIPlugin;\n\nimpl Plugin for DungeonCrawlUIPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app.add_event::<LogMessage>().init_resource::<Logs>();\n\n app.add_system_set(\n\n SystemSet::on_enter(AppState::DungeonCrawlEnter).with_system(ui_setup::create.system()),\n\n );\n\n app.add_system_set(\n\n SystemSet::on_update(AppState::DungeonCrawl(TurnState::Turn))\n\n .with_system(update_position.system().label(\"positions\"))\n\n .with_system(camera_position.system().after(\"positions\"))\n\n .with_system(update_health.system())\n", "file_path": "src/dungeon_crawl/ui/mod.rs", "rank": 77, "score": 22659.676486810084 }, { "content": "\n\n details.resize(4, String::from(\" \"));\n\n text.single_mut().unwrap().sections[0].value = details\n\n .into_iter()\n\n .intersperse(String::from(\"\\n\"))\n\n .collect();\n\n\n\n return;\n\n }\n\n }\n\n }\n\n // Else\n\n text.single_mut().unwrap().sections[0].value = String::from(\" \\n \\n \\n \");\n\n}\n\n\n", "file_path": "src/dungeon_crawl/ui/mod.rs", "rank": 78, "score": 22655.0716572015 }, { "content": " } else if inventory.weapon == Some(item) {\n\n \"(equiped)\"\n\n } else {\n\n \"\"\n\n }\n\n } else {\n\n \"\"\n\n }\n\n ));\n\n }\n\n\n\n if inventory.inventory.iter().all(|i| i.is_none()) {\n\n text.single_mut().unwrap().sections[0].value =\n\n String::from(\"Press G to pick up items\\n \\n \\n \\n \");\n\n } else {\n\n text.single_mut().unwrap().sections[0].value =\n\n inv.into_iter().intersperse(String::from(\"\\n\")).collect();\n\n }\n\n}\n", "file_path": "src/dungeon_crawl/ui/mod.rs", "rank": 79, "score": 22654.88868047664 }, { "content": " .with_system(update_xp.system())\n\n .with_system(update_floor.system())\n\n .with_system(update_log.system())\n\n .with_system(update_cursor.system().before(\"positions\"))\n\n .with_system(update_details.system())\n\n .with_system(update_inventory.system()),\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/dungeon_crawl/ui/mod.rs", "rank": 80, "score": 22652.28674191909 }, { "content": "use crate::dungeon_crawl::Name;\n\nuse bevy::{math::ivec2, prelude::*};\n\nuse bitflags::bitflags;\n\nuse pathfinding::directed::astar;\n\nuse std::ops::{Index, IndexMut};\n\n\n\npub struct Grid {\n\n pub cell_size: IVec2,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct GridPosition {\n\n pub x: i32,\n\n pub y: i32,\n\n}\n\n\n\npub struct Tile;\n\npub struct BlocksMovement;\n\npub struct BlocksVision;\n\n\n", "file_path": "src/world_map.rs", "rank": 81, "score": 17.65931161122404 }, { "content": "use crate::{\n\n bundles::{EnemyBundle, ItemBundle, PlayerBundle},\n\n dungeon_crawl::{GameData, InitiativeOrder},\n\n world_map::{Array2D, GridPosition, TileFactory, WorldMap},\n\n AppState,\n\n};\n\nuse bevy::prelude::*;\n\nuse rand::random;\n\nuse std::{collections::VecDeque, mem};\n\n\n\npub struct CellularAutomataPlugin;\n\nimpl Plugin for CellularAutomataPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app.add_system_set(\n\n SystemSet::on_enter(AppState::WorldGeneration).with_system(cellular_automata.system()),\n\n );\n\n }\n\n}\n\n\n\nconst MAP_SIZE: i32 = 40;\n\nconst ALIVE_SPAWN_CHANCE: f32 = 0.45;\n\nconst ITERATIONS: u32 = 2;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n", "file_path": "src/world_generation/cellular_automata.rs", "rank": 82, "score": 16.718346208589608 }, { "content": " commands\n\n .spawn_bundle(SpriteBundle {\n\n material: self.explored_wall_material.clone(),\n\n ..Default::default()\n\n })\n\n .insert_bundle((\n\n Tile,\n\n GridPosition { x, y },\n\n BlocksMovement,\n\n BlocksVision,\n\n Name(String::from(\"wall\")),\n\n ))\n\n .id()\n\n }\n\n\n\n pub fn floor(&self, commands: &mut Commands, x: i32, y: i32) -> Entity {\n\n commands\n\n .spawn_bundle(SpriteBundle {\n\n material: self.explored_floor_material.clone(),\n\n ..Default::default()\n", "file_path": "src/world_map.rs", "rank": 83, "score": 12.445324937980832 }, { "content": " })\n\n .insert_bundle((Tile, GridPosition { x, y }, Name(String::from(\"floor\"))))\n\n .id()\n\n }\n\n\n\n pub fn stairs(&self, commands: &mut Commands, x: i32, y: i32) -> Entity {\n\n commands\n\n .spawn_bundle(SpriteBundle {\n\n material: self.explored_stairs_material.clone(),\n\n ..Default::default()\n\n })\n\n .insert_bundle((Tile, GridPosition { x, y }, Name(String::from(\"stairs\"))))\n\n .id()\n\n }\n\n}\n", "file_path": "src/world_map.rs", "rank": 84, "score": 11.877085829139471 }, { "content": "use super::{\n\n MyCanvas, MyDetails, MyFloorText, MyHpBar, MyHpText, MyInventory, MyLog, MyXPBar, MyXPText,\n\n};\n\nuse crate::{dungeon_crawl::Cursor, world_map::GridPosition};\n\nuse bevy::prelude::*;\n\n\n", "file_path": "src/dungeon_crawl/ui/ui_setup.rs", "rank": 85, "score": 7.821331557331597 }, { "content": "use super::{Cursor, Player};\n\nuse crate::world_map::{GridPosition, Tile, TileFlags, WorldMap};\n\nuse bevy::prelude::*;\n\n\n", "file_path": "src/dungeon_crawl/fov.rs", "rank": 86, "score": 6.1039318194952905 }, { "content": "#[derive(Debug, Clone)]\n\npub struct Array2D<T> {\n\n elems: Vec<Vec<T>>,\n\n}\n\n\n\nimpl<T> Array2D<T> {\n\n pub fn with_elem(x: i32, y: i32, val: T) -> Self\n\n where\n\n T: Clone,\n\n {\n\n Self {\n\n elems: vec![vec![val; y as usize]; x as usize],\n\n }\n\n }\n\n\n\n pub fn with_size(x: i32, y: i32) -> Self\n\n where\n\n T: Default + Clone,\n\n {\n\n Self {\n", "file_path": "src/world_map.rs", "rank": 87, "score": 5.898557113583057 }, { "content": " elems: vec![vec![Default::default(); y as usize]; x as usize],\n\n }\n\n }\n\n\n\n pub fn _from_vecs(elems: Vec<Vec<T>>) -> Self {\n\n Self { elems }\n\n }\n\n\n\n pub fn get(&self, x: i32, y: i32) -> Option<&T> {\n\n self.elems\n\n .get(x as usize)\n\n .map(|r| r.get(y as usize))\n\n .flatten()\n\n }\n\n\n\n pub fn get_mut(&mut self, x: i32, y: i32) -> Option<&mut T> {\n\n self.elems\n\n .get_mut(x as usize)\n\n .map(|r| r.get_mut(y as usize))\n\n .flatten()\n", "file_path": "src/world_map.rs", "rank": 88, "score": 5.7963935365820465 }, { "content": "use super::{EnemyAI, Initiative, InitiativeOrder, Player};\n\nuse crate::world_map::{BlocksMovement, BlocksVision, Tile, TileFlags, WorldMap};\n\nuse bevy::prelude::*;\n\n\n", "file_path": "src/dungeon_crawl/setup.rs", "rank": 89, "score": 5.76524292851599 }, { "content": " }\n\n let mut entities = Array2D::with_size(MAP_SIZE + 20, MAP_SIZE + 20);\n\n let tile_factory = TileFactory::new(&asset_server, &mut materials);\n\n for x in 1..MAP_SIZE - 1 {\n\n for y in 1..MAP_SIZE - 1 {\n\n let mut tile = vec![];\n\n\n\n if stairs.x == x && stairs.y == y {\n\n tile.push(tile_factory.stairs(&mut commands, x + 9, y + 9));\n\n } else if let TileType::Alive(zone) = tile_map[[x, y]] {\n\n tile.push(tile_factory.floor(&mut commands, x + 9, y + 9));\n\n\n\n // Zones start at 1 so we have to substract one\n\n if let Some(e) = zone_entities.get_mut(zone - 1) {\n\n if let Some(e) = e.pop() {\n\n commands\n\n .entity(e)\n\n .insert(GridPosition { x: x + 9, y: y + 9 });\n\n tile.push(e);\n\n }\n", "file_path": "src/world_generation/cellular_automata.rs", "rank": 90, "score": 5.731940159660775 }, { "content": " }\n\n\n\n commands.insert_resource(WorldMap {\n\n entities,\n\n tile_factory,\n\n tiles: Array2D::with_size(MAP_SIZE + 20, MAP_SIZE + 20),\n\n stairs: GridPosition {\n\n x: stairs.x + 9,\n\n y: stairs.y + 9,\n\n },\n\n });\n\n commands.insert_resource(InitiativeOrder::default());\n\n app_state.set(AppState::DungeonCrawlEnter).unwrap();\n\n}\n\n\n", "file_path": "src/world_generation/cellular_automata.rs", "rank": 91, "score": 5.534921586811711 }, { "content": " );\n\n }\n\n\n\n for _ in 0..data.floor_item_count() {\n\n let zone = random::<usize>() % (zone_count - 1) + 1;\n\n let item = data.floor_item();\n\n entities[zone].push(\n\n commands\n\n .spawn_bundle(ItemBundle::item(item, asset_server, materials))\n\n .id(),\n\n );\n\n }\n\n\n\n entities\n\n}\n", "file_path": "src/world_generation/cellular_automata.rs", "rank": 92, "score": 5.021117288742847 }, { "content": " }\n\n } else {\n\n // Show wall only if it's adjencent to a floor\n\n 'finish: for i in -1..=1i32 {\n\n for j in -1..=1i32 {\n\n if let TileType::Alive(_) = tile_map[[x + i, y + j]] {\n\n tile.push(tile_factory.wall(&mut commands, x + 9, y + 9));\n\n break 'finish;\n\n }\n\n }\n\n }\n\n };\n\n\n\n entities[[x + 9, y + 9]] = tile;\n\n }\n\n }\n\n\n\n // Despawn unused enemies\n\n for e in zone_entities.iter().flatten() {\n\n commands.entity(*e).despawn();\n", "file_path": "src/world_generation/cellular_automata.rs", "rank": 93, "score": 4.708219505987117 }, { "content": " }\n\n\n\n break (\n\n tile_map,\n\n get_zone_entities(\n\n &mut commands,\n\n &asset_server,\n\n &mut materials,\n\n &*data,\n\n zone_count,\n\n ),\n\n );\n\n };\n\n\n\n let mut stairs = GridPosition { x: 1, y: 1 };\n\n while tile_map[stairs] == TileType::Dead {\n\n stairs = GridPosition {\n\n x: (1 + rand::random::<u32>() % (MAP_SIZE as u32 - 3)) as i32,\n\n y: (1 + rand::random::<u32>() % (MAP_SIZE as u32 - 3)) as i32,\n\n };\n", "file_path": "src/world_generation/cellular_automata.rs", "rank": 94, "score": 4.625040865085661 }, { "content": " }\n\n\n\n pub fn size(&self) -> IVec2 {\n\n ivec2(self.elems.len() as i32, self.elems[0].len() as i32)\n\n }\n\n}\n\n\n\nimpl<T> Index<[i32; 2]> for Array2D<T> {\n\n type Output = T;\n\n\n\n fn index(&self, index: [i32; 2]) -> &Self::Output {\n\n &self.elems[index[0] as usize][index[1] as usize]\n\n }\n\n}\n\n\n\nimpl<T> IndexMut<[i32; 2]> for Array2D<T> {\n\n fn index_mut(&mut self, index: [i32; 2]) -> &mut Self::Output {\n\n &mut self.elems[index[0] as usize][index[1] as usize]\n\n }\n\n}\n", "file_path": "src/world_map.rs", "rank": 95, "score": 4.599417139030144 }, { "content": " visible_stairs_material: materials.add(ColorMaterial {\n\n texture: Some(asset_server.load(\"stairs.png\")),\n\n color: Color::hex(\"826007\").unwrap(),\n\n }),\n\n explored_wall_material: materials.add(ColorMaterial {\n\n texture: Some(asset_server.load(\"brick-wall.png\")),\n\n color: Color::hex(\"444444\").unwrap(),\n\n }),\n\n explored_floor_material: materials.add(ColorMaterial {\n\n texture: Some(asset_server.load(\"square.png\")),\n\n color: Color::hex(\"444444\").unwrap(),\n\n }),\n\n explored_stairs_material: materials.add(ColorMaterial {\n\n texture: Some(asset_server.load(\"stairs.png\")),\n\n color: Color::hex(\"444444\").unwrap(),\n\n }),\n\n }\n\n }\n\n\n\n pub fn wall(&self, commands: &mut Commands, x: i32, y: i32) -> Entity {\n", "file_path": "src/world_map.rs", "rank": 96, "score": 4.5924554733657565 }, { "content": " }\n\n}\n\n\n\nimpl<T> Index<GridPosition> for Array2D<T> {\n\n type Output = T;\n\n\n\n fn index(&self, index: GridPosition) -> &Self::Output {\n\n &self.elems[index.x as usize][index.y as usize]\n\n }\n\n}\n\n\n\nimpl<T> IndexMut<GridPosition> for Array2D<T> {\n\n fn index_mut(&mut self, index: GridPosition) -> &mut Self::Output {\n\n &mut self.elems[index.x as usize][index.y as usize]\n\n }\n\n}\n\n\n\nbitflags! {\n\n pub struct TileFlags: u32 {\n\n const BLOCKS_MOVEMENT = 0b00000001;\n", "file_path": "src/world_map.rs", "rank": 97, "score": 4.546429564035336 }, { "content": "\n\n pub explored_wall_material: Handle<ColorMaterial>,\n\n pub explored_floor_material: Handle<ColorMaterial>,\n\n pub explored_stairs_material: Handle<ColorMaterial>,\n\n}\n\n\n\nimpl TileFactory {\n\n pub fn new(\n\n asset_server: &Res<AssetServer>,\n\n materials: &mut ResMut<Assets<ColorMaterial>>,\n\n ) -> Self {\n\n Self {\n\n visible_wall_material: materials.add(ColorMaterial {\n\n texture: Some(asset_server.load(\"brick-wall.png\")),\n\n color: Color::hex(\"826007\").unwrap(),\n\n }),\n\n visible_floor_material: materials.add(ColorMaterial {\n\n texture: Some(asset_server.load(\"square.png\")),\n\n color: Color::hex(\"826007\").unwrap(),\n\n }),\n", "file_path": "src/world_map.rs", "rank": 98, "score": 4.53917996585718 }, { "content": " } else {\n\n visible.is_visible = world.tiles[pos].contains(TileFlags::IN_VIEW);\n\n }\n\n }\n\n\n\n for (mut mat, &pos) in tiles.iter_mut() {\n\n if world.tiles[pos].contains(TileFlags::IN_VIEW) {\n\n if mat.id == world.tile_factory.explored_floor_material.id {\n\n *mat = world.tile_factory.visible_floor_material.clone();\n\n } else if mat.id == world.tile_factory.explored_wall_material.id {\n\n *mat = world.tile_factory.visible_wall_material.clone();\n\n } else if mat.id == world.tile_factory.explored_stairs_material.id {\n\n *mat = world.tile_factory.visible_stairs_material.clone();\n\n }\n\n } else {\n\n if mat.id == world.tile_factory.visible_floor_material.id {\n\n *mat = world.tile_factory.explored_floor_material.clone();\n\n } else if mat.id == world.tile_factory.visible_wall_material.id {\n\n *mat = world.tile_factory.explored_wall_material.clone();\n\n } else if mat.id == world.tile_factory.visible_stairs_material.id {\n\n *mat = world.tile_factory.explored_stairs_material.clone();\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/dungeon_crawl/fov.rs", "rank": 99, "score": 4.294385015309041 } ]
Rust
proxy/tests/discovery.rs
xiaods/conduit
bc16034fd6d3c88f20a4e7a6dc3f0baa0e3ce06f
mod support; use self::support::*; macro_rules! generate_tests { (server: $make_server:path, client: $make_client:path) => { use conduit_proxy_controller_grpc as pb; #[test] fn outbound_asks_controller_api() { let _ = env_logger::try_init(); let srv = $make_server().route("/", "hello").route("/bye", "bye").run(); let ctrl = controller::new() .destination("disco.test.svc.cluster.local", srv.addr) .run(); let proxy = proxy::new().controller(ctrl).outbound(srv).run(); let client = $make_client(proxy.outbound, "disco.test.svc.cluster.local"); assert_eq!(client.get("/"), "hello"); assert_eq!(client.get("/bye"), "bye"); } #[test] fn outbound_reconnects_if_controller_stream_ends() { let _ = env_logger::try_init(); let srv = $make_server().route("/recon", "nect").run(); let ctrl = controller::new() .destination_close("disco.test.svc.cluster.local") .destination("disco.test.svc.cluster.local", srv.addr) .run(); let proxy = proxy::new().controller(ctrl).outbound(srv).run(); let client = $make_client(proxy.outbound, "disco.test.svc.cluster.local"); assert_eq!(client.get("/recon"), "nect"); } #[test] #[cfg_attr(not(feature = "flaky_tests"), ignore)] fn outbound_destinations_reset_on_reconnect_followed_by_no_endpoints_exists() { outbound_destinations_reset_on_reconnect(move || { Some(controller::destination_exists_with_no_endpoints()) }) } #[test] #[cfg_attr(not(feature = "flaky_tests"), ignore)] fn outbound_destinations_reset_on_reconnect_followed_by_add_none() { outbound_destinations_reset_on_reconnect(move || { Some(controller::destination_add_none()) }) } #[test] #[cfg_attr(not(feature = "flaky_tests"), ignore)] fn outbound_destinations_reset_on_reconnect_followed_by_remove_none() { outbound_destinations_reset_on_reconnect(move || { Some(controller::destination_remove_none()) }) } fn outbound_destinations_reset_on_reconnect<F>(f: F) where F: Fn() -> Option<pb::destination::Update> + Send + 'static { use std::thread; let _ = env_logger::try_init(); let mut env = config::TestEnv::new(); env.put(config::ENV_BIND_TIMEOUT, "100".to_owned()); let srv = $make_server().route("/", "hello").run(); let ctrl = controller::new() .destination("initially-exists.ns.svc.cluster.local", srv.addr) .destination_close("trigger-close.ns.svc.cluster.local") .destination_fn("initially-exists.ns.svc.cluster.local", f) .run(); let proxy = proxy::new() .controller(ctrl) .outbound(srv) .run_with_test_env(env); let initially_exists = $make_client(proxy.outbound, "initially-exists.ns.svc.cluster.local"); assert_eq!(initially_exists.get("/"), "hello"); { let trigger_close = $make_client(proxy.outbound, "trigger-close.ns.svc.cluster.local"); let mut req = trigger_close.request_builder("/"); let rsp = trigger_close.request(req.method("GET")); assert_eq!(rsp.status(), http::StatusCode::INTERNAL_SERVER_ERROR); } thread::sleep(Duration::from_millis(1000)); let mut req = initially_exists.request_builder("/"); let rsp = initially_exists.request(req.method("GET")); assert_eq!(rsp.status(), http::StatusCode::INTERNAL_SERVER_ERROR); } #[test] #[cfg_attr(not(feature = "flaky_tests"), ignore)] fn outbound_times_out() { use std::thread; let _ = env_logger::try_init(); let mut env = config::TestEnv::new(); env.put(config::ENV_BIND_TIMEOUT, "100".to_owned()); let srv = $make_server().route("/hi", "hello").run(); let addr = srv.addr.clone(); let ctrl = controller::new() .destination_fn("disco.test.svc.cluster.local", move || { thread::sleep(Duration::from_millis(500)); Some(controller::destination_update(addr)) }) .run(); let proxy = proxy::new() .controller(ctrl) .outbound(srv) .run_with_test_env(env); let client = $make_client(proxy.outbound, "disco.test.svc.cluster.local"); let mut req = client.request_builder("/"); let rsp = client.request(req.method("GET")); assert_eq!(rsp.status(), http::StatusCode::INTERNAL_SERVER_ERROR); } #[test] fn outbound_uses_orig_dst_if_not_local_svc() { let _ = env_logger::try_init(); let srv = $make_server() .route("/", "hello") .route("/bye", "bye") .run(); let ctrl = controller::new() .run(); let proxy = proxy::new() .controller(ctrl) .outbound(srv) .run(); let client = $make_client(proxy.outbound, "versioncheck.conduit.io"); assert_eq!(client.get("/"), "hello"); assert_eq!(client.get("/bye"), "bye"); } #[test] fn outbound_asks_controller_without_orig_dst() { let _ = env_logger::try_init(); let srv = $make_server() .route("/", "hello") .route("/bye", "bye") .run(); let ctrl = controller::new() .destination("disco.test.svc.cluster.local", srv.addr) .run(); let proxy = proxy::new() .controller(ctrl) .run(); let client = $make_client(proxy.outbound, "disco.test.svc.cluster.local"); assert_eq!(client.get("/"), "hello"); assert_eq!(client.get("/bye"), "bye"); } } } mod http2 { use super::support::*; generate_tests! { server: server::new, client: client::new } } mod http1 { use super::support::*; generate_tests! { server: server::http1, client: client::http1 } mod absolute_uris { use super::super::support::*; generate_tests! { server: server::http1, client: client::http1_absolute_uris } } } #[test] fn outbound_updates_newer_services() { let _ = env_logger::try_init(); let srv = server::http1().route("/h1", "hello h1").run(); let ctrl = controller::new() .destination("disco.test.svc.cluster.local", srv.addr) .run(); let proxy = proxy::new().controller(ctrl).outbound(srv).run(); let client1 = client::http2(proxy.outbound, "disco.test.svc.cluster.local"); client1.get("/h2"); let client2 = client::http1(proxy.outbound, "disco.test.svc.cluster.local"); assert_eq!(client2.get("/h1"), "hello h1"); }
mod support; use self::support::*; macro_rules! generate_tests { (server: $make_server:path, client: $make_client:path) => { use conduit_proxy_controller_grpc as pb; #[test] fn outbound_asks_controller_api() { let _ = env_logger::try_init(); let srv = $make_server().route("/", "hello").route("/bye", "bye").run(); let ctrl = controller::new() .destination("disco.test.svc.cluster.local", srv.addr) .run(); let proxy = proxy::new().controller(ctrl).outbound(srv).run(); let client = $make_client(proxy.outbound, "disco.test.svc.cluster.local"); assert_eq!(client.get("/"), "hello"); assert_eq!(client.get("/bye"), "bye"); } #[test] fn outbound_reconnects_if_controller_stream_ends() { let _ = env_logger::try_init(); let srv = $make_server().route("/recon", "nect").run(); let ctrl = controller::new() .destination_close("disco.test.svc.cluster.local") .destination("disco.test.svc.cluster.local", srv.addr) .run(); let proxy = proxy::new().controller(ctrl).outbound(srv).run(); let client = $make_client(proxy.outbound, "disco.test.svc.cluster.local"); assert_eq!(client.get("/recon"), "nect"); } #[test] #[cfg_attr(not(feature = "flaky_tests"), ignore)] fn outbound_destinations_reset_on_reconnect_followed_by_no_endpoints_exists() { outbound_destinations_reset_on_reconnect(move || { Some(controller::destination_exists_with_no_endpoints()) }) } #[test] #[cfg_attr(not(feature = "flaky_tests"), ignore)] fn outbound_destinations_reset_on_reconnect_followed_by_add_none() { outbound_destinations_reset_on_reconnect(move || { Some(controller::destination_add_none()) }) } #[test] #[cfg_attr(not(feature = "flaky_tests"), ignore)] fn outbound_destinations_reset_on_reconnect_followed_by_remove_none() { outbound_destinations_reset_on_reconnect(move || { Some(controller::destination_remove_none()) }) } fn outbound_destinations_reset_on_reconnect<F>(f: F) where F: Fn() -> Option<pb::destination::Update> + Send + 'static { use std::thread; let _ = env_logger::try_init(); let mut env = config::TestEnv::new(); env.put(config::ENV_BIND_TIMEOUT, "100".to_owned()); let srv = $make_server().route("/", "hello").run(); let ctrl = controller::new() .destination("initially-exists.ns.svc.cluster.local", srv.addr) .destination_close("trigger-close.ns.svc.cluster.local") .destination_fn("initially-exists.ns.svc.cluster.local", f) .run(); let proxy = proxy::new() .controller(ctrl) .outbound(srv) .run_with_test_env(env); let initially_exists = $make_client(proxy.outbound, "initially-exists.ns.svc.cluster.local"); assert_eq!(initially_exists.get("/"), "hello"); { let trigger_close = $make_client(proxy.outbound, "trigger-close.ns.svc.cluster.local"); let mut req = trigger_close.request_builder("/"); let rsp = trigger_close.request(req.method("GET")); assert_eq!(rsp.status(), http::StatusCode::INTERNAL_SERVER_ERROR); } thread::sleep(Duration::from_millis(1000)); let mut req = initially_exists.request_builder("/"); let rsp = initially_exists.request(req.method("GET")); assert_eq!(rsp.status(), http::StatusCode::INTERNAL_SERVER_ERROR); } #[test] #[cfg_attr(not(feature = "flaky_tests"), ignore)] fn outbound_times_out() { use std::thread; let _ = env_logger::try_init(); let mut env = config::TestEnv::new(); env.put(config::ENV_BIND_TIMEOUT, "100".to_owned()); let srv = $make_server().route("/hi", "hello").run(); let addr = srv.addr.clone(); let ctrl = controller::new() .destination_fn("disco.test.svc.cluster.local", move || { thread::sleep(Duration::from_millis(500)); Some(controller::destination_update(addr)) }) .run(); let proxy = proxy::new() .controller(ctrl) .outbound(srv) .run_with_test_env(env); let client = $make_client(proxy.outbound, "disco.test.svc.cluster.local"); let mut req = client.request_builder("/"); let rsp = client.request(req.method("GET")); assert_eq!(rsp.status(), http::StatusCode::INTERNAL_SERVER_ERROR); } #[test] fn outbound_uses_orig_dst_if_not_local_svc() { let _ = env_logger::try_init(); let srv = $make_server() .route("/", "hello") .route("/bye", "bye") .run(); let ctrl = controller::new() .run(); let proxy = proxy::new() .controller(ctrl) .outbound(srv) .run(); let client = $make_client(proxy.outbound, "versioncheck.conduit.io"); assert_eq!(client.get("/"), "hello"); assert_eq!(client.get("/bye"), "bye"); } #[test] fn outbound_asks_controller_without_orig_dst() { let _ = env_logger::try_init(); let srv = $make_server() .route("/", "hello") .route("/bye", "bye") .run(); let ctrl = controller::new() .destination("disco.test.svc.cluster.local", srv.addr) .run(); let proxy = proxy::new() .controller(ctrl) .run(); let client = $make_client(proxy.outbound, "disco.test.svc.cluster.local"); assert_eq!(client.get("/"), "hello"); assert_eq!(client.get("/bye"), "bye"); } } } mod http2 { use super::support::*; generate_tests! { server: server::new, client: client::new } } mod http1 { use super::support::*; generate_tests! { server: server::http1, client: client::http1 } mod absolute_uris { use super::super::support::*; generate_tests! { server: server::http1, client: client::http1_absolute_uris } } } #[test] fn outbound_updates_newer_services() { let _ = env_logger::try_init(); let srv = server::http1().route("/h1", "hello h1").run(); let ctrl = controller::new() .destination("disco.test.svc.cluster.local", srv.addr) .run(); let proxy = proxy::new().controller(ctrl).outbound(srv).run(); let client1 = client::http2(proxy.outbound, "disco.test.svc.cluster.local"); client1.get("/h2");
let client2 = client::http1(proxy.outbound, "disco.test.svc.cluster.local"); assert_eq!(client2.get("/h1"), "hello h1"); }
function_block-function_prefix_line
[ { "content": "fn run(proxy: Proxy, mut env: config::TestEnv) -> Listening {\n\n use self::conduit_proxy::config;\n\n\n\n let controller = proxy.controller.expect(\"proxy controller missing\");\n\n let inbound = proxy.inbound;\n\n let outbound = proxy.outbound;\n\n let mut mock_orig_dst = DstInner::default();\n\n\n\n env.put(config::ENV_CONTROL_URL, format!(\"tcp://{}\", controller.addr));\n\n env.put(config::ENV_PRIVATE_LISTENER, \"tcp://127.0.0.1:0\".to_owned());\n\n if let Some(ref inbound) = inbound {\n\n env.put(config::ENV_PRIVATE_FORWARD, format!(\"tcp://{}\", inbound.addr));\n\n mock_orig_dst.inbound_orig_addr = Some(inbound.addr);\n\n }\n\n if let Some(ref outbound) = outbound {\n\n mock_orig_dst.outbound_orig_addr = Some(outbound.addr);\n\n }\n\n env.put(config::ENV_PUBLIC_LISTENER, \"tcp://127.0.0.1:0\".to_owned());\n\n env.put(config::ENV_CONTROL_LISTENER, \"tcp://127.0.0.1:0\".to_owned());\n\n env.put(config::ENV_METRICS_LISTENER, \"tcp://127.0.0.1:0\".to_owned());\n", "file_path": "proxy/tests/support/proxy.rs", "rank": 0, "score": 353932.9489032605 }, { "content": "fn run(addr: SocketAddr, version: Run) -> Sender {\n\n let (tx, rx) = mpsc::unbounded::<(Request, oneshot::Sender<Result<Response, String>>)>();\n\n\n\n ::std::thread::Builder::new().name(\"support client\".into()).spawn(move || {\n\n let mut core = Core::new().unwrap();\n\n let reactor = core.handle();\n\n\n\n let conn = Conn(addr, reactor.clone());\n\n\n\n let work: Box<Future<Item=(), Error=()>> = match version {\n\n Run::Http1 { absolute_uris } => {\n\n let client = hyper::Client::configure()\n\n .connector(conn)\n\n .build(&reactor);\n\n Box::new(rx.for_each(move |(req, cb)| {\n\n let mut req = hyper::Request::from(req.map(|()| hyper::Body::empty()));\n\n if !req.headers().has::<hyper::header::ContentLength>() {\n\n assert!(req.body_mut().take().unwrap().is_empty());\n\n }\n\n if absolute_uris {\n", "file_path": "proxy/tests/support/client.rs", "rank": 1, "score": 350263.99464529374 }, { "content": "pub fn http2() -> Server {\n\n Server::http2()\n\n}\n\n\n", "file_path": "proxy/tests/support/server.rs", "rank": 2, "score": 335973.66271528957 }, { "content": "pub fn http1() -> Server {\n\n Server::http1()\n\n}\n\n\n", "file_path": "proxy/tests/support/server.rs", "rank": 3, "score": 335950.7213455664 }, { "content": "fn run_client(addr: SocketAddr) -> TcpSender {\n\n let (tx, rx) = mpsc::unbounded();\n\n ::std::thread::Builder::new().name(\"support client\".into()).spawn(move || {\n\n let mut core = Core::new().unwrap();\n\n let handle = core.handle();\n\n\n\n let work = rx.for_each(|cb: oneshot::Sender<_>| {\n\n let fut = TcpStream::connect(&addr, &handle)\n\n .map_err(|e| panic!(\"connect error: {}\", e))\n\n .and_then(move |tcp| {\n\n let (tx, rx) = mpsc::unbounded();\n\n cb.send(tx).unwrap();\n\n rx.fold(tcp, |tcp, (action, cb): (Option<Vec<u8>>, oneshot::Sender<io::Result<Option<Vec<u8>>>>)| {\n\n let f: Box<Future<Item=TcpStream, Error=()>> = match action {\n\n None => {\n\n Box::new(tokio_io::io::read(tcp, vec![0; 1024])\n\n .then(move |res| {\n\n match res {\n\n Ok((tcp, mut vec, n)) => {\n\n vec.truncate(n);\n", "file_path": "proxy/tests/support/tcp.rs", "rank": 4, "score": 321359.00826142373 }, { "content": "pub fn http2<T: Into<String>>(addr: SocketAddr, auth: T) -> Client {\n\n Client::new(addr, auth.into(), Run::Http2)\n\n}\n\n\n", "file_path": "proxy/tests/support/client.rs", "rank": 5, "score": 317033.3532622515 }, { "content": "pub fn http1<T: Into<String>>(addr: SocketAddr, auth: T) -> Client {\n\n Client::new(addr, auth.into(), Run::Http1 {\n\n absolute_uris: false,\n\n })\n\n}\n\n\n", "file_path": "proxy/tests/support/client.rs", "rank": 6, "score": 317014.5317519842 }, { "content": "pub fn client(addr: SocketAddr) -> TcpClient {\n\n let tx = run_client(addr);\n\n TcpClient {\n\n tx,\n\n }\n\n}\n\n\n", "file_path": "proxy/tests/support/tcp.rs", "rank": 7, "score": 306094.9972380605 }, { "content": "struct Route(Box<Fn(Request<()>) -> Response<String> + Send>);\n\n\n\nimpl Route {\n\n fn string(body: &str) -> Route {\n\n let body = body.to_owned();\n\n Route(Box::new(move |_| {\n\n http::Response::builder()\n\n .status(200)\n\n .body(body.clone())\n\n .unwrap()\n\n }))\n\n }\n\n}\n\n\n\nimpl ::std::fmt::Debug for Route {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n f.write_str(\"Route\")\n\n }\n\n}\n\n\n", "file_path": "proxy/tests/support/server.rs", "rank": 8, "score": 297277.12453376193 }, { "content": "// This sends `GET http://foo.com/ HTTP/1.1` instead of just `GET / HTTP/1.1`.\n\npub fn http1_absolute_uris<T: Into<String>>(addr: SocketAddr, auth: T) -> Client {\n\n Client::new(addr, auth.into(), Run::Http1 {\n\n absolute_uris: true,\n\n })\n\n}\n\n\n", "file_path": "proxy/tests/support/client.rs", "rank": 9, "score": 295467.2225661331 }, { "content": "fn run_server(tcp: TcpServer) -> server::Listening {\n\n let (tx, rx) = shutdown_signal();\n\n let (addr_tx, addr_rx) = oneshot::channel();\n\n let conn_count = Arc::new(AtomicUsize::from(0));\n\n let srv_conn_count = Arc::clone(&conn_count);\n\n ::std::thread::Builder::new().name(\"support server\".into()).spawn(move || {\n\n let mut core = Core::new().unwrap();\n\n let reactor = core.handle();\n\n\n\n let addr = ([127, 0, 0, 1], 0).into();\n\n let bind = TcpListener::bind(&addr, &reactor).expect(\"bind\");\n\n\n\n let local_addr = bind.local_addr().expect(\"local_addr\");\n\n let _ = addr_tx.send(local_addr);\n\n\n\n let mut accepts = tcp.accepts;\n\n\n\n let work = bind.incoming().for_each(move |(sock, _)| {\n\n let cb = accepts.pop_front().expect(\"no more accepts\");\n\n srv_conn_count.fetch_add(1, Ordering::Release);\n", "file_path": "proxy/tests/support/tcp.rs", "rank": 10, "score": 283769.5675863245 }, { "content": "pub fn tcp(addr: SocketAddr) -> tcp::TcpClient {\n\n tcp::client(addr)\n\n}\n\n\n\npub struct Client {\n\n authority: String,\n\n tx: Sender,\n\n version: http::Version,\n\n}\n\n\n\nimpl Client {\n\n fn new(addr: SocketAddr, authority: String, r: Run) -> Client {\n\n let v = match r {\n\n Run::Http1 { .. } => http::Version::HTTP_11,\n\n Run::Http2 => http::Version::HTTP_2,\n\n };\n\n Client {\n\n authority,\n\n tx: run(addr, r),\n\n version: v,\n", "file_path": "proxy/tests/support/client.rs", "rank": 11, "score": 281384.33318842953 }, { "content": "pub fn new() -> Server {\n\n http2()\n\n}\n\n\n", "file_path": "proxy/tests/support/server.rs", "rank": 12, "score": 271112.9796214748 }, { "content": "pub fn new<T: Into<String>>(addr: SocketAddr, auth: T) -> Client {\n\n http2(addr, auth.into())\n\n}\n\n\n", "file_path": "proxy/tests/support/client.rs", "rank": 13, "score": 271079.8953608101 }, { "content": "pub fn destination_update(addr: SocketAddr) -> pb::destination::Update {\n\n pb::destination::Update {\n\n update: Some(pb::destination::update::Update::Add(\n\n pb::destination::WeightedAddrSet {\n\n addrs: vec![\n\n pb::destination::WeightedAddr {\n\n addr: Some(pb::common::TcpAddress {\n\n ip: Some(ip_conv(addr.ip())),\n\n port: u32::from(addr.port()),\n\n }),\n\n weight: 0,\n\n ..Default::default()\n\n },\n\n ],\n\n ..Default::default()\n\n },\n\n )),\n\n }\n\n}\n\n\n", "file_path": "proxy/tests/support/controller.rs", "rank": 14, "score": 269424.3274681035 }, { "content": "pub fn server() -> TcpServer {\n\n TcpServer {\n\n accepts: VecDeque::new(),\n\n }\n\n}\n\n\n\npub struct TcpClient {\n\n tx: TcpSender,\n\n}\n\n\n", "file_path": "proxy/tests/support/tcp.rs", "rank": 15, "score": 264718.34218346514 }, { "content": "enum Run {\n\n Http1 {\n\n absolute_uris: bool,\n\n },\n\n Http2,\n\n}\n\n\n", "file_path": "proxy/tests/support/client.rs", "rank": 16, "score": 263369.7392272943 }, { "content": "#[derive(Debug)]\n\nenum Run {\n\n Http1,\n\n Http2,\n\n}\n\n\n", "file_path": "proxy/tests/support/server.rs", "rank": 17, "score": 263029.14705044613 }, { "content": "struct Destination(Box<Fn() -> Option<pb::destination::Update> + Send>);\n\n\n\n#[derive(Debug)]\n\npub struct Controller {\n\n destinations: VecDeque<(String, Destination)>,\n\n reports: Option<mpsc::UnboundedSender<pb::telemetry::ReportRequest>>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Listening {\n\n pub addr: SocketAddr,\n\n shutdown: Shutdown,\n\n}\n\n\n\nimpl Controller {\n\n pub fn new() -> Self {\n\n Controller {\n\n destinations: VecDeque::new(),\n\n reports: None,\n\n }\n", "file_path": "proxy/tests/support/controller.rs", "rank": 18, "score": 258422.24348855668 }, { "content": "#[test]\n\nfn http1_connect_not_supported() {\n\n let _ = env_logger::try_init();\n\n\n\n let srv = server::tcp()\n\n .run();\n\n let ctrl = controller::new().run();\n\n let proxy = proxy::new()\n\n .controller(ctrl)\n\n .inbound(srv)\n\n .run();\n\n\n\n let client = client::tcp(proxy.inbound);\n\n\n\n let tcp_client = client.connect();\n\n tcp_client.write(\"CONNECT foo.bar:443 HTTP/1.1\\r\\nHost: foo.bar:443\\r\\n\\r\\n\");\n\n\n\n let expected = \"HTTP/1.1 502 Bad Gateway\\r\\n\";\n\n assert_eq!(s(&tcp_client.read()[..expected.len()]), expected);\n\n}\n\n\n", "file_path": "proxy/tests/transparency.rs", "rank": 19, "score": 255866.8266037646 }, { "content": "#[test]\n\n#[should_panic]\n\nfn assert_eventually() {\n\n assert_eventually!(false)\n\n}\n", "file_path": "proxy/tests/support/mod.rs", "rank": 20, "score": 255740.9756533037 }, { "content": "#[test]\n\nfn http1_inbound_sends_telemetry() {\n\n let _ = env_logger::try_init();\n\n\n\n info!(\"running test server\");\n\n let srv = server::http1().route(\"/hey\", \"hello\").run();\n\n\n\n let mut ctrl = controller::new();\n\n let reports = ctrl.reports();\n\n let proxy = proxy::new()\n\n .controller(ctrl.run())\n\n .inbound(srv)\n\n .metrics_flush_interval(Duration::from_millis(500))\n\n .run();\n\n let client = client::http1(proxy.inbound, \"tele.test.svc.cluster.local\");\n\n\n\n info!(\"client.get(/hey)\");\n\n assert_eq!(client.get(\"/hey\"), \"hello\");\n\n\n\n info!(\"awaiting report\");\n\n let report = reports.wait().next().unwrap().unwrap();\n", "file_path": "proxy/tests/telemetry.rs", "rank": 21, "score": 248949.32049581382 }, { "content": "fn run(controller: Controller) -> Listening {\n\n let (tx, rx) = shutdown_signal();\n\n let (addr_tx, addr_rx) = oneshot::channel();\n\n\n\n ::std::thread::Builder::new()\n\n .name(\"support controller\".into())\n\n .spawn(move || {\n\n let mut core = Core::new().unwrap();\n\n let reactor = core.handle();\n\n\n\n let factory = NewSvc {\n\n destinations: Arc::new(Mutex::new(controller.destinations)),\n\n reports: controller.reports,\n\n };\n\n let h2 = tower_h2::Server::new(factory, Default::default(), reactor.clone());\n\n\n\n let addr = ([127, 0, 0, 1], 0).into();\n\n let bind = TcpListener::bind(&addr, &reactor).expect(\"bind\");\n\n\n\n let _ = addr_tx.send(bind.local_addr().expect(\"addr\"));\n", "file_path": "proxy/tests/support/controller.rs", "rank": 22, "score": 246651.83762773694 }, { "content": "fn ip_conv(ip: IpAddr) -> pb::common::IpAddress {\n\n match ip {\n\n IpAddr::V4(v4) => pb::common::IpAddress {\n\n ip: Some(pb::common::ip_address::Ip::Ipv4(v4.into())),\n\n },\n\n IpAddr::V6(v6) => {\n\n let (first, last) = octets_to_u64s(v6.octets());\n\n pb::common::IpAddress {\n\n ip: Some(pb::common::ip_address::Ip::Ipv6(pb::common::IPv6 {\n\n first,\n\n last,\n\n })),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "proxy/tests/support/controller.rs", "rank": 23, "score": 246269.01490721846 }, { "content": "pub fn tcp() -> tcp::TcpServer {\n\n tcp::server()\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Server {\n\n routes: HashMap<String, Route>,\n\n version: Run,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Listening {\n\n pub addr: SocketAddr,\n\n pub(super) shutdown: Shutdown,\n\n pub(super) conn_count: Arc<AtomicUsize>,\n\n}\n\n\n\nimpl Listening {\n\n pub fn connections(&self) -> usize {\n\n self.conn_count.load(Ordering::Acquire)\n", "file_path": "proxy/tests/support/server.rs", "rank": 24, "score": 235527.52050265923 }, { "content": "pub fn s(bytes: &[u8]) -> &str {\n\n ::std::str::from_utf8(bytes.as_ref()).unwrap()\n\n}\n\n\n", "file_path": "proxy/tests/support/mod.rs", "rank": 25, "score": 226994.8262351332 }, { "content": "pub fn destination_exists_with_no_endpoints() -> pb::destination::Update {\n\n pb::destination::Update {\n\n update: Some(pb::destination::update::Update::NoEndpoints (\n\n pb::destination::NoEndpoints { exists: true }\n\n )),\n\n }\n\n}\n\n\n", "file_path": "proxy/tests/support/controller.rs", "rank": 26, "score": 222281.77805447273 }, { "content": "pub fn destination_add_none() -> pb::destination::Update {\n\n pb::destination::Update {\n\n update: Some(pb::destination::update::Update::Add(\n\n pb::destination::WeightedAddrSet {\n\n addrs: Vec::new(),\n\n ..Default::default()\n\n },\n\n )),\n\n }\n\n}\n\n\n", "file_path": "proxy/tests/support/controller.rs", "rank": 27, "score": 222281.77805447273 }, { "content": "pub fn destination_remove_none() -> pb::destination::Update {\n\n pb::destination::Update {\n\n update: Some(pb::destination::update::Update::Remove(\n\n pb::destination::AddrSet {\n\n addrs: Vec::new(),\n\n ..Default::default()\n\n },\n\n )),\n\n }\n\n}\n\n\n", "file_path": "proxy/tests/support/controller.rs", "rank": 28, "score": 222281.77805447273 }, { "content": "#[test]\n\nfn outbound_http1() {\n\n let _ = env_logger::try_init();\n\n\n\n let srv = server::http1().route(\"/\", \"hello h1\").run();\n\n let ctrl = controller::new()\n\n .destination(\"transparency.test.svc.cluster.local\", srv.addr)\n\n .run();\n\n let proxy = proxy::new().controller(ctrl).outbound(srv).run();\n\n let client = client::http1(proxy.outbound, \"transparency.test.svc.cluster.local\");\n\n\n\n assert_eq!(client.get(\"/\"), \"hello h1\");\n\n}\n\n\n", "file_path": "proxy/tests/transparency.rs", "rank": 29, "score": 216030.96772504615 }, { "content": "#[test]\n\nfn inbound_http1() {\n\n let _ = env_logger::try_init();\n\n\n\n let srv = server::http1().route(\"/\", \"hello h1\").run();\n\n let ctrl = controller::new().run();\n\n let proxy = proxy::new()\n\n .controller(ctrl)\n\n .inbound(srv)\n\n .run();\n\n let client = client::http1(proxy.inbound, \"transparency.test.svc.cluster.local\");\n\n\n\n assert_eq!(client.get(\"/\"), \"hello h1\");\n\n}\n\n\n", "file_path": "proxy/tests/transparency.rs", "rank": 30, "score": 216030.96772504615 }, { "content": "pub fn new() -> Proxy {\n\n Proxy::new()\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Proxy {\n\n controller: Option<controller::Listening>,\n\n inbound: Option<server::Listening>,\n\n outbound: Option<server::Listening>,\n\n\n\n metrics_flush_interval: Option<Duration>,\n\n inbound_disable_ports_protocol_detection: Option<Vec<u16>>,\n\n outbound_disable_ports_protocol_detection: Option<Vec<u16>>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Listening {\n\n pub control: SocketAddr,\n\n pub inbound: SocketAddr,\n\n pub outbound: SocketAddr,\n", "file_path": "proxy/tests/support/proxy.rs", "rank": 31, "score": 211907.96212185864 }, { "content": "struct Conn(SocketAddr, Handle);\n\n\n\nimpl Connect for Conn {\n\n type Connected = TcpStream;\n\n type Error = ::std::io::Error;\n\n type Future = Box<Future<Item = TcpStream, Error = ::std::io::Error>>;\n\n\n\n fn connect(&self) -> Self::Future {\n\n let c = TcpStream::connect(&self.0, &self.1)\n\n .and_then(|tcp| tcp.set_nodelay(true).map(move |_| tcp));\n\n Box::new(c)\n\n }\n\n}\n\n\n\n\n\nimpl hyper::client::Service for Conn {\n\n type Request = hyper::Uri;\n\n type Response = TcpStream;\n\n type Future = Box<Future<Item = TcpStream, Error = ::std::io::Error>>;\n\n type Error = ::std::io::Error;\n\n fn call(&self, _: hyper::Uri) -> <Self as hyper::client::Service>::Future {\n\n let c = TcpStream::connect(&self.0, &self.1)\n\n .and_then(|tcp| tcp.set_nodelay(true).map(move |_| tcp));\n\n Box::new(c)\n\n }\n\n}\n", "file_path": "proxy/tests/support/client.rs", "rank": 32, "score": 211764.1586541618 }, { "content": "struct RspBody(Option<Bytes>);\n\n\n\nimpl RspBody {\n\n fn new(body: Bytes) -> Self {\n\n RspBody(Some(body))\n\n }\n\n\n\n fn empty() -> Self {\n\n RspBody(None)\n\n }\n\n}\n\n\n\n\n\nimpl Body for RspBody {\n\n type Data = Bytes;\n\n\n\n fn is_end_stream(&self) -> bool {\n\n self.0.as_ref().map(|b| b.is_empty()).unwrap_or(false)\n\n }\n\n\n\n fn poll_data(&mut self) -> Poll<Option<Bytes>, h2::Error> {\n\n let data = self.0\n\n .take()\n\n .and_then(|b| if b.is_empty() { None } else { Some(b) });\n\n Ok(Async::Ready(data))\n\n }\n\n}\n\n\n", "file_path": "proxy/tests/support/server.rs", "rank": 33, "score": 211539.8404751476 }, { "content": "#[test]\n\nfn http1_head_responses() {\n\n let _ = env_logger::try_init();\n\n\n\n let srv = server::http1()\n\n .route_fn(\"/\", move |req| {\n\n assert_eq!(req.method(), \"HEAD\");\n\n Response::builder()\n\n .header(\"content-length\", \"55\")\n\n .body(\"\".into())\n\n .unwrap()\n\n })\n\n .run();\n\n let ctrl = controller::new().run();\n\n let proxy = proxy::new()\n\n .controller(ctrl)\n\n .inbound(srv)\n\n .run();\n\n let client = client::http1(proxy.inbound, \"transparency.test.svc.cluster.local\");\n\n\n\n let resp = client.request(\n", "file_path": "proxy/tests/transparency.rs", "rank": 34, "score": 210033.3212195495 }, { "content": "#[test]\n\nfn http1_bodyless_responses() {\n\n let _ = env_logger::try_init();\n\n\n\n let req_status_header = \"x-test-status-requested\";\n\n\n\n let srv = server::http1()\n\n .route_fn(\"/\", move |req| {\n\n let status = req.headers()\n\n .get(req_status_header)\n\n .map(|val| {\n\n val.to_str()\n\n .expect(\"req_status_header should be ascii\")\n\n .parse::<u16>()\n\n .expect(\"req_status_header should be numbers\")\n\n })\n\n .unwrap_or(200);\n\n\n\n Response::builder()\n\n .status(status)\n\n .body(\"\".into())\n", "file_path": "proxy/tests/transparency.rs", "rank": 35, "score": 210033.32121954949 }, { "content": "#[test]\n\nfn inbound_sends_telemetry() {\n\n let _ = env_logger::try_init();\n\n\n\n info!(\"running test server\");\n\n let srv = server::new().route(\"/hey\", \"hello\").run();\n\n\n\n let mut ctrl = controller::new();\n\n let reports = ctrl.reports();\n\n let proxy = proxy::new()\n\n .controller(ctrl.run())\n\n .inbound(srv)\n\n .metrics_flush_interval(Duration::from_millis(500))\n\n .run();\n\n let client = client::new(proxy.inbound, \"tele.test.svc.cluster.local\");\n\n\n\n info!(\"client.get(/hey)\");\n\n assert_eq!(client.get(\"/hey\"), \"hello\");\n\n\n\n info!(\"awaiting report\");\n\n let report = reports.wait().next().unwrap().unwrap();\n", "file_path": "proxy/tests/telemetry.rs", "rank": 36, "score": 210018.86540144883 }, { "content": "#[test]\n\nfn http11_upgrade_not_supported() {\n\n let _ = env_logger::try_init();\n\n\n\n // our h1 proxy will strip the Connection header\n\n // and headers it mentions\n\n let msg1 = \"\\\n\n GET /chat HTTP/1.1\\r\\n\\\n\n Host: foo.bar\\r\\n\\\n\n Connection: Upgrade\\r\\n\\\n\n Upgrade: websocket\\r\\n\\\n\n \\r\\n\\\n\n \";\n\n\n\n // but let's pretend the server tries to upgrade\n\n // anyways\n\n let msg2 = \"\\\n\n HTTP/1.1 101 Switching Protocols\\r\\n\\\n\n Upgrade: websocket\\r\\n\\\n\n Connection: Upgrade\\r\\n\\\n\n \\r\\n\\\n", "file_path": "proxy/tests/transparency.rs", "rank": 37, "score": 209876.13603635243 }, { "content": "#[test]\n\nfn tcp_server_first() {\n\n use std::sync::mpsc;\n\n\n\n let _ = env_logger::try_init();\n\n\n\n let msg1 = \"custom tcp server starts\";\n\n let msg2 = \"custom tcp client second\";\n\n\n\n let (tx, rx) = mpsc::channel();\n\n\n\n let srv = server::tcp()\n\n .accept_fut(move |sock| {\n\n tokio_io::io::write_all(sock, msg1.as_bytes())\n\n .and_then(move |(sock, _)| {\n\n tokio_io::io::read(sock, vec![0; 512])\n\n })\n\n .map(move |(_sock, vec, n)| {\n\n assert_eq!(&vec[..n], msg2.as_bytes());\n\n tx.send(()).unwrap();\n\n })\n", "file_path": "proxy/tests/transparency.rs", "rank": 38, "score": 209417.25445250713 }, { "content": "pub fn shutdown_signal() -> (oneshot::Sender<()>, ShutdownRx) {\n\n let (tx, rx) = oneshot::channel();\n\n (tx, rx.then(|_| { Ok(()) } as _))\n\n}\n\n\n\n\n", "file_path": "proxy/tests/support/mod.rs", "rank": 39, "score": 209331.59029809706 }, { "content": "#[test]\n\nfn telemetry_report_errors_are_ignored() {}\n\n\n\nmacro_rules! assert_contains {\n\n ($scrape:expr, $contains:expr) => {\n\n assert_eventually!($scrape.contains($contains), \"metrics scrape:\\n{:8}\\ndid not contain:\\n{:8}\", $scrape, $contains)\n\n }\n\n}\n\n\n", "file_path": "proxy/tests/telemetry.rs", "rank": 40, "score": 204412.5187309445 }, { "content": "#[test]\n\nfn http1_response_end_of_file() {\n\n let _ = env_logger::try_init();\n\n\n\n // test both http/1.0 and 1.1\n\n let srv = server::tcp()\n\n .accept(move |_read| {\n\n \"\\\n\n HTTP/1.0 200 OK\\r\\n\\\n\n \\r\\n\\\n\n body till eof\\\n\n \"\n\n })\n\n .accept(move |_read| {\n\n \"\\\n\n HTTP/1.1 200 OK\\r\\n\\\n\n \\r\\n\\\n\n body till eof\\\n\n \"\n\n })\n\n .run();\n", "file_path": "proxy/tests/transparency.rs", "rank": 41, "score": 204410.76926305547 }, { "content": "#[test]\n\nfn http1_removes_connection_headers() {\n\n let _ = env_logger::try_init();\n\n\n\n let srv = server::http1()\n\n .route_fn(\"/\", |req| {\n\n assert!(!req.headers().contains_key(\"x-foo-bar\"));\n\n Response::builder()\n\n .header(\"x-server-quux\", \"lorem ipsum\")\n\n .header(\"connection\", \"close, x-server-quux\")\n\n .body(\"\".into())\n\n .unwrap()\n\n })\n\n .run();\n\n let ctrl = controller::new().run();\n\n let proxy = proxy::new()\n\n .controller(ctrl)\n\n .inbound(srv)\n\n .run();\n\n let client = client::http1(proxy.inbound, \"transparency.test.svc.cluster.local\");\n\n\n\n let res = client.request(client.request_builder(\"/\")\n\n .header(\"x-foo-bar\", \"baz\")\n\n .header(\"connection\", \"x-foo-bar, close\"));\n\n\n\n assert_eq!(res.status(), http::StatusCode::OK);\n\n assert!(!res.headers().contains_key(\"x-server-quux\"));\n\n}\n\n\n", "file_path": "proxy/tests/transparency.rs", "rank": 42, "score": 204410.76926305547 }, { "content": "fn pb_to_sock_addr(pb: TcpAddress) -> Option<SocketAddr> {\n\n use conduit_proxy_controller_grpc::common::ip_address::Ip;\n\n use std::net::{Ipv4Addr, Ipv6Addr};\n\n /*\n\n current structure is:\n\n TcpAddress {\n\n ip: Option<IpAddress {\n\n ip: Option<enum Ip {\n\n Ipv4(u32),\n\n Ipv6(IPv6 {\n\n first: u64,\n\n last: u64,\n\n }),\n\n }>,\n\n }>,\n\n port: u32,\n\n }\n\n */\n\n match pb.ip {\n\n Some(ip) => match ip.ip {\n", "file_path": "proxy/src/control/discovery.rs", "rank": 43, "score": 202072.4269954014 }, { "content": "#[derive(Debug)]\n\nstruct Svc(Arc<HashMap<String, Route>>);\n\n\n\nimpl Service for Svc {\n\n type Request = Request<RecvBody>;\n\n type Response = Response<RspBody>;\n\n type Error = h2::Error;\n\n type Future = future::FutureResult<Self::Response, Self::Error>;\n\n\n\n fn poll_ready(&mut self) -> Poll<(), Self::Error> {\n\n Ok(Async::Ready(()))\n\n }\n\n\n\n fn call(&mut self, req: Self::Request) -> Self::Future {\n\n let rsp = match self.0.get(req.uri().path()) {\n\n Some(route) => {\n\n (route.0)(req.map(|_| ()))\n\n .map(|s| RspBody::new(s.as_bytes().into()))\n\n }\n\n None => {\n\n println!(\"server 404: {:?}\", req.uri().path());\n", "file_path": "proxy/tests/support/server.rs", "rank": 44, "score": 199443.04495847021 }, { "content": "#[test]\n\nfn http1_content_length_zero_is_preserved() {\n\n let _ = env_logger::try_init();\n\n\n\n let srv = server::http1()\n\n .route_fn(\"/\", |req| {\n\n let status = if req.headers()[\"content-length\"] == \"0\" {\n\n StatusCode::OK\n\n } else {\n\n StatusCode::BAD_REQUEST\n\n };\n\n Response::builder()\n\n .status(status)\n\n .header(\"content-length\", \"0\")\n\n .body(\"\".into())\n\n .unwrap()\n\n })\n\n .run();\n\n let ctrl = controller::new().run();\n\n let proxy = proxy::new()\n\n .controller(ctrl)\n", "file_path": "proxy/tests/transparency.rs", "rank": 45, "score": 199129.1910359836 }, { "content": "#[test]\n\nfn http1_one_connection_per_host() {\n\n let _ = env_logger::try_init();\n\n\n\n let srv = server::http1().route(\"/\", \"hello\").run();\n\n let ctrl = controller::new()\n\n .run();\n\n let proxy = proxy::new().controller(ctrl).inbound(srv).run();\n\n\n\n let client = client::http1(proxy.inbound, \"foo.bar\");\n\n\n\n let inbound = &proxy.inbound_server.as_ref()\n\n .expect(\"no inbound server!\");\n\n\n\n // Make a request with the header \"Host: foo.bar\". After the request, the\n\n // server should have seen one connection.\n\n let res1 = client.request(client.request_builder(\"/\")\n\n .version(http::Version::HTTP_11)\n\n .header(\"host\", \"foo.bar\")\n\n );\n\n assert_eq!(res1.status(), http::StatusCode::OK);\n", "file_path": "proxy/tests/transparency.rs", "rank": 46, "score": 199129.1910359836 }, { "content": "#[test]\n\nfn tcp_connections_close_if_client_closes() {\n\n use std::sync::mpsc;\n\n\n\n let _ = env_logger::try_init();\n\n\n\n let msg1 = \"custom tcp hello\";\n\n let msg2 = \"custom tcp bye\";\n\n\n\n let (tx, rx) = mpsc::channel();\n\n\n\n let srv = server::tcp()\n\n .accept_fut(move |sock| {\n\n tokio_io::io::read(sock, vec![0; 1024])\n\n .and_then(move |(sock, vec, n)| {\n\n assert_eq!(&vec[..n], msg1.as_bytes());\n\n\n\n tokio_io::io::write_all(sock, msg2.as_bytes())\n\n }).and_then(|(sock, _)| {\n\n // lets read again, but we should get eof\n\n tokio_io::io::read(sock, [0; 16])\n", "file_path": "proxy/tests/transparency.rs", "rank": 47, "score": 198860.09488502494 }, { "content": "trait CallBox: 'static {\n\n fn call_box(self: Box<Self>, sock: TcpStream) -> Box<Future<Item=(), Error=()>>;\n\n}\n\n\n\nimpl<F: FnOnce(TcpStream) -> Box<Future<Item=(), Error=()>> + Send + 'static> CallBox for F {\n\n fn call_box(self: Box<Self>, sock: TcpStream) -> Box<Future<Item=(), Error=()>> {\n\n (*self)(sock)\n\n }\n\n}\n\n\n\npub struct TcpServer {\n\n accepts: VecDeque<Handler>,\n\n}\n\n\n\npub struct TcpConn {\n\n tx: TcpConnSender,\n\n}\n\n\n\nimpl TcpClient {\n\n pub fn connect(&self) -> TcpConn {\n", "file_path": "proxy/tests/support/tcp.rs", "rank": 48, "score": 198519.5891223238 }, { "content": "type BodyStream = Box<Stream<Item=Bytes, Error=String> + Send>;\n", "file_path": "proxy/tests/support/client.rs", "rank": 49, "score": 195009.45417363782 }, { "content": "pub fn new() -> Controller {\n\n Controller::new()\n\n}\n\n\n", "file_path": "proxy/tests/support/controller.rs", "rank": 50, "score": 194220.22991102483 }, { "content": "#[test]\n\nfn http1_requests_without_host_have_unique_connections() {\n\n let _ = env_logger::try_init();\n\n\n\n let srv = server::http1().route(\"/\", \"hello\").run();\n\n let ctrl = controller::new()\n\n .run();\n\n let proxy = proxy::new().controller(ctrl).inbound(srv).run();\n\n\n\n let client = client::http1(proxy.inbound, \"foo.bar\");\n\n\n\n let inbound = &proxy.inbound_server.as_ref()\n\n .expect(\"no inbound server!\");\n\n\n\n // Make a request with no Host header and no authority in the request path.\n\n let res = client.request(client.request_builder(\"/\")\n\n .version(http::Version::HTTP_11)\n\n .header(\"host\", \"\")\n\n );\n\n assert_eq!(res.status(), http::StatusCode::OK);\n\n assert_eq!(res.version(), http::Version::HTTP_11);\n", "file_path": "proxy/tests/transparency.rs", "rank": 51, "score": 194158.48236820547 }, { "content": "#[derive(Debug)]\n\nstruct NewSvc(Arc<HashMap<String, Route>>);\n\nimpl NewService for NewSvc {\n\n type Request = Request<RecvBody>;\n\n type Response = Response<RspBody>;\n\n type Error = h2::Error;\n\n type InitError = ::std::io::Error;\n\n type Service = Svc;\n\n type Future = future::FutureResult<Svc, Self::InitError>;\n\n\n\n fn new_service(&self) -> Self::Future {\n\n future::ok(Svc(Arc::clone(&self.0)))\n\n }\n\n}\n", "file_path": "proxy/tests/support/server.rs", "rank": 52, "score": 193899.7860976898 }, { "content": "pub use self::futures::*;\n\nuse self::futures::sync::oneshot;\n\npub use self::http::{HeaderMap, Request, Response, StatusCode};\n\nuse self::http::header::HeaderValue;\n\nuse self::tokio_connect::Connect;\n\nuse self::tokio_core::net::{TcpListener, TcpStream};\n\nuse self::tokio_core::reactor::{Core, Handle};\n\nuse self::tower::{NewService, Service};\n\nuse self::tower_h2::{Body, RecvBody};\n\nuse std::net::SocketAddr;\n\npub use std::time::Duration;\n\n\n\n/// Environment variable for overriding the test patience.\n\npub const ENV_TEST_PATIENCE_MS: &'static str = \"RUST_TEST_PATIENCE_MS\";\n\npub const DEFAULT_TEST_PATIENCE: Duration = Duration::from_millis(15);\n\n\n\n/// Retry an assertion up to a specified number of times, waiting\n\n/// `RUST_TEST_PATIENCE_MS` between retries.\n\n///\n\n/// If the assertion is successful after a retry, execution will continue\n", "file_path": "proxy/tests/support/mod.rs", "rank": 53, "score": 192088.66076804037 }, { "content": " // TODO: don't do this *every* time eventually is called (lazy_static?)\n\n let patience = env::var($crate::support::ENV_TEST_PATIENCE_MS).ok()\n\n .map(|s| {\n\n let millis = u64::from_str(&s)\n\n .expect(\n\n \"Could not parse RUST_TEST_PATIENCE_MS environment \\\n\n variable.\"\n\n );\n\n Duration::from_millis(millis)\n\n })\n\n .unwrap_or($crate::support::DEFAULT_TEST_PATIENCE);\n\n let start_t = Instant::now();\n\n for i in 0..($retries + 1) {\n\n if $cond {\n\n break;\n\n } else if i == $retries {\n\n panic!(\n\n \"assertion failed after {} (retried {} times): {}\",\n\n timeout::HumanDuration(start_t.elapsed()),\n\n i,\n", "file_path": "proxy/tests/support/mod.rs", "rank": 54, "score": 192087.98077547672 }, { "content": "pub mod controller;\n\npub mod proxy;\n\npub mod server;\n\nmod tcp;\n\n\n\npub type Shutdown = oneshot::Sender<()>;\n\npub type ShutdownRx = future::Then<\n\n oneshot::Receiver<()>,\n\n Result<(), ()>,\n\n fn(Result<(), oneshot::Canceled>) -> Result<(), ()>,\n\n>;\n\n\n", "file_path": "proxy/tests/support/mod.rs", "rank": 55, "score": 192085.49430886545 }, { "content": "#![allow(unused)]\n\n\n\nextern crate bytes;\n\npub extern crate conduit_proxy_controller_grpc;\n\nextern crate conduit_proxy;\n\npub extern crate convert;\n\nextern crate futures;\n\nextern crate h2;\n\npub extern crate http;\n\nextern crate hyper;\n\nextern crate prost;\n\nextern crate tokio_connect;\n\nextern crate tokio_core;\n\npub extern crate tokio_io;\n\nextern crate tower;\n\nextern crate tower_h2;\n\npub extern crate env_logger;\n\n\n\nuse self::bytes::{BigEndian, Bytes, BytesMut};\n\npub use self::conduit_proxy::*;\n", "file_path": "proxy/tests/support/mod.rs", "rank": 56, "score": 192085.14169445718 }, { "content": "/// normally. If all retries are exhausted and the assertion still fails,\n\n/// `assert_eventually!` will panic as though a regular `assert!` had failed.\n\n/// Note that other panics elsewhere in the code under test will not be\n\n/// prevented.\n\n///\n\n/// This should be used sparingly, but is often useful in end-to-end testing\n\n/// where a desired state may not be reached immediately. For example, when\n\n/// some state updates asynchronously and there's no obvious way for the test\n\n/// to wait for an update to occur before making assertions.\n\n///\n\n/// The `RUST_TEST_PATIENCE_MS` environment variable may be used to customize\n\n/// the backoff duration between retries. This may be useful for purposes such\n\n/// compensating for decreased performance on CI.\n\n#[macro_export]\n\nmacro_rules! assert_eventually {\n\n ($cond:expr, retries: $retries:expr, $($arg:tt)+) => {\n\n {\n\n use std::{env, u64};\n\n use std::time::{Instant, Duration};\n\n use std::str::FromStr;\n", "file_path": "proxy/tests/support/mod.rs", "rank": 57, "score": 192084.28275595704 }, { "content": " format_args!($($arg)+)\n\n )\n\n } else {\n\n ::std::thread::sleep(patience);\n\n }\n\n }\n\n }\n\n };\n\n ($cond:expr, $($arg:tt)+) => {\n\n assert_eventually!($cond, retries: 5, $($arg)+)\n\n };\n\n ($cond:expr, retries: $retries:expr) => {\n\n assert_eventually!($cond, retries: $retries, stringify!($cond))\n\n };\n\n ($cond:expr) => {\n\n assert_eventually!($cond, retries: 5, stringify!($cond))\n\n };\n\n}\n\n\n\npub mod client;\n", "file_path": "proxy/tests/support/mod.rs", "rank": 58, "score": 192079.04418305794 }, { "content": " Run::Http2 => {\n\n let h2 = tower_h2::client::Connect::<Conn, Handle, ()>::new(\n\n conn,\n\n Default::default(),\n\n reactor.clone(),\n\n );\n\n\n\n Box::new(h2.new_service()\n\n .map_err(move |err| println!(\"connect error ({:?}): {:?}\", addr, err))\n\n .and_then(move |mut h2| {\n\n rx.for_each(move |(req, cb)| {\n\n let fut = h2.call(req).then(|result| {\n\n let result = result\n\n .map(|res| {\n\n res.map(|body| -> BodyStream {\n\n Box::new(RecvBodyStream(body).map_err(|e| format!(\"{:?}\", e)))\n\n })\n\n })\n\n .map_err(|e| format!(\"{:?}\", e));\n\n let _ = cb.send(result);\n", "file_path": "proxy/tests/support/client.rs", "rank": 59, "score": 191924.35598981372 }, { "content": " req.set_proxy(true);\n\n }\n\n let fut = client.request(req).then(move |result| {\n\n let result = result\n\n .map(|res| {\n\n let res = http::Response::from(res);\n\n res.map(|body| -> BodyStream {\n\n Box::new(body.map(|chunk| chunk.into())\n\n .map_err(|e| e.to_string()))\n\n })\n\n })\n\n .map_err(|e| e.to_string());\n\n let _ = cb.send(result);\n\n Ok(())\n\n });\n\n reactor.spawn(fut);\n\n Ok(())\n\n })\n\n .map_err(|e| println!(\"client error: {:?}\", e)))\n\n },\n", "file_path": "proxy/tests/support/client.rs", "rank": 60, "score": 191917.90830075272 }, { "content": " }\n\n }\n\n\n\n pub fn get(&self, path: &str) -> String {\n\n let mut req = self.request_builder(path);\n\n let res = self.request(req.method(\"GET\"));\n\n let stream = res.into_parts().1;\n\n stream.concat2()\n\n .map(|body| ::std::str::from_utf8(&body).unwrap().to_string())\n\n .wait()\n\n .unwrap()\n\n }\n\n\n\n pub fn request(&self, builder: &mut http::request::Builder) -> Response {\n\n let (tx, rx) = oneshot::channel();\n\n let _ = self.tx.unbounded_send((builder.body(()).unwrap(), tx));\n\n rx.map_err(|_| panic!(\"client request dropped\"))\n\n .wait()\n\n .map(|result| result.unwrap())\n\n .unwrap()\n", "file_path": "proxy/tests/support/client.rs", "rank": 61, "score": 191913.56929135937 }, { "content": "use support::*;\n\n\n\nuse self::futures::sync::{mpsc, oneshot};\n\nuse self::tokio_core::net::TcpStream;\n\n\n", "file_path": "proxy/tests/support/client.rs", "rank": 62, "score": 191910.64192309184 }, { "content": " Ok(())\n\n });\n\n reactor.spawn(fut);\n\n Ok(())\n\n })\n\n })\n\n .map(|_| ())\n\n .map_err(|e| println!(\"client error: {:?}\", e)))\n\n }\n\n };\n\n\n\n core.run(work).unwrap();\n\n }).unwrap();\n\n tx\n\n}\n\n\n", "file_path": "proxy/tests/support/client.rs", "rank": 63, "score": 191908.94945378264 }, { "content": " }\n\n\n\n pub fn request_builder(&self, path: &str) -> http::request::Builder {\n\n let mut b = Request::builder();\n\n b.uri(format!(\"http://{}{}\", self.authority, path).as_str())\n\n .version(self.version);\n\n b\n\n }\n\n}\n\n\n", "file_path": "proxy/tests/support/client.rs", "rank": 64, "score": 191904.69750388185 }, { "content": " }\n\n}\n\n\n\nimpl Server {\n\n fn new(run: Run) -> Self {\n\n Server {\n\n routes: HashMap::new(),\n\n version: run,\n\n }\n\n }\n\n fn http1() -> Self {\n\n Server::new(Run::Http1)\n\n }\n\n\n\n fn http2() -> Self {\n\n Server::new(Run::Http2)\n\n }\n\n\n\n pub fn route(mut self, path: &str, resp: &str) -> Self {\n\n self.routes.insert(path.into(), Route::string(resp));\n", "file_path": "proxy/tests/support/server.rs", "rank": 65, "score": 191576.4430066973 }, { "content": " self\n\n }\n\n\n\n pub fn route_fn<F>(mut self, path: &str, cb: F) -> Self\n\n where\n\n F: Fn(Request<()>) -> Response<String> + Send + 'static,\n\n {\n\n self.routes.insert(path.into(), Route(Box::new(cb)));\n\n self\n\n }\n\n\n\n pub fn route_with_latency(\n\n mut self,\n\n path: &str,\n\n resp: &str,\n\n latency: Duration\n\n ) -> Self {\n\n let resp = resp.to_owned();\n\n let route = Route(Box::new(move |_| {\n\n thread::sleep(latency);\n", "file_path": "proxy/tests/support/server.rs", "rank": 66, "score": 191576.23830635904 }, { "content": " http::Response::builder()\n\n .status(200)\n\n .body(resp.clone())\n\n .unwrap()\n\n }));\n\n self.routes.insert(path.into(), route);\n\n self\n\n }\n\n\n\n pub fn run(self) -> Listening {\n\n let (tx, rx) = shutdown_signal();\n\n let (addr_tx, addr_rx) = oneshot::channel();\n\n let conn_count = Arc::new(AtomicUsize::from(0));\n\n let srv_conn_count = Arc::clone(&conn_count);\n\n ::std::thread::Builder::new().name(\"support server\".into()).spawn(move || {\n\n let mut core = Core::new().unwrap();\n\n let reactor = core.handle();\n\n\n\n let new_svc = NewSvc(Arc::new(self.routes));\n\n\n", "file_path": "proxy/tests/support/server.rs", "rank": 67, "score": 191572.50334514724 }, { "content": " let srv: Box<Fn(TcpStream) -> Box<Future<Item=(), Error=()>>> = match self.version {\n\n Run::Http1 => {\n\n let h1 = hyper::server::Http::<hyper::Chunk>::new();\n\n\n\n Box::new(move |sock| {\n\n let h1_clone = h1.clone();\n\n let srv_conn_count = Arc::clone(&srv_conn_count);\n\n let conn = new_svc.new_service()\n\n .inspect(move |_| {\n\n srv_conn_count.fetch_add(1, Ordering::Release);\n\n })\n\n .from_err()\n\n .and_then(move |svc| h1_clone.serve_connection(sock, svc))\n\n .map(|_| ())\n\n .map_err(|e| println!(\"server h1 error: {}\", e));\n\n Box::new(conn)\n\n })\n\n },\n\n Run::Http2 => {\n\n let h2 = tower_h2::Server::new(\n", "file_path": "proxy/tests/support/server.rs", "rank": 68, "score": 191572.1825259782 }, { "content": " let _ = addr_tx.send(local_addr);\n\n\n\n let serve = bind.incoming()\n\n .fold((srv, reactor), move |(srv, reactor), (sock, _)| {\n\n if let Err(e) = sock.set_nodelay(true) {\n\n return Err(e);\n\n }\n\n reactor.spawn(srv(sock));\n\n\n\n Ok((srv, reactor))\n\n });\n\n\n\n core.handle().spawn(\n\n serve\n\n .map(|_| ())\n\n .map_err(|e| println!(\"server error: {}\", e)),\n\n );\n\n\n\n core.run(rx).unwrap();\n\n }).unwrap();\n", "file_path": "proxy/tests/support/server.rs", "rank": 69, "score": 191570.34072955593 }, { "content": " let mut rsp = http::Response::builder();\n\n rsp.version(http::Version::HTTP_2);\n\n let body = RspBody::empty();\n\n rsp.status(404).body(body).unwrap()\n\n }\n\n };\n\n future::ok(rsp)\n\n }\n\n}\n\n\n\nimpl hyper::server::Service for Svc {\n\n type Request = hyper::server::Request;\n\n type Response = hyper::server::Response<hyper::Body>;\n\n type Error = hyper::Error;\n\n type Future = future::FutureResult<hyper::server::Response<hyper::Body>, hyper::Error>;\n\n\n\n fn call(&self, req: Self::Request) -> Self::Future {\n\n\n\n let rsp = match self.0.get(req.uri().path()) {\n\n Some(route) => {\n", "file_path": "proxy/tests/support/server.rs", "rank": 70, "score": 191567.4884827621 }, { "content": " (route.0)(Request::from(req).map(|_| ()))\n\n .map(|s| hyper::Body::from(s))\n\n .into()\n\n }\n\n None => {\n\n println!(\"server 404: {:?}\", req.uri().path());\n\n let rsp = hyper::server::Response::new();\n\n let body = hyper::Body::empty();\n\n rsp.with_status(hyper::NotFound)\n\n .with_body(body)\n\n }\n\n };\n\n future::ok(rsp)\n\n }\n\n}\n\n\n", "file_path": "proxy/tests/support/server.rs", "rank": 71, "score": 191566.30287846574 }, { "content": " new_svc,\n\n Default::default(),\n\n reactor.clone(),\n\n );\n\n Box::new(move |sock| {\n\n let srv_conn_count = Arc::clone(&srv_conn_count);\n\n let conn = h2.serve(sock)\n\n .map_err(|e| println!(\"server h2 error: {:?}\", e))\n\n .inspect(move |_| {\n\n srv_conn_count.fetch_add(1, Ordering::Release);\n\n });\n\n Box::new(conn)\n\n })\n\n },\n\n };\n\n\n\n let addr = ([127, 0, 0, 1], 0).into();\n\n let bind = TcpListener::bind(&addr, &reactor).expect(\"bind\");\n\n\n\n let local_addr = bind.local_addr().expect(\"local_addr\");\n", "file_path": "proxy/tests/support/server.rs", "rank": 72, "score": 191564.83255730697 }, { "content": "use std::collections::HashMap;\n\nuse std::sync::Arc;\n\nuse std::sync::atomic::{AtomicUsize, Ordering};\n\nuse std::thread;\n\n\n\nuse support::*;\n\n\n", "file_path": "proxy/tests/support/server.rs", "rank": 73, "score": 191558.10169469746 }, { "content": "\n\n let addr = addr_rx.wait().expect(\"addr\");\n\n\n\n Listening {\n\n addr,\n\n shutdown: tx,\n\n conn_count,\n\n }\n\n }\n\n}\n\n\n", "file_path": "proxy/tests/support/server.rs", "rank": 74, "score": 191554.29981477227 }, { "content": "/// Execute a closure with a `Debug` item attached to allow log messages.\n\npub fn context<T, F, U>(context: &T, mut closure: F) -> U\n\nwhere\n\n T: ::std::fmt::Debug + 'static,\n\n F: FnMut() -> U,\n\n{\n\n // This is a raw pointer because of lifetime conflicts that require\n\n // the thread local to have a static lifetime.\n\n //\n\n // We don't want to require a static lifetime, and in fact,\n\n // only use the reference within this closure, so converting\n\n // to a raw pointer is safe.\n\n let _guard = ContextGuard::new(context);\n\n closure()\n\n}\n\n\n", "file_path": "proxy/src/logging.rs", "rank": 75, "score": 190539.38804354178 }, { "content": "#[test]\n\nfn http1_requests_without_body_doesnt_add_transfer_encoding() {\n\n let _ = env_logger::try_init();\n\n\n\n let srv = server::http1()\n\n .route_fn(\"/\", |req| {\n\n let has_body_header = req.headers().contains_key(\"transfer-encoding\")\n\n || req.headers().contains_key(\"content-length\");\n\n let status = if has_body_header {\n\n StatusCode::BAD_REQUEST\n\n } else {\n\n StatusCode::OK\n\n };\n\n let mut res = Response::new(\"\".into());\n\n *res.status_mut() = status;\n\n res\n\n })\n\n .run();\n\n let ctrl = controller::new().run();\n\n let proxy = proxy::new()\n\n .controller(ctrl)\n", "file_path": "proxy/tests/transparency.rs", "rank": 76, "score": 185045.99115156478 }, { "content": "type Handler = Box<CallBox + Send>;\n\n\n", "file_path": "proxy/tests/support/tcp.rs", "rank": 77, "score": 181302.1849960511 }, { "content": "/// Tries to make sure the `Uri` of the request is in a form needed by\n\n/// hyper's Client.\n\n///\n\n/// Also sets the `UriIsAbsoluteForm` extension if received `Uri` was\n\n/// already in absolute-form.\n\npub fn normalize_our_view_of_uri<B>(req: &mut http::Request<B>) {\n\n if req.uri().authority_part().is_some() {\n\n req.extensions_mut().insert(UriIsAbsoluteForm);\n\n return;\n\n }\n\n\n\n // try to parse the Host header\n\n if let Some(auth) = authority_from_host(&req) {\n\n set_authority(req.uri_mut(), auth);\n\n return;\n\n }\n\n\n\n // last resort is to use the so_original_dst\n\n let orig_dst = req.extensions()\n\n .get::<Arc<ServerCtx>>()\n\n .and_then(|ctx| ctx.orig_dst_if_not_local());\n\n if let Some(orig_dst) = orig_dst {\n\n let mut bytes = BytesMut::with_capacity(31);\n\n write!(&mut bytes, \"{}\", orig_dst)\n\n .expect(\"socket address display is under 31 bytes\");\n\n let bytes = bytes.freeze();\n\n let auth = Authority::from_shared(bytes)\n\n .expect(\"socket address is valid authority\");\n\n set_authority(req.uri_mut(), auth);\n\n }\n\n}\n\n\n", "file_path": "proxy/src/transparency/h1.rs", "rank": 78, "score": 176921.7703643464 }, { "content": "type Request = http::Request<()>;\n", "file_path": "proxy/tests/support/client.rs", "rank": 79, "score": 173898.51696969266 }, { "content": "fn octets_to_u64s(octets: [u8; 16]) -> (u64, u64) {\n\n let first = (u64::from(octets[0]) << 56) + (u64::from(octets[1]) << 48)\n\n + (u64::from(octets[2]) << 40) + (u64::from(octets[3]) << 32)\n\n + (u64::from(octets[4]) << 24) + (u64::from(octets[5]) << 16)\n\n + (u64::from(octets[6]) << 8) + u64::from(octets[7]);\n\n let last = (u64::from(octets[8]) << 56) + (u64::from(octets[9]) << 48)\n\n + (u64::from(octets[10]) << 40) + (u64::from(octets[11]) << 32)\n\n + (u64::from(octets[12]) << 24) + (u64::from(octets[13]) << 16)\n\n + (u64::from(octets[14]) << 8) + u64::from(octets[15]);\n\n (first, last)\n\n}\n", "file_path": "proxy/tests/support/controller.rs", "rank": 80, "score": 172582.99037131283 }, { "content": "#[test]\n\nfn inbound_tcp() {\n\n let _ = env_logger::try_init();\n\n\n\n let msg1 = \"custom tcp hello\";\n\n let msg2 = \"custom tcp bye\";\n\n\n\n let srv = server::tcp()\n\n .accept(move |read| {\n\n assert_eq!(read, msg1.as_bytes());\n\n msg2\n\n })\n\n .run();\n\n let ctrl = controller::new().run();\n\n let proxy = proxy::new()\n\n .controller(ctrl)\n\n .inbound(srv)\n\n .run();\n\n\n\n let client = client::tcp(proxy.inbound);\n\n\n\n let tcp_client = client.connect();\n\n\n\n tcp_client.write(msg1);\n\n assert_eq!(tcp_client.read(), msg2.as_bytes());\n\n}\n\n\n", "file_path": "proxy/tests/transparency.rs", "rank": 81, "score": 168506.19997433852 }, { "content": "#[test]\n\nfn http10_with_host() {\n\n let _ = env_logger::try_init();\n\n\n\n let host = \"transparency.test.svc.cluster.local\";\n\n let srv = server::http1()\n\n .route_fn(\"/\", move |req| {\n\n assert_eq!(req.version(), http::Version::HTTP_10);\n\n assert_eq!(req.headers().get(\"host\").unwrap(), host);\n\n Response::builder()\n\n .version(http::Version::HTTP_10)\n\n .body(\"\".into())\n\n .unwrap()\n\n })\n\n .run();\n\n let ctrl = controller::new().run();\n\n let proxy = proxy::new()\n\n .controller(ctrl)\n\n .inbound(srv)\n\n .run();\n\n let client = client::http1(proxy.inbound, host);\n\n\n\n let res = client.request(client.request_builder(\"/\")\n\n .version(http::Version::HTTP_10)\n\n .header(\"host\", host));\n\n\n\n assert_eq!(res.status(), http::StatusCode::OK);\n\n assert_eq!(res.version(), http::Version::HTTP_10);\n\n}\n\n\n", "file_path": "proxy/tests/transparency.rs", "rank": 82, "score": 168506.19997433852 }, { "content": "#[test]\n\nfn outbound_tcp() {\n\n let _ = env_logger::try_init();\n\n\n\n let msg1 = \"custom tcp hello\";\n\n let msg2 = \"custom tcp bye\";\n\n\n\n let srv = server::tcp()\n\n .accept(move |read| {\n\n assert_eq!(read, msg1.as_bytes());\n\n msg2\n\n })\n\n .run();\n\n let ctrl = controller::new().run();\n\n let proxy = proxy::new()\n\n .controller(ctrl)\n\n .outbound(srv)\n\n .run();\n\n\n\n let client = client::tcp(proxy.outbound);\n\n\n\n let tcp_client = client.connect();\n\n\n\n tcp_client.write(msg1);\n\n assert_eq!(tcp_client.read(), msg2.as_bytes());\n\n}\n\n\n", "file_path": "proxy/tests/transparency.rs", "rank": 83, "score": 168506.19997433852 }, { "content": "fn tcp_serve(\n\n tcp: &tcp::Proxy,\n\n connection: Connection,\n\n sensors: &Sensors,\n\n opened_at: Instant,\n\n proxy_ctx: &Arc<ProxyCtx>,\n\n local_addr: LocalAddr,\n\n remote_addr: RemoteAddr,\n\n orig_dst: OrigDst,\n\n) -> Box<Future<Item=(), Error=()>> {\n\n let srv_ctx = ServerCtx::new(\n\n proxy_ctx,\n\n local_addr.0,\n\n remote_addr.0,\n\n orig_dst.0,\n\n common::Protocol::Tcp,\n\n );\n\n\n\n // record telemetry\n\n let tcp_in = sensors.accept(connection, opened_at, &srv_ctx);\n\n\n\n tcp.serve(tcp_in, srv_ctx)\n\n}\n", "file_path": "proxy/src/transparency/server.rs", "rank": 84, "score": 164415.23543316266 }, { "content": "#[test]\n\n#[cfg_attr(not(feature = \"flaky_tests\"), ignore)]\n\nfn records_latency_statistics() {\n\n let _ = env_logger::try_init();\n\n\n\n info!(\"running test server\");\n\n let srv = server::new()\n\n .route_with_latency(\"/hey\", \"hello\", Duration::from_millis(500))\n\n .route_with_latency(\"/hi\", \"good morning\", Duration::from_millis(40))\n\n .run();\n\n\n\n let mut ctrl = controller::new();\n\n let reports = ctrl.reports();\n\n let proxy = proxy::new()\n\n .controller(ctrl.run())\n\n .inbound(srv)\n\n .metrics_flush_interval(Duration::from_secs(5))\n\n .run();\n\n let client = client::new(proxy.inbound, \"tele.test.svc.cluster.local\");\n\n\n\n info!(\"client.get(/hey)\");\n\n assert_eq!(client.get(\"/hey\"), \"hello\");\n", "file_path": "proxy/tests/telemetry.rs", "rank": 85, "score": 164047.7429179943 }, { "content": "#[test]\n\nfn http10_without_host() {\n\n let _ = env_logger::try_init();\n\n\n\n let srv = server::http1()\n\n .route_fn(\"/\", move |req| {\n\n assert_eq!(req.version(), http::Version::HTTP_10);\n\n assert!(!req.headers().contains_key(\"host\"));\n\n assert_eq!(req.uri().to_string(), \"/\");\n\n Response::builder()\n\n .version(http::Version::HTTP_10)\n\n .body(\"\".into())\n\n .unwrap()\n\n })\n\n .run();\n\n let ctrl = controller::new()\n\n .run();\n\n let proxy = proxy::new()\n\n .controller(ctrl)\n\n .inbound(srv)\n\n .run();\n", "file_path": "proxy/tests/transparency.rs", "rank": 86, "score": 164042.6306521373 }, { "content": "#[test]\n\nfn tcp_with_no_orig_dst() {\n\n let _ = env_logger::try_init();\n\n\n\n let srv = server::tcp()\n\n .accept(move |_| \"don't read me\")\n\n .run();\n\n let ctrl = controller::new().run();\n\n let proxy = proxy::new()\n\n .controller(ctrl)\n\n .inbound(srv)\n\n .run();\n\n\n\n // no outbound configured for proxy\n\n let client = client::tcp(proxy.outbound);\n\n\n\n let tcp_client = client.connect();\n\n tcp_client.write(\"custom tcp hello\");\n\n\n\n let read = tcp_client\n\n .try_read()\n\n // This read might be an error, or an empty vec\n\n .unwrap_or_else(|_| Vec::new());\n\n assert_eq!(read, b\"\");\n\n}\n\n\n", "file_path": "proxy/tests/transparency.rs", "rank": 87, "score": 164042.6306521373 }, { "content": "#[test]\n\nfn metrics_have_no_double_commas() {\n\n // Test for regressions to runconduit/conduit#600.\n\n let _ = env_logger::try_init();\n\n\n\n info!(\"running test server\");\n\n let inbound_srv = server::new().route(\"/hey\", \"hello\").run();\n\n let outbound_srv = server::new().route(\"/hey\", \"hello\").run();\n\n\n\n let ctrl = controller::new()\n\n .destination(\"tele.test.svc.cluster.local\", outbound_srv.addr)\n\n .run();\n\n let proxy = proxy::new()\n\n .controller(ctrl)\n\n .inbound(inbound_srv)\n\n .outbound(outbound_srv)\n\n .metrics_flush_interval(Duration::from_millis(500))\n\n .run();\n\n let client = client::new(proxy.inbound, \"tele.test.svc.cluster.local\");\n\n let metrics = client::http1(proxy.metrics, \"localhost\");\n\n\n", "file_path": "proxy/tests/telemetry.rs", "rank": 88, "score": 164042.6306521373 }, { "content": "type Response = http::Response<BodyStream>;\n", "file_path": "proxy/tests/support/client.rs", "rank": 89, "score": 163662.9864127253 }, { "content": "\tclient telemetry.TelemetryClient\n", "file_path": "controller/api/public/grpc_server_test.go", "rank": 90, "score": 160911.00598291063 }, { "content": "/// Creates proxy-specific runtime telemetry.\n\n///\n\n/// [`Sensors`] hide the details of how telemetry is recorded, but expose proxy utilties\n\n/// that support telemetry.\n\n///\n\n/// [`Control`] drives processing of all telemetry events for tapping as well as metrics\n\n/// reporting.\n\n///\n\n/// # Arguments\n\n/// - `capacity`: the number of events to aggregate.\n\n/// - `flush_interval`: the length of time after which a metrics report should be sent,\n\n/// regardless of how many events have been aggregated.\n\n///\n\n/// [`Sensors`]: struct.Sensors.html\n\n/// [`Control`]: struct.Control.html\n\npub fn new(\n\n process: &Arc<ctx::Process>,\n\n capacity: usize,\n\n flush_interval: Duration,\n\n) -> (Sensors, MakeControl) {\n\n let (tx, rx) = futures_mpsc_lossy::channel(capacity);\n\n let s = Sensors::new(tx);\n\n let c = MakeControl::new(rx, flush_interval, process);\n\n (s, c)\n\n}\n", "file_path": "proxy/src/telemetry/mod.rs", "rank": 92, "score": 158897.51813061163 }, { "content": "\tSend(*Update) error\n", "file_path": "controller/gen/proxy/destination/destination.pb.go", "rank": 93, "score": 157452.88633884815 }, { "content": "\tSend(*conduit_common.TapEvent) error\n", "file_path": "controller/gen/proxy/tap/tap.pb.go", "rank": 94, "score": 157452.88633884815 }, { "content": "\tAddr *conduit_common.TcpAddress `protobuf:\"bytes,1,opt,name=addr\" json:\"addr,omitempty\"`\n", "file_path": "controller/gen/proxy/destination/destination.pb.go", "rank": 95, "score": 157402.1926497314 }, { "content": "#[test]\n\n#[cfg_attr(not(feature = \"flaky_tests\"), ignore)]\n\nfn metrics_endpoint_outbound_request_duration() {\n\n let _ = env_logger::try_init();\n\n\n\n info!(\"running test server\");\n\n let srv = server::new()\n\n .route(\"/hey\", \"hello\")\n\n .run();\n\n let ctrl = controller::new()\n\n .destination(\"tele.test.svc.cluster.local\", srv.addr)\n\n .run();\n\n let proxy = proxy::new()\n\n .controller(ctrl)\n\n .outbound(srv)\n\n .metrics_flush_interval(Duration::from_millis(500))\n\n .run();\n\n let client = client::new(proxy.outbound, \"tele.test.svc.cluster.local\");\n\n let metrics = client::http1(proxy.metrics, \"localhost\");\n\n\n\n info!(\"client.get(/hey)\");\n\n assert_eq!(client.get(\"/hey\"), \"hello\");\n", "file_path": "proxy/tests/telemetry.rs", "rank": 96, "score": 155932.6696258209 }, { "content": "#[test]\n\n#[cfg_attr(not(feature = \"flaky_tests\"), ignore)]\n\nfn metrics_endpoint_inbound_response_latency() {\n\n let _ = env_logger::try_init();\n\n\n\n info!(\"running test server\");\n\n let srv = server::new()\n\n .route_with_latency(\"/hey\", \"hello\", Duration::from_millis(500))\n\n .route_with_latency(\"/hi\", \"good morning\", Duration::from_millis(40))\n\n .run();\n\n\n\n let ctrl = controller::new();\n\n let proxy = proxy::new()\n\n .controller(ctrl.run())\n\n .inbound(srv)\n\n .metrics_flush_interval(Duration::from_millis(500))\n\n .run();\n\n let client = client::new(proxy.inbound, \"tele.test.svc.cluster.local\");\n\n let metrics = client::http1(proxy.metrics, \"localhost\");\n\n\n\n info!(\"client.get(/hey)\");\n\n assert_eq!(client.get(\"/hey\"), \"hello\");\n", "file_path": "proxy/tests/telemetry.rs", "rank": 97, "score": 155932.6696258209 }, { "content": "#[test]\n\n#[cfg_attr(not(feature = \"flaky_tests\"), ignore)]\n\nfn metrics_endpoint_outbound_response_latency() {\n\n let _ = env_logger::try_init();\n\n\n\n info!(\"running test server\");\n\n let srv = server::new()\n\n .route_with_latency(\"/hey\", \"hello\", Duration::from_millis(500))\n\n .route_with_latency(\"/hi\", \"good morning\", Duration::from_millis(40))\n\n .run();\n\n\n\n let ctrl = controller::new()\n\n .destination(\"tele.test.svc.cluster.local\", srv.addr)\n\n .run();\n\n let proxy = proxy::new()\n\n .controller(ctrl)\n\n .outbound(srv)\n\n .metrics_flush_interval(Duration::from_millis(500))\n\n .run();\n\n let client = client::new(proxy.outbound, \"tele.test.svc.cluster.local\");\n\n let metrics = client::http1(proxy.metrics, \"localhost\");\n\n\n", "file_path": "proxy/tests/telemetry.rs", "rank": 98, "score": 155932.6696258209 }, { "content": "#[test]\n\n#[cfg_attr(not(feature = \"flaky_tests\"), ignore)]\n\nfn metrics_endpoint_inbound_request_duration() {\n\n let _ = env_logger::try_init();\n\n\n\n info!(\"running test server\");\n\n let srv = server::new()\n\n .route(\"/hey\", \"hello\")\n\n .run();\n\n\n\n let ctrl = controller::new();\n\n let proxy = proxy::new()\n\n .controller(ctrl.run())\n\n .inbound(srv)\n\n .metrics_flush_interval(Duration::from_millis(500))\n\n .run();\n\n let client = client::new(proxy.inbound, \"tele.test.svc.cluster.local\");\n\n let metrics = client::http1(proxy.metrics, \"localhost\");\n\n\n\n // request with body should increment request_duration\n\n info!(\"client.get(/hey)\");\n\n assert_eq!(client.get(\"/hey\"), \"hello\");\n", "file_path": "proxy/tests/telemetry.rs", "rank": 99, "score": 155932.6696258209 } ]
Rust
src/rendering.rs
Ipotrick/eisen
fa86495574f3be99213e0ed98f0963db0f43614a
use std::{time::{SystemTime, Instant}}; use async_std::sync::Mutex; use wgpu::{util::{DeviceExt, RenderEncoder}, BindGroupDescriptor, RenderPipelineDescriptor}; use winit::{dpi::PhysicalSize, window::Window}; const QUADS_PER_BATCH: usize = 1024; #[repr(C)] #[derive(Clone, Copy, Debug, bytemuck::Pod, bytemuck::Zeroable)] struct QuadDrawGlobals{ camera_translation: [f32; 2], camera_rotation: [f32; 2], camera_scale: [f32; 2], } #[repr(C)] #[derive(Clone, Copy, Debug, bytemuck::Pod, bytemuck::Zeroable)] pub struct QuadDrawInfo{ pub color: [f32; 4], pub scale: [f32; 2], pub position: [f32; 2], pub orientation: [f32; 2], pub _pad: [f32; 2], } pub struct SharedRenderRessources { instance: wgpu::Instance, surface: wgpu::Surface, surf_size: PhysicalSize<u32>, surf_config: wgpu::SurfaceConfiguration, adapter: wgpu::Adapter, device: wgpu::Device, main_queue: wgpu::Queue, } pub trait RenderRoutine: Send + Sync { fn render(&mut self, shareed: &mut SharedRenderRessources); } pub enum RenderPass { Main, } pub struct RenderState { shared_ressources: SharedRenderRessources, main_pass_render_routines: Vec<Box<dyn RenderRoutine>>, rect_draw_buffers: Vec<(wgpu::Buffer, wgpu::BindGroup)>, last_buffer_index: usize, last_buffer_fill_len: usize, rect_index_buffer: wgpu::Buffer, rect_pipeline_binding_group_layout: wgpu::BindGroupLayout, rect_pipeline_globals_binding_group_layout: wgpu::BindGroupLayout, rect_globals_buffer: wgpu::Buffer, rect_pipeline_layout: wgpu::PipelineLayout, rect_pipeline: wgpu::RenderPipeline, globals: QuadDrawGlobals, } pub struct Renderer { state: Mutex<RenderState>, start_time: SystemTime, } impl Renderer { pub async fn new(window: &Window) -> Self { let instance = wgpu::Instance::new(wgpu::Backends::VULKAN); let surface = unsafe { instance.create_surface(window) }; let adapter = instance.request_adapter( &wgpu::RequestAdapterOptions { power_preference: wgpu::PowerPreference::HighPerformance, compatible_surface: Some(&surface), force_fallback_adapter: false, }, ).await.unwrap(); let (device, main_queue) = adapter.request_device( &wgpu::DeviceDescriptor{ features: wgpu::Features::empty(), limits: wgpu::Limits::default(), label: Some("main device"), }, None, ).await.unwrap(); let surf_size = window.inner_size(); let surf_config = wgpu::SurfaceConfiguration { usage: wgpu::TextureUsages::RENDER_ATTACHMENT, format: surface.get_preferred_format(&adapter).unwrap(), width: surf_size.width, height: surf_size.height, present_mode: wgpu::PresentMode::Immediate, }; surface.configure(&device, &surf_config); let mut indices = Vec::<u32>::new(); indices.reserve(QUADS_PER_BATCH*6); for i in (0 as u32..(QUADS_PER_BATCH*4) as u32).step_by(4) { indices.push(i + 2); indices.push(i + 0); indices.push(i + 1); indices.push(i + 2); indices.push(i + 1); indices.push(i + 3); } let batched_quad_index_buffer = device.create_buffer( &wgpu::BufferDescriptor{ label: Some("batched quad index buffer"), size: (std::mem::size_of::<u32>() * indices.len()) as u64, usage: wgpu::BufferUsages::INDEX | wgpu::BufferUsages::COPY_DST, mapped_at_creation: false, } ); main_queue.write_buffer(&batched_quad_index_buffer, 0, bytemuck::cast_slice(&indices[..])); let bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor{ entries: &[ wgpu::BindGroupLayoutEntry { binding: 0, visibility: wgpu::ShaderStages::VERTEX, ty: wgpu::BindingType::Buffer{ has_dynamic_offset: false, ty: wgpu::BufferBindingType::Uniform, min_binding_size: None, }, count: None, }, ], label: Some("quad render bind group layout"), }); let globals_bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor{ entries: &[ wgpu::BindGroupLayoutEntry { binding: 0, visibility: wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT, ty: wgpu::BindingType::Buffer{ has_dynamic_offset: false, ty: wgpu::BufferBindingType::Uniform, min_binding_size: None, }, count: None, }, ], label: Some("quad render global bind group layout"), }); let globals_buffer = device.create_buffer(&wgpu::BufferDescriptor{ label: Some("[quad renderer] globals"), size: std::mem::size_of::<QuadDrawGlobals>() as u64, usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST, mapped_at_creation: false, }); let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor{ label: Some("quad render pipeline"), bind_group_layouts: &[&globals_bind_group_layout, &bind_group_layout], push_constant_ranges: &[], }); let shader_module = device.create_shader_module(&wgpu::ShaderModuleDescriptor{ label: Some("quad render shader"), source: wgpu::ShaderSource::Wgsl(std::borrow::Cow::Borrowed(include_str!("rendering/quad_shader.wgsl"))), }); let pipeline = device.create_render_pipeline(&RenderPipelineDescriptor{ vertex: wgpu::VertexState{ module: &shader_module, entry_point: "vs_main", buffers: &[], }, fragment: Some(wgpu::FragmentState{ module: &shader_module, entry_point: "fs_main", targets: &[ wgpu::ColorTargetState{ blend: None, format: surf_config.format, write_mask: wgpu::ColorWrites::ALL, } ], }), label: Some("quad render pipeline"), layout: Some(&pipeline_layout), primitive: wgpu::PrimitiveState{ topology: wgpu::PrimitiveTopology::TriangleList, strip_index_format: None, front_face: wgpu::FrontFace::Cw, cull_mode: None, unclipped_depth: false, polygon_mode: wgpu::PolygonMode::Fill, conservative: false, }, depth_stencil: None, multisample: wgpu::MultisampleState::default(), multiview: None, }); let shared = SharedRenderRessources{ instance, surface, surf_size, surf_config, adapter, device, main_queue, }; let state = Mutex::new(RenderState{ shared_ressources: shared, main_pass_render_routines: Vec::new(), rect_draw_buffers: Vec::new(), last_buffer_index: 0, last_buffer_fill_len: 0, rect_index_buffer: batched_quad_index_buffer, rect_pipeline_binding_group_layout: bind_group_layout, rect_pipeline_globals_binding_group_layout: globals_bind_group_layout, rect_globals_buffer: globals_buffer, rect_pipeline_layout: pipeline_layout, rect_pipeline: pipeline, globals: QuadDrawGlobals{ camera_translation: [0.0,0.0], camera_rotation: [1.0,0.0], camera_scale: [1.0,1.0], }, }); Self{ state, start_time: SystemTime::now(), } } pub async fn resize(&self, new_size: winit::dpi::PhysicalSize<u32>) { let mut state = self.state.lock().await; if new_size.width > 0 && new_size.height > 0 { state.shared_ressources.surf_size = new_size; state.shared_ressources.surf_config.width = new_size.width; state.shared_ressources.surf_config.height = new_size.height; state.shared_ressources.surface.configure(&state.shared_ressources.device, &state.shared_ressources.surf_config); println!("resized to: ({},{})", new_size.width, new_size.height); } } pub async fn add_render_routine(&self, routine: impl RenderRoutine + 'static, pass: RenderPass) { let mut state = self.state.lock().await; match pass { RenderPass::Main => state.main_pass_render_routines.push(Box::new(routine)), } } pub async fn push_quads(&self, quads: &[QuadDrawInfo]) { let mut state = self.state.lock().await; if quads.len() > 0 { state.last_buffer_index = (quads.len() - 1) / QUADS_PER_BATCH; state.last_buffer_fill_len = (quads.len() - 1) % QUADS_PER_BATCH + 1; while state.rect_draw_buffers.len() <= state.last_buffer_index { let buff = state.shared_ressources.device.create_buffer(&wgpu::BufferDescriptor{ label: Some("quad draw buffer"), mapped_at_creation: false, size: (QUADS_PER_BATCH * std::mem::size_of::<QuadDrawInfo>()) as u64, usage: wgpu::BufferUsages::COPY_DST | wgpu::BufferUsages::UNIFORM }); let bind_group = state.shared_ressources.device.create_bind_group(&wgpu::BindGroupDescriptor{ label: Some("quad render pipeline bind group"), layout: &state.rect_pipeline_binding_group_layout, entries: &[ wgpu::BindGroupEntry{ binding: 0, resource: wgpu::BindingResource::Buffer(wgpu::BufferBinding{ buffer: &buff, offset: 0, size: None, }), } ], }); state.rect_draw_buffers.push((buff, bind_group)); } for i in 0..state.last_buffer_index { let slice = bytemuck::cast_slice(&quads[i*QUADS_PER_BATCH..(i+1)*state.last_buffer_index]); state.shared_ressources.main_queue.write_buffer(&state.rect_draw_buffers[i].0, 0, slice); } let slice = bytemuck::cast_slice(&quads[state.last_buffer_index*QUADS_PER_BATCH..]); state.shared_ressources.main_queue.write_buffer(&state.rect_draw_buffers[state.last_buffer_index].0, 0, slice); } else { state.last_buffer_index = 0; state.last_buffer_fill_len = 0; } } pub async fn render(&self) -> Result<(), wgpu::SurfaceError> { let mut state = self.state.lock().await; let state = &mut*state; let output = state.shared_ressources.surface.get_current_texture()?; let view = output.texture.create_view(&wgpu::TextureViewDescriptor::default()); state.shared_ressources.main_queue.write_buffer(&state.rect_globals_buffer, 0, bytemuck::cast_slice(&[state.globals])); let mut encoder = state.shared_ressources.device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: Some("Rect Renderpass Encoder"), }); { let globals_bind_group = state.shared_ressources.device.create_bind_group(&wgpu::BindGroupDescriptor{ label: Some("[quad renderer] globals"), layout: &state.rect_pipeline_globals_binding_group_layout, entries: &[ wgpu::BindGroupEntry{ binding: 0, resource: wgpu::BindingResource::Buffer(wgpu::BufferBinding{ buffer: &state.rect_globals_buffer, offset: 0, size: None, }), } ] }); let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor { label: Some("Render Pass"), color_attachments: &[wgpu::RenderPassColorAttachment { view: &view, resolve_target: None, ops: wgpu::Operations { load: wgpu::LoadOp::Clear(wgpu::Color { r: 0.0, g: 0.0, b: 0.0, a: 1.0, }), store: true, }, }], depth_stencil_attachment: None, }); render_pass.set_bind_group(0, &globals_bind_group, &[]); render_pass.set_pipeline(&state.rect_pipeline); render_pass.set_index_buffer( state.rect_index_buffer.slice(..), wgpu::IndexFormat::Uint32, ); if state.last_buffer_fill_len > 0 { for i in 0..state.last_buffer_index { render_pass.set_bind_group(1, &state.rect_draw_buffers[i].1, &[]); render_pass.draw_indexed(0..(QUADS_PER_BATCH*6) as u32, 0, 0..1); } render_pass.set_bind_group(1, &state.rect_draw_buffers[state.last_buffer_index].1, &[]); render_pass.draw_indexed(0..(state.last_buffer_fill_len*6) as u32, 0, 0..1); } } let shared = &mut state.shared_ressources; for render_routine in &mut state.main_pass_render_routines { render_routine.render(shared); } state.shared_ressources.main_queue.submit(std::iter::once(encoder.finish())); output.present(); Ok(()) } }
use std::{time::{SystemTime, Instant}}; use async_std::sync::Mutex; use wgpu::{util::{DeviceExt, RenderEncoder}, BindGroupDescriptor, RenderPipelineDescriptor}; use winit::{dpi::PhysicalSize, window::Window}; const QUADS_PER_BATCH: usize = 1024; #[repr(C)] #[derive(Clone, Copy, Debug, bytemuck::Pod, bytemuck::Zeroable)] struct QuadDrawGlobals{ camera_translation: [f32; 2], camera_rotation: [f32; 2], camera_scale: [f32; 2], } #[repr(C)] #[derive(Clone, Copy, Debug, bytemuck::Pod, bytemuck::Zeroable)] pub struct QuadDrawInfo{ pub color: [f32; 4], pub scale: [f32; 2], pub position: [f32; 2], pub orientation: [f32; 2], pub _pad: [f32; 2], } pub struct SharedRenderRessources { instance: wgpu::Instance, surface: wgpu::Surface, surf_size: PhysicalSize<u32>, surf_config: wgpu::SurfaceConfiguration, adapter: wgpu::Adapter, device: wgpu::Device, main_queue: wgpu::Queue, } pub trait RenderRoutine: Send + Sync { fn render(&mut self, shareed: &mut SharedRenderRessources); } pub enum RenderPass { Main, } pub struct RenderState { shared_ressources: SharedRenderRessources, main_pass_render_routines: Vec<Box<dyn RenderRoutine>>, rect_draw_buffers: Vec<(wgpu::Buffer, wgpu::BindGroup)>, last_buffer_index: usize, last_buffer_fill_len: usize, rect_index_buffer: wgpu::Buffer, rect_pipeline_binding_group_layout: wgpu::BindGroupLayout, rect_pipeline_globals_binding_group_layout: wgpu::BindGroupLayout, rect_globals_buffer: wgpu::Buffer, rect_pipeline_layout: wgpu::PipelineLayout, rect_pipeline: wgpu::RenderPipeline, globals: QuadDrawGlobals, } pub struct Renderer { state: Mutex<RenderState>, start_time: SystemTime, } impl Renderer { pub async fn new(window: &Window) -> Self { let instance = wgpu::Instance::new(wgpu::Backends::VULKAN); let surface = unsafe { instance.create_surface(window) }; let adapter = instance.request_adapter( &wgpu::RequestAdapterOptions { power_preference: wgpu::PowerPreference::HighPerformance, compatible_surface: Some(&surface), force_fallback_adapter: false, }, ).await.unwrap(); let (device, main_queue) = adapter.request_device( &wgpu::DeviceDescriptor{ features: wgpu::Features::empty(), limits: wgpu::Limits::default(), label: Some("main device"), }, None, ).await.unwrap(); let surf_size = window.inner_size(); let surf_config = wgpu::SurfaceConfiguration { usage: wgpu::TextureUsages::RENDER_ATTACHMENT, format: surface.get_preferred_format(&adapter).unwrap(), width: surf_size.width, height: surf_size.height, present_mode: wgpu::PresentMode::Immediate, }; surface.configure(&device, &surf_config); let mut indices = Vec::<u32>::new(); indices.reserve(QUADS_PER_BATCH*6); for i in (0 as u32..(QUADS_PER_BATCH*4) as u32).step_by(4) { indices.push(i + 2); indices.push(i + 0); indices.push(i + 1); indices.push(i + 2); indices.push(i + 1); indices.push(i + 3); } let batched_quad_index_buffer = device.create_buffer( &wgpu::BufferDescriptor{ label: Some("batched quad index buffer"), size: (std::mem::size_of::<u32>() * indices.len()) as u64, usage: wgpu::BufferUsages::INDEX | wgpu::BufferUsages::COPY_DST, mapped_at_creation: false, } ); main_queue.write_buffer(&batched_quad_index_buffer, 0, bytemuck::cast_slice(&indices[..])); let bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor{ entries: &[ wgpu::BindGroupLayoutEntry { binding: 0, visibility: wgpu::ShaderStages::VERTEX, ty: wgpu::BindingType::Buffer{ has_dynamic_offset: false, ty: wgpu::BufferBindingType::Uniform, min_binding_size: None, }, count: None, }, ], label: Some("quad render bind group layout"), }); let globals_bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor{ entries: &[ wgpu::BindGroupLayoutEntry { binding: 0, visibility: wgpu::ShaderStages::VERTEX | wgpu::ShaderStages::FRAGMENT, ty: wgpu::BindingType::Buffer{ has_dynamic_offset: false, ty: wgpu::BufferBindingType::Uniform, min_binding_size: None, }, count: None, }, ], label: Some("quad render global bind group layout"), }); let globals_buffer = device.create_buffer(&wgpu::BufferDescriptor{ label: Some("[quad renderer] globals"), size: std::mem::size_of::<QuadDrawGlobals>() as u64, usage: wgpu::BufferUsages::UNIFORM | wgpu::BufferUsages::COPY_DST, mapped_at_creation: false, }); let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor{ label: Some("quad render pipeline"), bind_group_layouts: &[&globals_bind_group_layout, &bind_group_layout], push_constant_ranges: &[], }); let shader_module = device.create_shader_module(&wgpu::ShaderModuleDescriptor{ label: Some("quad render shader"), source: wgpu::ShaderSource::Wgsl(std::borrow::Cow::Borrowed(include_str!("rendering/quad_shader.wgsl"))), }); let pipeline = device.create_render_pipeline(&RenderPipelineDescriptor{ vertex: wgpu::VertexState{ module: &shader_module, entry_point: "vs_main", buffers: &[], }, fragment: Some(wgpu::FragmentState{ module: &shader_module, entry_point: "fs_main", targets: &[ wgpu::ColorTargetState{ blend: None, format: surf_config.format, write_mask: wgpu::ColorWrites::ALL, } ], }), label: Some("quad render pipeline"), layout: Some(&pipeline_layout), primitive: wgpu::PrimitiveState{ topology: wgpu::PrimitiveTopology::TriangleList, strip_index_format: None, front_face: wgpu::FrontFace::Cw, cull_mode: None, unclipped_depth: false, polygon_mode: wgpu::PolygonMode::Fill, conservative: false, }, depth_stencil: None, multisample: wgpu::MultisampleState::default(), multiview: None, }); let shared = SharedRenderRessources{ instance, surface, surf_size, surf_config, adapter, device, main_queue, }; let state = Mutex::new(RenderState{ shared_ressources: shared, main_pass_render_routines: Vec::new(), rect_draw_buffers: Vec::new(), last_buffer_index: 0, last_buffer_fill_len: 0, rect_index_buffer: batched_quad_index_buffer, rect_pipeline_binding_group_layout: bind_group_layout, rect_pipeline_globals_binding_group_layout: globals_bind_group_layout, rect_globals_buffer: globals_buffer, rect_pipeline_layout: pipeline_layout, rect_pipeline: pipeline, globals: QuadDrawGlobals{ camera_translation: [0.0,0.0], camera_rotation: [1.0,0.0], camera_scale: [1.0,1.0], }, }); Self{ state, start_time: SystemTime::now(), } } pub async fn resize(&self, new_size: winit::dpi::PhysicalSize<u32>) { let mut state = self.state.lock().await; if new_size.width > 0 && new_size.height > 0 { state.shared_ressources.surf_size = new_size; state.shared_ressources.surf_config.width = new_size.width; state.shared_ressources.surf_config.height = new_size.height; state.shared_ressources.surface.configure(&state.shared_ressources.device, &state.shared_ressources.surf_config); println!("resized to: ({},{})", new_size.width, new_size.height); } } pub async fn add_render_routine(&self, routine: impl RenderRoutine + 'static, pass: RenderPass) { let mut state = self.state.lock().await; match pass { RenderPass::Main => state.main_pass_render_routines.push(Box::new(routine)), } } pub async fn push_quads(&self, quads: &[QuadDrawInfo]) { let mut state = self.state.lock().await; if quads.len() > 0 { state.last_buffer_index = (quads.len() - 1) / QUADS_PER_BATCH; state.last_buffer_fill_len = (quads.len() - 1) % QUADS_PER_BATCH + 1; while state.rect_draw_buffers.len() <= state.last_buffer_index { let buff = state.shared_ressources.device.create_buffer(&wgpu::BufferDescriptor{ label: Some("quad draw buffer"), mapped_at_creation: false, size: (QUADS_PER_BATCH * std::mem::size_of::<QuadDrawInfo>()) as u64, usage: wgpu::BufferUsages::COPY_DST | wgpu::BufferUsages::UNIFORM }); let bind_group = state.shared_ressources.device.create_bind_group(&wgpu::BindGroupDescriptor{ label: Some("quad render pipeline bind group"), layout: &state.rect_pipeline_binding_group_layout, entries: &[ wgpu::BindGroupEntry{ binding: 0, resource: wgpu::BindingResource::Buffer(wgpu::BufferBinding{ buffer: &buff, offset: 0, size: None, }), } ], }); state.rect_draw_buffers.push((buff, bind_group)); }
pub async fn render(&self) -> Result<(), wgpu::SurfaceError> { let mut state = self.state.lock().await; let state = &mut*state; let output = state.shared_ressources.surface.get_current_texture()?; let view = output.texture.create_view(&wgpu::TextureViewDescriptor::default()); state.shared_ressources.main_queue.write_buffer(&state.rect_globals_buffer, 0, bytemuck::cast_slice(&[state.globals])); let mut encoder = state.shared_ressources.device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: Some("Rect Renderpass Encoder"), }); { let globals_bind_group = state.shared_ressources.device.create_bind_group(&wgpu::BindGroupDescriptor{ label: Some("[quad renderer] globals"), layout: &state.rect_pipeline_globals_binding_group_layout, entries: &[ wgpu::BindGroupEntry{ binding: 0, resource: wgpu::BindingResource::Buffer(wgpu::BufferBinding{ buffer: &state.rect_globals_buffer, offset: 0, size: None, }), } ] }); let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor { label: Some("Render Pass"), color_attachments: &[wgpu::RenderPassColorAttachment { view: &view, resolve_target: None, ops: wgpu::Operations { load: wgpu::LoadOp::Clear(wgpu::Color { r: 0.0, g: 0.0, b: 0.0, a: 1.0, }), store: true, }, }], depth_stencil_attachment: None, }); render_pass.set_bind_group(0, &globals_bind_group, &[]); render_pass.set_pipeline(&state.rect_pipeline); render_pass.set_index_buffer( state.rect_index_buffer.slice(..), wgpu::IndexFormat::Uint32, ); if state.last_buffer_fill_len > 0 { for i in 0..state.last_buffer_index { render_pass.set_bind_group(1, &state.rect_draw_buffers[i].1, &[]); render_pass.draw_indexed(0..(QUADS_PER_BATCH*6) as u32, 0, 0..1); } render_pass.set_bind_group(1, &state.rect_draw_buffers[state.last_buffer_index].1, &[]); render_pass.draw_indexed(0..(state.last_buffer_fill_len*6) as u32, 0, 0..1); } } let shared = &mut state.shared_ressources; for render_routine in &mut state.main_pass_render_routines { render_routine.render(shared); } state.shared_ressources.main_queue.submit(std::iter::once(encoder.finish())); output.present(); Ok(()) } }
for i in 0..state.last_buffer_index { let slice = bytemuck::cast_slice(&quads[i*QUADS_PER_BATCH..(i+1)*state.last_buffer_index]); state.shared_ressources.main_queue.write_buffer(&state.rect_draw_buffers[i].0, 0, slice); } let slice = bytemuck::cast_slice(&quads[state.last_buffer_index*QUADS_PER_BATCH..]); state.shared_ressources.main_queue.write_buffer(&state.rect_draw_buffers[state.last_buffer_index].0, 0, slice); } else { state.last_buffer_index = 0; state.last_buffer_fill_len = 0; } }
function_block-function_prefix_line
[ { "content": "#[allow(unused)]\n\npub fn block_on<Out>(mut future: impl Future<Output = Out>) -> Out {\n\n LOCAL_BLOCK_ON_DATA.with(\n\n |ref_cell| {\n\n let local_block_data = ref_cell.borrow_mut();\n\n let waker = waker_ref(&local_block_data.waker);\n\n let context = &mut Context::from_waker(&*waker);\n\n\n\n loop {\n\n let future = unsafe{ Pin::new_unchecked(&mut future)};\n\n if let Poll::Ready(val) = future.poll(context) {\n\n break val;\n\n }\n\n local_block_data.recv.recv().unwrap();\n\n }\n\n }\n\n )\n\n}", "file_path": "src/sync/block_on.rs", "rank": 2, "score": 138646.81848751925 }, { "content": "pub trait User : Send + Sync + Default\n\n{\n\n fn init(self: Arc<Self>, shared_data: Arc<SharedAppData>, fixed_step_data: Arc<FixedStepData>, variable_step_data: Arc<VariableStepData>);\n\n fn cleanup(self: Arc<Self>, shared_data: Arc<SharedAppData>);\n\n fn fixed_step(self: Arc<Self>, shared_data: Arc<SharedAppData>, fixed_step_data: Arc<FixedStepData>) -> Pin<Box<dyn Future<Output=()> + Send + Sync>>;\n\n fn varaible_step(self: Arc<Self>, shared_data: Arc<SharedAppData>, variable_step_data: Arc<VariableStepData>) -> Pin<Box<dyn Future<Output=()> + Send + Sync>>;\n\n}\n\n\n\n//o------------ App Data ---------------o\n\n\n\npub struct SharedAppData {\n\n pub end_program: AtomicBool,\n\n pub runtime: Runtime,\n\n pub window: Window,\n\n pub(crate) min_vary_delta_time: AtomicU64,\n\n}\n\n\n\npub struct FixedStepData {\n\n pub(crate) input_state: Mutex<InputState>,\n\n pub ecm: EntityComponentManager,\n", "file_path": "src/app.rs", "rank": 3, "score": 138617.99249992345 }, { "content": "pub trait Component : Clone + Default + Sync + Send\n\n{\n\n type Storage : ComponentStore<Self>;\n\n}", "file_path": "src/entity/component_storage.rs", "rank": 4, "score": 125008.34157543618 }, { "content": "#[allow(unused)]\n\nfn worker(meta: Arc<RuntimeMeta>, worker_index: usize) {\n\n \n\n // crate worker loop\n\n 'outer: while !meta.end_runtime.load(Ordering::Relaxed) || meta.open_tasks.load(Ordering::Acquire) > 0 {\n\n // if there are no tasks directly available, we sleep and wake up to execute the next available Order from any queue\n\n\n\n match meta.signal_reciever.recv_timeout(std::time::Duration::from_millis(100)) {\n\n Ok(_) => (),\n\n Err(err) => match err {\n\n RecvTimeoutError::Timeout => (),\n\n RecvTimeoutError::Disconnected => panic!(),\n\n }\n\n }\n\n\n\n let mut order = None;\n\n let mut sender = None;\n\n\n\n 'inner2: loop {\n\n if let Ok(o) = meta.execution_reciever_very_high.try_recv() {\n\n order = Some(o);\n", "file_path": "src/sync/runtime.rs", "rank": 5, "score": 90016.91521201654 }, { "content": "struct SharedData {\n\n count: AtomicUsize,\n\n waker: Mutex<Option<Waker>>,\n\n}\n\n\n\npub struct AtomicDependency{\n\n data: Arc<SharedData>,\n\n}\n\n\n\nimpl Clone for AtomicDependency{\n\n fn clone(&self) -> Self {\n\n self.data.count.fetch_add(1, std::sync::atomic::Ordering::Acquire);\n\n Self{\n\n data: self.data.clone(),\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for AtomicDependency {\n\n fn drop(&mut self) {\n", "file_path": "src/sync/atomic_waiter.rs", "rank": 6, "score": 87803.14665060415 }, { "content": "pub trait TupleExtras {\n\n type PopFrontResultType;\n\n\n\n fn pop_front(self) -> Self::PopFrontResultType;\n\n}\n\n\n\nimpl<T> TupleExtrasT<T> for () {\n\n type AppendResultType = (T,);\n\n type FirstReplaceResultType = ();\n\n\n\n fn append(self, t: T) -> Self::AppendResultType {\n\n (t,)\n\n }\n\n\n\n fn replace_first(self, _r: T) -> Self::FirstReplaceResultType {\n\n self\n\n }\n\n}\n\n\n\nimpl TupleExtras for () {\n", "file_path": "src/util.rs", "rank": 7, "score": 79323.44563553412 }, { "content": "#[derive(Clone)]\n\nstruct Page<T: Default + Clone, const N: usize> {\n\n slots: [T; N],\n\n slot_used: [bool; N],\n\n len: usize,\n\n}\n\n\n\nimpl<T: Default + Clone, const N: usize> Page<T,N> {\n\n fn new() -> Self {\n\n Self{\n\n slots: [(); N].map(|_| T::default()),\n\n slot_used: [false; N],\n\n len: 0,\n\n }\n\n }\n\n\n\n #[allow(unused)]\n\n fn iter(&self) -> impl Iterator<Item = &T> {\n\n self.slots.iter()\n\n .zip(self.slot_used.iter())\n\n .filter(|(_, used)| **used)\n", "file_path": "src/entity/component_storage/linear_store.rs", "rank": 8, "score": 77253.97195583544 }, { "content": "type ClosureBox = Box<dyn FnOnce() + Send>;\n\n\n\n#[derive(Clone,Copy)]\n\npub enum Priority {\n\n #[allow(unused)]\n\n VeryHigh,\n\n #[allow(unused)]\n\n High,\n\n #[allow(unused)]\n\n Normal,\n\n #[allow(unused)]\n\n Low\n\n}\n\n\n\npub struct Task {\n\n pub future: Mutex<TaskFutureBox>,\n\n pub execution_queue: crossbeam_channel::Sender<ExecutionOrder>,\n\n pub notify_signal: crossbeam_channel::Sender<RuntimeInfo>,\n\n pub priority: Priority,\n\n}\n", "file_path": "src/sync/task.rs", "rank": 9, "score": 75050.7292362089 }, { "content": "pub trait GenericComponentStore {\n\n fn optimize(&mut self);\n\n\n\n fn as_any(&self) -> &dyn Any;\n\n\n\n fn as_any_mut(&mut self) -> &mut dyn Any;\n\n\n\n fn has(&self, index: EntityIndex) -> bool;\n\n\n\n fn rem(&mut self, index: EntityIndex);\n\n\n\n fn len(&self) -> usize;\n\n}\n\n\n", "file_path": "src/entity/component_storage.rs", "rank": 10, "score": 73094.33701105937 }, { "content": "pub trait TupleExtrasT<T> {\n\n type AppendResultType;\n\n type FirstReplaceResultType;\n\n\n\n fn append(self, t: T) -> Self::AppendResultType;\n\n fn replace_first(self, r: T) -> Self::FirstReplaceResultType;\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 11, "score": 72542.98988865975 }, { "content": "pub trait ComponentStoreAccessor: {\n\n fn exec(&self, f: &mut dyn FnMut(&mut dyn GenericComponentStore) -> ());\n\n\n\n fn as_any_mut(&mut self) -> &mut dyn Any;\n\n\n\n fn as_any_ref(&self) -> &dyn Any;\n\n}\n\n\n\nimpl<T: 'static + Default + Clone> ComponentStoreAccessor for Arc<RwLock<LinearStore<T>>> {\n\n fn exec(&self, f: &mut dyn FnMut(&mut dyn GenericComponentStore) -> ()) {\n\n let mut guard = spin_on!(self.try_write());\n\n\n\n let generic_self: &mut dyn GenericComponentStore = &mut *guard;\n\n\n\n f(generic_self);\n\n }\n\n\n\n fn as_any_mut(&mut self) -> &mut dyn Any {\n\n self\n\n }\n", "file_path": "src/entity/component_storage/dense_store.rs", "rank": 12, "score": 69732.99951737786 }, { "content": "struct UnblockSignal{}\n\n\n", "file_path": "src/sync/block_on.rs", "rank": 13, "score": 65696.50703417588 }, { "content": "struct YieldFuture {\n\n yielded_once: bool,\n\n}\n\n\n\nimpl Future for YieldFuture {\n\n type Output = ();\n\n fn poll(mut self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Self::Output> {\n\n if self.yielded_once {\n\n return Poll::Ready(());\n\n } \n\n self.yielded_once = true;\n\n\n\n YIELD_INFO.with(|d|{\n\n d.borrow_mut().did_yield = true;\n\n });\n\n\n\n Poll::Pending\n\n }\n\n}\n\n\n\n#[allow(unused)]\n\npub async fn yield_now() {\n\n YieldFuture{yielded_once:false}.await;\n\n}", "file_path": "src/sync/yielding.rs", "rank": 14, "score": 65696.50703417588 }, { "content": "type TaskFutureBox = Pin<Box<dyn Future<Output = ()> + Send + Sync>>;\n", "file_path": "src/sync/task.rs", "rank": 15, "score": 64123.57263789163 }, { "content": "struct BlockedThreadWaker{\n\n snd: crossbeam_channel::Sender<UnblockSignal>,\n\n}\n\n\n\nimpl ArcWake for BlockedThreadWaker {\n\n fn wake(self: Arc<Self>) {\n\n self.snd.send(UnblockSignal{}).unwrap();\n\n }\n\n\n\n fn wake_by_ref(arc_self: &Arc<Self>) {\n\n arc_self.snd.send(UnblockSignal{}).unwrap();\n\n }\n\n}\n\n\n", "file_path": "src/sync/block_on.rs", "rank": 16, "score": 63489.71388374958 }, { "content": "struct LocalBlockData {\n\n send: crossbeam_channel::Sender<UnblockSignal>,\n\n recv: crossbeam_channel::Receiver<UnblockSignal>,\n\n waker: Arc<BlockedThreadWaker>,\n\n}\n\n\n\nimpl LocalBlockData {\n\n fn new() -> Self {\n\n let (send, recv) = crossbeam_channel::bounded(1);\n\n Self{\n\n send: send.clone(),\n\n recv,\n\n waker: Arc::new(BlockedThreadWaker{snd:send})\n\n }\n\n }\n\n}\n\n\n\nthread_local! {\n\n static LOCAL_BLOCK_ON_DATA: RefCell<LocalBlockData> = RefCell::new(LocalBlockData::new());\n\n}\n\n\n", "file_path": "src/sync/block_on.rs", "rank": 17, "score": 63489.71388374958 }, { "content": "pub trait ComponentStore<T: Default + Clone> {\n\n type ComponentType : Component;\n\n\n\n fn new() -> Self;\n\n\n\n fn get(&self, index: EntityIndex) -> Option<&T>;\n\n\n\n fn get_mut(&mut self, index: EntityIndex) -> Option<&mut T>;\n\n\n\n fn set(&mut self, index: EntityIndex, value: T);\n\n\n\n fn add(&mut self, index: EntityIndex, value: T);\n\n}\n\n\n", "file_path": "src/entity/component_storage.rs", "rank": 18, "score": 63447.60395080972 }, { "content": "#[allow(unused)]\n\nfn process_task(meta: &Arc<RuntimeMeta>, execution_sender: &crossbeam_channel::Sender<ExecutionOrder>, task: Arc<Task>) {\n\n let task_finished = {\n\n let waker = waker_ref(&task);\n\n let context = &mut Context::from_waker(&*waker);\n\n \n\n let finished = Poll::Pending != task.future.lock()\n\n .unwrap()\n\n .as_mut()\n\n .poll(context);\n\n \n\n YIELD_INFO.with(|info| {\n\n if info.borrow_mut().did_yield {\n\n info.borrow_mut().did_yield = false;\n\n info.borrow_mut().yield_task = Some(task);\n\n }\n\n });\n\n\n\n if finished {\n\n meta.open_tasks.fetch_sub(1, Ordering::AcqRel);\n\n }\n\n \n\n finished\n\n };\n\n}\n\n\n", "file_path": "src/sync/runtime.rs", "rank": 19, "score": 38618.93588313801 }, { "content": "#![macro_use]\n\n\n\npub mod task;\n\nmod runtime;\n\nmod atomic_waiter;\n\nmod block_on;\n\n//mod sleep;\n\nmod yielding;\n\n\n\npub use runtime::*;\n\npub use atomic_waiter::*;\n\npub use block_on::*;\n\n//pub use sleep::*;\n\npub use yielding::*;\n\n\n\n#[allow(unused)]\n\nmacro_rules! spin_on {\n\n ($expression:expr) => {\n\n loop {\n\n if let Some(guard) = $expression {\n\n break guard\n\n }\n\n }\n\n };\n\n}", "file_path": "src/sync.rs", "rank": 38, "score": 27781.30693750943 }, { "content": "impl Future for SleepFuture {\n\n type Output = ();\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n if self.called_once && std::time::Instant::now() + SLEEP_SELF_SPIN_TIME > self.wake_up_time {\n\n profiling::scope!(\"SleepFuture: spinning wait\",\"inner spinning for the last few microseconds\");\n\n while std::time::Instant::now() < self.wake_up_time {}\n\n return Poll::Ready(());\n\n }\n\n self.called_once = true;\n\n while self.sender.try_send(SleepCheckerOrder::Task(SleepingTask{waker: cx.waker().clone(), wake_up_time: self.wake_up_time})).is_err() {}\n\n Poll::Pending\n\n }\n\n}\n\n\n\nuse std::sync::atomic::Ordering;\n\n\n\npub(crate) enum SleepCheckerOrder {\n\n Task(SleepingTask),\n\n WakeUp,\n\n}\n", "file_path": "src/sync/sleep.rs", "rank": 39, "score": 26344.582934704693 }, { "content": " * Executes a future on a threadpool.\n\n * Submitted future should not block.\n\n * Submitted future should have a short runtime (<200mics) or yield periodicly.\n\n */\n\n #[allow(unused)]\n\n pub fn spawn(&self, future: impl Future<Output = ()> + Send + Sync + 'static) {\n\n self.spawn_prioritised(future, Priority::Normal);\n\n }\n\n\n\n /**\n\n * Executes a prioritised closure on a threadpool.\n\n * Submitted Closures should not block.\n\n * Submitted Closures should have a short runtime (<200mics).\n\n * If the task needs to sync, please spawn a sync task via the spawn function.\n\n */\n\n #[allow(unused)]\n\n pub fn exec_prioritised(&self, closure: impl FnOnce() + Send + 'static, priority: Priority) {\n\n match priority {\n\n Priority::Low => self.meta.execution_sender_low.send(ExecutionOrder::ExecuteClosure(Box::new(closure))).unwrap(),\n\n Priority::Normal => self.meta.execution_sender_normal.send(ExecutionOrder::ExecuteClosure(Box::new(closure))).unwrap(),\n", "file_path": "src/sync/runtime.rs", "rank": 40, "score": 26344.116447828168 }, { "content": " .unwrap()\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n let ret = Self {\n\n meta: meta.clone(),\n\n worker_joins: Mutex::new(Some(worker_join_handles)),\n\n };\n\n\n\n ret\n\n }\n\n\n\n /**\n\n * Executes a prioritised future on a threadpool.\n\n * Submitted future should not block.\n\n * Submitted future should have a short runtime (<200mics) or yield periodicly.\n\n */\n\n #[allow(unused)]\n\n pub fn spawn_prioritised(&self, future: impl Future<Output = ()> + Send + Sync + 'static, priority: Priority) {\n\n let sender = match priority {\n", "file_path": "src/sync/runtime.rs", "rank": 41, "score": 26343.463884143264 }, { "content": "\n\nimpl Runtime {\n\n #[allow(unused)]\n\n pub fn new() -> Self {\n\n let (s_low, r_low) = crossbeam_channel::unbounded();\n\n let (s_normal, r_normal) = crossbeam_channel::unbounded();\n\n let (s_high, r_high) = crossbeam_channel::unbounded();\n\n let (s_very_high, r_very_high) = crossbeam_channel::unbounded();\n\n let (signal_snd, signal_rcv) = crossbeam_channel::unbounded();\n\n\n\n let worker_thread_count = usize::max(1,num_cpus::get_physical()-1);\n\n println!(\"INFO: Runtime started with pool of {} threads.\", worker_thread_count);\n\n\n\n let mut meta = Arc::new(RuntimeMeta{ \n\n worker_count: AtomicU64::from(worker_thread_count as u64),\n\n execution_sender_low: s_low,\n\n execution_sender_normal: s_normal,\n\n execution_sender_high: s_high,\n\n execution_sender_very_high: s_very_high,\n\n execution_reciever_low: r_low,\n", "file_path": "src/sync/runtime.rs", "rank": 42, "score": 26341.9927277178 }, { "content": "use std::{pin::Pin, task::{Context, Poll, Waker}};\n\n\n\nuse super::yielding::*;\n\nuse futures::Future;\n\n\n\nuse super::Runtime;\n\n\n\nconst SLEEP_SELF_SPIN_TIME: std::time::Duration = std::time::Duration::from_micros(100);\n\n\n\npub(crate) struct SleepFuture {\n\n called_once: bool,\n\n wake_up_time: std::time::Instant,\n\n sender: async_std::channel::Sender<SleepCheckerOrder>,\n\n}\n\n\n\npub(crate) struct SleepingTask {\n\n waker: Waker,\n\n wake_up_time: std::time::Instant,\n\n}\n\n\n", "file_path": "src/sync/sleep.rs", "rank": 43, "score": 26341.774165180603 }, { "content": "use std::{cell::RefCell, pin::Pin, sync::Arc, task::{Context, Poll}};\n\n\n\nuse futures::Future;\n\n\n\nuse super::task::Task;\n\n\n\npub(crate) struct YieldInfo {\n\n pub(crate) did_yield: bool,\n\n pub(crate) yield_task: Option<Arc<Task>>,\n\n}\n\n\n\nthread_local! {\n\n pub(crate) static YIELD_INFO: RefCell<YieldInfo> = RefCell::new(YieldInfo{did_yield:false, yield_task: None});\n\n}\n\n\n", "file_path": "src/sync/yielding.rs", "rank": 44, "score": 26341.60029760212 }, { "content": "pub use std::{cell::RefCell, pin::Pin, task::Waker};\n\nuse crossbeam_channel::RecvTimeoutError;\n\npub use futures::{Future, FutureExt, task::{waker_ref}};\n\npub use smallbox::SmallBox;\n\nuse std::{sync::atomic::{AtomicU64, Ordering}};\n\n\n\nuse super::yielding::*;\n\n\n\npub use {\n\n futures::{\n\n task::{ArcWake},\n\n },\n\n std::{\n\n sync::{Arc, Mutex},\n\n task::{Context, Poll},\n\n },\n\n};\n\n\n\nuse super::task::*;\n\n\n", "file_path": "src/sync/runtime.rs", "rank": 45, "score": 26341.370437605543 }, { "content": " Priority::High => self.meta.execution_sender_high.send(ExecutionOrder::ExecuteClosure(Box::new(closure))).unwrap(),\n\n Priority::VeryHigh => self.meta.execution_sender_very_high.send(ExecutionOrder::ExecuteClosure(Box::new(closure))).unwrap(),\n\n }\n\n self.meta.signal_sender.send(RuntimeInfo::WakeUp);\n\n }\n\n \n\n /**\n\n * Executes a closure on a threadpool.\n\n * Submitted Closures should not block.\n\n * Submitted Closures should have a short runtime (<200mics).\n\n * If the task needs to sync, please spawn a sync task via the spawn function.\n\n */\n\n #[allow(unused)]\n\n pub fn exec(&self, closure: impl FnOnce() + Send + 'static) {\n\n self.exec_prioritised(closure, Priority::Normal);\n\n }\n\n\n\n /**\n\n * Kills threadpool.\n\n * All worker threads will terminate AFTER all open tasks are completed.\n", "file_path": "src/sync/runtime.rs", "rank": 46, "score": 26341.059103559524 }, { "content": "\n\npub enum ExecutionOrder {\n\n ExecuteTask(Arc<Task>),\n\n ExecuteClosure(ClosureBox),\n\n}\n\n\n\nimpl ArcWake for Task {\n\n fn wake(self: Arc<Self>) {\n\n if self.execution_queue.send(ExecutionOrder::ExecuteTask(self.clone())).is_err() {\n\n println!(\"WARNING: tried to wake up future on dead runtime!\");\n\n }\n\n self.notify_signal.send(RuntimeInfo::WakeUp);\n\n }\n\n\n\n fn wake_by_ref(arc_self: &Arc<Self>) {\n\n if arc_self.execution_queue.send(ExecutionOrder::ExecuteTask(arc_self.clone())).is_err() {\n\n println!(\"WARNING: tried to wake up future on dead runtime!\");\n\n }\n\n arc_self.notify_signal.send(RuntimeInfo::WakeUp);\n\n }\n\n}", "file_path": "src/sync/task.rs", "rank": 47, "score": 26339.626518859583 }, { "content": " * Looping tasks MUST be notified/terminated before calling this function!\n\n */\n\n #[allow(unused)]\n\n pub fn stop(&self) {\n\n if let Some(mut worker_joins) = self.worker_joins.lock().unwrap().take() {\n\n self.meta.end_runtime.store(true, Ordering::Release);\n\n\n\n let worker_count = worker_joins.len();\n\n\n\n for _ in 0..self.meta.worker_count.load(Ordering::Relaxed) {\n\n self.meta.signal_sender.send(RuntimeInfo::WakeUp);\n\n }\n\n\n\n println!(\"threads: {}\", worker_joins.len());\n\n \n\n while let Some(join_handle) = worker_joins.pop() {\n\n join_handle.join().unwrap();\n\n }\n\n \n\n println!(\"INFO: Runtime shut down.\");\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for Runtime {\n\n fn drop(&mut self) {\n\n self.stop();\n\n }\n\n}", "file_path": "src/sync/runtime.rs", "rank": 48, "score": 26339.42615074937 }, { "content": "pub use std::{cell::RefCell, pin::Pin, task::Waker};\n\npub use futures::{Future, FutureExt, task::{waker_ref}};\n\npub use smallbox::SmallBox;\n\n\n\npub use {\n\n futures::{\n\n task::{ArcWake},\n\n },\n\n std::{\n\n sync::{Arc, Mutex},\n\n task::{Context, Poll},\n\n },\n\n};\n\n\n\npub enum RuntimeInfo {\n\n WakeUp\n\n}\n\n\n", "file_path": "src/sync/task.rs", "rank": 49, "score": 26339.113564247 }, { "content": " process_task(&meta, sender.unwrap(), task);\n\n },\n\n ExecutionOrder::ExecuteClosure(mut closure) => {\n\n closure();\n\n },\n\n };\n\n };\n\n }\n\n }\n\n meta.worker_count.fetch_sub(1, Ordering::Relaxed);\n\n for _ in 0..meta.worker_count.load(Ordering::Relaxed) {\n\n meta.signal_sender.send(RuntimeInfo::WakeUp);\n\n }\n\n println!(\"INFO: Runtime worker ended.\");\n\n}\n\n\n\npub struct Runtime {\n\n pub(crate) meta: Arc<RuntimeMeta>,\n\n worker_joins: Mutex<Option<Vec<std::thread::JoinHandle<()>>>>,\n\n}\n", "file_path": "src/sync/runtime.rs", "rank": 50, "score": 26338.672596402415 }, { "content": " \n\npub(crate) async fn sleep_sheduler(meta: std::sync::Arc<super::runtime::RuntimeMeta>) {\n\n let mut sleepers = Vec::new();\n\n while !meta.end_runtime.load(Ordering::Relaxed) || meta.open_tasks.load(Ordering::Relaxed) > 1 /* this is also an open task! */ {\n\n if sleepers.is_empty() {\n\n match meta.new_sleepers_rcv.recv().await.unwrap() {\n\n SleepCheckerOrder::Task(sleeper) => {\n\n sleepers.push(sleeper);\n\n },\n\n SleepCheckerOrder::WakeUp => continue,\n\n }\n\n }\n\n {\n\n profiling::scope!(\"sleep_sheduler_check\");\n\n {\n\n while let Ok(order) = meta.new_sleepers_rcv.try_recv() {\n\n if let SleepCheckerOrder::Task(new_sleeper) = order {\n\n let insertion_index = sleepers.partition_point(|other_st: &SleepingTask| other_st.wake_up_time > new_sleeper.wake_up_time);\n\n sleepers.insert(insertion_index, new_sleeper);\n\n }\n", "file_path": "src/sync/sleep.rs", "rank": 51, "score": 26338.500063979325 }, { "content": " execution_reciever_normal: r_normal,\n\n execution_reciever_high: r_high,\n\n execution_reciever_very_high: r_very_high,\n\n signal_sender: signal_snd.clone(),\n\n signal_reciever: signal_rcv,\n\n end_runtime: \t std::sync::atomic::AtomicBool::from(false),\n\n open_tasks: std::sync::atomic::AtomicU64::from(0),\n\n });\n\n\n\n let worker_join_handles = (0..worker_thread_count)\n\n .into_iter()\n\n .map(|index|{\n\n let meta = meta.clone();\n\n std::thread::Builder::new()\n\n .name(std::format!(\"worker thread {}\", index))\n\n .spawn(move || { \n\n // register thread/core to profiling\n\n profiling::register_thread!(std::format!(\"worker thread {}\", index).as_str());\n\n worker(meta, index); \n\n })\n", "file_path": "src/sync/runtime.rs", "rank": 52, "score": 26338.444351708407 }, { "content": "pub async fn sleep_for(runtime: &Runtime, min_dura: std::time::Duration) {\n\n let wake_up_time = std::time::Instant::now() + min_dura;\n\n sleep_until(runtime, wake_up_time).await\n\n}\n\n\n\n#[allow(unused)]\n\npub async fn sleep_until(runtime: &Runtime, wake_up_time: std::time::Instant) {\n\n let sleeper = SleepFuture{\n\n called_once: false,\n\n wake_up_time: wake_up_time,\n\n sender: runtime.meta.new_sleepers_snd.clone(),\n\n };\n\n\n\n sleeper.await;\n\n}", "file_path": "src/sync/sleep.rs", "rank": 53, "score": 26337.885082213368 }, { "content": "pub(crate) struct RuntimeMeta {\n\n worker_count: AtomicU64,\n\n execution_sender_low: crossbeam_channel::Sender<ExecutionOrder>,\n\n execution_sender_normal: crossbeam_channel::Sender<ExecutionOrder>,\n\n execution_sender_high: crossbeam_channel::Sender<ExecutionOrder>,\n\n execution_sender_very_high: crossbeam_channel::Sender<ExecutionOrder>,\n\n execution_reciever_low: crossbeam_channel::Receiver<ExecutionOrder>,\n\n execution_reciever_normal: crossbeam_channel::Receiver<ExecutionOrder>,\n\n execution_reciever_high: crossbeam_channel::Receiver<ExecutionOrder>,\n\n execution_reciever_very_high: crossbeam_channel::Receiver<ExecutionOrder>,\n\n signal_sender: crossbeam_channel::Sender<RuntimeInfo>,\n\n signal_reciever: crossbeam_channel::Receiver<RuntimeInfo>,\n\n pub(crate) end_runtime: std::sync::atomic::AtomicBool,\n\n pub(crate) open_tasks: std::sync::atomic::AtomicU64,\n\n}\n\n\n\n#[allow(unused)]\n", "file_path": "src/sync/runtime.rs", "rank": 54, "score": 26337.55673046204 }, { "content": "use std::{cell::RefCell, pin::Pin, sync::Arc, task::{Context, Poll}};\n\n\n\nuse futures::{future::*, task::{ArcWake, waker_ref}};\n\n\n", "file_path": "src/sync/block_on.rs", "rank": 55, "score": 26336.135951220098 }, { "content": " }\n\n \n\n 'search: loop {\n\n if sleepers.is_empty() { break 'search; }\n\n \n\n if (std::time::Instant::now() + SLEEP_SELF_SPIN_TIME) > sleepers.last().unwrap().wake_up_time {\n\n let sleeper = sleepers.pop().unwrap();\n\n sleeper.waker.wake_by_ref();\n\n } else {\n\n break 'search;\n\n }\n\n }\n\n }\n\n }\n\n yield_now().await;\n\n }\n\n println!(\"INFO: Sleep sheduler ended.\");\n\n}\n\n\n\n#[allow(unused)]\n", "file_path": "src/sync/sleep.rs", "rank": 56, "score": 26334.46700137666 }, { "content": " sender = Some(& meta.execution_sender_very_high);\n\n } else if let Ok(o) = meta.execution_reciever_high.try_recv() {\n\n order = Some(o);\n\n sender = Some(& meta.execution_sender_high);\n\n } else if let Ok(o) = meta.execution_reciever_normal.try_recv() {\n\n order = Some(o);\n\n sender = Some(& meta.execution_sender_normal);\n\n } else if let Ok(o) = meta.execution_reciever_low.try_recv() {\n\n order = Some(o);\n\n sender = Some(& meta.execution_sender_low);\n\n }\n\n \n\n if order.is_none() {\n\n continue 'outer;\n\n }\n\n\n\n {\n\n profiling::scope!(\"worker does work\");\n\n match order.take().unwrap() {\n\n ExecutionOrder::ExecuteTask(task) => {\n", "file_path": "src/sync/runtime.rs", "rank": 57, "score": 26333.785177438713 }, { "content": " Priority::Low => &self.meta.execution_sender_low,\n\n Priority::Normal => &self.meta.execution_sender_normal,\n\n Priority::High => &self.meta.execution_sender_high,\n\n Priority::VeryHigh => &self.meta.execution_sender_very_high,\n\n };\n\n\n\n let task_arc = Arc::new(Task{\n\n future: Mutex::new(Box::pin(future)),\n\n execution_queue: sender.clone(),\n\n notify_signal: self.meta.signal_sender.clone(),\n\n priority: priority,\n\n });\n\n\n\n self.meta.open_tasks.fetch_add(1, Ordering::AcqRel);\n\n\n\n sender.send(ExecutionOrder::ExecuteTask(task_arc)).unwrap();\n\n self.meta.signal_sender.send(RuntimeInfo::WakeUp);\n\n }\n\n\n\n /**\n", "file_path": "src/sync/runtime.rs", "rank": 58, "score": 26332.66380101242 }, { "content": " let count = self.data.count.fetch_sub(1, std::sync::atomic::Ordering::Acquire);\n\n if count == 1 {\n\n if let Some(waker) = &*self.data.waker.lock().unwrap() {\n\n waker.wake_by_ref();\n\n }\n\n }\n\n }\n\n}\n\n\n\npub struct AtomicWaiter {\n\n data: Arc<SharedData>,\n\n}\n\n\n\nimpl AtomicWaiter {\n\n #[allow(unused)]\n\n pub fn new() -> Self {\n\n Self {\n\n data: Arc::new(SharedData{count: AtomicUsize::new(0), waker: Mutex::new(None)}),\n\n }\n\n }\n", "file_path": "src/sync/atomic_waiter.rs", "rank": 59, "score": 25047.861583105438 }, { "content": "\n\n #[allow(unused)]\n\n pub fn make_dependency(&self) -> AtomicDependency {\n\n self.data.count.fetch_add(1, std::sync::atomic::Ordering::Acquire);\n\n AtomicDependency{\n\n data: self.data.clone(),\n\n }\n\n }\n\n}\n\n\n\nimpl Future for AtomicWaiter {\n\n type Output = ();\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n {\n\n let mut waker = self.data.waker.lock().unwrap();\n\n *waker = Some(cx.waker().clone());\n\n }\n\n if self.data.count.load(std::sync::atomic::Ordering::Relaxed) == 0 {\n\n Poll::Ready(())\n\n } else {\n", "file_path": "src/sync/atomic_waiter.rs", "rank": 60, "score": 25042.867362622674 }, { "content": " Poll::Pending\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for AtomicWaiter {\n\n fn drop(&mut self) {\n\n if self.data.count.fetch_add(0, std::sync::atomic::Ordering::Acquire) != 0 {\n\n }\n\n }\n\n}", "file_path": "src/sync/atomic_waiter.rs", "rank": 61, "score": 25039.675632926224 }, { "content": "\n\nuse std::{pin::Pin, sync::{Arc, Mutex, atomic::AtomicUsize}, task::{Context, Poll, Waker}};\n\n\n\nuse futures::{Future};\n\n\n", "file_path": "src/sync/atomic_waiter.rs", "rank": 62, "score": 25037.884961952754 }, { "content": "}\n\n\n\n#[derive(Clone, Copy)]\n\npub struct RectRenderable {\n\n pub size: cgmath::Vector2<f32>,\n\n pub color: cgmath::Vector4<f32>,\n\n}\n\n\n\nimpl Default for RectRenderable {\n\n fn default() -> Self {\n\n Self{\n\n size: Vf32x2::new(1.0, 1.0),\n\n color: cgmath::Vector4::<f32>::new(1.0,1.0,1.0,1.0),\n\n }\n\n }\n\n}\n\n\n\nimpl Component for RectRenderable {\n\n type Storage = DenseStore<Self>;\n\n}", "file_path": "src/entity/default_components.rs", "rank": 63, "score": 23.56831825976228 }, { "content": "use crate::Vf32x2;\n\n\n\nuse super::{Component, DenseStore, LinearStore};\n\n\n\n#[derive(Clone, Copy)]\n\npub struct Transform {\n\n pub position: cgmath::Vector2<f32>,\n\n pub orientation: cgmath::Vector2<f32>,\n\n}\n\n\n\nimpl Default for Transform {\n\n fn default() -> Self {\n\n Self{\n\n position: Vf32x2::new(0.0, 0.0),\n\n orientation: Vf32x2::new(0.0, 0.0),\n\n }\n\n }\n\n}\n\n\n\nimpl Component for Transform {\n", "file_path": "src/entity/default_components.rs", "rank": 64, "score": 22.231975136433718 }, { "content": " let n = self.dense_indices.len();\n\n \n\n (0..n).step_by(batch_size).into_iter().map(move |i| {\n\n let forgotten_self = unsafe{std::mem::transmute::<&mut Self, &mut Self>(self)};\n\n forgotten_self.dense_indices[i..]\n\n .iter()\n\n .take(batch_size)\n\n .map(|i| *i)\n\n .zip(forgotten_self.dense_values[i..]\n\n .iter_mut()\n\n .take(batch_size))\n\n })\n\n }\n\n\n\n fn assure_index(&mut self, index: EntityIndex) {\n\n if index as usize >= self.sparse_indices.len() {\n\n self.sparse_indices.resize(index as usize + 1, !0);\n\n }\n\n }\n\n #[allow(unused)]\n", "file_path": "src/entity/component_storage/dense_store.rs", "rank": 65, "score": 20.40072810258954 }, { "content": " let n = self.pages.len();\n\n\n\n (0..n).into_iter()\n\n .map(|page_index|{\n\n self.pages[page_index].iter_entity(page_index).take(PAGE_SIZE)\n\n })\n\n }\n\n\n\n #[allow(unused)]\n\n pub fn iter_entity_mut_batch(&mut self, batch_size: usize) -> impl Iterator<Item = impl Iterator<Item = (EntityIndex, &mut T)>> {\n\n let n = self.pages.len();\n\n\n\n (0..n).into_iter()\n\n .map(|page_index|{\n\n let forgotten_self = unsafe{std::mem::transmute::<&mut Self, &mut Self>(self)};\n\n forgotten_self.pages[page_index].iter_entity_mut(page_index).take(PAGE_SIZE)\n\n })\n\n }\n\n\n\n fn assure_page(&mut self, page_index: usize) {\n", "file_path": "src/entity/component_storage/linear_store.rs", "rank": 66, "score": 20.020899204481147 }, { "content": " type Storage = LinearStore<Self>;\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub struct OldTransform {\n\n pub position: cgmath::Vector2<f32>,\n\n pub orientation: cgmath::Vector2<f32>,\n\n}\n\n\n\nimpl Default for OldTransform {\n\n fn default() -> Self {\n\n Self{\n\n position: Vf32x2::new(0.0, 0.0),\n\n orientation: Vf32x2::new(0.0, 0.0),\n\n }\n\n }\n\n}\n\n\n\nimpl Component for OldTransform {\n\n type Storage = LinearStore<Self>;\n", "file_path": "src/entity/default_components.rs", "rank": 67, "score": 19.956218455883363 }, { "content": " pub(crate) fixed_delta_time: AtomicU64,\n\n}\n\n\n\npub struct VariableStepData {\n\n pub(crate) input_state_backbuffer: Mutex<InputState>,\n\n pub(crate) input_state_frontbuffer: Mutex<InputState>,\n\n pub renderer: Renderer,\n\n pub(crate) vary_delta_time: AtomicU64,\n\n}\n\n\n\nimpl SharedAppData {\n\n pub fn end(&self) {\n\n self.end_program.store(true, Ordering::Relaxed);\n\n }\n\n}\n\n\n\nimpl FixedStepData {\n\n pub async fn key_pressed(&self, key: VirtualKeyCode) -> bool {\n\n let input_state = &mut*self.input_state.lock().await;\n\n input_state.key_states[key as usize]\n", "file_path": "src/app.rs", "rank": 68, "score": 19.8617230635754 }, { "content": " self.fixed_step_signal_thread = Some(\n\n std::thread::Builder::new()\n\n .name(\"fixed time step notify thread\".into())\n\n .spawn(||{fixed_time_step_notify(meta_clone, fixed_step_data_clone, signal_snd)})\n\n .unwrap()\n\n ); \n\n\n\n event_loop.run(move |event, _, control_flow| {\n\n *control_flow = ControlFlow::Poll;\n\n match event {\n\n Event::MainEventsCleared => self.on_main_events_cleared(control_flow),\n\n Event::RedrawRequested(_) => { },\n\n Event::WindowEvent{ ref event, window_id, } if (window_id == self.shared_data.window.id()) => {\n\n match event {\n\n WindowEvent::CloseRequested => *control_flow = ControlFlow::Exit,\n\n WindowEvent::Resized(physical_size) => block_on(self.variable_step_data.renderer.resize(*physical_size)),\n\n WindowEvent::ScaleFactorChanged { new_inner_size, .. } => block_on(self.variable_step_data.renderer.resize(**new_inner_size)),\n\n WindowEvent::KeyboardInput{device_id,input,is_synthetic} => {\n\n let index = input.virtual_keycode.unwrap() as usize;\n\n let in_state = &mut*spin_on!(self.variable_step_data.input_state_backbuffer.try_lock());\n", "file_path": "src/app.rs", "rank": 69, "score": 18.75444917689864 }, { "content": "\n\n #[allow(unused)]\n\n pub fn iter_entity_batch(&self, batch_size: usize) -> impl Iterator<Item = impl Iterator<Item = (EntityIndex, &T)>> {\n\n \n\n let n = self.dense_indices.len();\n\n\n\n (0..n).step_by(batch_size).into_iter().map(move |i| {\n\n self.dense_indices[i..]\n\n .iter()\n\n .take(batch_size)\n\n .map(|i| *i)\n\n .zip(self.dense_values[i..]\n\n .iter()\n\n .take(batch_size))\n\n })\n\n }\n\n\n\n #[allow(unused)]\n\n pub fn iter_entity_mut_batch(&mut self, batch_size: usize) -> impl Iterator<Item = impl Iterator<Item = (EntityIndex, &mut T)>> {\n\n\n", "file_path": "src/entity/component_storage/dense_store.rs", "rank": 70, "score": 18.576729564281603 }, { "content": " #[allow(unused)]\n\n fn iter_entity_mut(&mut self, page_index: usize) -> impl Iterator<Item = (EntityIndex, &mut T)> {\n\n self.slots.iter_mut()\n\n .zip(self.slot_used.iter())\n\n .enumerate()\n\n .filter(|(_, (_, used))| **used)\n\n .map(move |(index, (slot, _))| ((index + (page_index << PAGE_EXPONENT)) as EntityIndex, slot))\n\n }\n\n}\n\n\n\npub struct LinearStore<T: Default + Clone> {\n\n pages: Vec<Page<T, PAGE_SIZE>>,\n\n}\n\n\n\nimpl<T: 'static + Default + Clone> GenericComponentStore for LinearStore<T> {\n\n\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n", "file_path": "src/entity/component_storage/linear_store.rs", "rank": 71, "score": 17.916135002239663 }, { "content": " self.dense_values[dense_index] = value;\n\n }\n\n\n\n fn add(&mut self, index: EntityIndex, value: T) {\n\n assert!(!self.has(index));\n\n self.assure_index(index);\n\n self.dense_values.push(value);\n\n self.dense_indices.push(index);\n\n self.sparse_indices[index as usize] = self.dense_indices.len() as EntityIndex - 1;\n\n }\n\n}\n\n\n\nuse async_std::sync::RwLock;\n\n\n", "file_path": "src/entity/component_storage/dense_store.rs", "rank": 72, "score": 17.131231475755527 }, { "content": "mod ticks;\n\nuse std::thread::JoinHandle;\n\nuse async_std::sync::Mutex;\n\nuse ticks::*;\n\nuse winit::event::VirtualKeyCode;\n\n\n\nuse std::time::{Duration, Instant};\n\nuse std::{pin::Pin, sync::Arc};\n\nuse std::sync::atomic::*;\n\n\n\nuse futures::{Future};\n\nuse winit::{event::{Event, WindowEvent}, event_loop::{ControlFlow, EventLoop}, platform::windows::EventLoopExtWindows, window::{Window, WindowBuilder}};\n\n\n\nuse crate::rendering::Renderer;\n\nuse crate::sync::AtomicWaiter;\n\nuse crate::{entity::EntityComponentManager, sync::{Runtime, block_on}};\n\n\n\n//o------------ User Trait ---------------o\n\n\n", "file_path": "src/app.rs", "rank": 73, "score": 17.040614897054876 }, { "content": "use super::*;\n\n\n\npub struct DenseStore<T: Default + Clone> {\n\n sparse_indices: Vec<EntityIndex>,\n\n dense_indices: Vec<EntityIndex>,\n\n dense_values: Vec<T>,\n\n}\n\n\n\nimpl<T: 'static + Default + Clone> GenericComponentStore for DenseStore<T> {\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n\n fn as_any_mut(&mut self) -> &mut dyn Any {\n\n self\n\n }\n\n\n\n fn optimize(&mut self) {}\n\n\n\n fn has(&self, index: EntityIndex) -> bool {\n", "file_path": "src/entity/component_storage/dense_store.rs", "rank": 74, "score": 16.889067408033117 }, { "content": " event_loop: Option<EventLoop<()>>,\n\n user: Arc<T>,\n\n fixed_step_signal_thread: Option<JoinHandle<()>>,\n\n fixed_step_signal: (async_std::channel::Sender<FixedStepUpdateSignal>, async_std::channel::Receiver<FixedStepUpdateSignal>),\n\n last_frame_end: Instant,\n\n}\n\n\n\nimpl<T: User + 'static> Application<T> \n\n{\n\n\n\n pub fn new() -> Self \n\n {\n\n env_logger::init();\n\n let event_loop = EventLoop::new_any_thread();\n\n let window = WindowBuilder::new().build(&event_loop).unwrap();\n\n let renderer = block_on(Renderer::new(&window));\n\n Self{\n\n shared_data: Arc::new(SharedAppData{\n\n end_program: AtomicBool::new(false),\n\n runtime: Runtime::new(),\n", "file_path": "src/app.rs", "rank": 75, "score": 16.562155246235825 }, { "content": " else {\n\n None\n\n }\n\n }\n\n \n\n fn get_mut(&mut self, index: EntityIndex) -> Option<&mut T> {\n\n if self.has(index) {\n\n let index = index as usize;\n\n let dense_index = self.sparse_indices[index] as usize;\n\n Some(&mut self.dense_values[dense_index])\n\n }\n\n else {\n\n None\n\n }\n\n }\n\n\n\n fn set(&mut self, index: EntityIndex, value: T) {\n\n assert!(self.has(index));\n\n let index = index as usize;\n\n let dense_index = self.sparse_indices[index] as usize;\n", "file_path": "src/entity/component_storage/dense_store.rs", "rank": 76, "score": 16.399866648115694 }, { "content": " }\n\n}\n\n\n\nimpl VariableStepData {\n\n pub async fn key_pressed(&self, key: VirtualKeyCode) -> bool {\n\n let input_state = &mut*self.input_state_backbuffer.lock().await;\n\n input_state.key_states[key as usize]\n\n }\n\n\n\n pub async fn key_released(&self, key: VirtualKeyCode) -> bool {\n\n let input_state = &mut*self.input_state_backbuffer.lock().await;\n\n !input_state.key_states[key as usize]\n\n }\n\n\n\n pub async fn key_just_pressed(&self, key: VirtualKeyCode) -> bool {\n\n let input_state = &mut*self.input_state_backbuffer.lock().await;\n\n input_state.key_states[key as usize] && !input_state.key_states_old[key as usize]\n\n }\n\n\n\n pub async fn key_just_released(&self, key: VirtualKeyCode) -> bool {\n", "file_path": "src/app.rs", "rank": 77, "score": 15.772286794838003 }, { "content": " }\n\n}\n\n\n\nimpl<T: 'static + Default + Clone + Component> ComponentStore<T> for DenseStore<T> {\n\n type ComponentType = T;\n\n\n\n fn new() -> Self {\n\n Self{\n\n sparse_indices: Vec::new(),\n\n dense_indices: Vec::new(),\n\n dense_values: Vec::new(),\n\n }\n\n }\n\n\n\n fn get(&self, index: EntityIndex) -> Option<&T> {\n\n if self.has(index) {\n\n let index = index as usize;\n\n let dense_index = self.sparse_indices[index] as usize;\n\n Some(&self.dense_values[dense_index])\n\n }\n", "file_path": "src/entity/component_storage/dense_store.rs", "rank": 78, "score": 15.745495072018745 }, { "content": " }\n\n\n\n pub async fn key_released(&self, key: VirtualKeyCode) -> bool {\n\n let input_state = &mut*self.input_state.lock().await;\n\n !input_state.key_states[key as usize]\n\n }\n\n\n\n pub async fn key_just_pressed(&self, key: VirtualKeyCode) -> bool {\n\n let input_state = &mut*self.input_state.lock().await;\n\n input_state.key_states[key as usize] && !input_state.key_states_old[key as usize]\n\n }\n\n\n\n pub async fn key_just_released(&self, key: VirtualKeyCode) -> bool {\n\n let input_state = &mut*self.input_state.lock().await;\n\n !input_state.key_states[key as usize] && input_state.key_states_old[key as usize]\n\n }\n\n\n\n pub fn get_delta_time(&self) -> std::time::Duration {\n\n let dt = self.fixed_delta_time.load(std::sync::atomic::Ordering::Relaxed);\n\n std::time::Duration::from_nanos(dt)\n", "file_path": "src/app.rs", "rank": 79, "score": 15.109856352753564 }, { "content": "\n\n in_state.key_states[index] = input.state == winit::event::ElementState::Pressed;\n\n },\n\n _ => { }\n\n }\n\n },\n\n _ => {}\n\n }\n\n });\n\n }\n\n\n\n fn on_main_events_cleared(&mut self, control_flow: &mut ControlFlow) \n\n {\n\n profiling::scope!(\"MainEventsCleared\");\n\n self.shared_data.window.request_redraw();\n\n if self.shared_data.end_program.load(Ordering::Relaxed) {\n\n *control_flow = ControlFlow::Exit\n\n } else {\n\n let waiter = AtomicWaiter::new();\n\n let dep = waiter.make_dependency();\n", "file_path": "src/app.rs", "rank": 80, "score": 15.00915842616007 }, { "content": "use crate::entity::handle::*; \n\nuse super::component_storage::*;\n\n\n\npub struct EntityManager {\n\n pub(crate) entity_slots: Vec<EntitySlot>,\n\n pub entity_free_list: Vec<EntityIndex>,\n\n pub entity_destruct_queue: Vec<EntityIndex>,\n\n}\n\n\n\nimpl EntityManager {\n\n pub fn new() -> Self {\n\n Self{\n\n entity_slots: Vec::new(),\n\n entity_free_list: Vec::new(),\n\n entity_destruct_queue: Vec::new(),\n\n }\n\n }\n\n\n\n pub(crate) fn exists_index(&self, index: EntityIndex) -> bool {\n\n (index as usize) < self.entity_slots.len() && self.entity_slots[index as usize].alive\n", "file_path": "src/entity/entity_manager.rs", "rank": 81, "score": 14.918871517824904 }, { "content": " if self.pages.len() <= page_index {\n\n self.pages.resize(page_index + 1, Page::new());\n\n }\n\n }\n\n\n\n fn has_split(&self, page_index: usize, page_offset: usize) -> bool {\n\n page_index < self.pages.len() && self.pages[page_index].slot_used[page_offset]\n\n }\n\n}\n\n\n\nimpl<T: Default + Clone + Component> ComponentStore<T> for LinearStore<T> {\n\n type ComponentType = T;\n\n\n\n fn new() -> Self {\n\n Self{\n\n pages: Vec::new(),\n\n }\n\n }\n\n\n\n fn get(&self, index: EntityIndex) -> Option<&T> {\n", "file_path": "src/entity/component_storage/linear_store.rs", "rank": 82, "score": 14.690043554019184 }, { "content": "#![macro_use]\n\n\n\nuse async_std::sync::{RwLock};\n\nuse std::any::*;\n\nuse std::sync::Arc;\n\n\n\nuse crate::entity::entity_manager::*;\n\nuse crate::entity::component_storage::*;\n\n\n\npub struct EntityComponentManager {\n\n pub entities: Arc<RwLock<EntityManager>>,\n\n stores: RwLock<rustc_hash::FxHashMap<TypeId, Box<dyn ComponentStoreAccessor + Sync + Send>>>,\n\n}\n\n\n\nimpl Default for EntityComponentManager {\n\n fn default() -> Self {\n\n Self{\n\n entities: Arc::new(RwLock::new(EntityManager::new())),\n\n stores: RwLock::new(rustc_hash::FxHashMap::default()),\n\n }\n", "file_path": "src/entity/component_manager.rs", "rank": 83, "score": 14.444558244090624 }, { "content": "pub mod sync;\n\npub mod entity;\n\npub mod util;\n\npub mod app;\n\npub mod rendering;\n\n\n\npub type Vf32x2 = cgmath::Vector2<f32>;\n\npub type Vf32x3 = cgmath::Vector3<f32>;\n\npub type Vf32x4 = cgmath::Vector4<f32>;\n\n\n\n#[allow(unused)]\n\nuse sync::*;\n\n#[allow(unused)]\n\nuse entity::{DenseStore,EntityHandle, GenericComponentStore, ComponentStore};\n\n#[allow(unused)]\n\nuse util::*;\n\n#[allow(unused)]\n\nuse app::*;\n\n\n\n#[cfg(test)]\n", "file_path": "src/lib.rs", "rank": 84, "score": 13.994228770750304 }, { "content": " .map(|(slot, _)| slot)\n\n }\n\n\n\n #[allow(unused)]\n\n fn iter_mut(&mut self) -> impl Iterator<Item = &mut T> {\n\n self.slots.iter_mut()\n\n .zip(self.slot_used.iter())\n\n .filter(|(_, used)| **used)\n\n .map(|(slot, _)| slot)\n\n }\n\n\n\n #[allow(unused)]\n\n fn iter_entity(&self, page_index: usize) -> impl Iterator<Item = (EntityIndex, &T)> {\n\n self.slots.iter()\n\n .zip(self.slot_used.iter())\n\n .enumerate()\n\n .filter(|(_, (_, used))| **used)\n\n .map(move |(index, (slot, _))| ((index + (page_index << PAGE_EXPONENT)) as EntityIndex, slot))\n\n }\n\n\n", "file_path": "src/entity/component_storage/linear_store.rs", "rank": 85, "score": 13.887473349004052 }, { "content": "pub type EntityIndex = u32;\n\npub type EntityVersion = u32;\n\n\n\n#[derive(Clone, Copy)]\n\npub struct EntityHandle {\n\n pub index: EntityIndex,\n\n pub version: EntityVersion,\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub(crate) struct EntitySlot {\n\n pub(crate) version: EntityVersion,\n\n pub(crate) alive: bool,\n\n}\n\n\n\nimpl EntitySlot {\n\n pub(crate) fn new() -> Self {\n\n Self{\n\n version: 0,\n\n alive: false,\n\n }\n\n }\n\n}", "file_path": "src/entity/handle.rs", "rank": 86, "score": 13.737504045730216 }, { "content": "impl<T: Default + Clone> DenseStore<T> {\n\n #[allow(unused)]\n\n pub fn iter(&self) -> impl Iterator<Item = &T> {\n\n self.dense_values.iter()\n\n }\n\n \n\n #[allow(unused)]\n\n pub fn iter_mut(&mut self) -> impl Iterator<Item = &mut T> {\n\n self.dense_values.iter_mut()\n\n }\n\n\n\n #[allow(unused)]\n\n pub fn iter_entity(&self) -> impl Iterator<Item = (EntityIndex, &T)> {\n\n self.dense_indices.iter().map(|i|*i).zip(self.dense_values.iter())\n\n }\n\n \n\n #[allow(unused)]\n\n pub fn iter_entity_mut(&mut self) -> impl Iterator<Item = (EntityIndex, &mut T)> {\n\n self.dense_indices.iter().map(|i|*i).zip(self.dense_values.iter_mut())\n\n }\n", "file_path": "src/entity/component_storage/dense_store.rs", "rank": 87, "score": 13.702262606598683 }, { "content": " self.pages\n\n .iter()\n\n .enumerate()\n\n .filter(|(_, page)| page.len > 0)\n\n .map(|(entity, page)| (entity, page))\n\n .flat_map(|(page_index, page)| page.iter_entity(page_index))\n\n }\n\n \n\n #[allow(unused)]\n\n pub fn iter_entity_mut(&mut self) -> impl Iterator<Item = (EntityIndex, &mut T)> {\n\n self.pages\n\n .iter_mut()\n\n .enumerate()\n\n .filter(|(_, page)| page.len > 0)\n\n .map(|(entity, page)| (entity, page))\n\n .flat_map(|(page_index, page)| page.iter_entity_mut(page_index))\n\n }\n\n\n\n #[allow(unused)]\n\n pub fn iter_entity_batch(&self, batch_size: usize) -> impl Iterator<Item = impl Iterator<Item = (EntityIndex, &T)>> {\n", "file_path": "src/entity/component_storage/linear_store.rs", "rank": 88, "score": 13.670910660888326 }, { "content": " let page_index = get_page_index(index, PAGE_EXPONENT);\n\n let page_offset = get_page_offset(index, PAGE_MASK);\n\n if self.has_split(page_index,page_offset) {\n\n Some(&self.pages[page_index].slots[page_offset])\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n fn get_mut(&mut self, index: EntityIndex) -> Option<&mut T> {\n\n let page_index = get_page_index(index, PAGE_EXPONENT);\n\n let page_offset = get_page_offset(index, PAGE_MASK);\n\n if self.has_split(page_index,page_offset) {\n\n Some(&mut self.pages[page_index].slots[page_offset])\n\n }else {\n\n None\n\n }\n\n }\n\n\n\n fn set(&mut self, index: EntityIndex, value: T) {\n", "file_path": "src/entity/component_storage/linear_store.rs", "rank": 89, "score": 13.00730228288527 }, { "content": "\n\n pub fn sort(&mut self) {\n\n let mut new_dense_values = Vec::<T>::with_capacity(self.dense_values.len());\n\n let mut new_dense_indices = Vec::<EntityIndex>::with_capacity(self.dense_values.len());\n\n\n\n for dense_index in self.sparse_indices.iter_mut() {\n\n *dense_index = \n\n if *dense_index != !(0 as EntityIndex) {\n\n new_dense_indices.push(*dense_index);\n\n let mut el = T::default();\n\n std::mem::swap(&mut self.dense_values[*dense_index as usize], &mut el);\n\n new_dense_values.push(el);\n\n (new_dense_indices.len() - 1) as EntityIndex\n\n } else {\n\n !(0 as EntityIndex)\n\n }\n\n }\n\n\n\n self.dense_indices = new_dense_indices;\n\n self.dense_values = new_dense_values;\n", "file_path": "src/entity/component_storage/dense_store.rs", "rank": 90, "score": 12.927526191386905 }, { "content": " window,\n\n min_vary_delta_time: AtomicU64::from(1_000_000),\n\n }),\n\n fixed_step_data: Arc::new(FixedStepData{\n\n ecm: EntityComponentManager::new(),\n\n input_state: Mutex::new(InputState::default()),\n\n fixed_delta_time: AtomicU64::from(33_000_000),\n\n }),\n\n event_loop: Some(event_loop),\n\n user: Arc::new(T::default()),\n\n fixed_step_signal_thread: None,\n\n fixed_step_signal: async_std::channel::bounded(2),\n\n last_frame_end: Instant::now(),\n\n variable_step_data: Arc::new(VariableStepData{\n\n input_state_backbuffer: Mutex::new(InputState::default()),\n\n input_state_frontbuffer: Mutex::new(InputState::default()),\n\n renderer,\n\n vary_delta_time: AtomicU64::from(0),\n\n }),\n\n }\n", "file_path": "src/app.rs", "rank": 91, "score": 12.851400940743499 }, { "content": " }\n\n\n\n #[allow(unused)]\n\n pub fn version_of(&self, index: EntityIndex) -> Option<EntityVersion> {\n\n if self.exists_index(index) {\n\n Some(self.entity_slots[index as usize].version)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n pub fn exists(&self, entity: EntityHandle) -> bool {\n\n (entity.index as usize) < self.entity_slots.len() && self.entity_slots[entity.index as usize].alive && self.entity_slots[entity.index as usize].version == entity.version\n\n }\n\n\n\n #[allow(unused)]\n\n pub fn create(&mut self) -> EntityHandle {\n\n let index = self.entity_free_list.pop().unwrap_or(\n\n {\n\n let index = self.entity_slots.len();\n", "file_path": "src/entity/entity_manager.rs", "rank": 92, "score": 12.742259024735048 }, { "content": " let index = index as usize;\n\n index < self.sparse_indices.len() && self.sparse_indices[index] != !(0 as EntityIndex)\n\n }\n\n\n\n fn rem(&mut self, index: EntityIndex) {\n\n assert!(self.has(index), \"index was {}, val was {}\", index, self.sparse_indices[index as usize]);\n\n self.assure_index(index);\n\n let dense_index = self.sparse_indices[index as usize] as usize;\n\n let last_value = self.dense_values.pop().unwrap();\n\n let last_index = self.dense_indices.pop().unwrap();\n\n self.dense_values[dense_index] = last_value;\n\n self.dense_indices[dense_index] = last_index;\n\n self.sparse_indices[index as usize] = !0;\n\n }\n\n\n\n fn len(&self) -> usize {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "src/entity/component_storage/dense_store.rs", "rank": 93, "score": 12.699322585124897 }, { "content": "use super::*;\n\n\n\nconst fn get_page_index(index: EntityIndex, page_exponent: usize) -> usize {\n\n index as usize >> page_exponent\n\n}\n\n\n\nconst fn get_page_offset(index: EntityIndex, page_mask: usize) -> usize {\n\n index as usize & page_mask\n\n}\n\n\n\nconst fn get_page_mask(page_exponent: usize) -> usize {\n\n !(usize::MAX << page_exponent)\n\n}\n\n\n\nconst PAGE_EXPONENT: usize = 7;\n\nconst PAGE_SIZE: usize = 1 << PAGE_EXPONENT;\n\nconst PAGE_MASK: usize = get_page_mask(PAGE_EXPONENT);\n\n\n\n#[derive(Clone)]\n", "file_path": "src/entity/component_storage/linear_store.rs", "rank": 94, "score": 12.667816867359335 }, { "content": "\n\nimpl Default for InputState {\n\n fn default() -> Self {\n\n Self{\n\n key_states_old: Box::new([false; 512]),\n\n key_states: Box::new([false; 512]),\n\n button_states_old: Box::new([false; 16]),\n\n button_states: Box::new([false; 16]),\n\n cursor_pos_old: [0; 2],\n\n cursor_pos: [0; 2],\n\n }\n\n }\n\n}\n\n\n\n#[allow(unused)]\n\npub struct Application<T : User> \n\n{\n\n pub(crate) shared_data: Arc<SharedAppData>,\n\n pub(crate) fixed_step_data: Arc<FixedStepData>,\n\n pub(crate) variable_step_data: Arc<VariableStepData>,\n", "file_path": "src/app.rs", "rank": 95, "score": 12.51716334377899 }, { "content": "use std::{sync::Arc};\n\nuse std::sync::atomic::*;\n\n\n\nuse super::*;\n\n\n\npub(crate) struct FixedStepUpdateSignal;\n\n\n\npub(crate) fn fixed_time_step_notify(\n\n shared_data: Arc<SharedAppData>, \n\n fixed_step_data: Arc<FixedStepData>,\n\n signal_snd: async_std::channel::Sender<FixedStepUpdateSignal>\n\n) {\n\n profiling::register_thread!(\"fixed time step notify thread\".into());\n\n while !shared_data.end_program.load(Ordering::Relaxed) {\n\n spin_sleep::sleep(fixed_step_data.get_delta_time());\n\n profiling::scope!(\"fixed step notify\");\n\n if signal_snd.len() < 2 {\n\n let _ = signal_snd.try_send(FixedStepUpdateSignal{});\n\n }\n\n }\n", "file_path": "src/app/ticks.rs", "rank": 96, "score": 12.112744317445149 }, { "content": "\n\n fn as_any_ref(&self) -> &dyn Any {\n\n self\n\n }\n\n}\n\n\n\nimpl<T: 'static + Default + Clone> ComponentStoreAccessor for Arc<RwLock<DenseStore<T>>> {\n\n fn exec(&self, f: &mut dyn FnMut(&mut dyn GenericComponentStore) -> ()) {\n\n let mut guard = spin_on!(self.try_write());\n\n\n\n let generic_self: &mut dyn GenericComponentStore = &mut *guard;\n\n\n\n f(generic_self);\n\n }\n\n\n\n fn as_any_mut(&mut self) -> &mut dyn Any {\n\n self\n\n }\n\n\n\n fn as_any_ref(&self) -> &dyn Any {\n\n self\n\n }\n\n}", "file_path": "src/entity/component_storage/dense_store.rs", "rank": 97, "score": 12.06343083750369 }, { "content": " }\n\n\n\n fn len(&self) -> usize {\n\n 0\n\n }\n\n}\n\n\n\nimpl<T: Default + Clone> LinearStore<T> {\n\n #[allow(unused)]\n\n pub fn iter(&self) -> impl Iterator<Item = &T> {\n\n self.pages.iter().filter(|page| page.len > 0).flat_map(|page| page.iter())\n\n }\n\n \n\n #[allow(unused)]\n\n pub fn iter_mut(&mut self) -> impl Iterator<Item = &mut T> {\n\n self.pages.iter_mut().filter(| page| page.len > 0).flat_map(|page| page.iter_mut())\n\n }\n\n\n\n #[allow(unused)]\n\n pub fn iter_entity(&self) -> impl Iterator<Item = (EntityIndex, &T)> {\n", "file_path": "src/entity/component_storage/linear_store.rs", "rank": 98, "score": 11.982954001090917 }, { "content": " fn as_any_mut(&mut self) -> &mut dyn Any {\n\n self\n\n }\n\n\n\n fn optimize(&mut self) {}\n\n\n\n fn has(&self, index: EntityIndex) -> bool {\n\n let page_index = get_page_index(index, PAGE_EXPONENT);\n\n let page_offset = get_page_offset(index, PAGE_MASK);\n\n self.has_split(page_index, page_offset)\n\n }\n\n\n\n fn rem(&mut self, index: EntityIndex) {\n\n let page_index = get_page_index(index, PAGE_EXPONENT);\n\n let page_offset = get_page_offset(index, PAGE_MASK);\n\n assert!(self.has_split(page_index, page_offset), \"tried to remove non existing component of an entity\");\n\n let page = &mut self.pages[page_index];\n\n page.slots[page_offset] = T::default();\n\n page.slot_used[page_offset] = false;\n\n page.len -= 1;\n", "file_path": "src/entity/component_storage/linear_store.rs", "rank": 99, "score": 11.97023889946696 } ]
Rust
src/state.rs
peterschwarz/sawtooth-pbft
f9b5372fc028d5c47ad4f2a7c6947cbc77272a50
/* * Copyright 2018 Bitwise IO, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ----------------------------------------------------------------------------- */ use std::fmt; use hex; use sawtooth_sdk::consensus::engine::{BlockId, PeerId}; use crate::config::PbftConfig; use crate::message_type::PbftMessageType; use crate::protos::pbft_message::PbftBlock; use crate::timing::Timeout; #[derive(Debug, PartialEq, Serialize, Deserialize)] enum PbftNodeRole { Primary, Secondary, } #[derive(Debug, PartialEq, PartialOrd, Clone, Serialize, Deserialize)] pub enum PbftPhase { NotStarted, PrePreparing, Preparing, Checking, Committing, Finished, } #[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] pub enum PbftMode { Normal, ViewChanging, Checkpointing, } impl fmt::Display for PbftState { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let ast = if self.is_primary() { "*" } else { " " }; let mode = match self.mode { PbftMode::Normal => "N", PbftMode::Checkpointing => "C", PbftMode::ViewChanging => "V", }; let phase = match self.phase { PbftPhase::NotStarted => "NS", PbftPhase::PrePreparing => "PP", PbftPhase::Preparing => "Pr", PbftPhase::Checking => "Ch", PbftPhase::Committing => "Co", PbftPhase::Finished => "Fi", }; let wb = match self.working_block { WorkingBlockOption::WorkingBlock(ref block) => format!( "{}/{}", block.block_num, &hex::encode(block.get_block_id())[..6] ), WorkingBlockOption::TentativeWorkingBlock(ref block_id) => { String::from(&hex::encode(block_id)[..5]) + "~" } _ => String::from("~none~"), }; write!( f, "({} {} {}, seq {}, wb {}), Node {}{}", phase, mode, self.view, self.seq_num, wb, ast, &hex::encode(self.id.clone())[..6], ) } } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub enum WorkingBlockOption { NoWorkingBlock, TentativeWorkingBlock(BlockId), WorkingBlock(PbftBlock), } impl WorkingBlockOption { pub fn is_none(&self) -> bool { self == &WorkingBlockOption::NoWorkingBlock } } #[derive(Debug, Serialize, Deserialize)] pub struct PbftState { pub id: PeerId, pub seq_num: u64, pub view: u64, pub phase: PbftPhase, role: PbftNodeRole, pub mode: PbftMode, pub pre_checkpoint_mode: PbftMode, pub peer_ids: Vec<PeerId>, pub f: u64, pub commit_timeout: Timeout, pub idle_timeout: Timeout, pub forced_view_change_period: u64, pub working_block: WorkingBlockOption, } impl PbftState { #[allow(clippy::needless_pass_by_value)] pub fn new(id: PeerId, head_block_num: u64, config: &PbftConfig) -> Self { let f = ((config.peers.len() - 1) / 3) as u64; if f == 0 { panic!("This network does not contain enough nodes to be fault tolerant"); } PbftState { id: id.clone(), seq_num: head_block_num + 1, view: 0, phase: PbftPhase::NotStarted, role: if config.peers[0] == id { PbftNodeRole::Primary } else { PbftNodeRole::Secondary }, mode: PbftMode::Normal, pre_checkpoint_mode: PbftMode::Normal, f, peer_ids: config.peers.clone(), commit_timeout: Timeout::new(config.commit_timeout), idle_timeout: Timeout::new(config.idle_timeout), forced_view_change_period: config.forced_view_change_period, working_block: WorkingBlockOption::NoWorkingBlock, } } pub fn peers(&self) -> &Vec<PeerId> { &self.peer_ids } pub fn check_msg_type(&self) -> PbftMessageType { match self.phase { PbftPhase::PrePreparing => PbftMessageType::PrePrepare, PbftPhase::Preparing => PbftMessageType::Prepare, PbftPhase::Checking => PbftMessageType::Prepare, PbftPhase::Committing => PbftMessageType::Commit, _ => PbftMessageType::Unset, } } pub fn get_primary_id(&self) -> PeerId { let primary_index = (self.view % (self.peer_ids.len() as u64)) as usize; self.peer_ids[primary_index].clone() } pub fn is_primary(&self) -> bool { self.role == PbftNodeRole::Primary } pub fn upgrade_role(&mut self) { self.role = PbftNodeRole::Primary; } pub fn downgrade_role(&mut self) { self.role = PbftNodeRole::Secondary; } pub fn switch_phase(&mut self, desired_phase: PbftPhase) -> Option<PbftPhase> { let next = match self.phase { PbftPhase::NotStarted => PbftPhase::PrePreparing, PbftPhase::PrePreparing => PbftPhase::Preparing, PbftPhase::Preparing => PbftPhase::Checking, PbftPhase::Checking => PbftPhase::Committing, PbftPhase::Committing => PbftPhase::Finished, PbftPhase::Finished => PbftPhase::NotStarted, }; if desired_phase == next { debug!("{}: Changing to {:?}", self, desired_phase); self.phase = desired_phase.clone(); Some(desired_phase) } else { debug!("{}: Didn't change to {:?}", self, desired_phase); None } } pub fn at_forced_view_change(&self) -> bool { self.seq_num > 0 && self.seq_num % self.forced_view_change_period == 0 } pub fn discard_current_block(&mut self) { warn!("PbftState::reset: {}", self); self.working_block = WorkingBlockOption::NoWorkingBlock; self.phase = PbftPhase::NotStarted; self.mode = PbftMode::Normal; self.commit_timeout.stop(); self.idle_timeout.start(); } } #[cfg(test)] mod tests { use super::*; use crate::config::mock_config; #[test] fn no_fault_tolerance() { let config = mock_config(1); let caught = ::std::panic::catch_unwind(|| { PbftState::new(vec![0], 0, &config); }) .is_err(); assert!(caught); } #[test] fn initial_config() { let config = mock_config(4); let state0 = PbftState::new(vec![0], 0, &config); let state1 = PbftState::new(vec![], 0, &config); assert!(state0.is_primary()); assert!(!state1.is_primary()); assert_eq!(state0.f, 1); assert_eq!(state1.f, 1); assert_eq!(state0.check_msg_type(), PbftMessageType::Unset); assert_eq!(state1.check_msg_type(), PbftMessageType::Unset); assert_eq!(state0.get_primary_id(), state0.peer_ids[0]); assert_eq!(state1.get_primary_id(), state1.peer_ids[0]); } #[test] fn role_changes() { let config = mock_config(4); let mut state = PbftState::new(vec![0], 0, &config); state.downgrade_role(); assert!(!state.is_primary()); state.upgrade_role(); assert!(state.is_primary()); } #[test] fn phase_changes() { let config = mock_config(4); let mut state = PbftState::new(vec![0], 0, &config); assert!(state.switch_phase(PbftPhase::PrePreparing).is_some()); assert!(state.switch_phase(PbftPhase::Preparing).is_some()); assert!(state.switch_phase(PbftPhase::Checking).is_some()); assert!(state.switch_phase(PbftPhase::Committing).is_some()); assert!(state.switch_phase(PbftPhase::Finished).is_some()); assert!(state.switch_phase(PbftPhase::NotStarted).is_some()); assert!(state.switch_phase(PbftPhase::Finished).is_none()); assert!(state.switch_phase(PbftPhase::Preparing).is_none()); } }
/* * Copyright 2018 Bitwise IO, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ----------------------------------------------------------------------------- */ use std::fmt; use hex; use sawtooth_sdk::consensus::engine::{BlockId, PeerId}; use crate::config::PbftConfig; use crate::message_type::PbftMessageType; use crate::protos::pbft_message::PbftBlock; use crate::timing::Timeout; #[derive(Debug, PartialEq, Serialize, Deserialize)] enum PbftNodeRole { Primary, Secondary, } #[derive(Debug, PartialEq, PartialOrd, Clone, Serialize, Deserialize)] pub enum PbftPhase { NotStarted, PrePreparing, Preparing, Checking, Committing, Finished, } #[derive(Debug, PartialEq, Copy, Clone, Serialize, Deserialize)] pub enum PbftMode { Normal, ViewChanging, Checkpointing, } impl fmt::Display for PbftState { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let ast = if self.is_primary()
mary(&self) -> bool { self.role == PbftNodeRole::Primary } pub fn upgrade_role(&mut self) { self.role = PbftNodeRole::Primary; } pub fn downgrade_role(&mut self) { self.role = PbftNodeRole::Secondary; } pub fn switch_phase(&mut self, desired_phase: PbftPhase) -> Option<PbftPhase> { let next = match self.phase { PbftPhase::NotStarted => PbftPhase::PrePreparing, PbftPhase::PrePreparing => PbftPhase::Preparing, PbftPhase::Preparing => PbftPhase::Checking, PbftPhase::Checking => PbftPhase::Committing, PbftPhase::Committing => PbftPhase::Finished, PbftPhase::Finished => PbftPhase::NotStarted, }; if desired_phase == next { debug!("{}: Changing to {:?}", self, desired_phase); self.phase = desired_phase.clone(); Some(desired_phase) } else { debug!("{}: Didn't change to {:?}", self, desired_phase); None } } pub fn at_forced_view_change(&self) -> bool { self.seq_num > 0 && self.seq_num % self.forced_view_change_period == 0 } pub fn discard_current_block(&mut self) { warn!("PbftState::reset: {}", self); self.working_block = WorkingBlockOption::NoWorkingBlock; self.phase = PbftPhase::NotStarted; self.mode = PbftMode::Normal; self.commit_timeout.stop(); self.idle_timeout.start(); } } #[cfg(test)] mod tests { use super::*; use crate::config::mock_config; #[test] fn no_fault_tolerance() { let config = mock_config(1); let caught = ::std::panic::catch_unwind(|| { PbftState::new(vec![0], 0, &config); }) .is_err(); assert!(caught); } #[test] fn initial_config() { let config = mock_config(4); let state0 = PbftState::new(vec![0], 0, &config); let state1 = PbftState::new(vec![], 0, &config); assert!(state0.is_primary()); assert!(!state1.is_primary()); assert_eq!(state0.f, 1); assert_eq!(state1.f, 1); assert_eq!(state0.check_msg_type(), PbftMessageType::Unset); assert_eq!(state1.check_msg_type(), PbftMessageType::Unset); assert_eq!(state0.get_primary_id(), state0.peer_ids[0]); assert_eq!(state1.get_primary_id(), state1.peer_ids[0]); } #[test] fn role_changes() { let config = mock_config(4); let mut state = PbftState::new(vec![0], 0, &config); state.downgrade_role(); assert!(!state.is_primary()); state.upgrade_role(); assert!(state.is_primary()); } #[test] fn phase_changes() { let config = mock_config(4); let mut state = PbftState::new(vec![0], 0, &config); assert!(state.switch_phase(PbftPhase::PrePreparing).is_some()); assert!(state.switch_phase(PbftPhase::Preparing).is_some()); assert!(state.switch_phase(PbftPhase::Checking).is_some()); assert!(state.switch_phase(PbftPhase::Committing).is_some()); assert!(state.switch_phase(PbftPhase::Finished).is_some()); assert!(state.switch_phase(PbftPhase::NotStarted).is_some()); assert!(state.switch_phase(PbftPhase::Finished).is_none()); assert!(state.switch_phase(PbftPhase::Preparing).is_none()); } }
{ "*" } else { " " }; let mode = match self.mode { PbftMode::Normal => "N", PbftMode::Checkpointing => "C", PbftMode::ViewChanging => "V", }; let phase = match self.phase { PbftPhase::NotStarted => "NS", PbftPhase::PrePreparing => "PP", PbftPhase::Preparing => "Pr", PbftPhase::Checking => "Ch", PbftPhase::Committing => "Co", PbftPhase::Finished => "Fi", }; let wb = match self.working_block { WorkingBlockOption::WorkingBlock(ref block) => format!( "{}/{}", block.block_num, &hex::encode(block.get_block_id())[..6] ), WorkingBlockOption::TentativeWorkingBlock(ref block_id) => { String::from(&hex::encode(block_id)[..5]) + "~" } _ => String::from("~none~"), }; write!( f, "({} {} {}, seq {}, wb {}), Node {}{}", phase, mode, self.view, self.seq_num, wb, ast, &hex::encode(self.id.clone())[..6], ) } } #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub enum WorkingBlockOption { NoWorkingBlock, TentativeWorkingBlock(BlockId), WorkingBlock(PbftBlock), } impl WorkingBlockOption { pub fn is_none(&self) -> bool { self == &WorkingBlockOption::NoWorkingBlock } } #[derive(Debug, Serialize, Deserialize)] pub struct PbftState { pub id: PeerId, pub seq_num: u64, pub view: u64, pub phase: PbftPhase, role: PbftNodeRole, pub mode: PbftMode, pub pre_checkpoint_mode: PbftMode, pub peer_ids: Vec<PeerId>, pub f: u64, pub commit_timeout: Timeout, pub idle_timeout: Timeout, pub forced_view_change_period: u64, pub working_block: WorkingBlockOption, } impl PbftState { #[allow(clippy::needless_pass_by_value)] pub fn new(id: PeerId, head_block_num: u64, config: &PbftConfig) -> Self { let f = ((config.peers.len() - 1) / 3) as u64; if f == 0 { panic!("This network does not contain enough nodes to be fault tolerant"); } PbftState { id: id.clone(), seq_num: head_block_num + 1, view: 0, phase: PbftPhase::NotStarted, role: if config.peers[0] == id { PbftNodeRole::Primary } else { PbftNodeRole::Secondary }, mode: PbftMode::Normal, pre_checkpoint_mode: PbftMode::Normal, f, peer_ids: config.peers.clone(), commit_timeout: Timeout::new(config.commit_timeout), idle_timeout: Timeout::new(config.idle_timeout), forced_view_change_period: config.forced_view_change_period, working_block: WorkingBlockOption::NoWorkingBlock, } } pub fn peers(&self) -> &Vec<PeerId> { &self.peer_ids } pub fn check_msg_type(&self) -> PbftMessageType { match self.phase { PbftPhase::PrePreparing => PbftMessageType::PrePrepare, PbftPhase::Preparing => PbftMessageType::Prepare, PbftPhase::Checking => PbftMessageType::Prepare, PbftPhase::Committing => PbftMessageType::Commit, _ => PbftMessageType::Unset, } } pub fn get_primary_id(&self) -> PeerId { let primary_index = (self.view % (self.peer_ids.len() as u64)) as usize; self.peer_ids[primary_index].clone() } pub fn is_pri
random
[ { "content": "#[allow(clippy::ptr_arg)]\n\npub fn commit(\n\n state: &mut PbftState,\n\n service: &mut Service,\n\n message: &ParsedMessage,\n\n) -> Result<(), PbftError> {\n\n info!(\n\n \"{}: Committing block {:?}\",\n\n state,\n\n message.get_block().block_id.clone()\n\n );\n\n\n\n service\n\n .commit_block(message.get_block().block_id.clone())\n\n .map_err(|e| PbftError::InternalError(format!(\"Failed to commit block: {:?}\", e)))?;\n\n\n\n state.switch_phase(PbftPhase::Finished);\n\n state.working_block = WorkingBlockOption::NoWorkingBlock;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/handlers.rs", "rank": 0, "score": 148445.69544432705 }, { "content": "/// Given a location string, returns the appropriate storage\n\n///\n\n/// Accepts `\"memory\"` or `\"disk+/path/to/file\"` as location values\n\npub fn get_storage<'a, T: Sized + Serialize + DeserializeOwned + 'a, F: Fn() -> T>(\n\n location: &str,\n\n default: F,\n\n) -> Result<Box<dyn Storage<S = T> + 'a>, String> {\n\n if location == \"memory\" {\n\n Ok(Box::new(MemStorage::new(default)) as Box<Storage<S = T>>)\n\n } else if location.starts_with(\"disk\") {\n\n let split = location.splitn(2, '+').collect::<Vec<_>>();\n\n\n\n if split.len() != 2 {\n\n Err(format!(\"Invalid location: {}\", location))?\n\n }\n\n\n\n Ok(Box::new(DiskStorage::from_path(split[1], default).unwrap()))\n\n } else {\n\n Err(format!(\"Unknown storage location type: {}\", location))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/storage/mod.rs", "rank": 1, "score": 147603.4696916675 }, { "content": "pub fn force_view_change(state: &mut PbftState, service: &mut Service) {\n\n let next_view = state.view + 1;\n\n set_current_view(state, next_view);\n\n\n\n // Upgrade this node to primary, if its ID is correct\n\n if check_is_primary(state) {\n\n become_primary(state, service)\n\n } else {\n\n become_secondary(state)\n\n }\n\n\n\n state.discard_current_block();\n\n}\n\n\n", "file_path": "src/handlers.rs", "rank": 2, "score": 129378.7403278586 }, { "content": "fn become_secondary(state: &mut PbftState) {\n\n warn!(\"{}: I'm now a secondary\", state);\n\n state.downgrade_role();\n\n}\n\n\n", "file_path": "src/handlers.rs", "rank": 3, "score": 125343.16615071395 }, { "content": "fn become_primary(state: &mut PbftState, service: &mut Service) {\n\n state.upgrade_role();\n\n warn!(\"{}: I'm now a primary\", state);\n\n\n\n // If we're the new primary, need to clean up the block mess from the view change and\n\n // initialize a new block.\n\n if let WorkingBlockOption::WorkingBlock(ref working_block) = state.working_block {\n\n info!(\n\n \"{}: Ignoring block {}\",\n\n state,\n\n &hex::encode(working_block.get_block_id())\n\n );\n\n service\n\n .ignore_block(working_block.get_block_id().to_vec())\n\n .unwrap_or_else(|e| error!(\"Couldn't ignore block: {}\", e));\n\n } else if let WorkingBlockOption::TentativeWorkingBlock(ref block_id) = state.working_block {\n\n info!(\"{}: Ignoring block {}\", state, &hex::encode(block_id));\n\n service\n\n .ignore_block(block_id.clone())\n\n .unwrap_or_else(|e| error!(\"Couldn't ignore block: {}\", e));\n\n }\n\n info!(\"{}: Initializing block\", state);\n\n service\n\n .initialize_block(None)\n\n .unwrap_or_else(|err| error!(\"Couldn't initialize block: {}\", err));\n\n}\n\n\n", "file_path": "src/handlers.rs", "rank": 4, "score": 123884.93050639029 }, { "content": "/// Handle a `PrePrepare` message\n\n///\n\n/// A `PrePrepare` message is accepted and added to the log if the following are true:\n\n/// - The message signature is valid (already verified by validator)\n\n/// - The message is from the primary\n\n/// - There is a matching BlockNew message\n\n/// - A `PrePrepare` message does not already exist at this view and sequence number with a\n\n/// different block\n\n/// - The message's view matches the node's current view (handled by message log)\n\n/// - The sequence number is between the low and high watermarks (handled by message log)\n\n///\n\n/// If a `PrePrepare` message is accepted, we update the phase and working block\n\npub fn pre_prepare(\n\n state: &mut PbftState,\n\n msg_log: &mut PbftLog,\n\n message: &ParsedMessage,\n\n) -> Result<(), PbftError> {\n\n // Check that message is from the current primary\n\n if PeerId::from(message.info().get_signer_id()) != state.get_primary_id() {\n\n error!(\n\n \"Got PrePrepare from a secondary node {:?}; ignoring message\",\n\n message.info().get_signer_id()\n\n );\n\n return Err(PbftError::NotFromPrimary);\n\n }\n\n\n\n // Check that there is a matching BlockNew message\n\n let block_new_exists = msg_log\n\n .get_messages_of_type_seq(&PbftMessageType::BlockNew, message.info().get_seq_num())\n\n .iter()\n\n .any(|block_new_msg| block_new_msg.get_block() == message.get_block());\n\n if !block_new_exists {\n", "file_path": "src/handlers.rs", "rank": 5, "score": 122276.73407504421 }, { "content": "fn check_is_primary(state: &PbftState) -> bool {\n\n state.id == state.get_primary_id()\n\n}\n\n\n", "file_path": "src/handlers.rs", "rank": 6, "score": 107313.45140629442 }, { "content": "fn check_if_secondary(state: &PbftState) -> bool {\n\n !state.is_primary() && state.mode != PbftMode::Checkpointing\n\n}\n\n\n", "file_path": "src/node.rs", "rank": 7, "score": 107313.45140629442 }, { "content": "/// Load configuration from on-chain Sawtooth settings.\n\n///\n\n/// Configuration loads the following settings:\n\n/// + `sawtooth.consensus.pbft.peers` (required)\n\n/// + `sawtooth.consensus.pbft.block_duration` (optional, default 200 ms)\n\n/// + `sawtooth.consensus.pbft.checkpoint_period` (optional, default 10 ms)\n\n/// + `sawtooth.consensus.pbft.commit_timeout` (optional, default 4s)\n\n/// + `sawtooth.consensus.pbft.idle_timeout` (optional, default 30s)\n\n/// + `sawtooth.consensus.pbft.forced_view_change_period` (optional, default 30 blocks)\n\n/// + `sawtooth.consensus.pbft.message_timeout` (optional, default 100 blocks)\n\n/// + `sawtooth.consensus.pbft.max_log_size` (optional, default 1000 messages)\n\n/// + `sawtooth.consensus.pbft.storage` (optional, default `\"memory\"`)\n\n///\n\n/// # Panics\n\n/// + If the `sawtooth.consensus.pbft.peers` setting is not provided\n\n/// + If settings loading fails entirely\n\n/// + If block duration is greater than the view change timeout\n\npub fn load_pbft_config(block_id: BlockId, service: &mut Service) -> PbftConfig {\n\n let mut config = PbftConfig::default();\n\n\n\n let settings: HashMap<String, String> = service\n\n .get_settings(\n\n block_id,\n\n vec![\n\n String::from(\"sawtooth.consensus.pbft.peers\"),\n\n String::from(\"sawtooth.consensus.pbft.block_duration\"),\n\n String::from(\"sawtooth.consensus.pbft.checkpoint_period\"),\n\n String::from(\"sawtooth.consensus.pbft.commit_timeout\"),\n\n String::from(\"sawtooth.consensus.pbft.idle_timeout\"),\n\n String::from(\"sawtooth.consensus.pbft.forced_view_change_period\"),\n\n String::from(\"sawtooth.consensus.pbft.message_timeout\"),\n\n String::from(\"sawtooth.consensus.pbft.max_log_size\"),\n\n ],\n\n )\n\n .expect(\"Failed to get on-chain settings\");\n\n\n\n // Get the peers associated with this node (including ourselves). Panic if it is not provided;\n", "file_path": "src/config.rs", "rank": 8, "score": 95841.98397612193 }, { "content": "/// Handle a `ViewChange` message\n\n/// Once a node receives `2f + 1` `ViewChange` messages, the node enters view `v + 1` and changes\n\n/// itself into the appropriate role for that view (i.e. if `v = 1` and this is node 1, then this\n\n/// node is now the primary).\n\npub fn view_change(\n\n state: &mut PbftState,\n\n msg_log: &mut PbftLog,\n\n service: &mut Service,\n\n vc_message: &ParsedMessage,\n\n) -> Result<(), PbftError> {\n\n if !check_received_enough_view_changes(state, msg_log, vc_message) {\n\n return Ok(());\n\n }\n\n\n\n set_current_view_from_msg(state, vc_message);\n\n\n\n // Upgrade this node to primary, if its ID is correct\n\n if check_is_primary(state) {\n\n become_primary(state, service)\n\n } else {\n\n become_secondary(state)\n\n }\n\n\n\n state.discard_current_block();\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/handlers.rs", "rank": 9, "score": 93181.35524117338 }, { "content": "/// Create a PbftMessageInfo struct with the desired type, view, sequence number, and signer ID\n\npub fn make_msg_info(\n\n msg_type: &PbftMessageType,\n\n view: u64,\n\n seq_num: u64,\n\n signer_id: PeerId,\n\n) -> PbftMessageInfo {\n\n let mut info = PbftMessageInfo::new();\n\n info.set_msg_type(String::from(msg_type));\n\n info.set_view(view);\n\n info.set_seq_num(seq_num);\n\n info.set_signer_id(signer_id);\n\n info\n\n}\n\n\n", "file_path": "src/handlers.rs", "rank": 10, "score": 90624.2315145192 }, { "content": "fn set_current_view(state: &mut PbftState, view: u64) {\n\n state.view = view;\n\n warn!(\"{}: Updating to view {}\", state, state.view);\n\n}\n\n\n", "file_path": "src/handlers.rs", "rank": 11, "score": 87721.93920121732 }, { "content": "fn set_current_view_from_msg(state: &mut PbftState, vc_message: &ParsedMessage) {\n\n set_current_view(state, vc_message.info().get_view())\n\n}\n\n\n", "file_path": "src/handlers.rs", "rank": 12, "score": 83295.25063069361 }, { "content": "/// Decide if this message is a future message, past message, or current message.\n\n/// This function defers action on future and past messages to the individual message handlers,\n\n/// which in turn call `action_from_hint()`, and either push to backlog for future messages, or add\n\n/// to message log for past messages. This usually only makes sense for regular multicast messages\n\n/// (`PrePrepare`, `Prepare`, and `Commit`)\n\npub fn multicast_hint(state: &PbftState, message: &ParsedMessage) -> PbftHint {\n\n let msg_info = message.info();\n\n let msg_type = PbftMessageType::from(msg_info.get_msg_type());\n\n\n\n if msg_info.get_seq_num() > state.seq_num {\n\n debug!(\n\n \"{}: seq {} > {}, accept all.\",\n\n state,\n\n msg_info.get_seq_num(),\n\n state.seq_num\n\n );\n\n return PbftHint::FutureMessage;\n\n } else if msg_info.get_seq_num() == state.seq_num {\n\n if state.working_block.is_none() {\n\n debug!(\n\n \"{}: seq {} == {}, in limbo\",\n\n state,\n\n msg_info.get_seq_num(),\n\n state.seq_num,\n\n );\n", "file_path": "src/handlers.rs", "rank": 13, "score": 80778.2037633923 }, { "content": "fn merge_setting_if_set_and_map<U, F, T>(\n\n settings_map: &HashMap<String, String>,\n\n setting_field: &mut U,\n\n setting_key: &str,\n\n map: F,\n\n) where\n\n F: Fn(T) -> U,\n\n T: ::std::str::FromStr,\n\n{\n\n if let Some(setting) = settings_map.get(setting_key) {\n\n if let Ok(setting_value) = setting.parse() {\n\n *setting_field = map(setting_value);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 14, "score": 80637.18168277742 }, { "content": "/// RAII structure used to allow write access to state object\n\n///\n\n/// This guard will ensure that any changes to an object are persisted to\n\n/// a backing store when this is Dropped.\n\npub trait StorageWriteGuard<'a, T: Sized>: DerefMut<Target = T> {}\n\n\n", "file_path": "src/storage/mod.rs", "rank": 15, "score": 76054.53423153651 }, { "content": "/// Make a PbftBlock out of a consensus Block (PBFT doesn't need to use all the information about\n\n/// the block - this keeps blocks lighter weight)\n\npub fn pbft_block_from_block(block: Block) -> PbftBlock {\n\n let mut pbft_block = PbftBlock::new();\n\n pbft_block.set_block_id(block.block_id);\n\n pbft_block.set_signer_id(block.signer_id);\n\n pbft_block.set_block_num(block.block_num);\n\n pbft_block.set_summary(block.summary);\n\n pbft_block\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::config;\n\n use crate::hash::hash_sha256;\n\n use crate::protos::pbft_message::PbftMessage;\n\n use sawtooth_sdk::consensus::engine::BlockId;\n\n\n\n fn mock_block_id(num: u64) -> BlockId {\n\n BlockId::from(hash_sha256(\n\n format!(\"I'm a block with block num {}\", num).as_bytes(),\n", "file_path": "src/handlers.rs", "rank": 16, "score": 74651.13298362255 }, { "content": "#[cfg(test)]\n\npub fn mock_config(num_nodes: usize) -> PbftConfig {\n\n let mut config = PbftConfig::default();\n\n config.peers = (0..num_nodes).map(|id| vec![id as u8]).collect();\n\n config\n\n}\n", "file_path": "src/config.rs", "rank": 17, "score": 74647.8026563718 }, { "content": "/// Hashes the given bytes with SHA-256\n\npub fn hash_sha256(bytes: &[u8]) -> Vec<u8> {\n\n let mut sha = Sha256::new();\n\n sha.update(bytes);\n\n let mut bytes = Vec::new();\n\n bytes.extend(sha.finish().iter());\n\n bytes\n\n}\n\n\n", "file_path": "src/hash.rs", "rank": 18, "score": 74593.33619492671 }, { "content": "/// Hashes the given bytes with SHA-512\n\npub fn hash_sha512(bytes: &[u8]) -> Vec<u8> {\n\n let mut sha = Sha512::new();\n\n sha.update(bytes);\n\n let mut bytes = Vec::new();\n\n bytes.extend(sha.finish().iter());\n\n bytes\n\n}\n\n\n", "file_path": "src/hash.rs", "rank": 19, "score": 74593.33619492671 }, { "content": "/// Get the peers as a Vec<PeerId> from settings\n\npub fn get_peers_from_settings<S: std::hash::BuildHasher>(\n\n settings: &HashMap<String, String, S>,\n\n) -> Vec<PeerId> {\n\n let peers_setting_value = settings\n\n .get(\"sawtooth.consensus.pbft.peers\")\n\n .expect(\"'sawtooth.consensus.pbft.peers' must be set to use PBFT\");\n\n\n\n warn!(\"Peers setting: {:?}\", peers_setting_value);\n\n\n\n let peers: Vec<String> = serde_json::from_str(peers_setting_value)\n\n .expect(\"Invalid value at 'sawtooth.consensus.pbft.peers'\");\n\n\n\n peers\n\n .into_iter()\n\n .map(|s| hex::decode(s).expect(\"PeerId is not valid hex\"))\n\n .collect()\n\n}\n\n\n\n/// Create a mock configuration, given a number of nodes. PeerIds are generated using a Sha256\n\n/// hash.\n", "file_path": "src/config.rs", "rank": 20, "score": 71073.87274943243 }, { "content": "fn check_received_enough_view_changes(\n\n state: &PbftState,\n\n msg_log: &PbftLog,\n\n vc_message: &ParsedMessage,\n\n) -> bool {\n\n msg_log.log_has_required_msgs(\n\n &PbftMessageType::ViewChange,\n\n vc_message,\n\n false,\n\n 2 * state.f + 1,\n\n )\n\n}\n\n\n", "file_path": "src/handlers.rs", "rank": 21, "score": 70350.90395847098 }, { "content": "fn ignore_hint_pre_prepare(state: &PbftState, pbft_message: &ParsedMessage) -> bool {\n\n if let WorkingBlockOption::TentativeWorkingBlock(ref block_id) = state.working_block {\n\n if block_id == &pbft_message.get_block().get_block_id()\n\n && pbft_message.info().get_seq_num() == state.seq_num + 1\n\n {\n\n debug!(\"{}: Ignoring not ready and starting multicast\", state);\n\n true\n\n } else {\n\n debug!(\n\n \"{}: Not starting multicast; ({} != {} or {} != {} + 1)\",\n\n state,\n\n &hex::encode(block_id.clone())[..6],\n\n &hex::encode(pbft_message.get_block().get_block_id())[..6],\n\n pbft_message.info().get_seq_num(),\n\n state.seq_num,\n\n );\n\n false\n\n }\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/node.rs", "rank": 22, "score": 67737.8093264427 }, { "content": "/// Verifies that the SHA-512 hash of the given content matches the given hash\n\npub fn verify_sha512(content: &[u8], content_hash: &[u8]) -> Result<(), PbftError> {\n\n let computed_sha512 = hash_sha512(&content);\n\n\n\n if computed_sha512 != content_hash {\n\n Err(PbftError::InternalError(format!(\n\n \"Hash verification failed! Content: `{:?}`, Hash: `{:?}`\",\n\n content, content_hash\n\n )))\n\n } else {\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/hash.rs", "rank": 23, "score": 64934.997869833416 }, { "content": "#[derive(Debug, PartialEq, Serialize, Deserialize)]\n\nenum TimeoutState {\n\n Active,\n\n Inactive,\n\n Expired,\n\n}\n\n\n\n/// A timer that expires after a given duration\n\n/// Check back on this timer every so often to see if it's expired\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct Timeout {\n\n state: TimeoutState,\n\n duration: Duration,\n\n #[serde(with = \"serde_millis\")]\n\n start: Instant,\n\n}\n\n\n\nimpl Timeout {\n\n pub fn new(duration: Duration) -> Self {\n\n Timeout {\n\n state: TimeoutState::Inactive,\n", "file_path": "src/timing.rs", "rank": 24, "score": 54500.57277372488 }, { "content": "fn main() {\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n let dest_path = Path::new(&out_dir).join(\"protos\");\n\n let proto_path = Path::new(\"./protos\");\n\n fs::create_dir_all(&dest_path).unwrap();\n\n\n\n // Run protoc\n\n protoc_rust::run(protoc_rust::Args {\n\n out_dir: &dest_path.to_str().unwrap(),\n\n input: &[proto_path.join(\"pbft_message.proto\").to_str().unwrap()],\n\n includes: &[proto_path.to_str().unwrap()],\n\n customize: Customize {\n\n serde_derive: Some(true),\n\n ..Default::default()\n\n },\n\n })\n\n .expect(\"Protoc Error\");\n\n\n\n // Create mod.rs accordingly\n\n let mut mod_file = File::create(dest_path.join(\"mod.rs\")).unwrap();\n\n mod_file.write_all(b\"pub mod pbft_message;\\n\").unwrap();\n\n}\n", "file_path": "build.rs", "rank": 26, "score": 50898.4114870386 }, { "content": "fn main() {\n\n let args = parse_args();\n\n\n\n let config = match args.log_config {\n\n Some(path) => {\n\n // Register deserializer for syslog so we can load syslog appender(s)\n\n let mut deserializers = log4rs::file::Deserializers::new();\n\n log4rs_syslog::register(&mut deserializers);\n\n\n\n match log4rs::load_config_file(path, deserializers) {\n\n Ok(mut config) => {\n\n {\n\n let root = config.root_mut();\n\n root.set_level(args.log_level);\n\n }\n\n config\n\n }\n\n Err(err) => {\n\n eprintln!(\n\n \"Error loading logging configuration file: {:?}\\\n", "file_path": "src/main.rs", "rank": 27, "score": 49194.012088410374 }, { "content": "/// Storage wrapper that ensures that changes to an object are persisted to a backing store\n\n///\n\n/// Achieves this by handing out RAII-guarded references to the underlying data, that ensure\n\n/// persistence when they get Dropped.\n\npub trait Storage {\n\n type S;\n\n\n\n fn read<'a>(&'a self) -> Box<StorageReadGuard<'a, Self::S, Target = Self::S> + 'a>;\n\n fn write<'a>(&'a mut self) -> Box<StorageWriteGuard<'a, Self::S, Target = Self::S> + 'a>;\n\n}\n\n\n", "file_path": "src/storage/mod.rs", "rank": 28, "score": 48532.8322435522 }, { "content": "fn handle_update(\n\n node: &mut PbftNode,\n\n incoming_message: Result<Update, RecvTimeoutError>,\n\n state: &mut PbftState,\n\n) -> Result<bool, PbftError> {\n\n match incoming_message {\n\n Ok(Update::BlockNew(block)) => node.on_block_new(block, state)?,\n\n Ok(Update::BlockValid(block_id)) => node.on_block_valid(&block_id, state)?,\n\n Ok(Update::BlockInvalid(_)) => {\n\n warn!(\"{}: BlockInvalid received, starting view change\", state);\n\n node.propose_view_change(state)?\n\n }\n\n Ok(Update::BlockCommit(block_id)) => node.on_block_commit(block_id, state)?,\n\n Ok(Update::PeerMessage(message, sender_id)) => {\n\n let parsed_message = ParsedMessage::from_peer_message(message, false)?;\n\n let signer_id = parsed_message.info().get_signer_id().to_vec();\n\n\n\n if signer_id != sender_id {\n\n return Err(PbftError::InternalError(format!(\n\n \"Mismatch between sender ID ({:?}) and signer ID ({:?})!\",\n", "file_path": "src/engine.rs", "rank": 29, "score": 47656.518315666915 }, { "content": "fn merge_millis_setting_if_set(\n\n settings_map: &HashMap<String, String>,\n\n setting_field: &mut Duration,\n\n setting_key: &str,\n\n) {\n\n merge_setting_if_set_and_map(\n\n settings_map,\n\n setting_field,\n\n setting_key,\n\n Duration::from_millis,\n\n )\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 30, "score": 44992.927667648 }, { "content": "fn parse_args() -> PbftCliArgs {\n\n let matches = clap_app!(sawtooth_pbft =>\n\n (version: crate_version!())\n\n (about: \"PBFT consensus for Sawtooth\")\n\n (@arg connect: -C --connect +takes_value\n\n \"connection endpoint for validator\")\n\n (@arg verbose: -v --verbose +multiple\n\n \"increase output verbosity\")\n\n (@arg logconfig: -L --log_config +takes_value\n\n \"path to logging config file\"))\n\n .get_matches();\n\n\n\n let log_config = matches.value_of(\"logconfig\").map(|s| s.into());\n\n\n\n let log_level = match matches.occurrences_of(\"verbose\") {\n\n 0 => log::LevelFilter::Warn,\n\n 1 => log::LevelFilter::Info,\n\n 2 => log::LevelFilter::Debug,\n\n 3 | _ => log::LevelFilter::Trace,\n\n };\n", "file_path": "src/main.rs", "rank": 31, "score": 42214.4859470414 }, { "content": "// Make a PbftBlock out of a consensus Block (PBFT doesn't need to use all the information about\n\n// the block - this keeps blocks lighter weight)\n\nfn pbft_block_from_block(block: Block) -> PbftBlock {\n\n let mut pbft_block = PbftBlock::new();\n\n pbft_block.set_block_id(block.block_id);\n\n pbft_block.set_signer_id(block.signer_id);\n\n pbft_block.set_block_num(block.block_num);\n\n pbft_block.set_summary(block.summary);\n\n pbft_block\n\n}\n\n\n\n/// NOTE: Testing the PbftNode is a bit strange. Due to missing functionality in the Service,\n\n/// a node calling `broadcast()` doesn't include sending a message to itself. In order to get around\n\n/// this, `on_peer_message()` is called, which sometimes causes unintended side effects when\n\n/// testing. Self-sending has been disabled (see `broadcast()` method) for testing purposes.\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::config::mock_config;\n\n use crate::handlers::make_msg_info;\n\n use crate::hash::{hash_sha256, hash_sha512};\n\n use sawtooth_sdk::consensus::engine::{Error, PeerId};\n", "file_path": "src/node.rs", "rank": 32, "score": 37681.2647253662 }, { "content": "fn handle_pbft_result(res: Result<(), PbftError>) {\n\n if let Err(e) = res {\n\n match e {\n\n PbftError::Timeout => (),\n\n PbftError::WrongNumMessages(_, _, _) | PbftError::NotReadyForMessage => trace!(\"{}\", e),\n\n _ => error!(\"{}\", e),\n\n }\n\n }\n\n}\n", "file_path": "src/engine.rs", "rank": 33, "score": 37677.93439811545 }, { "content": "fn merge_setting_if_set<T: ::std::str::FromStr>(\n\n settings_map: &HashMap<String, String>,\n\n setting_field: &mut T,\n\n setting_key: &str,\n\n) {\n\n merge_setting_if_set_and_map(settings_map, setting_field, setting_key, |setting| setting)\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 34, "score": 36700.402516135415 }, { "content": "fn get_console_config(log_level: log::LevelFilter) -> Config {\n\n let stdout = ConsoleAppender::builder()\n\n .encoder(Box::new(PatternEncoder::new(\n\n \"{h({l:5.5})} | {({M}:{L}):20.20} | {m}{n}\",\n\n )))\n\n .build();\n\n\n\n Config::builder()\n\n .appender(Appender::builder().build(\"stdout\", Box::new(stdout)))\n\n .build(Root::builder().appender(\"stdout\").build(log_level))\n\n .unwrap_or_else(|err| {\n\n eprintln!(\"Error building logging configuration: {:?}\", err);\n\n process::exit(1)\n\n })\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 35, "score": 34950.7858296647 }, { "content": "/// RAII structure used to allow read access to state object\n\n///\n\n/// This guard allows avoiding unnecessary syncing if you just need read\n\n/// access to the state object.\n\npub trait StorageReadGuard<'a, T: Sized>: Deref<Target = T> {}\n\n\n", "file_path": "src/storage/mod.rs", "rank": 36, "score": 34479.53797891789 }, { "content": "/// Create a Protobuf binary representation of a PbftMessage from its info and corresponding Block\n\nfn make_msg_bytes(info: PbftMessageInfo, block: PbftBlock) -> Result<Vec<u8>, ProtobufError> {\n\n let mut msg = PbftMessage::new();\n\n msg.set_info(info);\n\n msg.set_block(block);\n\n msg.write_to_bytes()\n\n}\n\n\n", "file_path": "src/node.rs", "rank": 37, "score": 28134.145962185976 }, { "content": "def check_tolerance(heights):\n\n if heights:\n\n return (max(heights) - min(heights)) <= SYNC_TOLERANCE\n\n else:\n", "file_path": "tests/test_liveness.py", "rank": 38, "score": 24433.005017780793 }, { "content": "def check_consensus(chains, block_num):\n\n blocks = []\n\n for chain in chains:\n\n if chain is not None:\n\n block = chain[-(block_num + 1)]\n\n blocks.append(block)\n\n else:\n\n LOGGER.error(\"Got None chain\")\n\n return False\n\n b0 = blocks[0]\n\n for b in blocks[1:]:\n\n if b0[\"header_signature\"] != b[\"header_signature\"]:\n\n LOGGER.error(\"Validators not in consensus on block %s\", block_num)\n\n LOGGER.error(\"BLOCK DUMP: %s\", blocks)\n\n return False\n", "file_path": "tests/test_liveness.py", "rank": 39, "score": 24433.005017780793 }, { "content": "def check_min_batches(chain, min_batches):\n\n n = sum([len(block[\"header\"][\"batch_ids\"]) for block in chain])\n", "file_path": "tests/test_liveness.py", "rank": 40, "score": 23508.33586572754 }, { "content": "def check_block_batch_count(block, batch_range):\n\n batch_count = len(block[\"header\"][\"batch_ids\"])\n\n\n\n valid = batch_range[0] <= batch_count <= batch_range[1]\n\n\n\n if not valid:\n\n LOGGER.error(\n\n \"Block (%s, %s) had %s batches in it\",\n\n block[\"header\"][\"block_num\"],\n\n block[\"header_signature\"],\n\n batch_count)\n\n\n", "file_path": "tests/test_liveness.py", "rank": 41, "score": 22651.102948956144 }, { "content": "/*\n\n * Copyright 2018 Bitwise IO, Inc.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\n//! Memory-backed persistence wrapper\n\n//!\n\n//! Useful when a Storage impl is required, but you don't actually need to\n", "file_path": "src/storage/memory.rs", "rank": 42, "score": 77.49854585126582 }, { "content": "/*\n\n * Copyright 2018 Bitwise IO, Inc.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\n//! Handlers for individual message types\n\n\n\nuse std::convert::From;\n", "file_path": "src/handlers.rs", "rank": 43, "score": 76.64889554801354 }, { "content": "/*\n\n * Copyright 2018 Bitwise IO, Inc.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\n//! PBFT-specific error messages\n\n\n\nuse std::error::Error;\n", "file_path": "src/error.rs", "rank": 44, "score": 76.4391935652331 }, { "content": "/*\n\n * Copyright 2018 Bitwise IO, Inc.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\n//! Disk-backed persistence wrapper\n\n\n\nuse std::fmt;\n", "file_path": "src/storage/disk.rs", "rank": 46, "score": 76.07241977409235 }, { "content": "/*\n\n * Copyright 2018 Bitwise IO, Inc.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\n//! The core PBFT algorithm\n\n\n\nuse std::collections::HashSet;\n", "file_path": "src/node.rs", "rank": 47, "score": 75.50466325560078 }, { "content": "/*\n\n * Copyright 2018 Bitwise IO, Inc.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * ------------------------------------------------------------------------------\n\n */\n\n\n\n//! Timing-related structures\n\n\n\nuse std::time::{Duration, Instant};\n", "file_path": "src/timing.rs", "rank": 48, "score": 75.50466325560076 }, { "content": "/*\n\n * Copyright 2018 Bitwise IO, Inc.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\n//! Initial configuration for a PBFT node\n\n\n\nuse std::collections::HashMap;\n", "file_path": "src/config.rs", "rank": 49, "score": 74.94542772644733 }, { "content": "/*\n\n * Copyright 2018 Bitwise IO, Inc.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\n//! The message log used by PBFT nodes to save messages\n\n\n\n#![allow(unknown_lints)]\n", "file_path": "src/message_log.rs", "rank": 50, "score": 74.39452092978787 }, { "content": "/*\n\n * Copyright 2018 Bitwise IO, Inc.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\n//! Storage trait for syncing writes to an object to a backing store\n\n//!\n\n//! Hands out {read, write} RAII-guarded references to an object, and ensures\n", "file_path": "src/storage/mod.rs", "rank": 51, "score": 73.2965454472626 }, { "content": "/*\n\n * Copyright 2018 Bitwise IO, Inc.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\n//! Message types for PeerMessages\n\n\n\n#![allow(unknown_lints, clippy::derive_hash_xor_eq)]\n", "file_path": "src/message_type.rs", "rank": 52, "score": 72.94658769466794 }, { "content": "/*\n\n * Copyright 2018 Bitwise IO, Inc.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\n// Includes the autogenerated protobuf messages\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/protos/mod.rs\"));\n", "file_path": "src/protos.rs", "rank": 53, "score": 72.94658769466794 }, { "content": "/*\n\n * Copyright 2018 Bitwise IO, Inc.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\nextern crate protoc_rust;\n\n\n\nuse protoc_rust::Customize;\n\n\n\nuse std::env;\n\nuse std::fs;\n\nuse std::fs::File;\n\nuse std::io::Write;\n\nuse std::path::Path;\n\n\n", "file_path": "build.rs", "rank": 54, "score": 71.77592793985943 }, { "content": "/*\n\n * Copyright 2018 Bitwise IO, Inc.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\n//! Entry point for the consensus algorithm, including the main event loop\n\n\n\nuse std::sync::mpsc::{Receiver, RecvTimeoutError};\n", "file_path": "src/engine.rs", "rank": 55, "score": 71.25391916419383 }, { "content": "/*\n\n * Copyright 2018 Bitwise IO, Inc.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\n//! Extensions for the Protobuf-defined message types\n\n\n\n// We know that the property `k1 == k2 ==> hash(k1) == hash(k2)` holds, since protobuf just compares\n", "file_path": "src/message_extensions.rs", "rank": 56, "score": 69.85385665219138 }, { "content": "/*\n\n * Copyright 2018 Bitwise IO, Inc.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\n//! Implementation of the [PBFT consensus\n\n//! algorithm](https://www.usenix.org/legacy/events/osdi99/full_papers/castro/castro_html/castro.html),\n\n//! modified for use with Hyperledger Sawtooth.\n", "file_path": "src/main.rs", "rank": 57, "score": 68.37058586797443 }, { "content": "}\n\n\n\n/// Memory-backed RAII-guarded Storage implementation\n\n///\n\n/// Can be used when actual persistence isn't required\n\n#[derive(Debug)]\n\npub struct MemStorage<T: Serialize + DeserializeOwned> {\n\n data: T,\n\n}\n\n\n\nimpl<T: Serialize + DeserializeOwned> MemStorage<T> {\n\n pub fn new<F: Fn() -> T>(default: F) -> Self {\n\n Self { data: default() }\n\n }\n\n}\n\n\n\nimpl<T: Serialize + DeserializeOwned + fmt::Display> fmt::Display for MemStorage<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n (*self).data.fmt(f)\n\n }\n", "file_path": "src/storage/memory.rs", "rank": 58, "score": 24.952722673215938 }, { "content": "use std::fs::File;\n\nuse std::io::{Read, Write};\n\nuse std::ops::{Deref, DerefMut};\n\n\n\nuse atomicwrites::{AllowOverwrite, AtomicFile};\n\nuse serde::de::DeserializeOwned;\n\nuse serde::Serialize;\n\nuse serde_json::{from_str, to_string};\n\n\n\nuse super::{Storage, StorageReadGuard, StorageWriteGuard};\n\n\n\n/// A disk-based read guard\n\npub struct DiskStorageReadGuard<'a, T: Serialize + DeserializeOwned + 'a> {\n\n storage: &'a DiskStorage<T>,\n\n}\n\n\n\nimpl<'a, T: Serialize + DeserializeOwned> DiskStorageReadGuard<'a, T> {\n\n fn new(storage: &'a DiskStorage<T>) -> Self {\n\n Self { storage }\n\n }\n", "file_path": "src/storage/disk.rs", "rank": 59, "score": 24.257167173425014 }, { "content": " }\n\n}\n\n\n\nimpl<'a, T: 'a + Serialize + DeserializeOwned + fmt::Display> fmt::Display\n\n for DiskStorageWriteGuard<'a, T>\n\n{\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n (**self).fmt(f)\n\n }\n\n}\n\n\n\nimpl<'a, T: 'a + Serialize + DeserializeOwned> StorageWriteGuard<'a, T>\n\n for DiskStorageWriteGuard<'a, T>\n\n{\n\n}\n\n\n\n/// A disk-based RAII-guarded Storage implementation\n\n///\n\n/// File writes are atomic\n\npub struct DiskStorage<T: Serialize + DeserializeOwned> {\n", "file_path": "src/storage/disk.rs", "rank": 61, "score": 21.705463733752488 }, { "content": " use sawtooth_sdk::messages::consensus::ConsensusPeerMessageHeader;\n\n use serde_json;\n\n use std::collections::HashMap;\n\n use std::default::Default;\n\n use std::fs::{remove_file, File};\n\n use std::io::prelude::*;\n\n\n\n const BLOCK_FILE: &str = \"target/blocks.txt\";\n\n\n\n /// Mock service to roughly keep track of the blockchain\n\n pub struct MockService {\n\n pub chain: Vec<BlockId>,\n\n }\n\n\n\n impl MockService {\n\n /// Serialize the chain into JSON, and write to a file\n\n fn write_chain(&self) {\n\n let mut block_file = File::create(BLOCK_FILE).unwrap();\n\n let block_bytes: Vec<Vec<u8>> = self\n\n .chain\n", "file_path": "src/node.rs", "rank": 62, "score": 21.674721124879504 }, { "content": " data: T,\n\n file: AtomicFile,\n\n}\n\n\n\nimpl<T: Serialize + DeserializeOwned> DiskStorage<T> {\n\n pub fn from_path<P: Into<String>, F: Fn() -> T>(path: P, default: F) -> Result<Self, String> {\n\n let path = path.into();\n\n\n\n let file = AtomicFile::new(path, AllowOverwrite);\n\n\n\n // Read the file first, to see if there's any existing data\n\n let data = match File::open(file.path()) {\n\n Ok(mut f) => {\n\n let mut contents = String::new();\n\n\n\n f.read_to_string(&mut contents)\n\n .map_err(|err| format!(\"Couldn't read file: {}\", err))?;\n\n\n\n from_str(&contents).map_err(|err| format!(\"Couldn't read file: {}\", err))?\n\n }\n", "file_path": "src/storage/disk.rs", "rank": 63, "score": 21.213982415150532 }, { "content": "{\n\n}\n\n\n\n/// A disk-based write guard\n\npub struct DiskStorageWriteGuard<'a, T: Serialize + DeserializeOwned + 'a> {\n\n storage: &'a mut DiskStorage<T>,\n\n}\n\n\n\nimpl<'a, T: Serialize + DeserializeOwned> DiskStorageWriteGuard<'a, T> {\n\n fn new(storage: &'a mut DiskStorage<T>) -> Self {\n\n Self { storage }\n\n }\n\n}\n\n\n\nimpl<'a, T: Serialize + DeserializeOwned> Drop for DiskStorageWriteGuard<'a, T> {\n\n fn drop(&mut self) {\n\n self.storage\n\n .file\n\n .write(|f| {\n\n f.write_all(\n", "file_path": "src/storage/disk.rs", "rank": 64, "score": 20.636928121855803 }, { "content": " to_string(&self.storage.data)\n\n .expect(\"Couldn't convert value to string!\")\n\n .as_bytes(),\n\n )\n\n })\n\n .expect(\"File write failed while dropping DiskStorageWriteGuard!\");\n\n }\n\n}\n\n\n\nimpl<'a, T: Serialize + DeserializeOwned + 'a> Deref for DiskStorageWriteGuard<'a, T> {\n\n type Target = T;\n\n\n\n fn deref(&self) -> &T {\n\n &self.storage.data\n\n }\n\n}\n\n\n\nimpl<'a, T: Serialize + DeserializeOwned + 'a> DerefMut for DiskStorageWriteGuard<'a, T> {\n\n fn deref_mut(&mut self) -> &mut T {\n\n &mut self.storage.data\n", "file_path": "src/storage/disk.rs", "rank": 65, "score": 19.756569924101417 }, { "content": "{\n\n}\n\n\n\n/// Memory-backed write guard\n\n#[derive(Debug)]\n\npub struct MemStorageWriteGuard<'a, T: Serialize + DeserializeOwned + 'a> {\n\n storage: &'a mut MemStorage<T>,\n\n}\n\n\n\nimpl<'a, T: Serialize + DeserializeOwned> MemStorageWriteGuard<'a, T> {\n\n fn new(storage: &'a mut MemStorage<T>) -> Self {\n\n Self { storage }\n\n }\n\n}\n\n\n\nimpl<'a, T: Serialize + DeserializeOwned + 'a> Deref for MemStorageWriteGuard<'a, T> {\n\n type Target = T;\n\n\n\n fn deref(&self) -> &T {\n\n &self.storage.data\n", "file_path": "src/storage/memory.rs", "rank": 67, "score": 19.63189854036783 }, { "content": "//! persist the wrapped object.\n\n\n\nuse std::fmt;\n\nuse std::ops::{Deref, DerefMut};\n\n\n\nuse serde::de::DeserializeOwned;\n\nuse serde::Serialize;\n\n\n\nuse super::{Storage, StorageReadGuard, StorageWriteGuard};\n\n\n\n/// Memory-backed read guard\n\n#[derive(Debug)]\n\npub struct MemStorageReadGuard<'a, T: Serialize + DeserializeOwned + 'a> {\n\n storage: &'a MemStorage<T>,\n\n}\n\n\n\nimpl<'a, T: Serialize + DeserializeOwned> MemStorageReadGuard<'a, T> {\n\n fn new(storage: &'a MemStorage<T>) -> Self {\n\n Self { storage }\n\n }\n", "file_path": "src/storage/memory.rs", "rank": 68, "score": 19.342961180923062 }, { "content": " }\n\n}\n\n\n\nimpl<'a, T: Serialize + DeserializeOwned + 'a> DerefMut for MemStorageWriteGuard<'a, T> {\n\n fn deref_mut(&mut self) -> &mut T {\n\n &mut self.storage.data\n\n }\n\n}\n\n\n\nimpl<'a, T: 'a + Serialize + DeserializeOwned + fmt::Display> fmt::Display\n\n for MemStorageWriteGuard<'a, T>\n\n{\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n (**self).fmt(f)\n\n }\n\n}\n\n\n\nimpl<'a, T: 'a + Serialize + DeserializeOwned> StorageWriteGuard<'a, T>\n\n for MemStorageWriteGuard<'a, T>\n\n{\n", "file_path": "src/storage/memory.rs", "rank": 69, "score": 19.06900232029296 }, { "content": " let s = state.seq_num;\n\n self._broadcast_pbft_message(s, &PbftMessageType::PrePrepare, pbft_block, state)?;\n\n }\n\n Ok(())\n\n }\n\n\n\n /// Handle a `BlockCommit` update from the Validator\n\n /// Since the block was successfully committed, the primary is not faulty and the view change\n\n /// timer can be stopped. If this node is a primary, then initialize a new block. Both node\n\n /// roles transition back to the `NotStarted` phase. If this node is at a checkpoint after the\n\n /// previously committed block (`checkpoint_period` blocks have been committed since the last\n\n /// checkpoint), then start a checkpoint.\n\n pub fn on_block_commit(\n\n &mut self,\n\n block_id: BlockId,\n\n state: &mut PbftState,\n\n ) -> Result<(), PbftError> {\n\n debug!(\"{}: <<<<<< BlockCommit: {:?}\", state, block_id);\n\n\n\n if state.phase == PbftPhase::Finished {\n", "file_path": "src/node.rs", "rank": 73, "score": 18.52795792643104 }, { "content": "\n\nuse serde_millis;\n\n\n\n/// Encapsulates calling a function every so often\n\npub struct Ticker {\n\n last: Instant,\n\n timeout: Duration,\n\n}\n\n\n\nimpl Ticker {\n\n pub fn new(period: Duration) -> Self {\n\n Ticker {\n\n last: Instant::now(),\n\n timeout: period,\n\n }\n\n }\n\n\n\n // Do some work if the timeout has expired\n\n pub fn tick<T: FnMut()>(&mut self, mut callback: T) {\n\n let elapsed = Instant::now() - self.last;\n\n if elapsed >= self.timeout {\n\n callback();\n\n self.last = Instant::now();\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Serialize, Deserialize)]\n", "file_path": "src/timing.rs", "rank": 74, "score": 18.42549508038001 }, { "content": " Err(_) => {\n\n let data = default();\n\n file.write(|f| f.write_all(to_string(&data)?.as_bytes()))\n\n .map_err(|err| format!(\"File write failed: {}\", err))?;\n\n\n\n data\n\n }\n\n };\n\n\n\n // Then open the file again and truncate, preparing it to be written to\n\n Ok(Self { data, file })\n\n }\n\n}\n\n\n\nimpl<T: fmt::Display + Serialize + DeserializeOwned> fmt::Display for DiskStorage<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n (*self).data.fmt(f)\n\n }\n\n}\n\n\n", "file_path": "src/storage/disk.rs", "rank": 75, "score": 18.269438606647356 }, { "content": " block_new_msg.get_block().clone(),\n\n preprep_msg.get_block().clone(),\n\n ));\n\n }\n\n\n\n // Check if we have 2f + 1 matching Prepares\n\n Ok(self.log_has_required_msgs(&PbftMessageType::Prepare, &preprep_msg, true, 2 * f + 1))\n\n }\n\n\n\n /// Checks if the node is ready to enter the `Committing` phase based on the `PbftMessage` received\n\n ///\n\n /// `check_committable` is true if for this node:\n\n /// + `check_prepared` is true\n\n /// + This node has accepted `2f + 1` `Commit` messages, including its own, that match the\n\n /// corresponding `PrePrepare` message\n\n pub fn check_committable(&self, info: &PbftMessageInfo, f: u64) -> Result<bool, PbftError> {\n\n // Check if Prepared predicate is true\n\n if !self.check_prepared(info, f)? {\n\n return Ok(false);\n\n }\n", "file_path": "src/message_log.rs", "rank": 76, "score": 18.07644773962728 }, { "content": " }\n\n Err(EngineError::BlockNotReady) => {\n\n debug!(\"{}: Block not ready\", state);\n\n Ok(())\n\n }\n\n Err(err) => {\n\n error!(\"Couldn't finalize block: {}\", err);\n\n Err(PbftError::InternalError(\"Couldn't finalize block!\".into()))\n\n }\n\n }\n\n }\n\n\n\n /// Check to see if the view change timeout has expired\n\n pub fn check_commit_timeout_expired(&mut self, state: &mut PbftState) -> bool {\n\n state.commit_timeout.check_expired()\n\n }\n\n\n\n /// Check to see if the idle timeout has expired\n\n pub fn check_idle_timeout_expired(&mut self, state: &mut PbftState) -> bool {\n\n state.idle_timeout.check_expired()\n", "file_path": "src/node.rs", "rank": 77, "score": 16.527448719398194 }, { "content": " if self.msg_log.check_committable(&msg.info(), state.f)? {\n\n self.commit_block_if_committing(&msg, state)?;\n\n }\n\n }\n\n\n\n PbftMessageType::Checkpoint => {\n\n if self.check_if_stale_checkpoint(&msg, state)? {\n\n return Ok(());\n\n }\n\n\n\n if !self.check_if_checkpoint_started(&msg, state) {\n\n return Ok(());\n\n }\n\n\n\n // Add message to the log\n\n self.msg_log.add_message(msg.clone(), state)?;\n\n\n\n if check_if_secondary(state) {\n\n self.start_checkpointing_and_forward(&msg, state)?;\n\n }\n", "file_path": "src/node.rs", "rank": 78, "score": 16.455978010755125 }, { "content": " if is_primary {\n\n n.service\n\n .initialize_block(None)\n\n .unwrap_or_else(|err| error!(\"Couldn't initialize block: {}\", err));\n\n }\n\n n\n\n }\n\n\n\n // ---------- Methods for handling Updates from the validator ----------\n\n\n\n /// Handle a peer message from another PbftNode\n\n /// This method handles all messages from other nodes. Such messages may include `PrePrepare`,\n\n /// `Prepare`, `Commit`, `Checkpoint`, or `ViewChange`. If a node receives a type of message\n\n /// before it is ready to do so, the message is pushed into a backlog queue.\n\n #[allow(clippy::needless_pass_by_value)]\n\n pub fn on_peer_message(\n\n &mut self,\n\n msg: ParsedMessage,\n\n state: &mut PbftState,\n\n ) -> Result<(), PbftError> {\n", "file_path": "src/node.rs", "rank": 79, "score": 16.341685863870104 }, { "content": " self.start_checkpoint(state)?;\n\n }\n\n } else {\n\n debug!(\"{}: Not doing anything with BlockCommit\", state);\n\n }\n\n\n\n // The primary processessed this block in a timely manner, so stop the timeout.\n\n state.commit_timeout.stop();\n\n state.idle_timeout.start();\n\n\n\n Ok(())\n\n }\n\n\n\n /// Handle a `BlockValid` update\n\n /// This message arrives after `check_blocks` is called, signifying that the validator has\n\n /// successfully checked a block with this `BlockId`.\n\n /// Once a `BlockValid` is received, transition to committing blocks.\n\n #[allow(clippy::ptr_arg)]\n\n pub fn on_block_valid(\n\n &mut self,\n", "file_path": "src/node.rs", "rank": 81, "score": 15.649682705275694 }, { "content": "//! that when the reference drops, any changes to the object are persisted to\n\n//! the selected storage.\n\n\n\npub mod disk;\n\npub mod memory;\n\n\n\nuse std::ops::{Deref, DerefMut};\n\n\n\nuse serde::de::DeserializeOwned;\n\nuse serde::Serialize;\n\n\n\npub use self::disk::DiskStorage;\n\npub use self::memory::MemStorage;\n\n\n\n/// RAII structure used to allow read access to state object\n\n///\n\n/// This guard allows avoiding unnecessary syncing if you just need read\n\n/// access to the state object.\n", "file_path": "src/storage/mod.rs", "rank": 83, "score": 15.037121463195927 }, { "content": "\n\nuse std::collections::{HashSet, VecDeque};\n\nuse std::fmt;\n\n\n\nuse hex;\n\nuse itertools::Itertools;\n\nuse sawtooth_sdk::consensus::engine::Block;\n\n\n\nuse crate::config::PbftConfig;\n\nuse crate::error::PbftError;\n\nuse crate::message_type::{ParsedMessage, PbftHint, PbftMessageType};\n\nuse crate::protos::pbft_message::{PbftMessage, PbftMessageInfo};\n\nuse crate::state::PbftState;\n\n\n\n/// The log keeps track of the last stable checkpoint\n\n#[derive(Clone)]\n\npub struct PbftStableCheckpoint {\n\n pub seq_num: u64,\n\n pub checkpoint_messages: Vec<PbftMessage>,\n\n}\n", "file_path": "src/message_log.rs", "rank": 85, "score": 14.846840887382506 }, { "content": " messages\n\n .iter()\n\n .map(|m| {\n\n let mut vote = PbftSignedCommitVote::new();\n\n\n\n vote.set_header_bytes(m.header_bytes.clone());\n\n vote.set_header_signature(m.header_signature.clone());\n\n vote.set_message_bytes(m.message_bytes.clone());\n\n\n\n vote\n\n })\n\n .collect::<Vec<_>>(),\n\n ));\n\n\n\n seal.write_to_bytes().map_err(PbftError::SerializationError)\n\n }\n\n\n\n /// The primary tries to finalize a block every so often\n\n /// # Panics\n\n /// Panics if `finalize_block` fails. This is necessary because it means the validator wasn't\n", "file_path": "src/node.rs", "rank": 86, "score": 14.424967985579748 }, { "content": "\n\n // Check if we have 2f + 1 matching Commits\n\n let preprep_msg = self\n\n .get_one_msg(info, &PbftMessageType::PrePrepare)\n\n .unwrap();\n\n Ok(self.log_has_required_msgs(&PbftMessageType::Commit, &preprep_msg, true, 2 * f + 1))\n\n }\n\n\n\n /// Get one message matching the type, view number, and sequence number\n\n pub fn get_one_msg(\n\n &self,\n\n info: &PbftMessageInfo,\n\n msg_type: &PbftMessageType,\n\n ) -> Option<&ParsedMessage> {\n\n let msgs =\n\n self.get_messages_of_type_seq_view(msg_type, info.get_seq_num(), info.get_view());\n\n msgs.first().cloned()\n\n }\n\n\n\n /// Check if the log contains `required` number of messages with type `msg_type` that match the\n", "file_path": "src/message_log.rs", "rank": 87, "score": 14.192750647707413 }, { "content": " assert!(node1.on_block_commit(mock_block_id(1), &mut state1).is_ok());\n\n assert_eq!(state1.phase, PbftPhase::NotStarted);\n\n\n\n // Make sure the block was actually committed\n\n let mut f = File::open(BLOCK_FILE).unwrap();\n\n let mut buffer = String::new();\n\n f.read_to_string(&mut buffer).unwrap();\n\n let deser: Vec<Vec<u8>> = serde_json::from_str(&buffer).unwrap();\n\n let blocks: Vec<BlockId> = deser\n\n .iter()\n\n .filter(|&block| !block.is_empty())\n\n .map(|ref block| BlockId::from(block.clone().clone()))\n\n .collect();\n\n assert_eq!(blocks.len(), 2);\n\n assert_eq!(blocks[1], mock_block_id(1));\n\n\n\n remove_file(BLOCK_FILE).unwrap();\n\n }\n\n\n\n /// Make sure that checkpointing works as expected:\n", "file_path": "src/node.rs", "rank": 89, "score": 14.132193340557127 }, { "content": " ViewChange,\n\n\n\n Unset,\n\n}\n\n\n\nimpl fmt::Display for PbftMessageType {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let txt = match self {\n\n PbftMessageType::PrePrepare => \"PP\",\n\n PbftMessageType::Prepare => \"Pr\",\n\n PbftMessageType::Commit => \"Co\",\n\n PbftMessageType::BlockNew => \"BN\",\n\n PbftMessageType::Checkpoint => \"CP\",\n\n PbftMessageType::ViewChange => \"VC\",\n\n PbftMessageType::Unset => \"Un\",\n\n };\n\n write!(f, \"{}\", txt)\n\n }\n\n}\n\n\n", "file_path": "src/message_type.rs", "rank": 90, "score": 14.084321373500352 }, { "content": " Timeout,\n\n\n\n /// There is no working block; no operations can be performed\n\n NoWorkingBlock,\n\n\n\n /// Not ready for this message type\n\n NotReadyForMessage,\n\n\n\n /// The message should only come from the primary, but was sent by a secondary node\n\n NotFromPrimary,\n\n}\n\n\n\nimpl Error for PbftError {\n\n fn description(&self) -> &str {\n\n use self::PbftError::*;\n\n match self {\n\n SerializationError(_) => \"SerializationError\",\n\n WrongNumMessages(_, _, _) => \"WrongNumMessages\",\n\n BlockMismatch(_, _) => \"BlockMismatch\",\n\n MessageMismatch(_) => \"MessageMismatch\",\n", "file_path": "src/error.rs", "rank": 91, "score": 14.059301467915017 }, { "content": "mod tests {\n\n extern crate rand;\n\n\n\n use self::rand::distributions::Alphanumeric;\n\n use self::rand::{thread_rng, Rng};\n\n use super::*;\n\n use super::{DiskStorage, MemStorage};\n\n use std::fs::remove_file;\n\n\n\n // The common use case, of passing in a guarded reference\n\n fn add_refs(foo: &mut u32, bar: &u32) {\n\n *foo += bar;\n\n }\n\n\n\n // You can also pass in the storages themselves\n\n fn add_storages(foo: &mut (Storage<S = u32>), bar: &mut (Storage<S = u32>)) {\n\n **foo.write() += **bar.read();\n\n }\n\n\n\n #[test]\n", "file_path": "src/storage/mod.rs", "rank": 92, "score": 13.938461684980414 }, { "content": "}\n\n\n\nimpl<T: Serialize + DeserializeOwned> Storage for MemStorage<T> {\n\n type S = T;\n\n\n\n fn read<'a>(&'a self) -> Box<StorageReadGuard<'a, T, Target = T> + 'a> {\n\n Box::new(MemStorageReadGuard::new(self))\n\n }\n\n\n\n fn write<'a>(&'a mut self) -> Box<StorageWriteGuard<'a, T, Target = T> + 'a> {\n\n Box::new(MemStorageWriteGuard::new(self))\n\n }\n\n}\n", "file_path": "src/storage/memory.rs", "rank": 93, "score": 13.880672906650936 }, { "content": "impl<T: Serialize + DeserializeOwned> Storage for DiskStorage<T> {\n\n type S = T;\n\n\n\n fn read<'a>(&'a self) -> Box<StorageReadGuard<'a, T, Target = T> + 'a> {\n\n Box::new(DiskStorageReadGuard::new(self))\n\n }\n\n\n\n fn write<'a>(&'a mut self) -> Box<StorageWriteGuard<'a, T, Target = T> + 'a> {\n\n Box::new(DiskStorageWriteGuard::new(self))\n\n }\n\n}\n", "file_path": "src/storage/disk.rs", "rank": 94, "score": 13.880672906650936 }, { "content": " }\n\n\n\n pub fn start_idle_timeout(&self, state: &mut PbftState) {\n\n state.idle_timeout.start();\n\n }\n\n\n\n /// Start the checkpoint process\n\n /// Primaries start the checkpoint to ensure sequence number correctness\n\n pub fn start_checkpoint(&mut self, state: &mut PbftState) -> Result<(), PbftError> {\n\n if !state.is_primary() {\n\n return Ok(());\n\n }\n\n if state.mode == PbftMode::Checkpointing {\n\n return Ok(());\n\n }\n\n\n\n state.pre_checkpoint_mode = state.mode;\n\n state.mode = PbftMode::Checkpointing;\n\n info!(\"{}: Starting checkpoint\", state);\n\n let s = state.seq_num;\n", "file_path": "src/node.rs", "rank": 95, "score": 13.81696744401263 }, { "content": "\n\n/// Contains all of the components for operating a PBFT node.\n\npub struct PbftNode {\n\n /// Used for interactions with the validator\n\n pub service: Box<Service>,\n\n\n\n /// Messages this node has received\n\n pub msg_log: PbftLog,\n\n}\n\n\n\nimpl PbftNode {\n\n /// Construct a new PBFT node.\n\n /// After the node is created, if the node is primary, it initializes a new block on the chain.\n\n pub fn new(config: &PbftConfig, service: Box<Service>, is_primary: bool) -> Self {\n\n let mut n = PbftNode {\n\n service,\n\n msg_log: PbftLog::new(config),\n\n };\n\n\n\n // Primary initializes a block\n", "file_path": "src/node.rs", "rank": 96, "score": 13.634024630495308 }, { "content": "impl PbftLog {\n\n pub fn new(config: &PbftConfig) -> Self {\n\n PbftLog {\n\n messages: HashSet::new(),\n\n low_water_mark: 0,\n\n cycles: 0,\n\n checkpoint_period: config.checkpoint_period,\n\n high_water_mark: config.max_log_size,\n\n max_log_size: config.max_log_size,\n\n backlog: VecDeque::new(),\n\n block_backlog: VecDeque::new(),\n\n latest_stable_checkpoint: None,\n\n }\n\n }\n\n\n\n /// `check_prepared` predicate\n\n /// `check_prepared` is true for this node if the following messages are present in its log:\n\n /// + The original `BlockNew` message\n\n /// + A `PrePrepare` message matching the original message (in the current view)\n\n /// + `2f + 1` matching `Prepare` messages from different nodes that match\n", "file_path": "src/message_log.rs", "rank": 97, "score": 13.501547774560136 }, { "content": " node1\n\n .on_peer_message(msg, &mut state1)\n\n .unwrap_or_else(handle_pbft_err);\n\n }\n\n assert_eq!(state1.phase, PbftPhase::Checking);\n\n\n\n // Spoof the `check_blocks()` call\n\n assert!(node1.on_block_valid(&mock_block_id(1), &mut state1).is_ok());\n\n\n\n // Receive 3 `Commit` messages\n\n for peer in 0..3 {\n\n assert_eq!(state1.phase, PbftPhase::Committing);\n\n let msg = mock_msg(&PbftMessageType::Commit, 0, 1, block.clone(), vec![peer]);\n\n node1\n\n .on_peer_message(msg, &mut state1)\n\n .unwrap_or_else(handle_pbft_err);\n\n }\n\n assert_eq!(state1.phase, PbftPhase::Finished);\n\n\n\n // Spoof the `commit_blocks()` call\n", "file_path": "src/node.rs", "rank": 98, "score": 13.476219461155544 }, { "content": "}\n\n\n\nimpl<'a, T: Serialize + DeserializeOwned + 'a> Deref for DiskStorageReadGuard<'a, T> {\n\n type Target = T;\n\n\n\n fn deref(&self) -> &T {\n\n &self.storage.data\n\n }\n\n}\n\n\n\nimpl<'a, T: 'a + Serialize + DeserializeOwned + fmt::Display> fmt::Display\n\n for DiskStorageReadGuard<'a, T>\n\n{\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n (**self).fmt(f)\n\n }\n\n}\n\n\n\nimpl<'a, T: 'a + Serialize + DeserializeOwned> StorageReadGuard<'a, T>\n\n for DiskStorageReadGuard<'a, T>\n", "file_path": "src/storage/disk.rs", "rank": 99, "score": 13.416465684949582 } ]
Rust
src/dclic/src/main.rs
alexcpsec/dcli
076a7e21a0bccc453722c9fb69b960d433666736
/* * Copyright 2021 Mike Chambers * https://github.com/mikechambers/dcli * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is furnished to do * so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ use dcli::apiinterface::ApiInterface; use dcli::character::Characters; use dcli::enums::platform::Platform; use dcli::error::Error; use dcli::output::Output; use dcli::utils::EXIT_FAILURE; use dcli::utils::{print_error, print_verbose, repeat_str, TSV_DELIM, TSV_EOL}; use structopt::StructOpt; async fn retrieve_characters( member_id: String, platform: Platform, verbose: bool, ) -> Result<Option<Characters>, Error> { let interface = ApiInterface::new(verbose)?; let characters = interface.retrieve_characters(&member_id, &platform).await?; Ok(characters) } #[derive(StructOpt, Debug)] #[structopt(verbatim_doc_comment)] struct Opt { #[structopt(short = "m", long = "member-id", required = true)] member_id: String, #[structopt(short = "p", long = "platform", required = true)] platform: Platform, #[structopt(short = "v", long = "verbose")] verbose: bool, #[structopt( short = "O", long = "output-format", default_value = "default" )] output: Output, } #[tokio::main] async fn main() { let opt = Opt::from_args(); print_verbose(&format!("{:#?}", opt), opt.verbose); let chars: Characters = match retrieve_characters(opt.member_id, opt.platform, opt.verbose) .await { Ok(e) => match e { Some(e) => e, None => { println!("No Characters found for member."); return; } }, Err(e) => { print_error("Error retrieving characters from API.", e); std::process::exit(EXIT_FAILURE); } }; match opt.output { Output::Default => { print_default(&chars); } Output::Tsv => { print_tsv(&chars); } } } fn print_default(characters: &Characters) { let col_w = 12; let col_id = 24; println!( "{:<0col_w$}{:<0col_id$}{:<0col_w$}", "CLASS", "ID", "STATUS", col_w = col_w, col_id = col_id, ); println!("{}", repeat_str("-", col_w * 2 + col_id)); for p in characters.characters.iter() { let label = if p == characters.get_last_active_ref().unwrap() { "LAST ACTIVE" } else { "" }; println!( "{:<0col_w$}{:<0col_id$}{:<0col_w$}", p.class_type, p.id, label, col_w = col_w, col_id = col_id, ); } } fn print_tsv(characters: &Characters) { for p in characters.characters.iter() { let label = if p == characters.get_last_active_ref().unwrap() { "LAST ACTIVE" } else { "" }; print!( "{c}{delim}{i}{delim}{s}{eol}", c = p.class_type, i = p.id, s = label, delim = TSV_DELIM, eol = TSV_EOL ); } }
/* * Copyright 2021 Mike Chambers * https://github.com/mikechambers/dcli * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is furnished to do * so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ use dcli::apiinterface::ApiInterface; use dcli::character::Characters; use dcli::enums::platform::Platform; use dcli::error::Error; use dcli::output::Output; use dcli::utils::EXIT_FAILURE; use dcli::utils::{print_error, print_verbose, repeat_str, TSV_DELIM, TSV_EOL}; use structopt::StructOpt; async fn retrieve_characters( member_id: String, platform: Platform, verbose: bool, ) -> Result<Option<Characters>, Error> { let interface = ApiInterface::new(verbose)?; let characters = interface.retrieve_characters(&member_id, &platform).await?; Ok(characters) } #[derive(StructOpt, Debug)] #[structopt(verbatim_doc_comment)] struct Opt { #[structopt(short = "m", long = "member-id", required = true)] member_id: String, #[structopt(short = "p", long = "platform", required = true)] platform: Platform, #[structopt(short = "v", long = "verbose")] verbose: bool, #[structopt( short = "O", long = "output-format", default_value = "default" )] output: Output, } #[tokio::main]
fn print_default(characters: &Characters) { let col_w = 12; let col_id = 24; println!( "{:<0col_w$}{:<0col_id$}{:<0col_w$}", "CLASS", "ID", "STATUS", col_w = col_w, col_id = col_id, ); println!("{}", repeat_str("-", col_w * 2 + col_id)); for p in characters.characters.iter() { let label = if p == characters.get_last_active_ref().unwrap() { "LAST ACTIVE" } else { "" }; println!( "{:<0col_w$}{:<0col_id$}{:<0col_w$}", p.class_type, p.id, label, col_w = col_w, col_id = col_id, ); } } fn print_tsv(characters: &Characters) { for p in characters.characters.iter() { let label = if p == characters.get_last_active_ref().unwrap() { "LAST ACTIVE" } else { "" }; print!( "{c}{delim}{i}{delim}{s}{eol}", c = p.class_type, i = p.id, s = label, delim = TSV_DELIM, eol = TSV_EOL ); } }
async fn main() { let opt = Opt::from_args(); print_verbose(&format!("{:#?}", opt), opt.verbose); let chars: Characters = match retrieve_characters(opt.member_id, opt.platform, opt.verbose) .await { Ok(e) => match e { Some(e) => e, None => { println!("No Characters found for member."); return; } }, Err(e) => { print_error("Error retrieving characters from API.", e); std::process::exit(EXIT_FAILURE); } }; match opt.output { Output::Default => { print_default(&chars); } Output::Tsv => { print_tsv(&chars); } } }
function_block-full_function
[ { "content": "/// Command line tool for retrieving and managing the Destiny 2 manifest database.\n\n///\n\n/// Manifest will be stored in the specified local directory with the file name:\n\n/// manifest.sqlite3, along with meta-data with information about the downloaded\n\n/// version. This is used to to determine whether the remote version has been updated.\n\n///\n\n/// Created by Mike Chambers.\n\n/// https://www.mikechambers.com\n\n///\n\n/// Get support, request features or just chat on the dcli Discord server:\n\n/// https://discord.gg/2Y8bV2Mq3p\n\n///\n\n/// Get the latest version, download the source and log issues at:\n\n/// https://github.com/mikechambers/dcli\n\n///\n\n/// Released under an MIT License.\n\nstruct Opt {\n\n /// Directory where manifest will be stored. (optional)\n\n ///\n\n /// By default data will be loaded from and stored in the appropriate system\n\n /// local storage directory. Manifest will be stored in a sqlite3 database file\n\n /// named manifest.sqlite3\n\n #[structopt(short = \"D\", long = \"data-dir\", parse(from_os_str))]\n\n data_dir: Option<PathBuf>,\n\n\n\n ///Print out additional information\n\n ///\n\n ///Output is printed to stderr.\n\n #[structopt(short = \"v\", long = \"verbose\")]\n\n verbose: bool,\n\n\n\n ///Force a download of manifest regardless of whether it has been updated.\n\n #[structopt(short = \"F\", long = \"force\", conflicts_with = \"check\")]\n\n force: bool,\n\n\n\n ///Check whether a new manifest version is available, but do not download.\n", "file_path": "src/dclim/src/main.rs", "rank": 0, "score": 131764.65583997537 }, { "content": "/// Command line tool for retrieving primary Destiny 2 member ids.\n\n///\n\n/// Retrieves the primary Destiny 2 membershipId and platform for specified\n\n/// username or steam 64 id and platform. Returned data may be a membershipId\n\n/// on a platform different that the one specified, depending on the cross\n\n/// save status of the account. It will return the primary membershipId that\n\n/// all data will be associate with.\n\n///\n\n/// Created by Mike Chambers. \n\n/// https://www.mikechambers.com\n\n///\n\n/// Get support, request features or just chat on the dcli Discord server: \n\n/// https://discord.gg/2Y8bV2Mq3p\n\n///\n\n/// Get the latest version, download the source and log issues at: \n\n/// https://github.com/mikechambers/dcli\n\n///\n\n/// Released under an MIT License.\n\nstruct Opt {\n\n /// Platform for specified id\n\n ///\n\n /// Valid values are: xbox, playstation, stadia or steam\n\n #[structopt(short = \"p\", long = \"platform\", required = true)]\n\n platform: Platform,\n\n\n\n /// User name or steam 64 id\n\n ///\n\n /// User name (for Xbox, Playstation or Stadia) or steam 64 id for Steam / pc :\n\n /// 00000000000000000 (17 digit ID) for steam.\n\n #[structopt(short = \"n\", long = \"name\", required = true)]\n\n name: String,\n\n\n\n ///Print out additional information for the API call\n\n #[structopt(short = \"v\", long = \"verbose\")]\n\n verbose: bool,\n\n\n\n /// Format for command output\n\n ///\n", "file_path": "src/dclis/src/main.rs", "rank": 1, "score": 131762.55711768643 }, { "content": "/// Command line tool for retrieving date / time stamps for Destiny 2 weekly event\n\n/// moments\n\n///\n\n/// Created by Mike Chambers.\n\n/// https://www.mikechambers.com\n\n///\n\n/// Get support, request features or just chat on the dcli Discord server:\n\n/// https://discord.gg/2Y8bV2Mq3p\n\n///\n\n/// Get the latest version, download the source and log issues at:\n\n/// https://github.com/mikechambers/dcli\n\n///\n\n/// Released under an MIT License.\n\nstruct Opt {\n\n /// The weekly Destiny 2 moment to retrieve the date / time stamp for\n\n ///\n\n /// Valid values are now, current_weekly (previous Tuesday weekly reset),\n\n /// next_weekly (upcoming Tuesday weekly reset), current_daily, next_daily,\n\n /// current_xur (previous Friday Xur reset), next_xur (upcoming Friday Xur reset),\n\n /// current_trials (previous Friday Trials reset), next_trials (upcoming Friday Trials reset)\n\n #[structopt(short = \"T\", parse(try_from_str = parse_and_validate_moment), long = \"moment\", default_value = \"now\")]\n\n moment: Moment,\n\n\n\n /// Date / time format to output moment\n\n ///\n\n /// Valid values are rfc3339 (default), rfc2822 and unix (unix timestamp,\n\n /// number of non-leap seconds since January 1, 1970 0:00:00 UTC).\n\n #[structopt(short = \"f\", long = \"time-format\", default_value = \"rfc3339\")]\n\n time_format: DateTimeFormat,\n\n\n\n /// Print out additional information\n\n ///\n\n /// Output is printed to stderr.\n", "file_path": "src/dclitime/src/main.rs", "rank": 3, "score": 131759.79354121594 }, { "content": "/// Command line tool for downloading and syncing Destiny 2 Crucible activity\n\n/// history to a sqlite3 database file.\n\n///\n\n/// Created by Mike Chambers.\n\n/// https://www.mikechambers.com\n\n///\n\n/// Get support, request features or just chat on the dcli Discord server:\n\n/// https://discord.gg/2Y8bV2Mq3p\n\n///\n\n/// Get the latest version, download the source and log issues at:\n\n/// https://github.com/mikechambers/dcli\n\n///\n\n/// Released under an MIT License.\n\nstruct Opt {\n\n /// Print out additional information\n\n ///\n\n /// Output is printed to stderr.\n\n #[structopt(short = \"v\", long = \"verbose\")]\n\n verbose: bool,\n\n\n\n /// Format for command output\n\n ///\n\n /// Valid values are default (Default) and tsv.\n\n ///\n\n /// tsv outputs in a tab (\\t) seperated format of name / value pairs with lines\n\n /// ending in a new line character (\\n).\n\n #[structopt(\n\n short = \"O\",\n\n long = \"output-format\",\n\n default_value = \"default\"\n\n )]\n\n output: Output,\n\n\n", "file_path": "src/dclias/src/main.rs", "rank": 4, "score": 131759.75453463444 }, { "content": "/// Command line tool for retrieving and viewing Destiny 2 Crucible activity history.\n\n///\n\n/// Enables control of which stats are displayed based on game mode, moment range\n\n/// from which to retrieve them and character.\n\n///\n\n/// Created by Mike Chambers.\n\n/// https://www.mikechambers.com\n\n///\n\n/// Get support, request features or just chat on the dcli Discord server:\n\n/// https://discord.gg/2Y8bV2Mq3p\n\n///\n\n/// Get the latest version, download the source and log issues at:\n\n/// https://github.com/mikechambers/dcli\n\n///\n\n/// Released under an MIT License.\n\nstruct Opt {\n\n /// Destiny 2 API member id\n\n ///\n\n /// This is not the user name, but the member id retrieved from the Destiny API.\n\n #[structopt(short = \"m\", long = \"member-id\", required = true)]\n\n member_id: String,\n\n\n\n /// Platform for specified id\n\n ///\n\n /// Valid values are: xbox, playstation, stadia or steam.\n\n #[structopt(short = \"p\", long = \"platform\", required = true)]\n\n platform: Platform,\n\n\n\n /// Custom start time in RFC 3339 date / time format\n\n ///\n\n /// Must be a valid date in the past.\n\n ///\n\n /// Example RFC 3339 format: 2020-12-08T17:00:00.774187+00:00\n\n ///\n\n /// Required when --moment is set to custom, but otherwise not applicable.\n", "file_path": "src/dcliah/src/main.rs", "rank": 5, "score": 131759.14490319992 }, { "content": "/// Command line tool for retrieving historic Destiny 2 Crucible activity stats.\n\n///\n\n/// Retrieves stats based on the moment specified, up to, but excluding the current day.\n\n/// Enables control of which stats are retrieved via game mode, past time moment and\n\n/// character.\n\n///\n\n/// Created by Mike Chambers.\n\n/// https://www.mikechambers.com\n\n///\n\n/// Get support, request features or just chat on the dcli Discord server:\n\n/// https://discord.gg/2Y8bV2Mq3p\n\n///\n\n/// Get the latest version, download the source and log issues at:\n\n/// https://github.com/mikechambers/dcli\n\n///\n\n/// Released under an MIT License.\n\nstruct Opt {\n\n /// Destiny 2 API member id\n\n ///\n\n /// This is not the user name, but the member id\n\n /// retrieved from the Destiny API.\n\n #[structopt(short = \"m\", long = \"member-id\", required = true)]\n\n member_id: String,\n\n\n\n /// Platform for specified id\n\n ///\n\n /// Valid values are: xbox, playstation, stadia or steam.\n\n #[structopt(short = \"p\", long = \"platform\", required = true)]\n\n platform: Platform,\n\n\n\n /// Time range to pull stats from\n\n ///\n\n /// Valid values include day (last day), daily (since last daily reset),\n\n /// week (last week), weekly (since last weekly reset on Tuesday), month\n\n /// (last month), weekend (since last Friday reset) and all_time.\n\n ///\n", "file_path": "src/dclics/src/main.rs", "rank": 6, "score": 131758.71963098 }, { "content": "/// Command line tool for retrieving and viewing Destiny 2 Crucible activity details.\n\n///\n\n/// By default the details on the last activity will be displayed, and you can\n\n/// specify the specific activity via the --activity-index argument. The index\n\n/// can be retrieved from dcliah, as well as directly from the sqlite datastore\n\n/// (activity.id)\n\n///\n\n/// Created by Mike Chambers.\n\n/// https://www.mikechambers.com\n\n///\n\n/// Get support, request features or just chat on the dcli Discord server:\n\n/// https://discord.gg/2Y8bV2Mq3p\n\n///\n\n/// Get the latest version, download the source and log issues at:\n\n/// https://github.com/mikechambers/dcli\n\n///\n\n/// Released under an MIT License.\n\nstruct Opt {\n\n /// Destiny 2 API member id\n\n ///\n\n /// This is not the user name, but the member id retrieved from the Destiny API.\n\n #[structopt(short = \"m\", long = \"member-id\", required = true)]\n\n member_id: String,\n\n\n\n /// Platform for specified id\n\n ///\n\n /// Valid values are: xbox, playstation, stadia or steam.\n\n #[structopt(short = \"p\", long = \"platform\", required = true)]\n\n platform: Platform,\n\n\n\n /// Activity mode from which to return last activity\n\n ///\n\n /// Supported values are all_pvp (default), control, clash, elimination,\n\n /// mayhem, iron_banner, all_private, rumble, pvp_competitive,\n\n /// quickplay and trials_of_osiris.\n\n ///\n\n /// Addition values available are crimsom_doubles, supremacy, survival,\n", "file_path": "src/dcliad/src/main.rs", "rank": 7, "score": 131758.57403296474 }, { "content": "/// Command line tool for retrieving current Destiny 2 activity status for player.\n\n///\n\n/// Created by Mike Chambers.\n\n/// https://www.mikechambers.com\n\n///\n\n/// Get support,request features or just chat on the dcli Discord server:\n\n/// https://discord.gg/2Y8bV2Mq3p\n\n///\n\n/// Get the latest version, download the source and log issues at:\n\n/// https://github.com/mikechambers/dcli\n\n///\n\n/// Released under an MIT License.\n\nstruct Opt {\n\n /// Platform for specified id\n\n ///\n\n /// Valid values are: xbox, playstation, stadia or steam.\n\n #[structopt(short = \"p\", long = \"platform\", required = true)]\n\n platform: Platform,\n\n\n\n /// Destiny 2 API member id\n\n ///\n\n /// This is not the user name, but the member id retrieved from the Destiny API.\n\n #[structopt(short = \"m\", long = \"member-id\", required = true)]\n\n member_id: String,\n\n\n\n ///Print out additional information\n\n ///\n\n ///Output is printed to stderr.\n\n #[structopt(short = \"v\", long = \"verbose\")]\n\n verbose: bool,\n\n\n\n /// Directory where Destiny 2 manifest database file is stored. (optional)\n", "file_path": "src/dclia/src/main.rs", "rank": 8, "score": 131756.5191233147 }, { "content": "/// Command line tool for searching the Destiny 2 manifest by hash ids.\n\n///\n\n/// Takes a hash / id from the Destiny 2 API, and returns data from the\n\n/// item from the manifest. May return more than one result.\n\n///\n\n/// Created by Mike Chambers.\n\n/// https://www.mikechambers.com\n\n///\n\n/// Get support, request features or just chat on the dcli Discord server:\n\n/// https://discord.gg/2Y8bV2Mq3p\n\n///\n\n/// Get the latest version, download the source and log issues at:\n\n/// https://github.com/mikechambers/dcli\n\n///\n\n/// Released under an MIT License.\n\nstruct Opt {\n\n /// Directory where Destiny 2 manifest database file is stored. (optional)\n\n ///\n\n /// This will normally be downloaded using the dclim tool, and stored in a file\n\n /// named manifest.sqlite3 (in the manifest directory specified when running\n\n /// dclim).\n\n #[structopt(short = \"D\", long = \"data-dir\", parse(from_os_str))]\n\n data_dir: Option<PathBuf>,\n\n\n\n ///The hash id from the Destiny 2 API for the item to be searched for.\n\n ///\n\n ///Example : 326060471\n\n #[structopt(long = \"hash\", short = \"h\", required = true)]\n\n hash: u32,\n\n\n\n /// Format for command output\n\n ///\n\n /// Valid values are default (Default) and tsv.\n\n ///\n\n /// tsv outputs in a tab (\\t) seperated format of columns with lines\n", "file_path": "src/dclims/src/main.rs", "rank": 9, "score": 131756.04039749282 }, { "content": "pub fn print_error(msg: &str, error: Error) {\n\n let app_name = env::current_exe()\n\n .ok()\n\n .as_ref()\n\n .map(Path::new)\n\n .and_then(Path::file_name)\n\n .and_then(OsStr::to_str)\n\n .map(String::from)\n\n .unwrap_or_else(|| \"\".to_string());\n\n\n\n eprintln!(\"{} : v{}\", app_name, VERSION);\n\n\n\n eprintln!(\"{}\", msg);\n\n eprintln!(\"{}\", error);\n\n\n\n match error {\n\n Error::InvalidParameters => {\n\n eprintln!(\"This can occur if --platform is set incorrectly.\");\n\n }\n\n Error::ParameterParseFailure => {\n", "file_path": "src/dcli/src/utils.rs", "rank": 11, "score": 119694.09230113537 }, { "content": "pub fn prepend_base_url<'de, D>(deserializer: D) -> Result<String, D::Error>\n\nwhere\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n String::deserialize(deserializer).map(|a| {\n\n let mut s = String::from(RESOURCE_BASE_URL);\n\n s.push_str(&a);\n\n s\n\n })\n\n}\n\n\n", "file_path": "src/dcli/src/response/utils.rs", "rank": 12, "score": 117219.39833122538 }, { "content": "fn get_in_orbit_human() -> String {\n\n \"Currently sitting in Orbit\".to_string()\n\n}\n", "file_path": "src/dclia/src/main.rs", "rank": 13, "score": 114783.14527930741 }, { "content": "pub fn repeat_str(s: &str, count: usize) -> String {\n\n std::iter::repeat(s).take(count).collect::<String>()\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 14, "score": 109662.97636184448 }, { "content": "pub fn build_tsv(name_values: Vec<(&str, String)>) -> String {\n\n name_values\n\n .iter()\n\n .map(|x| format!(\"{}{}{}{}\", x.0, TSV_DELIM, x.1, TSV_EOL))\n\n .collect()\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 15, "score": 107956.07682961691 }, { "content": "pub fn string_to_i64<'de, D>(deserializer: D) -> Result<i64, D::Error>\n\nwhere\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n\n\n let out = match s.parse::<i64>() {\n\n Ok(e) => e,\n\n Err(e) => {\n\n return Err(serde::de::Error::custom(&format!(\n\n \"Could not parse string to i64 : {}\",\n\n e\n\n )))\n\n }\n\n };\n\n\n\n Ok(out)\n\n}\n\n\n", "file_path": "src/dcli/src/response/utils.rs", "rank": 16, "score": 104994.44335105433 }, { "content": "fn print_default(results: &SyncResult, store: &ActivityStoreInterface) {\n\n println!();\n\n println!(\"{}\", \"Activity sync complete\".to_string().to_uppercase());\n\n println!(\"------------------------------------------------\");\n\n\n\n let s = if results.total_synced == 1 {\n\n \"y\"\n\n } else {\n\n \"ies\"\n\n };\n\n\n\n println!(\"{} activit{} synced\", results.total_synced, s);\n\n\n\n let total_available = results.total_available;\n\n let queue_str = if total_available == 1 {\n\n \"1 activity in queue. Activity will be synced the next time app is run.\"\n\n .to_string()\n\n } else if total_available == 0 {\n\n \"No activities in queue\".to_string()\n\n } else {\n", "file_path": "src/dclias/src/main.rs", "rank": 17, "score": 102483.16855090288 }, { "content": "fn generate_score(data: &CrucibleActivity) -> String {\n\n let mut tokens: Vec<String> = Vec::new();\n\n\n\n for t in data.teams.values() {\n\n tokens.push(t.score.to_string());\n\n tokens.push(\"-\".to_string());\n\n }\n\n\n\n tokens.pop();\n\n\n\n tokens.join(\"\")\n\n}\n\n\n\nasync fn get_combat_ratings(\n\n data: &CrucibleActivity,\n\n verbose: bool,\n\n) -> HashMap<u64, f32> {\n\n let mut players: Vec<&Player> = Vec::new();\n\n\n\n for t in data.teams.values() {\n", "file_path": "src/dcliad/src/main.rs", "rank": 18, "score": 102298.68562882376 }, { "content": "fn is_valid_steam_id(steam_id: &str) -> bool {\n\n //make sure it can be parsed into a u64\n\n let parses = match steam_id.parse::<u64>() {\n\n Ok(_e) => true,\n\n Err(_e) => false,\n\n };\n\n\n\n parses && steam_id.chars().count() == 17\n\n}\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(verbatim_doc_comment)]\n", "file_path": "src/dclis/src/main.rs", "rank": 19, "score": 100997.71469160021 }, { "content": "//this could use some more work and polish. Add \"and\" before the last item.\n\npub fn human_duration(seconds: u32) -> String {\n\n let dt =\n\n Utc.ymd(0, 1, 1).and_hms(0, 0, 0) + Duration::seconds(seconds as i64);\n\n let year = build_time_str(dt.year(), \"year\");\n\n let mon = build_time_str(dt.month() as i32 - 1, \"month\");\n\n let day = build_time_str(dt.day() as i32 - 1, \"day\");\n\n let hour = build_time_str(dt.hour() as i32, \"hour\");\n\n let min = build_time_str(dt.minute() as i32, \"minute\");\n\n let sec = build_time_str(dt.second() as i32, \"second\");\n\n //collect all items into a vector\n\n let t = vec![year, mon, day, hour, min, sec];\n\n\n\n //remove empty items\n\n let mut t = t\n\n .into_iter()\n\n .filter(|i| i.trim().chars().count() > 0)\n\n .collect::<Vec<String>>();\n\n\n\n //add an add before the last item\n\n if t.len() > 1 {\n\n t.insert(t.len() - 1, \"and\".to_string());\n\n }\n\n\n\n t.join(\" \")\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 20, "score": 99100.96237446944 }, { "content": "//https://stackoverflow.com/a/38406885/10232\n\npub fn uppercase_first_char(s: &str) -> String {\n\n let mut c = s.chars();\n\n match c.next() {\n\n None => String::new(),\n\n Some(f) => f.to_uppercase().collect::<String>() + c.as_str(),\n\n }\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 21, "score": 99096.5421319324 }, { "content": "pub fn truncate_ascii_string(input: &str, max_len: usize) -> String {\n\n if input.chars().count() <= max_len {\n\n return input.to_string();\n\n }\n\n\n\n format!(\"{:.len$}...\", input, len = max_len - 3)\n\n}\n", "file_path": "src/dcli/src/utils.rs", "rank": 22, "score": 98201.72348086926 }, { "content": "fn print_default(\n\n mode: Mode,\n\n activity_type_name: &str,\n\n activity_name: &str,\n\n place_name: &str,\n\n _destination_name: &str,\n\n description: &str,\n\n) {\n\n let out = build_human_status(\n\n mode,\n\n activity_type_name,\n\n activity_name,\n\n place_name,\n\n _destination_name,\n\n description,\n\n );\n\n\n\n println!(\"{}\", out);\n\n}\n\n\n", "file_path": "src/dclia/src/main.rs", "rank": 24, "score": 96309.74714868359 }, { "content": "fn print_default(\n\n data: &CrucibleActivity,\n\n elo_hash: &HashMap<u64, f32>,\n\n member_id: &str,\n\n details: bool,\n\n weapon_count: u32,\n\n verbose: bool,\n\n) {\n\n let col_w = 8;\n\n let name_col_w = 24;\n\n\n\n let mut activity_duration = \"\".to_string();\n\n let mut completion_reason = \"\".to_string();\n\n let mut standing_str = \"\".to_string();\n\n\n\n if let Some(e) = data.get_member_performance(member_id) {\n\n completion_reason =\n\n if e.stats.completion_reason == CompletionReason::Unknown {\n\n \"\".to_string()\n\n } else {\n", "file_path": "src/dcliad/src/main.rs", "rank": 25, "score": 96309.74714868359 }, { "content": "fn print_default(\n\n data: &[CruciblePlayerActivityPerformance],\n\n activity_limit: &u32,\n\n mode: &Mode,\n\n time_period: &DateTimePeriod,\n\n moment: &Moment,\n\n end_moment: &Moment,\n\n weapon_count: &u32,\n\n weapon_sort: &WeaponSort,\n\n) {\n\n //todo: might want to look at buffering output\n\n //https://rust-cli.github.io/book/tutorial/output.html\n\n\n\n let start_time = time_period.get_start();\n\n let end_time = time_period.get_end();\n\n\n\n let performances = data;\n\n\n\n let cpp: Vec<&CruciblePlayerPerformance> =\n\n performances.iter().map(|x| &x.performance).collect();\n", "file_path": "src/dcliah/src/main.rs", "rank": 26, "score": 96309.74714868359 }, { "content": "pub fn print_verbose(msg: &str, verbose: bool) {\n\n if !verbose {\n\n return;\n\n }\n\n\n\n eprintln!(\"{}\", msg);\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 27, "score": 94993.37476452472 }, { "content": "pub fn f32_are_equal(a: f32, b: f32) -> bool {\n\n (a - b).abs() < f32::EPSILON\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 28, "score": 94472.39146253924 }, { "content": "fn parse_and_validate_mode(src: &str) -> Result<Mode, String> {\n\n let mode = Mode::from_str(src)?;\n\n\n\n if !mode.is_crucible() {\n\n return Err(format!(\"Unsupported mode specified : {}\", src));\n\n }\n\n\n\n Ok(mode)\n\n}\n\n\n", "file_path": "src/dcliah/src/main.rs", "rank": 29, "score": 92737.35109825984 }, { "content": "//we do a custom parse / validation here so we can reuse Moment enum\n\n//across apps but not have to have all apps support all time ranges.\n\nfn parse_and_validate_moment(src: &str) -> Result<Moment, String> {\n\n let moment = Moment::from_str(src)?;\n\n\n\n //note, we positive capture what we want in case new properties\n\n //are added in the future\n\n match moment {\n\n Moment::Now => {}\n\n Moment::Daily => {}\n\n Moment::NextDaily => {}\n\n Moment::Weekend => {}\n\n Moment::NextWeekend => {}\n\n Moment::Weekly => {}\n\n Moment::NextWeekly => {}\n\n Moment::Day => {}\n\n Moment::NextDay => {}\n\n Moment::Week => {}\n\n Moment::NextWeek => {}\n\n Moment::Month => {}\n\n Moment::NextMonth => {}\n\n Moment::AllTime => {}\n\n _ => {\n\n return Err(format!(\"Unsupported moment specified : {}\", src));\n\n }\n\n };\n\n\n\n Ok(moment)\n\n}\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(verbatim_doc_comment)]\n", "file_path": "src/dclitime/src/main.rs", "rank": 30, "score": 92737.35109825984 }, { "content": "//TODO: we may not need custom validation here now\n\nfn parse_and_validate_moment(src: &str) -> Result<Moment, String> {\n\n let moment = Moment::from_str(src)?;\n\n\n\n Ok(moment)\n\n}\n\n\n", "file_path": "src/dcliah/src/main.rs", "rank": 31, "score": 92737.35109825984 }, { "content": "fn parse_and_validate_moment(src: &str) -> Result<Moment, String> {\n\n let moment = Moment::from_str(src)?;\n\n\n\n //note, we positive capture what we want in case new properties\n\n //are added in the future\n\n match moment {\n\n Moment::Daily => {}\n\n Moment::Weekend => {}\n\n Moment::Weekly => {}\n\n Moment::Day => {}\n\n Moment::Week => {}\n\n Moment::Month => {}\n\n Moment::AllTime => {}\n\n _ => {\n\n return Err(format!(\"Unsupported moment specified : {}\", src));\n\n }\n\n };\n\n\n\n Ok(moment)\n\n}\n\n\n", "file_path": "src/dclics/src/main.rs", "rank": 32, "score": 92737.35109825984 }, { "content": "fn parse_and_validate_mode(src: &str) -> Result<Mode, String> {\n\n let mode = Mode::from_str(src)?;\n\n\n\n if !mode.is_crucible() {\n\n return Err(format!(\"Unsupported mode specified : {}\", src));\n\n }\n\n\n\n Ok(mode)\n\n}\n\n\n", "file_path": "src/dcliad/src/main.rs", "rank": 33, "score": 92737.35109825984 }, { "content": "pub fn build_time_str(t: i32, label: &str) -> String {\n\n let mut out: String = \"\".to_string();\n\n if t > 0 {\n\n out.push_str(&format!(\"{} {}\", t, label));\n\n\n\n if t > 1 {\n\n out.push('s');\n\n }\n\n }\n\n\n\n out\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 34, "score": 90372.69223011783 }, { "content": "pub fn format_f32(val: f32, precision: usize) -> String {\n\n format!(\"{:.p$}\", val, p = precision)\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 35, "score": 90372.69223011783 }, { "content": "fn load_manifest_info(path: &PathBuf) -> Result<ManifestInfo, Error> {\n\n let json = fs::read_to_string(path)?;\n\n let m = ManifestInfo::from_json(&json)?;\n\n\n\n Ok(m)\n\n}\n\n\n\n//should this move to ApiClient?\n\nasync fn download_manifest(\n\n url: &str,\n\n path: &PathBuf,\n\n print_url: bool,\n\n) -> Result<(), Error> {\n\n let client: ApiClient = ApiClient::new(print_url)?;\n\n\n\n //Download the manifest\n\n let mut response = client.call(url).await?;\n\n\n\n //create a Vector to store the bytes for the download\n\n let mut out: Vec<u8> = Vec::new();\n", "file_path": "src/dclim/src/main.rs", "rank": 36, "score": 89722.68371951154 }, { "content": "pub fn human_date_format(start_time: &DateTime<Utc>) -> String {\n\n let local = start_time.with_timezone(&Local);\n\n let format_str = if Utc::now() - *start_time > Duration::days(6) {\n\n \"%B %-d, %Y\"\n\n } else if local.day() == Local::now().day() {\n\n \"Today at %-I:%M %p\"\n\n } else {\n\n \"%A at %-I:%M %p\"\n\n };\n\n\n\n format!(\"{}\", local.format(format_str))\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 37, "score": 89517.20980821285 }, { "content": "fn parse_rfc3339(src: &str) -> Result<DateTime<Utc>, String> {\n\n let d =\n\n match DateTime::parse_from_rfc3339(src) {\n\n Ok(e) => e,\n\n Err(_e) => return Err(\n\n \"Invalid RFC 3339 Date / Time String : Example : 2020-12-08T17:00:00.774187+00:00\"\n\n .to_string(),\n\n ),\n\n };\n\n\n\n let d = d.with_timezone(&Utc);\n\n\n\n if d > Utc::now() {\n\n return Err(\"start-date must be in the past.\".to_string());\n\n }\n\n\n\n Ok(d)\n\n}\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(verbatim_doc_comment)]\n", "file_path": "src/dcliah/src/main.rs", "rank": 38, "score": 88716.55890127974 }, { "content": "fn print_default(member: &Membership) {\n\n let default = &\"\".to_string();\n\n let n = member.display_name.as_ref().unwrap_or(default);\n\n\n\n let col_w = 15;\n\n println!(\"{:<0col_w$}{}\", \"Display Name\", n, col_w = col_w);\n\n println!(\"{:<0col_w$}{}\", \"id\", member.id, col_w = col_w);\n\n println!(\"{:<0col_w$}{}\", \"Platform\", member.platform, col_w = col_w);\n\n println!(\n\n \"{:<0col_w$}{}\",\n\n \"Platform Id\",\n\n member.platform.to_id(),\n\n col_w = col_w\n\n );\n\n}\n", "file_path": "src/dclis/src/main.rs", "rank": 39, "score": 86782.13796035758 }, { "content": "pub fn standing_default() -> u32 {\n\n STANDING_UNKNOWN_MAGIC_NUMBER\n\n}\n\n\n", "file_path": "src/dcli/src/response/utils.rs", "rank": 40, "score": 84659.8967883896 }, { "content": "pub fn determine_data_dir(dir: Option<PathBuf>) -> Result<PathBuf, Error> {\n\n let path = match dir {\n\n Some(e) => e,\n\n None => {\n\n let dld = dirs_next::data_local_dir()\n\n .ok_or(Error::SystemDirectoryNotFound)?;\n\n dld.join(\"dcli\")\n\n }\n\n };\n\n\n\n if !path.exists() {\n\n std::fs::create_dir_all(&path)?;\n\n }\n\n\n\n Ok(path)\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 41, "score": 82385.4794391635 }, { "content": "fn print_default(results: Vec<FindResult>) {\n\n if results.is_empty() {\n\n println!(\"No items found.\");\n\n return;\n\n }\n\n\n\n let col_w = 15;\n\n\n\n println!(\n\n \"Found {} item{}\",\n\n results.len(),\n\n if results.len() > 1 { \"s\" } else { \"\" }\n\n );\n\n println!(\"-----------------------------\");\n\n for r in results.iter() {\n\n let default: String = \"\".to_string();\n\n let description = r\n\n .display_properties\n\n .description\n\n .as_ref()\n", "file_path": "src/dclims/src/main.rs", "rank": 42, "score": 80873.63163362067 }, { "content": "pub fn property_to_standing<'de, D>(deserializer: D) -> Result<Standing, D::Error>\n\nwhere\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n #[derive(Deserialize)]\n\n struct Outer {\n\n pub basic: Inner,\n\n }\n\n\n\n #[derive(Deserialize)]\n\n struct Inner {\n\n pub value: f32,\n\n }\n\n\n\n let helper = Outer::deserialize(deserializer)?;\n\n Ok(Standing::from_f32(helper.basic.value))\n\n}\n\n*/\n\n\n", "file_path": "src/dcli/src/response/utils.rs", "rank": 43, "score": 78933.1869724931 }, { "content": "pub fn property_to_i32_value<'de, D>(deserializer: D) -> Result<i32, D::Error>\n\nwhere\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n #[derive(Deserialize)]\n\n struct Outer {\n\n pub basic: Inner,\n\n }\n\n\n\n #[derive(Deserialize)]\n\n struct Inner {\n\n pub value: f32,\n\n }\n\n\n\n let helper = <Outer>::deserialize(deserializer)?;\n\n Ok(helper.basic.value as i32)\n\n}\n\n\n", "file_path": "src/dcli/src/response/utils.rs", "rank": 44, "score": 77604.23009413667 }, { "content": "pub fn property_to_u32_value<'de, D>(deserializer: D) -> Result<u32, D::Error>\n\nwhere\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n #[derive(Deserialize)]\n\n struct Outer {\n\n pub basic: Inner,\n\n }\n\n\n\n #[derive(Deserialize)]\n\n struct Inner {\n\n pub value: f32,\n\n }\n\n\n\n let helper = <Outer>::deserialize(deserializer)?;\n\n Ok(helper.basic.value as u32)\n\n}\n\n\n", "file_path": "src/dcli/src/response/utils.rs", "rank": 45, "score": 77604.23009413667 }, { "content": "pub fn str_to_int<'de, T, D>(deserializer: D) -> Result<T, D::Error>\n\nwhere\n\n T: FromStr,\n\n T::Err: Display,\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n T::from_str(&s).map_err(serde::de::Error::custom)\n\n}\n", "file_path": "src/dcli/src/response/utils.rs", "rank": 46, "score": 76154.45234509566 }, { "content": "fn print_tsv(results: &SyncResult, store: &ActivityStoreInterface) {\n\n let mut name_values: Vec<(&str, String)> = Vec::new();\n\n\n\n name_values.push((\"total_synced\", results.total_synced.to_string()));\n\n name_values.push((\"total_available\", results.total_available.to_string()));\n\n name_values.push((\"path\", store.get_storage_path()));\n\n\n\n print!(\"{}\", build_tsv(name_values));\n\n}\n\n\n", "file_path": "src/dclias/src/main.rs", "rank": 47, "score": 74122.58407070232 }, { "content": "//TODO: should pass in by reference here\n\nfn print_default(data: PvpStatsData, mode: Mode, moment: Moment) {\n\n let p = format_f32;\n\n\n\n let moment_string = match moment {\n\n Moment::Daily => \"since the daily reset\",\n\n Moment::Weekend => \"since last Friday\",\n\n Moment::Weekly => \"since the weekly reset\",\n\n Moment::Day => \"for the last day\",\n\n Moment::Week => \"for the last week\",\n\n Moment::Month => \"for the last month\",\n\n Moment::AllTime => \"for all time\",\n\n _ => \"\",\n\n };\n\n\n\n let title: String =\n\n format!(\"Destiny 2 stats for {:#} {}\", mode, moment_string);\n\n\n\n println!();\n\n println!(\"{}\", title);\n\n println!(\"{}\", repeat_str(\"=\", title.chars().count()));\n", "file_path": "src/dclics/src/main.rs", "rank": 48, "score": 70095.16507231737 }, { "content": "fn check_width(s: &str, f: &mut fmt::Formatter) -> fmt::Result {\n\n if let Some(width) = f.width() {\n\n write!(f, \"{:width$}\", s.to_string(), width = width)\n\n } else {\n\n write!(f, \"{}\", s)\n\n }\n\n}\n\nimpl fmt::Display for CharacterClass {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let out = match self {\n\n CharacterClass::Titan => \"Titan\",\n\n CharacterClass::Hunter => \"Hunter\",\n\n CharacterClass::Warlock => \"Warlock\",\n\n CharacterClass::Unknown => \"Unknown\",\n\n };\n\n\n\n check_width(out, f)\n\n }\n\n}\n\n\n", "file_path": "src/dcli/src/enums/character.rs", "rank": 49, "score": 69713.01735621314 }, { "content": "#[derive(Serialize, Deserialize, Debug)]\n\nstruct DestinyResponseSteam {\n\n #[serde(rename = \"Response\")]\n\n response: Option<DestinyResponseMember>,\n\n\n\n #[serde(flatten)]\n\n status: DestinyResponseStatus,\n\n}\n\n\n\nimpl IsDestinyAPIResponse for DestinyResponseSteam {\n\n fn get_status(&self) -> &DestinyResponseStatus {\n\n &self.status\n\n }\n\n}\n\n\n", "file_path": "src/dclis/src/memberidsearch.rs", "rank": 50, "score": 63262.12911748748 }, { "content": "#[derive(Serialize, Deserialize, Debug)]\n\nstruct DestinySearchResponse {\n\n #[serde(rename = \"Response\")]\n\n response: Option<Vec<DestinyResponseMember>>,\n\n\n\n #[serde(flatten)]\n\n status: DestinyResponseStatus,\n\n}\n\n\n\nimpl IsDestinyAPIResponse for DestinySearchResponse {\n\n fn get_status(&self) -> &DestinyResponseStatus {\n\n &self.status\n\n }\n\n}\n\n\n", "file_path": "src/dclis/src/memberidsearch.rs", "rank": 51, "score": 63262.12911748748 }, { "content": "#[derive(Serialize, Deserialize, Debug)]\n\nstruct DestinyResponseMember {\n\n #[serde(rename = \"membershipType\")]\n\n membership_type: u64,\n\n\n\n #[serde(rename = \"membershipId\")]\n\n membership_id: String,\n\n\n\n #[serde(rename = \"displayName\")]\n\n display_name: Option<String>,\n\n}\n\n\n\npub struct Membership {\n\n pub platform: Platform,\n\n pub id: String,\n\n pub display_name: Option<String>,\n\n}\n", "file_path": "src/dclis/src/memberidsearch.rs", "rank": 52, "score": 63262.12911748748 }, { "content": "fn print_tsv(\n\n mode: Mode,\n\n activity_type_name: &str,\n\n activity_name: &str,\n\n place_name: &str,\n\n destination_name: &str,\n\n description: &str,\n\n in_activity: bool,\n\n) {\n\n //figure out if they are in orbit since bungie doesnt give us\n\n //a mode for it\n\n let human_status = if mode == Mode::None && in_activity {\n\n get_in_orbit_human()\n\n } else {\n\n build_human_status(\n\n mode,\n\n activity_type_name,\n\n activity_name,\n\n place_name,\n\n destination_name,\n", "file_path": "src/dclia/src/main.rs", "rank": 53, "score": 60847.32498804688 }, { "content": "fn print_tsv(\n\n data: PvpStatsData,\n\n member_id: &str,\n\n character_id: &str,\n\n platform: &Platform,\n\n mode: &Mode,\n\n period: &MomentPeriod,\n\n) {\n\n let mut name_values: Vec<(&str, String)> = Vec::new();\n\n\n\n name_values.push((\"member_id\", member_id.to_string()));\n\n name_values.push((\"platform\", format!(\"{}\", platform)));\n\n name_values.push((\"platform_id\", format!(\"{}\", platform.to_id())));\n\n name_values.push((\"character_id\", character_id.to_string()));\n\n\n\n name_values.push((\"start_moment_dt\", format!(\"{}\", period.start)));\n\n name_values.push((\"end_moment_dt\", format!(\"{}\", period.end)));\n\n\n\n name_values.push((\"moment_human\", format!(\"{}\", period.moment)));\n\n name_values.push((\"mode\", format!(\"{}\", mode)));\n", "file_path": "src/dclics/src/main.rs", "rank": 54, "score": 60847.32498804688 }, { "content": "fn save_manifest_info(\n\n manifest_info: &ManifestInfo,\n\n path: &PathBuf,\n\n) -> Result<(), Error> {\n\n let json = manifest_info.to_json()?;\n\n\n\n //opens a file for writing. creates if it doesn't exist, otherwise\n\n //overwrites it\n\n fs::write(path, &json)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/dclim/src/main.rs", "rank": 55, "score": 59640.951371153176 }, { "content": "fn build_human_status(\n\n mode: Mode,\n\n activity_type_name: &str,\n\n activity_name: &str,\n\n place_name: &str,\n\n _destination_name: &str,\n\n description: &str,\n\n) -> String {\n\n if mode == Mode::Patrol {\n\n format!(\"Exploring on {}\", place_name)\n\n } else if mode.is_gambit() || mode.is_crucible() {\n\n format!(\n\n \"Playing {} on {} ({})\",\n\n activity_type_name, activity_name, description\n\n )\n\n } else if mode == Mode::Strike {\n\n format!(\n\n \"Running {} {} on {}\",\n\n activity_name, activity_type_name, place_name\n\n )\n", "file_path": "src/dclia/src/main.rs", "rank": 56, "score": 59640.951371153176 }, { "content": "fn print_tsv_orbit() {\n\n print_tsv(Mode::None, \"\", \"\", \"Orbit\", \"\", \"\", true);\n\n}\n\n\n", "file_path": "src/dclia/src/main.rs", "rank": 57, "score": 59640.951371153176 }, { "content": "fn print_tsv_no_activity() {\n\n print_tsv(Mode::None, \"\", \"\", \"\", \"\", \"\", false);\n\n}\n\n\n", "file_path": "src/dclia/src/main.rs", "rank": 58, "score": 59640.951371153176 }, { "content": "fn find_previous_moment(\n\n past_reset: DateTime<Utc>,\n\n interval: i64,\n\n) -> DateTime<Utc> {\n\n let now: DateTime<Utc> = Utc::now();\n\n\n\n //get total seconds between now and the past reset\n\n //take the mod of that divided by a week in seconds\n\n //subtract that amount from current date / time to find previous reset\n\n now - Duration::seconds((now - past_reset).num_seconds() % interval)\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 59, "score": 59640.951371153176 }, { "content": "/// Clears screen. Works across platforms\n\npub fn clear_scr() {\n\n let mut stdout = stdout();\n\n //just silently fail if something goes wrong\n\n //note execute flushes queue immediately\n\n let _ = execute!(stdout, terminal::Clear(terminal::ClearType::All));\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 60, "score": 56983.750321301406 }, { "content": "pub fn clear_terminal() {\n\n print!(\"{}[2J\", 27 as char);\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 61, "score": 56979.25683203024 }, { "content": "pub fn calculate_per_activity_average(\n\n value: u32,\n\n total_activities: u32,\n\n) -> f32 {\n\n if total_activities == 0 {\n\n return 0.0;\n\n }\n\n\n\n value as f32 / total_activities as f32\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 62, "score": 54800.34897485685 }, { "content": "pub fn calculate_kills_deaths_assists(\n\n kills: u32,\n\n deaths: u32,\n\n assists: u32,\n\n) -> f32 {\n\n let kills = kills as f32;\n\n let assists = assists as f32;\n\n\n\n let t = kills + (assists / 2.0);\n\n if deaths > 0 {\n\n t / deaths as f32\n\n } else {\n\n t\n\n }\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 63, "score": 54800.34897485685 }, { "content": "pub fn check_destiny_response_status(\n\n status: &DestinyResponseStatus,\n\n) -> Result<(), Error> {\n\n match status.error_code {\n\n 1 => Ok(()),\n\n 5 => Err(Error::ApiNotAvailableException),\n\n 7 => Err(Error::ParameterParseFailure),\n\n 18 => Err(Error::InvalidParameters),\n\n 1665 => Err(Error::PrivacyException),\n\n 2102 => Err(Error::ApiKeyMissingFromRequest),\n\n _ => Err(Error::ApiStatus {\n\n description: format!(\n\n \"Response Status Error : {}({}) : {}\",\n\n status.error_status, status.error_code, status.message\n\n ),\n\n }),\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n", "file_path": "src/dcli/src/response/drs.rs", "rank": 64, "score": 53813.17869048595 }, { "content": "fn print_tsv(member: &Membership) {\n\n let default = &\"\".to_string();\n\n\n\n let n = member.display_name.as_ref().unwrap_or(default);\n\n\n\n print!(\n\n \"{d}{delim}{i}{delim}{p}{delim}{pi}{eol}\",\n\n d = n,\n\n i = member.id,\n\n p = member.platform,\n\n pi = member.platform.to_id(),\n\n delim = TSV_DELIM,\n\n eol = TSV_EOL,\n\n );\n\n}\n\n\n", "file_path": "src/dclis/src/main.rs", "rank": 65, "score": 53687.51545108223 }, { "content": "//str_to_datetime\n\npub fn str_to_datetime<'de, D>(\n\n deserializer: D,\n\n) -> Result<DateTime<Utc>, D::Error>\n\nwhere\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n\n\n let n = match NaiveDateTime::parse_from_str(&s, API_DATE_TIME_FORMAT) {\n\n Ok(e) => e,\n\n Err(e) => {\n\n return Err(serde::de::Error::custom(&format!(\n\n \"Could not parse date-time : {}\",\n\n e\n\n )))\n\n }\n\n };\n\n\n\n let dt = DateTime::<Utc>::from_utc(n, Utc);\n\n\n\n Ok(dt)\n\n}\n\n\n", "file_path": "src/dcli/src/response/utils.rs", "rank": 66, "score": 49850.4068557365 }, { "content": "fn print_tsv(results: Vec<FindResult>) {\n\n if results.is_empty() {\n\n println!();\n\n return;\n\n }\n\n\n\n for (i, r) in results.iter().enumerate() {\n\n let default: String = \"\".to_string();\n\n let description = r\n\n .display_properties\n\n .description\n\n .as_ref()\n\n .unwrap_or(&default);\n\n let icon_path =\n\n r.display_properties.icon_path.as_ref().unwrap_or(&default);\n\n\n\n print!(\n\n \"{i}{delim}{n}{delim}{d}{delim}{hi}{delim}{ip}{eol}\",\n\n i = i,\n\n n = r.display_properties.name,\n\n d = description,\n\n hi = r.display_properties.has_icon,\n\n ip = icon_path,\n\n delim = TSV_DELIM,\n\n eol = TSV_EOL,\n\n );\n\n }\n\n}\n", "file_path": "src/dclims/src/main.rs", "rank": 67, "score": 49850.4068557365 }, { "content": "//BUG: this doesnt get called if the property is not include in the JSON\n\n//https://github.com/serde-rs/json/issues/734\n\npub fn property_to_option_float<'de, D>(\n\n deserializer: D,\n\n) -> Result<Option<f32>, D::Error>\n\nwhere\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n #[derive(Deserialize, Debug)]\n\n struct Outer {\n\n pub basic: Inner,\n\n }\n\n\n\n #[derive(Deserialize, Debug)]\n\n struct Inner {\n\n pub value: f32,\n\n }\n\n\n\n Option::<Outer>::deserialize(deserializer).map(|o: Option<Outer>| match o {\n\n Some(e) => Some(e.basic.value),\n\n None => None,\n\n })\n", "file_path": "src/dcli/src/response/utils.rs", "rank": 68, "score": 48927.38659668627 }, { "content": "pub fn get_last_weekly_reset() -> DateTime<Utc> {\n\n //get a hardcoded past reset date / time (17:00 UTC every tuesday)\n\n let past_reset: DateTime<Utc> = Utc.ymd(2020, 11, 10).and_hms(17, 0, 0);\n\n find_previous_moment(past_reset, WEEK_IN_SECONDS)\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 69, "score": 48050.509309567205 }, { "content": "pub fn get_last_friday_reset() -> DateTime<Utc> {\n\n //get a hardcoded past reset date / time (17:00 UTC every friday)\n\n let past_reset: DateTime<Utc> = Utc.ymd(2020, 12, 4).and_hms(18, 0, 0);\n\n find_previous_moment(past_reset, WEEK_IN_SECONDS)\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 70, "score": 48050.509309567205 }, { "content": "pub fn get_destiny2_launch_date() -> DateTime<Utc> {\n\n Utc.ymd(2017, 9, 6).and_hms(17, 0, 0)\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 71, "score": 48050.509309567205 }, { "content": "pub fn get_last_daily_reset() -> DateTime<Utc> {\n\n //get a hardcoded past daily date / time (17:00 UTC every tuesday)\n\n let past_reset: DateTime<Utc> = Utc.ymd(2020, 11, 10).and_hms(18, 0, 0);\n\n\n\n find_previous_moment(past_reset, DAY_IN_SECONDS)\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 72, "score": 48050.509309567205 }, { "content": "pub fn prepend_base_url_option<'de, D>(\n\n deserializer: D,\n\n) -> Result<Option<String>, D::Error>\n\nwhere\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n Option::<String>::deserialize(deserializer).map(|o: Option<String>| match o\n\n {\n\n Some(e) => {\n\n let mut s = String::from(RESOURCE_BASE_URL);\n\n s.push_str(&e);\n\n Some(s)\n\n }\n\n None => None,\n\n })\n\n}\n\n\n", "file_path": "src/dcli/src/response/utils.rs", "rank": 73, "score": 48050.509309567205 }, { "content": "/// Takes a Destiny 2 API has and converts it to a Destiny 2 manifest db index value\n\npub fn convert_hash_to_id(hash: u32) -> i64 {\n\n let mut id: i64 = hash as i64;\n\n\n\n if (id & (1 << (32 - 1))) != 0 {\n\n id -= 1 << 32;\n\n }\n\n\n\n id\n\n}\n\n\n\npub struct ManifestInterface {\n\n manifest_db: SqliteConnection,\n\n activity_definition_cache: HashMap<i64, ActivityDefinitionData>,\n\n inventory_item_definition_cache: HashMap<i64, InventoryItemDefinitionData>,\n\n historical_stats_definition_cache:\n\n HashMap<String, HistoricalStatsDefinition>,\n\n}\n\n\n\nimpl ManifestInterface {\n\n pub async fn new(\n", "file_path": "src/dcli/src/manifestinterface.rs", "rank": 74, "score": 47408.430901628635 }, { "content": "pub fn calculate_ratio(a: u32, b: u32) -> f32 {\n\n if b == 0 {\n\n return 0.0;\n\n }\n\n\n\n a as f32 / b as f32\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 75, "score": 45922.934692636634 }, { "content": "pub fn calculate_percent(value: u32, total: u32) -> f32 {\n\n if total == 0 {\n\n return 0.0;\n\n }\n\n\n\n (value as f32 / total as f32) * 100.0\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 76, "score": 44123.03714646734 }, { "content": "pub fn calculate_avg(total: f32, count: u32) -> f32 {\n\n if count == 0 {\n\n return 0.0;\n\n }\n\n\n\n total / count as f32\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 77, "score": 44123.03714646734 }, { "content": "CREATE INDEX character_activity_stats_char_index ON character_activity_stats (character);\n", "file_path": "src/dcli/actitvity_store_schema.sql", "rank": 78, "score": 43786.01075595214 }, { "content": "pub fn calculate_kills_deaths_ratio(kills: u32, deaths: u32) -> f32 {\n\n let kills = kills as f32;\n\n if deaths > 0 {\n\n kills / deaths as f32\n\n } else {\n\n kills\n\n }\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 79, "score": 42523.38461710717 }, { "content": "pub fn calculate_efficiency(kills: u32, deaths: u32, assists: u32) -> f32 {\n\n let t = (kills + assists) as f32;\n\n if deaths > 0 {\n\n t / deaths as f32\n\n } else {\n\n t\n\n }\n\n}\n\n\n", "file_path": "src/dcli/src/utils.rs", "rank": 80, "score": 40687.24271827606 }, { "content": "pub fn property_to_value<'de, D, T: serde::de::Deserialize<'de>>(\n\n deserializer: D,\n\n) -> Result<T, D::Error>\n\nwhere\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n #[derive(Deserialize)]\n\n struct Outer<T> {\n\n pub basic: Inner<T>,\n\n }\n\n\n\n #[derive(Deserialize)]\n\n struct Inner<T> {\n\n pub value: T,\n\n }\n\n\n\n let helper = <Outer<T>>::deserialize(deserializer)?;\n\n Ok(helper.basic.value)\n\n}\n\n\n\n/*\n", "file_path": "src/dcli/src/response/utils.rs", "rank": 81, "score": 39952.34812549985 }, { "content": "/*\n\n* Copyright 2021 Mike Chambers\n\n* https://github.com/mikechambers/dcli\n\n*\n\n* Permission is hereby granted, free of charge, to any person obtaining a copy of\n\n* this software and associated documentation files (the \"Software\"), to deal in\n\n* the Software without restriction, including without limitation the rights to\n\n* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies\n\n* of the Software, and to permit persons to whom the Software is furnished to do\n\n* so, subject to the following conditions:\n\n*\n\n* The above copyright notice and this permission notice shall be included in all\n\n* copies or substantial portions of the Software.\n\n*\n\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\n\n* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\n\n* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\n\n* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\n\n* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", "file_path": "src/dcli/src/output.rs", "rank": 82, "score": 38422.17103863395 }, { "content": "*/\n\n\n\nuse std::str::FromStr;\n\n\n\n#[derive(PartialEq, Clone, Copy, Debug)]\n\npub enum Output {\n\n Tsv,\n\n Default,\n\n}\n\n\n\nimpl FromStr for Output {\n\n type Err = &'static str;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n //wrap in String so we can convert to lower case\n\n let s = String::from(s).to_lowercase();\n\n\n\n //get a slice to get a &str for the match\n\n match &s[..] {\n\n \"tsv\" => Ok(Output::Tsv),\n\n \"default\" => Ok(Output::Default),\n\n _ => Err(\"Unknown Output type\"),\n\n }\n\n }\n\n}\n", "file_path": "src/dcli/src/output.rs", "rank": 83, "score": 38284.57642690112 }, { "content": "/*\n\n* Copyright 2021 Mike Chambers\n\n* https://github.com/mikechambers/dcli\n\n*\n\n* Permission is hereby granted, free of charge, to any person obtaining a copy of\n\n* this software and associated documentation files (the \"Software\"), to deal in\n\n* the Software without restriction, including without limitation the rights to\n\n* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies\n\n* of the Software, and to permit persons to whom the Software is furnished to do\n\n* so, subject to the following conditions:\n\n*\n\n* The above copyright notice and this permission notice shall be included in all\n\n* copies or substantial portions of the Software.\n\n*\n\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\n\n* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\n\n* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\n\n* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\n\n* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", "file_path": "src/dcli/src/error.rs", "rank": 84, "score": 38243.728734642005 }, { "content": "/*\n\n* Copyright 2021 Mike Chambers\n\n* https://github.com/mikechambers/dcli\n\n*\n\n* Permission is hereby granted, free of charge, to any person obtaining a copy of\n\n* this software and associated documentation files (the \"Software\"), to deal in\n\n* the Software without restriction, including without limitation the rights to\n\n* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies\n\n* of the Software, and to permit persons to whom the Software is furnished to do\n\n* so, subject to the following conditions:\n\n*\n\n* The above copyright notice and this permission notice shall be included in all\n\n* copies or substantial portions of the Software.\n\n*\n\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\n\n* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\n\n* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\n\n* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\n\n* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", "file_path": "src/dcli/src/character.rs", "rank": 85, "score": 38205.785032500215 }, { "content": " ParameterParseFailure,\n\n //when id & platform are not correct combination\n\n InvalidParameters,\n\n //Api key not set correctly\n\n ApiKeyMissingFromRequest,\n\n ApiNotAvailableException,\n\n RequestTimedOut,\n\n Request,\n\n PrivacyException,\n\n Database { description: String },\n\n ApiParse { description: String },\n\n IoError { description: String },\n\n IoErrorDirIsFile { description: String },\n\n IoFileDoesNotExist { description: String },\n\n ZipError { description: String },\n\n Unknown { description: String },\n\n ManifestNotSet,\n\n ManifestItemNotFound { description: String },\n\n MaxActivitiesRequestCountExceeded,\n\n CharacterDataNotFound,\n", "file_path": "src/dcli/src/error.rs", "rank": 86, "score": 38102.17635802381 }, { "content": "*/\n\n\n\n//For error handling approach, we are going to start with one error for all\n\n//APIs and individual apps. Given that there is only a general range of what the apps\n\n//do, mostly loading and parsing api data, then we should be able to cover\n\n//error cases without super ballooning the number of error types.\n\n//If it turns out this becomes unwieldy, then we will break it out, into API\n\n//and app specific errors\n\n\n\nuse std::fmt::{Display, Formatter, Result};\n\n\n\nuse crate::response::activities::MAX_ACTIVITIES_REQUEST_COUNT;\n\n\n\n#[derive(PartialEq, Debug)]\n\npub enum Error {\n\n ApiRequest { description: String },\n\n ApiStatus { description: String },\n\n ApiResponseMissing,\n\n\n\n //when parameters are malformed in wrong format (i.e. expecting id, getting a name)\n", "file_path": "src/dcli/src/error.rs", "rank": 87, "score": 38100.95753906293 }, { "content": " \"Received response from API but no response property was present.\"\n\n ),\n\n Error::RequestTimedOut => write!(\n\n f,\n\n \"The API request took too long. Check your network connection and \\\n\n try again. (The API servers may be slow right now).\"\n\n ),\n\n Error::Request => write!(\n\n f,\n\n \"There was an error during the API request. This often means \\\n\n that we could not reach the Destiny servers. Check the network \\\n\n connection and try again (The API servers might not be available.).\"\n\n ),\n\n\n\n Error::MaxActivitiesRequestCountExceeded => write!(\n\n f,\n\n \"The maximum number of activities ({}) requested was exceeded.\",\n\n MAX_ACTIVITIES_REQUEST_COUNT\n\n ),\n\n Error::CharacterDataNotFound => write!(\n", "file_path": "src/dcli/src/error.rs", "rank": 88, "score": 38100.90289333055 }, { "content": " f,\n\n \"Could not find entry in activity data for specified character.\"\n\n ),\n\n Error::SystemDirectoryNotFound => {\n\n write!(f, \"Could not locate system directory.\")\n\n },\n\n Error::ChronoParse { description } => {\n\n write!(f, \"Error parsing String to date / time : {}\", description)\n\n },\n\n Error::UnknownEnumValue => {\n\n write!(f, \"Could not convert value to enum.\")\n\n },\n\n Error::NoCharacters => {\n\n write!(f, \"There are no characters for the member.\")\n\n },\n\n Error::CharacterDoesNotExist => {\n\n write!(f, \"Character class does not exist for member.\")\n\n },\n\n Error::ActivityNotFound => {\n\n write!(f, \"Could not find activity in data store.\")\n", "file_path": "src/dcli/src/error.rs", "rank": 89, "score": 38096.19199839277 }, { "content": " SystemDirectoryNotFound,\n\n ChronoParse { description: String },\n\n UnknownEnumValue,\n\n NoCharacters,\n\n CharacterDoesNotExist,\n\n ActivityNotFound,\n\n DateTimePeriodOrder,\n\n}\n\n\n\nimpl Display for Error {\n\n fn fmt(&self, f: &mut Formatter) -> Result {\n\n match self {\n\n Error::ApiRequest { description } => {\n\n write!(f, \"Error calling Destiny 2 API. {}\", description)\n\n },\n\n Error::ApiStatus { description } => {\n\n write!(f, \"Destiny 2 API call returned an error. {}\", description)\n\n },\n\n Error::ApiParse { description } => write!(\n\n f,\n", "file_path": "src/dcli/src/error.rs", "rank": 90, "score": 38095.13235421681 }, { "content": " \"Missing API Key. Set DESTINY_API_KEY environment variable before compiling.\"\n\n ),\n\n Error::ApiNotAvailableException => {\n\n write!(f, \"The Destiny API is currently not available. Please try again later.\")\n\n },\n\n Error::PrivacyException => write!(\n\n f,\n\n \"Privacy settings for Bungie account are too restrictive.\"\n\n ),\n\n Error::IoFileDoesNotExist { description } => {\n\n write!(f, \"Expected File does not exist: {}\", description)\n\n },\n\n Error::Database { description } => {\n\n write!(f, \"Error working with SQLite database : {}\", description)\n\n },\n\n Error::ManifestItemNotFound { description } => {\n\n write!(f, \"Manifest Item not found : {}\", description)\n\n },\n\n Error::ApiResponseMissing => write!(\n\n f,\n", "file_path": "src/dcli/src/error.rs", "rank": 91, "score": 38094.93991492303 }, { "content": " \"Error parsing results from Destiny 2 API call. {}\",\n\n description\n\n ),\n\n Error::IoError { description } => {\n\n write!(f, \"Error working with file system. {}\", description)\n\n },\n\n Error::ZipError { description } => {\n\n write!(f, \"Error decompressing manifest. {}\", description)\n\n },\n\n Error::IoErrorDirIsFile { description } => {\n\n write!(f, \"Expected directory but found file. {}\", description)\n\n },\n\n Error::Unknown { description } => {\n\n write!(f, \"An unknown error occured. {}\", description)\n\n },\n\n Error::ParameterParseFailure => write!(f, \"Could not parse Parameters. Make sure your inputs were correct and try again. (code 7)\"),\n\n Error::InvalidParameters => write!(f, \"Invalid input parameters. (code 18)\"),\n\n Error::ManifestNotSet => write!(f, \"Manifest was not set in Manifest Interface.\"),\n\n Error::ApiKeyMissingFromRequest => write!(\n\n f,\n", "file_path": "src/dcli/src/error.rs", "rank": 92, "score": 38093.78847385593 }, { "content": "impl From<std::io::Error> for Error {\n\n fn from(err: std::io::Error) -> Error {\n\n Error::IoError {\n\n description: format!(\"std::io::Error : {:#?}\", err),\n\n }\n\n }\n\n}\n\n\n\nimpl From<zip::result::ZipError> for Error {\n\n fn from(err: zip::result::ZipError) -> Error {\n\n Error::ZipError {\n\n description: format!(\"zip::result::ZipError : {:#?}\", err),\n\n }\n\n }\n\n}\n\n\n\nimpl From<sqlx::Error> for Error {\n\n fn from(err: sqlx::Error) -> Error {\n\n Error::Database {\n\n description: format!(\"sqlx::Error : {:#?}\", err),\n", "file_path": "src/dcli/src/error.rs", "rank": 93, "score": 38088.349048848744 }, { "content": " },\n\n Error::DateTimePeriodOrder => {\n\n write!(f, \"Start date must be before end date.\")\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl From<serde_json::Error> for Error {\n\n fn from(err: serde_json::Error) -> Error {\n\n Error::ApiParse {\n\n description: format!(\"serde_json::Error : {:#?}\", err),\n\n } //TODO:: impliment this for all error types\n\n }\n\n}\n\n\n\nimpl From<reqwest::Error> for Error {\n\n fn from(err: reqwest::Error) -> Error {\n\n /*\n\n //todo: need to figure out how to downcast to hyber error\n", "file_path": "src/dcli/src/error.rs", "rank": 94, "score": 38088.258797092 }, { "content": " //so we can get more details on the error (i.e. network failure)\n\n //https://stackoverflow.com/a/61100595/10232\n\n let hyper_error: Option<&hyper::Error> = reqwest_error\n\n .source()\n\n .unwrap()\n\n .downcast_ref();\n\n */\n\n\n\n if err.is_timeout() {\n\n Error::RequestTimedOut\n\n } else if err.is_request() {\n\n Error::Request\n\n } else {\n\n Error::ApiRequest {\n\n description: format!(\"reqwest::Error : {:#?}\", err),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/dcli/src/error.rs", "rank": 95, "score": 38088.11811638552 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl From<chrono::format::ParseError> for Error {\n\n fn from(err: chrono::format::ParseError) -> Error {\n\n Error::ChronoParse {\n\n description: format!(\"chrono::format::ParseError : {:#?}\", err),\n\n }\n\n }\n\n}\n", "file_path": "src/dcli/src/error.rs", "rank": 96, "score": 38088.11119678285 }, { "content": "*/\n\n\n\nuse crate::response::character::CharacterData;\n\nuse crate::{enums::character::CharacterClass, response::pgcr::UserInfoCard};\n\n\n\npub struct PlayerInfo {\n\n pub characters: Characters,\n\n pub user_info: UserInfoCard,\n\n}\n\n\n\npub struct Characters {\n\n pub characters: Vec<CharacterData>,\n\n}\n\n\n\nimpl Characters {\n\n pub fn with_characters(characters: Vec<CharacterData>) -> Characters {\n\n let mut out = Characters { characters };\n\n out.characters\n\n .sort_by(|a, b| b.date_last_played.cmp(&a.date_last_played));\n\n\n", "file_path": "src/dcli/src/character.rs", "rank": 97, "score": 38054.44287359822 }, { "content": " out\n\n }\n\n\n\n pub fn get_by_class_ref(\n\n &self,\n\n class_type: CharacterClass,\n\n ) -> Option<&CharacterData> {\n\n if self.characters.is_empty() {\n\n return None;\n\n }\n\n\n\n for c in &self.characters {\n\n if c.class_type == class_type {\n\n return Some(c);\n\n }\n\n }\n\n\n\n None\n\n }\n\n\n\n pub fn get_last_active_ref(&self) -> Option<&CharacterData> {\n\n if self.characters.is_empty() {\n\n return None;\n\n }\n\n\n\n Some(&self.characters[0])\n\n }\n\n}\n", "file_path": "src/dcli/src/character.rs", "rank": 98, "score": 38050.15579958615 }, { "content": "/*\n\n* Copyright 2021 Mike Chambers\n\n* https://github.com/mikechambers/dcli\n\n*\n\n* Permission is hereby granted, free of charge, to any person obtaining a copy of\n\n* this software and associated documentation files (the \"Software\"), to deal in\n\n* the Software without restriction, including without limitation the rights to\n\n* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies\n\n* of the Software, and to permit persons to whom the Software is furnished to do\n\n* so, subject to the following conditions:\n\n*\n\n* The above copyright notice and this permission notice shall be included in all\n\n* copies or substantial portions of the Software.\n\n*\n\n* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\n* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS\n\n* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR\n\n* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER\n\n* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN\n\n* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", "file_path": "src/dcli/src/enums/platform.rs", "rank": 99, "score": 36966.16516742188 } ]
Rust
tezos/context/src/serialize/mod.rs
tezedge/tezedge
b8e20d9886ad8b6876ad62375bacf1f6b7999e3b
use std::{ array::TryFromSliceError, convert::TryInto, io::Write, num::TryFromIntError, str::Utf8Error, string::FromUtf8Error, sync::Arc, }; use modular_bitfield::prelude::*; use tezos_timing::SerializeStats; use thiserror::Error; use crate::{ hash::HashingError, kv_store::HashId, persistent::DBError, working_tree::{ shape::DirectoryShapeError, storage::{DirEntryIdError, PointerToInode, Storage, StorageError}, string_interner::StringInterner, Object, }, ContextKeyValueStore, }; use self::persistent::AbsoluteOffset; pub mod in_memory; pub mod persistent; const COMPACT_HASH_ID_BIT: u64 = 1 << 31; const FULL_47_BITS: u64 = 0x7FFFFFFFFFFF; const FULL_31_BITS: u64 = 0x7FFFFFFF; pub type SerializeObjectSignature = fn( &Object, HashId, &mut Vec<u8>, &Storage, &StringInterner, &mut SerializeStats, &mut Vec<(HashId, Arc<[u8]>)>, &mut Vec<HashId>, &mut ContextKeyValueStore, Option<AbsoluteOffset>, ) -> Result<Option<AbsoluteOffset>, SerializationError>; #[derive(BitfieldSpecifier)] #[bits = 2] #[derive(Clone, Debug, Eq, PartialEq, Copy)] pub enum ObjectLength { OneByte, TwoBytes, FourBytes, } #[derive(BitfieldSpecifier)] #[bits = 3] #[derive(Clone, Debug, Eq, PartialEq, Copy)] pub enum ObjectTag { Directory, Blob, Commit, InodePointers, ShapedDirectory, } #[bitfield(bits = 8)] #[derive(Debug)] pub struct ObjectHeader { #[allow(dead_code)] tag: ObjectTag, length: ObjectLength, is_persistent: bool, #[skip] _unused: B2, } impl ObjectHeader { pub fn get_length(&self) -> ObjectLength { self.length() } pub fn get_persistent(&self) -> bool { self.is_persistent() } } #[derive(Copy, Clone, Default, Debug)] struct PointersHeader { bitfield: u32, } impl PointersHeader { fn set(&mut self, index: usize) { self.bitfield |= 1 << index; } fn get(&self, index: usize) -> bool { self.bitfield & 1 << index != 0 } fn to_bytes(self) -> [u8; 4] { self.bitfield.to_le_bytes() } fn iter(&self) -> PointersHeaderIterator { PointersHeaderIterator { bitfield: *self, current: 0, } } fn from_bytes(bytes: [u8; 4]) -> Self { Self { bitfield: u32::from_le_bytes(bytes), } } fn count(&self) -> u8 { self.bitfield.count_ones() as u8 } } impl From<&[Option<PointerToInode>; 32]> for PointersHeader { fn from(pointers: &[Option<PointerToInode>; 32]) -> Self { let mut bitfield = Self::default(); for (index, pointer) in pointers.iter().enumerate() { if pointer.is_some() { bitfield.set(index); } } bitfield } } struct PointersHeaderIterator { bitfield: PointersHeader, current: usize, } impl Iterator for PointersHeaderIterator { type Item = usize; fn next(&mut self) -> Option<Self::Item> { for index in self.current..32 { if self.bitfield.get(index) { self.current = index + 1; return Some(index); } } None } } #[derive(Debug, Error)] pub enum SerializationError { #[error("IOError {error}")] IOError { #[from] error: std::io::Error, }, #[error("Directory not found")] DirNotFound, #[error("Directory entry not found")] DirEntryNotFound, #[error("Blob not found")] BlobNotFound, #[error("Conversion from int failed: {error}")] TryFromIntError { #[from] error: TryFromIntError, }, #[error("StorageIdError: {error}")] StorageIdError { #[from] error: StorageError, }, #[error("HashId too big")] HashIdTooBig, #[error("Missing HashId")] MissingHashId, #[error("DBError: {error}")] DBError { #[from] error: DBError, }, #[error("Missing Offset")] MissingOffset, #[error("Hashing Error: {error}")] HashingError { #[from] error: HashingError, }, } #[derive(Debug, Error)] pub enum DeserializationError { #[error("Unexpected end of file")] UnexpectedEOF, #[error("Conversion from slice to an array failed")] TryFromSliceError { #[from] error: TryFromSliceError, }, #[error("Bytes are not valid utf-8: {error}")] Utf8Error { #[from] error: Utf8Error, }, #[error("UnknownID")] UnknownID, #[error("Vector is not valid utf-8: {error}")] FromUtf8Error { #[from] error: FromUtf8Error, }, #[error("Root hash is missing")] MissingRootHash, #[error("Hash is missing")] MissingHash, #[error("Offset is missing")] MissingOffset, #[error("DirEntryIdError: {error}")] DirEntryIdError { #[from] error: DirEntryIdError, }, #[error("StorageIdError: {error:?}")] StorageIdError { #[from] error: StorageError, }, #[error("Inode not found in repository")] InodeNotFoundInRepository, #[error("Inode empty in repository")] InodeEmptyInRepository, #[error("DBError: {error:?}")] DBError { #[from] error: Box<DBError>, }, #[error("Cannot find next shape")] CannotFindNextShape, #[error("Directory shape error: {error:?}")] DirectoryShapeError { #[from] error: DirectoryShapeError, }, #[error("IOError: {error:?}")] IOError { #[from] error: std::io::Error, }, } pub fn deserialize_hash_id(data: &[u8]) -> Result<(Option<HashId>, usize), DeserializationError> { use DeserializationError::*; let byte_hash_id = data.get(0).copied().ok_or(UnexpectedEOF)?; if byte_hash_id & 1 << 7 != 0 { let hash_id = data.get(0..4).ok_or(UnexpectedEOF)?; let hash_id = u32::from_be_bytes(hash_id.try_into()?); let hash_id = hash_id as u64; let hash_id = hash_id & (COMPACT_HASH_ID_BIT - 1); let hash_id = HashId::new(hash_id); Ok((hash_id, 4)) } else { let hash_id = data.get(0..6).ok_or(UnexpectedEOF)?; let hash_id = (hash_id[0] as u64) << 40 | (hash_id[1] as u64) << 32 | (hash_id[2] as u64) << 24 | (hash_id[3] as u64) << 16 | (hash_id[4] as u64) << 8 | (hash_id[5] as u64); let hash_id = HashId::new(hash_id); Ok((hash_id, 6)) } } pub fn serialize_hash_id_impl( hash_id: Option<HashId>, output: &mut Vec<u8>, repository: &mut ContextKeyValueStore, stats: &mut SerializeStats, ) -> Result<(), SerializationError> { let hash_id = match hash_id { Some(hash_id) => repository.make_hash_id_ready_for_commit(hash_id)?.as_u64(), None => 0, }; stats.highest_hash_id = stats.highest_hash_id.max(hash_id); if hash_id & FULL_31_BITS == hash_id { let hash_id: u64 = hash_id | COMPACT_HASH_ID_BIT; let hash_id: [u8; 8] = hash_id.to_be_bytes(); output.write_all(&hash_id[4..])?; stats.hash_ids_length = stats.hash_ids_length.saturating_add(4); Ok(()) } else if hash_id & FULL_47_BITS == hash_id { output.write_all(&hash_id.to_be_bytes()[2..])?; stats.hash_ids_length = stats.hash_ids_length.saturating_add(6); Ok(()) } else { Err(SerializationError::HashIdTooBig) } } pub fn serialize_hash_id<T>( hash_id: T, output: &mut Vec<u8>, repository: &mut ContextKeyValueStore, stats: &mut SerializeStats, ) -> Result<(), SerializationError> where T: Into<Option<HashId>>, { let hash_id: Option<HashId> = hash_id.into(); serialize_hash_id_impl(hash_id, output, repository, stats) }
use std::{ array::TryFromSliceError, convert::TryInto, io::Write, num::TryFromIntError, str::Utf8Error, string::FromUtf8Error, sync::Arc, }; use modular_bitfield::prelude::*; use tezos_timing::SerializeStats; use thiserror::Error; use crate::{ hash::HashingError, kv_store::HashId, persistent::DBError, working_tree::{ shape::DirectoryShapeError, storage::{DirEntryIdError, PointerToInode, Storage, StorageError}, string_interner::StringInterner, Object, }, ContextKeyValueStore, }; use self::persistent::AbsoluteOffset; pub mod in_memory; pub mod persistent; const COMPACT_HASH_ID_BIT: u64 = 1 << 31; const FULL_47_BITS: u64 = 0x7FFFFFFFFFFF; const FULL_31_BITS: u64 = 0x7FFFFFFF; pub type SerializeObjectSignature = fn( &Object, HashId, &mut Vec<u8>, &Storage, &StringInterner, &mut SerializeStats, &mut Vec<(HashId, Arc<[u8]>)>, &mut Vec<HashId>, &mut ContextKeyValueStore, Option<AbsoluteOffset>, ) -> Result<Option<AbsoluteOffset>, SerializationError>; #[derive(BitfieldSpecifier)] #[bits = 2] #[derive(Clone, Debug, Eq, PartialEq, Copy)] pub enum ObjectLength { OneByte, TwoBytes, FourBytes, } #[derive(BitfieldSpecifier)] #[bits = 3] #[derive(Clone, Debug, Eq, PartialEq, Copy)] pub enum ObjectTag { Directory, Blob, Commit, InodePointers, ShapedDirectory, } #[bitfield(bits = 8)] #[derive(Debug)] pub struct ObjectHeader { #[allow(dead_code)] tag: ObjectTag, length: ObjectLength, is_persistent: bool, #[skip] _unused: B2, } impl ObjectHeader { pub fn get_length(&self) -> ObjectLength { self.length() } pub fn get_persistent(&self) -> bool { self.is_persistent() } } #[derive(Copy, Clone, Default, Debug)] struct PointersHeader { bitfield: u32, } impl PointersHeader { fn set(&mut self, index: usize) { self.bitfield |= 1 << index; } fn get(&self, index: usize) -> bool { self.bitfield & 1 << index != 0 } fn to_bytes(self) -> [u8; 4] { self.bitfield.to_le_bytes() } fn iter(&self) -> PointersHeaderIterator { PointersHeaderIterator { bitfield: *self, current: 0, } } fn from_bytes(bytes: [u8; 4]) -> Self { Self { bitfield: u32::from_le_bytes(bytes), } } fn count(&self) -> u8 { self.bitfield.count_ones() as u8 } } impl From<&[Option<PointerToInode>; 32]> for PointersHeader { fn from(pointers: &[Option<PointerToInode>; 32]) -> Self { let mut bitfield = Self::default(); for (index, pointer) in pointers.iter().enumerate() { if pointer.is_some() { bitfield.set(index); } } bitfield } } struct PointersHeaderIterator { bitfield: PointersHeader, current: usize, } impl Iterator for PointersHeaderIterator { type Item = usize; fn next(&mut self) -> Option<Self::Item> { for index in self.current..32 { if self.bitfield.get(index) { self.current = index + 1; return Some(index); } } None } } #[derive(Debug, Error)] pub enum SerializationError { #[error("IOError {error}")] IOError { #[from] error: std::io::Error, }, #[error("Directory not found")] DirNotFound, #[error("Directory entry not found")] DirEntryNotFound, #[error("Blob not found")] BlobNotFound, #[error("Conversion from int failed: {error}")] TryFromIntError { #[from] error: TryFromIntError, }, #[error("StorageIdError: {error}")] StorageIdError { #[from] error: StorageError, }, #[error("HashId too big")] HashIdTooBig, #[error("Missing HashId")] MissingHashId, #[error("DBError: {error}")] DBError { #[from] error: DBError, }, #[error("Missing Offset")] MissingOffset, #[error("Hashing Error: {error}")] HashingError { #[from] error: HashingError, }, } #[derive(Debug, Error)] pub enum DeserializationError { #[error("Unexpected end of file")] UnexpectedEOF, #[error("Conversion from slice to an array failed")] TryFromSliceError { #[from] error: TryFromSliceError, }, #[error("Bytes are not valid utf-8: {error}")] Utf8Error { #[from] error: Utf8Error, }, #[error("UnknownID")] UnknownID, #[error("Vector is not valid utf-8: {error}")] FromUtf8Error { #[from] error: FromUtf8Error, }, #[error("Root hash is missing")] MissingRootHash, #[error("Hash is missing")] MissingHash, #[error("Offset is missing")] MissingOffset, #[error("DirEntryIdError: {error}")] DirEntryIdError { #[from] error: DirEntryIdError, }, #[error("StorageIdError: {error:?}")] StorageIdError { #[from] error: StorageError, }, #[error("Inode not found in repository")] InodeNotFoundInRepository, #[error("Inode empty in repository")] InodeEmptyInRepository, #[error("DBError: {error:?}")] DBError { #[from] error: Box<DBError>, }, #[error("Cannot find next shape")] CannotFindNextShape, #[error("Directory shape error: {error:?}")] DirectoryShapeError { #[from] error: DirectoryShapeError, }, #[error("IOError: {error:?}")] IOError { #[from] error: std::io::Error, }, } pub fn deserialize_hash_id(data: &[u8]) -> Result<(Option<HashId>, usize), DeserializationError> { use DeserializationError::*; let byte_hash_id = data.get(0).copied().ok_or(UnexpectedEOF)?; if byte_hash_id & 1 << 7 != 0 { let hash_id = data.get(0..4).ok_or(UnexpectedEOF)?; let hash_id = u3
pub fn serialize_hash_id_impl( hash_id: Option<HashId>, output: &mut Vec<u8>, repository: &mut ContextKeyValueStore, stats: &mut SerializeStats, ) -> Result<(), SerializationError> { let hash_id = match hash_id { Some(hash_id) => repository.make_hash_id_ready_for_commit(hash_id)?.as_u64(), None => 0, }; stats.highest_hash_id = stats.highest_hash_id.max(hash_id); if hash_id & FULL_31_BITS == hash_id { let hash_id: u64 = hash_id | COMPACT_HASH_ID_BIT; let hash_id: [u8; 8] = hash_id.to_be_bytes(); output.write_all(&hash_id[4..])?; stats.hash_ids_length = stats.hash_ids_length.saturating_add(4); Ok(()) } else if hash_id & FULL_47_BITS == hash_id { output.write_all(&hash_id.to_be_bytes()[2..])?; stats.hash_ids_length = stats.hash_ids_length.saturating_add(6); Ok(()) } else { Err(SerializationError::HashIdTooBig) } } pub fn serialize_hash_id<T>( hash_id: T, output: &mut Vec<u8>, repository: &mut ContextKeyValueStore, stats: &mut SerializeStats, ) -> Result<(), SerializationError> where T: Into<Option<HashId>>, { let hash_id: Option<HashId> = hash_id.into(); serialize_hash_id_impl(hash_id, output, repository, stats) }
2::from_be_bytes(hash_id.try_into()?); let hash_id = hash_id as u64; let hash_id = hash_id & (COMPACT_HASH_ID_BIT - 1); let hash_id = HashId::new(hash_id); Ok((hash_id, 4)) } else { let hash_id = data.get(0..6).ok_or(UnexpectedEOF)?; let hash_id = (hash_id[0] as u64) << 40 | (hash_id[1] as u64) << 32 | (hash_id[2] as u64) << 24 | (hash_id[3] as u64) << 16 | (hash_id[4] as u64) << 8 | (hash_id[5] as u64); let hash_id = HashId::new(hash_id); Ok((hash_id, 6)) } }
function_block-function_prefixed
[ { "content": "fn write_object_header(output: &mut Vec<u8>, start: usize, tag: ObjectTag) {\n\n let length = output.len() - start;\n\n\n\n if length <= 0xFF {\n\n let header: [u8; 1] = ObjectHeader::new()\n\n .with_tag(tag)\n\n .with_length(ObjectLength::OneByte)\n\n .with_is_persistent(true)\n\n .into_bytes();\n\n\n\n output[start] = header[0];\n\n output[start + 1] = length as u8;\n\n } else if length <= (0xFFFF - 1) {\n\n output.push(0);\n\n\n\n let end = output.len();\n\n output.copy_within(start + 2..end - 1, start + 3);\n\n\n\n let header: [u8; 1] = ObjectHeader::new()\n\n .with_tag(tag)\n", "file_path": "tezos/context/src/serialize/persistent.rs", "rank": 0, "score": 400359.12923161394 }, { "content": "pub trait GenState: Clone + PartialEq + Eq + Hash + Debug {\n\n /// Predicate that allows limiting the size of state graph.\n\n fn within_bounds(&self) -> bool;\n\n}\n\n\n", "file_path": "shell_automaton/tests/state_explorer.rs", "rank": 1, "score": 377532.27153063565 }, { "content": "#[inline]\n\npub fn vec_from_slice(buf: &[u8], from_idx: usize, size: usize) -> Vec<u8> {\n\n buf[from_idx..from_idx + size].to_vec()\n\n}\n\n\n\n#[inline]\n\npub const fn range_from_idx_len(idx: usize, len: usize) -> Range<usize> {\n\n idx..idx + len\n\n}\n", "file_path": "storage/src/persistent/codec.rs", "rank": 3, "score": 368859.41325492726 }, { "content": "type ItemCount = u32;\n\n\n\n/// Precisely identifies location of a record in a commit log.\n\n#[derive(Copy, Clone, Debug, Serialize, Deserialize)]\n\npub struct Location(pub u64, pub ByteLimit);\n\n\n\nimpl Location {\n\n #[inline]\n\n pub fn is_consecutive(&self, prev: &Location) -> bool {\n\n (prev.0 < self.0) && (self.0 - prev.0 == 1)\n\n }\n\n}\n\n\n\nimpl fmt::Display for Location {\n\n fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_fmt(format_args!(\"Location({},{})\", self.0, self.1))\n\n }\n\n}\n\n\n\nimpl BincodeEncoded for Location {}\n\n\n\n/// Range of values to get from a commit log\n\n#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq)]\n\npub struct Range(pub u64, pub ByteLimit, pub ItemCount);\n\n\n", "file_path": "storage/src/commit_log/mod.rs", "rank": 4, "score": 356091.5507073391 }, { "content": "pub fn read_data(file: impl AsRef<Path>) -> Result<Vec<u8>, Error> {\n\n let dir =\n\n std::env::var(\"CARGO_MANIFEST_DIR\").context(format!(\"`CARGO_MANIFEST_DIR` is not set\"))?;\n\n let path = PathBuf::from(dir).join(\"resources\").join(file);\n\n let data = File::open(&path)\n\n .and_then(|mut file| {\n\n let mut data = Vec::new();\n\n file.read_to_end(&mut data)?;\n\n Ok(data)\n\n })\n\n .with_context(|| format!(\"Cannot read message from {}\", path.to_string_lossy()))?;\n\n Ok(data)\n\n}\n\n\n", "file_path": "tezos/messages/benches/codecs_bench_common.rs", "rank": 5, "score": 351290.9012955135 }, { "content": "pub fn zstd_decompress<B: AsRef<[u8]>>(input: B, output: &mut Vec<u8>) -> std::io::Result<()> {\n\n zstd::stream::copy_decode(input.as_ref(), output)\n\n}\n", "file_path": "storage/src/commit_log/compression.rs", "rank": 6, "score": 350906.06510026567 }, { "content": "pub fn zstd_compress<B: AsRef<[u8]>>(input: B, output: &mut Vec<u8>) -> std::io::Result<()> {\n\n zstd::stream::copy_encode(input.as_ref(), output, COMPRESSION_LEVEL)\n\n}\n\n\n", "file_path": "storage/src/commit_log/compression.rs", "rank": 7, "score": 350906.06510026567 }, { "content": "// caml_hash_mix_string function:\n\n// https://github.com/ocaml/ocaml/blob/a5f63ba65fb8df7b2fa52076a2763b74078e463e/runtime/hash.c#L145\n\npub fn ocaml_hash_string(seed: u32, s: &[u8]) -> u32 {\n\n let len = s.len() as u32;\n\n let mut h = Wrapping(seed);\n\n let mut i = 0;\n\n\n\n // Mix by 32-bit blocks (little-endian)\n\n while i + 4 <= len {\n\n let pos = i as usize;\n\n let next_u32_bytes: [u8; 4] = s[pos..pos + 4].try_into().unwrap();\n\n let w = Wrapping(u32::from_le_bytes(next_u32_bytes));\n\n\n\n h = mix(h, w);\n\n i += 4;\n\n }\n\n\n\n // At this point there might be up to 3 bytes left to read.\n\n // Bytes that are out of range should be set to \\000.\n\n let pos = i as usize;\n\n h = match len & 3 {\n\n 3 => mix(\n", "file_path": "tezos/context/src/hash/ocaml.rs", "rank": 8, "score": 347179.2713038046 }, { "content": "pub fn peer_message_size(bytes: impl AsRef<[u8]>) -> Result<usize, BinaryReaderError> {\n\n let size = complete_input(size, bytes.as_ref())?;\n\n Ok(size as usize)\n\n}\n", "file_path": "tezos/messages/src/p2p/mod.rs", "rank": 9, "score": 336437.6855108965 }, { "content": "/// Merges 2 maps by picking the max value for each key, if the key is not present in both, its is added to the map\n\nfn max_merge_maps<K: Hash + Eq + Clone, V: Ord + Default + Clone>(\n\n first_map: HashMap<K, V>,\n\n second_map: HashMap<K, V>,\n\n) -> HashMap<K, V> {\n\n let mut new_map = HashMap::new();\n\n for (key, value) in first_map.iter() {\n\n new_map.insert(\n\n key.clone(),\n\n cmp::max(\n\n value.clone(),\n\n second_map.get(&key).unwrap_or(&V::default()).clone(),\n\n ),\n\n );\n\n }\n\n\n\n for (key, value) in second_map.iter() {\n\n new_map.insert(\n\n key.clone(),\n\n cmp::max(\n\n value.clone(),\n\n first_map.get(&key).unwrap_or(&V::default()).clone(),\n\n ),\n\n );\n\n }\n\n\n\n new_map\n\n}\n\n\n", "file_path": "apps/node_monitoring/src/monitors/resource.rs", "rank": 10, "score": 327529.08990420075 }, { "content": "type TaggedFile = u64;\n\n\n\npub const TAG_SHAPE: u64 = 0;\n\npub const TAG_SHAPE_INDEX: u64 = 1;\n\npub const TAG_COMMIT_INDEX: u64 = 2;\n\npub const TAG_DATA: u64 = 3;\n\npub const TAG_STRINGS: u64 = 4;\n\npub const TAG_BIG_STRINGS: u64 = 5;\n\npub const TAG_HASHES: u64 = 6;\n\npub const TAG_SIZES: u64 = 7;\n\n\n\nimpl From<FileType> for u64 {\n\n fn from(file_type: FileType) -> Self {\n\n match file_type {\n\n FileType::ShapeDirectories => TAG_SHAPE,\n\n FileType::ShapeDirectoriesIndex => TAG_SHAPE_INDEX,\n\n FileType::CommitIndex => TAG_COMMIT_INDEX,\n\n FileType::Data => TAG_DATA,\n\n FileType::Strings => TAG_STRINGS,\n\n FileType::BigStrings => TAG_BIG_STRINGS,\n", "file_path": "tezos/context/src/persistent/file.rs", "rank": 11, "score": 325797.78096975776 }, { "content": "/// Returns only true, if timestamp of header is not in the far future\n\npub fn is_future_block(block_header: &BlockHeader) -> Result<bool, anyhow::Error> {\n\n let future_margin =\n\n chrono::offset::Utc::now() + chrono::Duration::from_std(Duration::from_secs(15))?;\n\n let block_timestamp = chrono::Utc.from_utc_datetime(\n\n &chrono::NaiveDateTime::from_timestamp_opt(block_header.timestamp(), 0)\n\n .ok_or(TimestampOutOfRangeError)?,\n\n );\n\n Ok(block_timestamp > future_margin)\n\n}\n\n\n\npub enum CanApplyStatus {\n\n Ready,\n\n AlreadyApplied,\n\n MissingPredecessor,\n\n PredecessorNotApplied,\n\n MissingOperations,\n\n}\n\n\n", "file_path": "shell/src/validation/mod.rs", "rank": 12, "score": 324178.4814192307 }, { "content": "pub fn boolean(b: &bool, out: &mut Vec<u8>) -> BinResult {\n\n put_byte(\n\n if *b {\n\n &crate::types::BYTE_VAL_TRUE\n\n } else {\n\n &crate::types::BYTE_VAL_FALSE\n\n },\n\n out,\n\n );\n\n Ok(())\n\n}\n\n\n\n// Rust integers encoding\n\nmod integers {\n\n macro_rules! encode_integer {\n\n ($t:ident) => {\n\n pub fn $t(i: &$t, out: &mut Vec<u8>) -> super::BinResult {\n\n super::put_bytes(&i.to_be_bytes(), out);\n\n Ok(())\n\n }\n", "file_path": "tezos/encoding/src/enc.rs", "rank": 13, "score": 323712.3464960691 }, { "content": "#[inline]\n\nfn expected_data_length(validation_passes: u8) -> usize {\n\n std::mem::size_of::<u8>() // validation_passes\n\n + std::mem::size_of::<u8>() // is_complete\n\n + (validation_passes as usize) * std::mem::size_of::<u8>() // is_validation_pass_present\n\n}\n\n\n", "file_path": "storage/src/operations_meta_storage.rs", "rank": 14, "score": 320516.9639663779 }, { "content": "pub fn bounded_string<S: AsRef<str>>(max_len: usize) -> impl FnMut(S, &mut Vec<u8>) -> BinResult {\n\n move |data, out| {\n\n if data.as_ref().len() <= max_len {\n\n string(data, out)\n\n } else {\n\n Err(BinError::size_error(max_len, data.as_ref().len()))\n\n }\n\n }\n\n}\n\n\n", "file_path": "tezos/encoding/src/enc.rs", "rank": 15, "score": 313008.01123091625 }, { "content": "#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]\n\nstruct DirectoryShapeHash(u64);\n\n\n\n#[bitfield]\n\n#[derive(Clone, Debug, Eq, PartialEq, Copy)]\n\npub struct ShapeSliceId {\n\n start: B47,\n\n length: B17,\n\n}\n\n\n\n/// Contains the shape (key fragments) of a directory.\n\n///\n\n/// A `DirectoryShapeId` maps to a slice of `StringId`\n\npub struct DirectoryShapes {\n\n /// Map `DirectoryShapeHash` to its `DirectoryShapeId` and strings.\n\n hash_to_strings: BTreeMap<DirectoryShapeHash, (DirectoryShapeId, ShapeSliceId)>,\n\n shapes: ChunkedVec<StringId>,\n\n\n\n to_serialize: Vec<ShapeSliceId>,\n\n\n\n /// Map the `DirectoryShapeId` to its `DirectoryShapeHash`.\n", "file_path": "tezos/context/src/working_tree/shape.rs", "rank": 16, "score": 311839.7954359346 }, { "content": "/// Iterate HashIds in the serialized data\n\npub fn iter_hash_ids(data: &[u8]) -> HashIdIterator {\n\n HashIdIterator { data, pos: 0 }\n\n}\n\n\n\npub struct HashIdIterator<'a> {\n\n data: &'a [u8],\n\n pos: usize,\n\n}\n\n\n\n/// Number of bytes to reach the hashes when serializing a `Inode::Pointers`.\n\n///\n\n/// This skip `ID_INODE_POINTERS`, `depth`, `nchildren` and `PointersDescriptor`.\n\nconst INODE_POINTERS_NBYTES_TO_HASHES: usize = 13;\n\n\n\n/// Number of bytes to reach the hashes when serializing a shaped directory.\n\n///\n\n/// This skip `ID_SHAPED_DIRECTORY` and the `ShapeId`\n\nconst SHAPED_DIRECTORY_NBYTES_TO_HASHES: usize = 5;\n\n\n\nimpl<'a> Iterator for HashIdIterator<'a> {\n", "file_path": "tezos/context/src/serialize/in_memory.rs", "rank": 17, "score": 311505.78955160547 }, { "content": "pub fn read_data_unwrap(file: impl AsRef<Path>) -> Vec<u8> {\n\n read_data(file).unwrap_or_else(|e| panic!(\"Unexpected error: {}\", e))\n\n}\n\n\n", "file_path": "tezos/messages/benches/codecs_bench_common.rs", "rank": 18, "score": 309884.7893658906 }, { "content": "pub fn hash_to_string(hash: &[u8]) -> String {\n\n const HEXCHARS: &[u8] = b\"0123456789abcdef\";\n\n\n\n let mut s = String::with_capacity(62);\n\n for byte in hash {\n\n s.push(HEXCHARS[*byte as usize >> 4] as char);\n\n s.push(HEXCHARS[*byte as usize & 0xF] as char);\n\n }\n\n s\n\n}\n\n\n\nimpl Timing {\n\n fn new() -> Timing {\n\n Timing {\n\n current_block: None,\n\n current_operation: None,\n\n current_context: None,\n\n block_started_at: None,\n\n nqueries: 0,\n\n checkout_time: None,\n", "file_path": "tezos/timing/src/lib.rs", "rank": 19, "score": 309584.25203658204 }, { "content": "/// Returns true only if we recieve the same head as is our current_head\n\npub fn is_same_head(head: &Head, incoming_header: &BlockHeader) -> Result<bool, anyhow::Error> {\n\n let mut is_same = head.block_hash().as_ref() == &incoming_header.message_hash()?;\n\n is_same &= head.fitness() == incoming_header.fitness();\n\n is_same &= head.level() == &incoming_header.level();\n\n Ok(is_same)\n\n}\n\n\n", "file_path": "shell/src/validation/mod.rs", "rank": 20, "score": 307865.43304705195 }, { "content": "pub fn hashed<'a, O, F>(mut parser: F) -> impl FnMut(NomInput<'a>) -> NomResult<'a, (O, Vec<u8>)>\n\nwhere\n\n F: FnMut(NomInput<'a>) -> NomResult<'a, O>,\n\n{\n\n move |input| {\n\n let (rest, result) = parser(input)?;\n\n let hash = crypto::blake2b::digest_256(&input[..input.len() - rest.len()])\n\n .map_err(|e| nom::Err::Failure(NomError::hash_error(input, e)))?;\n\n Ok((rest, (result, hash)))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use num_bigint::BigInt;\n\n use num_traits::FromPrimitive;\n\n\n\n use super::error::*;\n\n use super::*;\n\n\n", "file_path": "tezos/encoding/src/nom.rs", "rank": 21, "score": 306477.80554937606 }, { "content": "pub fn string(data: impl AsRef<str>, out: &mut Vec<u8>) -> BinResult {\n\n put_size(data.as_ref().len(), out)?;\n\n put_bytes(data.as_ref().as_bytes(), out);\n\n Ok(())\n\n}\n\n\n", "file_path": "tezos/encoding/src/enc.rs", "rank": 22, "score": 295481.24703835987 }, { "content": "pub fn read_object_length(\n\n data: &[u8],\n\n header: &ObjectHeader,\n\n) -> Result<(usize, usize), DeserializationError> {\n\n use DeserializationError::*;\n\n\n\n match header.length() {\n\n ObjectLength::OneByte => {\n\n let length = data.get(1).copied().ok_or(UnexpectedEOF)? as usize;\n\n Ok((1 + 1, length))\n\n }\n\n ObjectLength::TwoBytes => {\n\n let length = data.get(1..3).ok_or(UnexpectedEOF)?;\n\n let length = u16::from_le_bytes(length.try_into()?) as usize;\n\n Ok((1 + 2, length))\n\n }\n\n ObjectLength::FourBytes => {\n\n let length = data.get(1..5).ok_or(UnexpectedEOF)?;\n\n let length = u32::from_le_bytes(length.try_into()?) as usize;\n\n Ok((1 + 4, length))\n\n }\n\n }\n\n}\n\n\n", "file_path": "tezos/context/src/serialize/persistent.rs", "rank": 23, "score": 294086.7583067126 }, { "content": "/// Validates if new_head is stronger or at least equals to old_head - according to fitness\n\npub fn can_update_current_head(\n\n new_head: &BlockHeaderWithHash,\n\n current_head: &Head,\n\n current_context_fitness: &Fitness,\n\n) -> bool {\n\n let new_head_fitness = FitnessWrapper::new(new_head.header.fitness());\n\n let current_head_fitness = FitnessWrapper::new(current_head.fitness());\n\n let context_fitness = FitnessWrapper::new(current_context_fitness);\n\n\n\n // according to chain_validator.ml\n\n if context_fitness.eq(&current_head_fitness) {\n\n new_head_fitness.gt(&context_fitness)\n\n } else {\n\n new_head_fitness.ge(&context_fitness)\n\n }\n\n}\n\n\n", "file_path": "shell/src/validation/mod.rs", "rank": 24, "score": 293860.2741694697 }, { "content": "fn encode_irmin_dir_entry_kind(kind: &DirEntryKind) -> [u8; 8] {\n\n match kind {\n\n DirEntryKind::Directory => [0, 0, 0, 0, 0, 0, 0, 0],\n\n DirEntryKind::Blob => [255, 0, 0, 0, 0, 0, 0, 0],\n\n }\n\n}\n\n\n\npub(crate) fn index(depth: u32, name: &str) -> u32 {\n\n ocaml_hash_string(depth, name.as_bytes()) % 32\n\n}\n\n\n", "file_path": "tezos/context/src/hash/mod.rs", "rank": 25, "score": 292735.124318716 }, { "content": "#[test]\n\npub fn test_context_copy_persistent() -> Result<(), anyhow::Error> {\n\n context_copy(\n\n ContextKvStoreConfiguration::OnDisk(\"\".to_string()),\n\n \"__context:test_context_copy_persistent\",\n\n )\n\n}\n\n\n", "file_path": "tezos/context/tests/context.rs", "rank": 26, "score": 291859.70258661115 }, { "content": "pub fn put_bytes(bytes: &[u8], out: &mut Vec<u8>) {\n\n out.extend_from_slice(bytes);\n\n}\n\n\n", "file_path": "tezos/encoding/src/enc.rs", "rank": 27, "score": 290614.851377646 }, { "content": "pub fn put_byte(byte: &u8, out: &mut Vec<u8>) {\n\n out.push(*byte)\n\n}\n\n\n", "file_path": "tezos/encoding/src/enc.rs", "rank": 28, "score": 290614.851377646 }, { "content": "#[inline]\n\nfn mix(mut h: Wrapping<u32>, mut w: Wrapping<u32>) -> Wrapping<u32> {\n\n w *= Wrapping(0xcc9e2d51);\n\n w = w << 15 | w >> 17;\n\n w *= Wrapping(0x1b873593);\n\n h ^= w;\n\n h = h << 13 | h >> 19;\n\n h = h * Wrapping(5) + Wrapping(0xe6546b64);\n\n h\n\n}\n\n\n", "file_path": "tezos/context/src/hash/ocaml.rs", "rank": 29, "score": 287352.46695534774 }, { "content": "/// Open commit log at a given path.\n\npub fn open_cl<P, I>(path: P, cfs: I, log: Logger) -> Result<CommitLogs, CommitLogError>\n\nwhere\n\n P: AsRef<Path>,\n\n I: IntoIterator<Item = CommitLogDescriptor>,\n\n{\n\n CommitLogs::new(path, cfs, log)\n\n}\n\n\n", "file_path": "storage/src/persistent/mod.rs", "rank": 31, "score": 285829.51782711584 }, { "content": "#[test]\n\npub fn test_context_set_get_commit_persistent() -> Result<(), anyhow::Error> {\n\n context_set_get_commit(\n\n ContextKvStoreConfiguration::OnDisk(\"\".to_string()),\n\n \"__context:test_context_set_get_commit_persistent\",\n\n )\n\n}\n\n\n", "file_path": "tezos/context/tests/context.rs", "rank": 32, "score": 280697.0095556996 }, { "content": "#[test]\n\npub fn test_context_hash_from_working_tree_persistent() -> Result<(), anyhow::Error> {\n\n context_hash_from_working_tree(\n\n ContextKvStoreConfiguration::OnDisk(\"\".to_string()),\n\n \"__context:test_context_hash_from_working_tree_persistent\",\n\n )\n\n}\n\n\n", "file_path": "tezos/context/tests/context.rs", "rank": 33, "score": 280615.7792134254 }, { "content": "pub fn hash_as_bytes<T>(hash: OCaml<T>) -> &[u8] {\n\n let field = unsafe { hash.field::<OCamlBytes>(0) };\n\n field.as_bytes()\n\n}\n\n\n\n// TODO: TE-367: review once ocaml-interop has been upgraded\n\nunsafe impl FromOCaml<OCamlChainId> for ChainId {\n\n fn from_ocaml(v: OCaml<OCamlChainId>) -> Self {\n\n let v: OCaml<OCamlBytes> = unsafe { std::mem::transmute(v) };\n\n let vec: Vec<u8> = v.to_rust();\n\n ChainId::try_from(vec).unwrap()\n\n }\n\n}\n\n\n\nimpl_from_ocaml_variant! {\n\n OCamlContextKvStoreConfiguration => ContextKvStoreConfiguration {\n\n ContextKvStoreConfiguration::ReadOnlyIpc,\n\n ContextKvStoreConfiguration::InMem,\n\n ContextKvStoreConfiguration::OnDisk(path: String),\n\n }\n", "file_path": "tezos/conv/src/from_ocaml.rs", "rank": 34, "score": 280241.47573610366 }, { "content": "/// Merges 2 float maps by picking the max value for each key, if the key is not present in both, its is added to the map\n\n// we need a separate function for f32 as they do not implement the Ord trait\n\nfn max_merge_float_maps<K: Hash + Eq + Clone>(\n\n first_map: HashMap<K, f32>,\n\n second_map: HashMap<K, f32>,\n\n) -> HashMap<K, f32> {\n\n // ...\n\n let mut new_map = HashMap::new();\n\n for (key, value) in first_map.iter() {\n\n new_map.insert(\n\n key.clone(),\n\n f32::max(*value, *second_map.get(&key).unwrap_or(&0.0)),\n\n );\n\n }\n\n\n\n for (key, value) in second_map.iter() {\n\n new_map.insert(\n\n key.clone(),\n\n f32::max(*value, *first_map.get(&key).unwrap_or(&0.0)),\n\n );\n\n }\n\n new_map\n\n // ...\n\n}\n\n\n", "file_path": "apps/node_monitoring/src/monitors/resource.rs", "rank": 35, "score": 277783.1433452973 }, { "content": "fn make_tags<'a>(variants: impl IntoIterator<Item = &'a syn::Variant>) -> Result<Vec<Tag<'a>>> {\n\n let mut default_id = 0;\n\n let mut tags = Vec::new();\n\n for variant in variants {\n\n let meta = &mut get_encoding_meta(&variant.attrs)?;\n\n let tag = make_tag(variant, meta, &mut default_id)?;\n\n tags.push(tag);\n\n }\n\n Ok(tags)\n\n}\n\n\n", "file_path": "tezos/encoding-derive/src/make.rs", "rank": 36, "score": 276639.2400728618 }, { "content": "pub fn is_ocaml_log_enabled() -> bool {\n\n env::var(\"OCAML_LOG_ENABLED\")\n\n .unwrap_or_else(|_| \"false\".to_string())\n\n .parse::<bool>()\n\n .unwrap()\n\n}\n\n\n", "file_path": "tezos/interop/tests/common/mod.rs", "rank": 37, "score": 276266.25226809795 }, { "content": "/// Returns only true, if new_fitness is greater than head's fitness\n\npub fn is_fitness_increases(head: &Head, new_fitness: &Fitness) -> bool {\n\n fitness_increases(head.fitness(), new_fitness)\n\n}\n\n\n", "file_path": "shell/src/validation/mod.rs", "rank": 38, "score": 275329.7070109036 }, { "content": "/// Returns only true, if new_fitness is greater than head's fitness\n\npub fn is_fitness_increases_or_same(head: &Head, new_fitness: &Fitness) -> bool {\n\n fitness_increases_or_same(head.fitness(), new_fitness)\n\n}\n\n\n", "file_path": "shell/src/validation/mod.rs", "rank": 39, "score": 275329.7070109036 }, { "content": "// Returns a byte vector of specified size, with BE encoding of the num\n\n// as its last 8 bytes\n\nfn get_hash(num: u64, size: usize) -> Vec<u8> {\n\n use std::mem::size_of;\n\n assert!(size >= size_of::<u64>() + 2);\n\n let mut vec = Vec::new();\n\n vec.push(LEFT_BOUNDARY);\n\n vec.resize(size - size_of::<u64>() - 1, 0x11);\n\n vec.push(RIGHT_BOUNDARY);\n\n vec.append(&mut num.to_be_bytes().to_vec());\n\n vec\n\n}\n\n\n", "file_path": "tezos/messages/tests/encoding_operations_for_blocks.rs", "rank": 40, "score": 271615.0630978764 }, { "content": "fn read_data(file: &str) -> Result<Vec<u8>, Error> {\n\n let dir =\n\n std::env::var(\"CARGO_MANIFEST_DIR\").context(format!(\"`CARGO_MANIFEST_DIR` is not set\"))?;\n\n let path = PathBuf::from(dir).join(\"resources\").join(file);\n\n let data = File::open(&path)\n\n .and_then(|mut file| {\n\n let mut data = Vec::new();\n\n file.read_to_end(&mut data)?;\n\n Ok(data)\n\n })\n\n .with_context(|| format!(\"Cannot read message from {}\", path.to_string_lossy()))?;\n\n Ok(data)\n\n}\n\n\n", "file_path": "tezos/messages/tests/dhat.rs", "rank": 41, "score": 270938.73222992127 }, { "content": "pub fn z_bignum(mut input: NomInput) -> NomResult<BigInt> {\n\n let mut bitslice_vec = Vec::new();\n\n let mut has_next = true;\n\n let mut missing_bits = 0;\n\n let mut first = true;\n\n let mut neg = false;\n\n while has_next {\n\n let (new_input, byte) = take(1_u8)(input)?;\n\n input = new_input;\n\n let bits = byte.view_bits();\n\n has_next = bits[0];\n\n let skip_bits = if first {\n\n neg = bits[1];\n\n 2\n\n } else {\n\n 1\n\n };\n\n first = false;\n\n bitslice_vec.push(&bits[skip_bits..]);\n\n missing_bits += skip_bits;\n\n }\n\n let mut bitvec = bitvec![Msb0, u8; 0; missing_bits % 8];\n\n for bitslice in bitslice_vec.into_iter().rev() {\n\n bitvec.extend_from_bitslice(bitslice);\n\n }\n\n let sign = if neg { Sign::Minus } else { Sign::Plus };\n\n Ok((input, BigInt::from_bytes_be(sign, &bitvec.into_vec())))\n\n}\n\n\n", "file_path": "tezos/encoding/src/nom.rs", "rank": 42, "score": 269874.9812313549 }, { "content": "type ByteLimit = usize;\n", "file_path": "storage/src/commit_log/mod.rs", "rank": 43, "score": 268968.3194820058 }, { "content": "fn error(span: Span, message: impl std::fmt::Display) -> syn::Error {\n\n syn::Error::new(span, message)\n\n}\n", "file_path": "tezos/encoding-derive/src/make.rs", "rank": 44, "score": 268445.47294713644 }, { "content": "/// Create default database configuration options,\n\n/// based on recommended setting:\n\n/// https://github.com/facebook/rocksdb/wiki/Setup-Options-and-Basic-Tuning#other-general-options\n\n/// https://rocksdb.org/blog/2019/03/08/format-version-4.html\n\npub fn default_table_options(_cache: &Cache) -> Options {\n\n // default db options\n\n let mut db_opts = Options::default();\n\n\n\n // https://github.com/facebook/rocksdb/wiki/Setup-Options-and-Basic-Tuning#other-general-options\n\n db_opts.set_level_compaction_dynamic_level_bytes(false);\n\n db_opts.set_write_buffer_size(32 * 1024 * 1024);\n\n\n\n // block table options\n\n let mut table_options = BlockBasedOptions::default();\n\n // table_options.set_block_cache(cache);\n\n // table_options.set_block_size(16 * 1024);\n\n // table_options.set_cache_index_and_filter_blocks(true);\n\n // table_options.set_pin_l0_filter_and_index_blocks_in_cache(true);\n\n\n\n // set format_version 4 https://rocksdb.org/blog/2019/03/08/format-version-4.html\n\n table_options.set_format_version(4);\n\n // table_options.set_index_block_restart_interval(16);\n\n\n\n db_opts.set_block_based_table_factory(&table_options);\n", "file_path": "storage/src/persistent/database.rs", "rank": 45, "score": 265035.82672519225 }, { "content": "fn error_spanned(tokens: impl quote::ToTokens, message: impl std::fmt::Display) -> syn::Error {\n\n syn::Error::new_spanned(tokens, message)\n\n}\n\n\n", "file_path": "tezos/encoding-derive/src/make.rs", "rank": 46, "score": 263676.768217168 }, { "content": "/// Generate digest of length 128 bits (16bytes) from arbitrary binary data\n\npub fn digest_128(data: &[u8]) -> Result<Vec<u8>, Blake2bError> {\n\n digest(data, 16)\n\n}\n\n\n", "file_path": "crypto/src/blake2b.rs", "rank": 47, "score": 263655.54081808723 }, { "content": "// Generate digest of length 160 bits (20bytes) from arbitrary binary data\n\npub fn digest_160(data: &[u8]) -> Result<Vec<u8>, Blake2bError> {\n\n digest(data, 20)\n\n}\n\n\n", "file_path": "crypto/src/blake2b.rs", "rank": 48, "score": 263655.54081808723 }, { "content": "/// Generate digest of length 256 bits (32bytes) from arbitrary binary data\n\npub fn digest_256(data: &[u8]) -> Result<Vec<u8>, Blake2bError> {\n\n digest(data, 32)\n\n}\n\n\n", "file_path": "crypto/src/blake2b.rs", "rank": 49, "score": 263655.54081808723 }, { "content": "pub fn hydrate_current_head(\n\n init_storage_data: &StorageInitInfo,\n\n persistent_storage: &PersistentStorage,\n\n) -> Result<Arc<BlockHeaderWithHash>, StorageError> {\n\n // check last stored current_head\n\n let current_head = match ChainMetaStorage::new(persistent_storage)\n\n .get_current_head(&init_storage_data.chain_id)?\n\n {\n\n Some(head) => head,\n\n None => {\n\n return Err(StorageError::MissingKey {\n\n when: \"current_head\".into(),\n\n })\n\n }\n\n };\n\n\n\n // get block_header data\n\n match BlockStorage::new(persistent_storage).get(current_head.block_hash())? {\n\n Some(block) => Ok(Arc::new(block)),\n\n None => Err(StorageError::MissingKey {\n", "file_path": "storage/src/lib.rs", "rank": 50, "score": 261466.09236546088 }, { "content": "pub fn commit(\n\n _rt: &OCamlRuntime,\n\n _new_context_hash: OCamlRef<OCamlContextHash>,\n\n irmin_time: f64,\n\n tezedge_time: f64,\n\n) {\n\n let irmin_time = get_time(irmin_time);\n\n let tezedge_time = get_time(tezedge_time);\n\n\n\n if let Err(e) = send_msg(TimingMessage::Commit {\n\n irmin_time,\n\n tezedge_time,\n\n }) {\n\n eprintln!(\"Timing commit hook error = {:?}\", e);\n\n }\n\n}\n\n\n", "file_path": "tezos/context/src/timings.rs", "rank": 51, "score": 261138.8457613745 }, { "content": "#[inline(always)]\n\npub fn bounded<'a, O, F>(max: usize, mut f: F) -> impl FnMut(NomInput<'a>) -> NomResult<'a, O>\n\nwhere\n\n F: FnMut(NomInput<'a>) -> NomResult<'a, O>,\n\n O: Clone,\n\n{\n\n move |input: NomInput| {\n\n let max = std::cmp::min(max, input.input_len());\n\n let bounded = input.slice(std::ops::RangeTo { end: max });\n\n match f.parse(bounded) {\n\n Ok((rest, parsed)) => Ok((\n\n input.slice(std::ops::RangeFrom {\n\n start: max - rest.input_len(),\n\n }),\n\n parsed,\n\n )),\n\n Err(Err::Error(DecodeError {\n\n input,\n\n kind: error::DecodeErrorKind::Nom(ErrorKind::Eof),\n\n other,\n\n })) => Err(Err::Error(DecodeError {\n\n input,\n\n kind: error::DecodeErrorKind::Boundary(BoundedEncodingKind::Bounded),\n\n other,\n\n })),\n\n e => e,\n\n }\n\n }\n\n}\n\n\n\n/// Applies the `parser` to the input, addin field context to the error.\n", "file_path": "tezos/encoding/src/nom.rs", "rank": 52, "score": 260640.29528831632 }, { "content": "/// Generates some integers in the specified range.\n\n///\n\n/// Namely, `[min, min + 1, med - 1, med, med + 1, max - 1, max]`\n\npub fn some_in_range<T: IntType>(range: impl RangeBounds<T>) -> IntGenerator<T> {\n\n let (min, max) = decode_range_bounds(range);\n\n let step = ((max - min - T::one()) >> T::one()) + T::one();\n\n IntGenerator::new(min, max, step, T::one())\n\n}\n\n\n\nmacro_rules! generated_hash {\n\n ($hash:ident) => {\n\n impl Generated for $hash {\n\n fn generator<F: GeneratorFactory>(\n\n field: &str,\n\n f: &mut F,\n\n ) -> Box<dyn Generator<Item = $hash>> {\n\n Box::new(\n\n f.hash_bytes(field, $hash::hash_type())\n\n .map(|bytes: Vec<u8>| $hash::try_from_bytes(bytes.as_slice()).unwrap()),\n\n )\n\n }\n\n }\n\n };\n", "file_path": "tezos/encoding/src/generator.rs", "rank": 53, "score": 258530.7866667192 }, { "content": "/// Helper composer for producing [std::vec::Vec] generator out of lenght generator and element generator\n\npub fn vec_of_items<G1, G2>(item: G1, len: G2) -> impl Generator<Item = Vec<G1::Item>>\n\nwhere\n\n G1: Generator,\n\n G1::Item: Clone,\n\n G2: Generator<Item = usize>,\n\n{\n\n compose(len, item, |len, item| vec![item; len])\n\n}\n\n\n", "file_path": "tezos/encoding/src/generator.rs", "rank": 54, "score": 258294.6601762129 }, { "content": "/// Open commit log at a given path.\n\npub fn open_main_db<C: RocksDbColumnFactory>(\n\n rocks_db: Option<Arc<DB>>,\n\n config: &RocksDbConfig<C>,\n\n backend_config: TezedgeDatabaseBackendConfiguration,\n\n log: Logger,\n\n) -> Result<TezedgeDatabase, DatabaseError> {\n\n // TODO - TE-498: Todo Change this\n\n let backend = match backend_config {\n\n TezedgeDatabaseBackendConfiguration::Sled => {\n\n TezedgeDatabaseBackendOptions::SledDB(SledDBBackend::new(config.db_path.as_path())?)\n\n }\n\n TezedgeDatabaseBackendConfiguration::RocksDB => {\n\n if let Some(db) = rocks_db {\n\n TezedgeDatabaseBackendOptions::RocksDB(RocksDBBackend::from_db(db)?)\n\n } else {\n\n return Err(DatabaseError::FailedToOpenDatabase);\n\n }\n\n }\n\n TezedgeDatabaseBackendConfiguration::EdgeKV => TezedgeDatabaseBackendOptions::EdgeKV(\n\n EdgeKVBackend::new(config.db_path.as_path(), edgekv_db_cols())?,\n", "file_path": "storage/src/persistent/mod.rs", "rank": 55, "score": 257272.58383747106 }, { "content": "#[test]\n\npub fn test_context_copy() -> Result<(), anyhow::Error> {\n\n context_copy(\n\n ContextKvStoreConfiguration::InMem,\n\n \"__context:test_context_copy\",\n\n )\n\n}\n\n\n", "file_path": "tezos/context/tests/context.rs", "rank": 56, "score": 256480.03042926837 }, { "content": "/// Helper composer for producing [std::opt::Option] generator out of element generator\n\npub fn option<G1, G2>(item: G1, presence: G2) -> impl Generator<Item = Option<G1::Item>>\n\nwhere\n\n G1: Generator,\n\n G2: Generator<Item = bool>,\n\n{\n\n // TODO use optimized version that does not issue [None] multiple times.\n\n compose(\n\n presence,\n\n item,\n\n |presence, item| if presence { Some(item) } else { None },\n\n )\n\n}\n\n\n", "file_path": "tezos/encoding/src/generator.rs", "rank": 57, "score": 255978.59426176638 }, { "content": "pub fn wait(pid: libc::pid_t) -> bool {\n\n // libc::waitpid is unsafe function\n\n unsafe {\n\n let mut status: i32 = 0;\n\n let options: i32 = 0;\n\n return match libc::waitpid(pid, &mut status as *mut i32, options) {\n\n -1 => {\n\n panic!(\"error occured libc::waitpid problem\")\n\n }\n\n _pid => true,\n\n };\n\n }\n\n}\n", "file_path": "ipc/tests/common/mod.rs", "rank": 58, "score": 255865.73310815723 }, { "content": "/// Extract values from `data` to store them in `storage`.\n\n/// Return an `Object`, which can be ids (refering to data inside `storage`) or a `Commit`\n\npub fn deserialize_object(\n\n bytes: &[u8],\n\n object_offset: AbsoluteOffset,\n\n storage: &mut Storage,\n\n strings: &mut StringInterner,\n\n repository: &ContextKeyValueStore,\n\n) -> Result<Object, DeserializationError> {\n\n use DeserializationError::*;\n\n\n\n let header = bytes.get(0).copied().ok_or(UnexpectedEOF)?;\n\n let header: ObjectHeader = ObjectHeader::from_bytes([header]);\n\n\n\n let (header_nbytes, object_length) = read_object_length(bytes, &header)?;\n\n\n\n let bytes = bytes\n\n .get(header_nbytes..object_length)\n\n .ok_or(UnexpectedEOF)?;\n\n\n\n let (object_hash_id, nbytes) = deserialize_hash_id(bytes)?;\n\n\n", "file_path": "tezos/context/src/serialize/persistent.rs", "rank": 59, "score": 255690.4997311269 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn serialize_object(\n\n object: &Object,\n\n object_hash_id: HashId,\n\n output: &mut Vec<u8>,\n\n storage: &Storage,\n\n strings: &StringInterner,\n\n stats: &mut SerializeStats,\n\n _batch: &mut Vec<(HashId, Arc<[u8]>)>,\n\n _referenced_older_objects: &mut Vec<HashId>,\n\n repository: &mut ContextKeyValueStore,\n\n file_offset: Option<AbsoluteOffset>,\n\n) -> Result<Option<AbsoluteOffset>, SerializationError> {\n\n let start = output.len();\n\n\n\n let file_offset = file_offset.ok_or(SerializationError::MissingOffset)?;\n\n let mut offset: AbsoluteOffset = file_offset.add_offset(start as u64);\n\n\n\n match object {\n\n Object::Directory(dir_id) => {\n\n if let Some(inode_id) = dir_id.get_inode_id() {\n", "file_path": "tezos/context/src/serialize/persistent.rs", "rank": 60, "score": 255666.813897088 }, { "content": "/// Stores commit_genesis result to storage and mark genesis block as applied, if everythnig is ok.\n\n/// !Important, this rewrites context_hash on stored genesis - because in initialize_storage_with_genesis_block we stored wiht Context_hash_zero\n\n/// And context hash of block is used for appling of successor\n\npub fn store_commit_genesis_result(\n\n block_storage: &BlockStorage,\n\n block_meta_storage: &BlockMetaStorage,\n\n chain_meta_storage: &ChainMetaStorage,\n\n operations_meta_storage: &OperationsMetaStorage,\n\n init_storage_data: &StorageInitInfo,\n\n bock_result: CommitGenesisResult,\n\n) -> Result<(), StorageError> {\n\n // store data for genesis\n\n let genesis_block_hash = &init_storage_data.genesis_block_header_hash;\n\n let chain_id = &init_storage_data.chain_id;\n\n\n\n // if everything is stored and ok, we can considere genesis block as applied\n\n // if storage is empty, initialize with genesis\n\n block_meta_storage.put(\n\n genesis_block_hash,\n\n &block_meta_storage::Meta::genesis_meta(genesis_block_hash, chain_id, true),\n\n )?;\n\n operations_meta_storage.put(\n\n genesis_block_hash,\n", "file_path": "storage/src/lib.rs", "rank": 61, "score": 255242.3215397842 }, { "content": "pub fn bytes<T: AsRef<[u8]>>(bytes: T, out: &mut Vec<u8>) -> BinResult {\n\n out.extend_from_slice(bytes.as_ref());\n\n Ok(())\n\n}\n\n\n", "file_path": "tezos/encoding/src/enc.rs", "rank": 62, "score": 254226.91353068256 }, { "content": "/// Generates all integers in the specified range.\n\npub fn full_range<T: IntType>(range: impl RangeBounds<T>) -> IntGenerator<T> {\n\n let (min, max) = decode_range_bounds(range);\n\n IntGenerator::new(min, max, T::one(), T::zero())\n\n}\n\n\n", "file_path": "tezos/encoding/src/generator.rs", "rank": 63, "score": 254122.86415747472 }, { "content": "#[inline(always)]\n\npub fn bounded_string<'a>(max: usize) -> impl FnMut(NomInput<'a>) -> NomResult<'a, String> {\n\n map_res(\n\n complete(length_data(bounded_size(BoundedEncodingKind::String, max))),\n\n |bytes| std::str::from_utf8(bytes).map(str::to_string),\n\n )\n\n}\n\n\n\n/// Parser that applies specified parser to the fixed length slice of input.\n", "file_path": "tezos/encoding/src/nom.rs", "rank": 64, "score": 251939.46622545083 }, { "content": "/// NOTE: duplicate block_meta_storage tests module\n\n/// Create and return a storage with [number_of_blocks] blocks and the last BlockHash in it\n\nfn init_mocked_storage(number_of_blocks: usize) -> Result<(BlockMetaStorage, BlockHash), Error> {\n\n let tmp_storage = TmpStorage::create(\"__mocked_storage\")?;\n\n let storage = BlockMetaStorage::new(tmp_storage.storage());\n\n let mut block_hash_set = HashSet::new();\n\n let mut rng = rand::thread_rng();\n\n\n\n let k: BlockHash = vec![0; 32].try_into()?;\n\n let v = Meta::new(\n\n false,\n\n Some(vec![0; 32].try_into()?),\n\n 0,\n\n vec![44; 4].try_into()?,\n\n );\n\n\n\n block_hash_set.insert(k.clone());\n\n\n\n storage.put(&k, &v)?;\n\n assert!(storage.get(&k)?.is_some());\n\n\n\n // save for the iteration\n", "file_path": "storage/benches/predecessor_benchmarks.rs", "rank": 65, "score": 251015.3017574332 }, { "content": "pub fn wait(pid: libc::pid_t) -> bool {\n\n // libc::waitpid is unsafe function\n\n unsafe {\n\n let mut status: i32 = 0;\n\n let options: i32 = 0;\n\n\n\n match libc::waitpid(pid, &mut status as *mut i32, options) {\n\n -1 => {\n\n panic!(\"error occured libc::waitpid problem\")\n\n }\n\n _pid => true,\n\n }\n\n }\n\n}\n\n\n", "file_path": "async-ipc/tests/common/mod.rs", "rank": 66, "score": 250709.08670659704 }, { "content": "pub fn fold_consecutive_locations(locations: &[Location]) -> Vec<Range> {\n\n if locations.is_empty() {\n\n Vec::with_capacity(0)\n\n } else {\n\n let mut ranges = Vec::with_capacity(locations.len());\n\n\n\n let mut prev = locations[0];\n\n let mut range = Range(prev.0, prev.1, 1);\n\n for curr in &locations[1..] {\n\n if curr.is_consecutive(&prev) {\n\n range.1 += curr.1;\n\n range.2 += 1;\n\n } else {\n\n ranges.push(range);\n\n range = Range(curr.0, curr.1, 1);\n\n }\n\n prev = *curr;\n\n }\n\n ranges.push(range);\n\n\n", "file_path": "storage/src/commit_log/mod.rs", "rank": 67, "score": 249300.70313182066 }, { "content": "fn in_bounds_and_over<T: IntType>(upper_bound: T) -> impl Generator<Item = T> {\n\n some_in_range(..=upper_bound).and(some_in_range(\n\n upper_bound + T::one()..(upper_bound << T::one()),\n\n ))\n\n}\n\n\n", "file_path": "tezos/messages/tests/encoding_diff_fuzz.rs", "rank": 68, "score": 248419.37019816146 }, { "content": "pub trait Persistable {\n\n fn is_persistent(&self) -> bool;\n\n}\n\n\n", "file_path": "storage/src/persistent/mod.rs", "rank": 69, "score": 248118.61974942475 }, { "content": "#[inline(always)]\n\nfn bounded_size(kind: BoundedEncodingKind, max: usize) -> impl FnMut(NomInput) -> NomResult<u32> {\n\n move |input| {\n\n let i = <&[u8]>::clone(&input);\n\n let (input, size) = size(input)?;\n\n if size as usize <= max {\n\n Ok((input, size))\n\n } else {\n\n Err(Err::Error(DecodeError::limit(i, kind.clone())))\n\n }\n\n }\n\n}\n\n\n\n/// Reads Tesoz string encoded as a 32-bit length followed by the string bytes.\n", "file_path": "tezos/encoding/src/nom.rs", "rank": 70, "score": 247039.8220720922 }, { "content": "#[test]\n\npub fn test_context_set_get_commit() -> Result<(), anyhow::Error> {\n\n context_set_get_commit(\n\n ContextKvStoreConfiguration::InMem,\n\n \"__context:test_context_set_get_commit\",\n\n )\n\n}\n\n\n", "file_path": "tezos/context/tests/context.rs", "rank": 71, "score": 247018.9527100228 }, { "content": "#[test]\n\npub fn test_context_delete_and_remove_persistent() -> Result<(), anyhow::Error> {\n\n context_delete_and_remove(\n\n ContextKvStoreConfiguration::OnDisk(\"\".to_string()),\n\n \"__context:test_context_delete_and_remove_persistent\",\n\n )\n\n}\n\n\n", "file_path": "tezos/context/tests/context.rs", "rank": 72, "score": 246904.57034185575 }, { "content": "#[test]\n\nfn generator_test_cloned_gen() -> Result<(), Error> {\n\n use rocksdb::{Options, DB};\n\n\n\n // logger\n\n let log_level = tests_common::log_level();\n\n let log = tests_common::create_logger(log_level);\n\n\n\n let path = out_dir_path(\"__sequence_multiseq\");\n\n if path.exists() {\n\n std::fs::remove_dir_all(&path).unwrap();\n\n }\n\n\n\n {\n\n let cache = Cache::new_lru_cache(32 * 1024 * 1024).unwrap();\n\n let backend = if cfg!(feature = \"maindb-backend-rocksdb\") {\n\n let db = open_kv(\n\n &path,\n\n vec![Sequences::descriptor(&cache)],\n\n &DbConfiguration::default(),\n\n )?;\n", "file_path": "storage/tests/sequences.rs", "rank": 73, "score": 245157.60766635707 }, { "content": "#[test]\n\nfn generator_test_continuation_after_persist() -> Result<(), Error> {\n\n use rocksdb::{Options, DB};\n\n\n\n // logger\n\n let log_level = tests_common::log_level();\n\n let log = tests_common::create_logger(log_level);\n\n\n\n let path = out_dir_path(\"__sequence_continuation\");\n\n if path.exists() {\n\n std::fs::remove_dir_all(&path).unwrap();\n\n }\n\n\n\n {\n\n let cache = Cache::new_lru_cache(32 * 1024 * 1024).unwrap();\n\n let backend = if cfg!(feature = \"maindb-backend-rocksdb\") {\n\n let db = open_kv(\n\n &path,\n\n vec![Sequences::descriptor(&cache)],\n\n &DbConfiguration::default(),\n\n )?;\n", "file_path": "storage/tests/sequences.rs", "rank": 74, "score": 245007.78231588873 }, { "content": "#[test]\n\npub fn test_context_hash_from_working_tree_memory() -> Result<(), anyhow::Error> {\n\n context_hash_from_working_tree(\n\n ContextKvStoreConfiguration::InMem,\n\n \"__context:test_context_hash_from_working_tree_memory\",\n\n )\n\n}\n\n\n", "file_path": "tezos/context/tests/context.rs", "rank": 75, "score": 242610.34037585987 }, { "content": "fn kv(i: usize) -> Vec<u8> {\n\n let i = i % SPACE;\n\n let k = [(i >> 16) as u8, (i >> 8) as u8, i as u8];\n\n k.to_vec()\n\n}\n\n\n", "file_path": "edgekv/src/tests/mod.rs", "rank": 76, "score": 241933.3914824535 }, { "content": "/// alternative to ocaml Operation_list_list_hash.empty\n\npub fn get_empty_operation_list_list_hash() -> Result<OperationListListHash, FromBase58CheckError> {\n\n OperationListListHash::try_from(\"LLoZS2LW3rEi7KYU4ouBQtorua37aWWCtpDmv1n2x3xoKi6sVXLWp\")\n\n}\n\n\n\n/// Enum representing different Tezos environment.\n\n#[derive(Serialize, Deserialize, Copy, Clone, Debug, PartialEq, Eq, Hash, EnumIter)]\n\npub enum TezosEnvironment {\n\n Custom,\n\n Mainnet,\n\n Sandbox,\n\n Zeronet,\n\n Alphanet,\n\n Babylonnet,\n\n Carthagenet,\n\n Delphinet,\n\n Edonet,\n\n Edo2net,\n\n Florencenet,\n\n Granadanet,\n\n Hangzhounet,\n", "file_path": "tezos/api/src/environment.rs", "rank": 77, "score": 240312.57008579612 }, { "content": "#[test]\n\nfn block_header_with_hash_encoded_equals_decoded() -> Result<(), Error> {\n\n let hash: BlockHash = \"BL84RJX8tqB3WkFPWCcg1Lm6KYE5gns9UYFguihG5Yy17UwnL3b\".try_into()?;\n\n let hash_bytes = hash.as_ref().to_vec();\n\n let expected = BlockHeaderWithHash::new(\n\n BlockHeaderBuilder::default()\n\n .level(34)\n\n .proto(1)\n\n .predecessor(\"BKyQ9EofHrgaZKENioHyP4FZNsTmiSEcVmcghgzCC9cGhE7oCET\".try_into()?)\n\n .timestamp(5_635_634)\n\n .validation_pass(4)\n\n .operations_hash(\"LLoaGLRPRx3Zf8kB4ACtgku8F4feeBiskeb41J1ciwfcXB3KzHKXc\".try_into()?)\n\n .fitness(vec![vec![0, 0]])\n\n .context(\"CoVmAcMV64uAQo8XvfLr9VDuz7HVZLT4cgK1w1qYmTjQNbGwQwDd\".try_into()?)\n\n .protocol_data(vec![0, 1, 2, 3, 4, 5, 6, 7, 8])\n\n .hash(hash_bytes.into())\n\n .build()\n\n .unwrap(),\n\n )?;\n\n let encoded_bytes = expected.encode()?;\n\n let decoded = BlockHeaderWithHash::decode(&encoded_bytes)?;\n\n Ok(assert_eq!(expected, decoded))\n\n}\n\n\n", "file_path": "storage/tests/block_header_with_hash.rs", "rank": 78, "score": 240207.11890341324 }, { "content": "/// Generator yelding specified values.\n\npub fn values<T: Clone>(vs: impl AsRef<[T]>) -> ValuesGenerator<T> {\n\n ValuesGenerator {\n\n values: vs.as_ref().to_vec(),\n\n index: 0,\n\n }\n\n}\n\n\n\nmacro_rules! tuple_trait {\n\n\t($num1:tt $name1:ident, $num2:tt $name2:ident, $($num:tt $name:ident),*) => (\n\n tuple_trait!(__impl $num1 $name1, $num2 $name2; $($num $name),*);\n\n );\n\n (__impl $($num:tt $name:ident),+; $num1:tt $name1:ident, $($num2:tt $name2:ident),* ) => (\n\n tuple_trait_impl!($($num $name),*);\n\n tuple_trait!(__impl $($num $name),*, $num1 $name1; $($num2 $name2),*);\n\n );\n\n (__impl $($num:tt $name:ident),*; $num1:tt $name1:ident) => (\n\n tuple_trait_impl!($($num $name),*);\n\n tuple_trait_impl!($($num $name),*, $num1 $name1);\n\n );\n\n}\n", "file_path": "tezos/encoding/src/generator.rs", "rank": 79, "score": 240115.98341485998 }, { "content": "pub fn storage_request_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::StorageRequestCreate(action) => {\n\n state.storage.requests.add(StorageRequestState {\n\n status: StorageRequestStatus::Idle,\n\n payload: action.payload.clone(),\n\n });\n\n }\n\n Action::StorageRequestPending(action) => {\n\n if let Some(req) = state.storage.requests.get_mut(action.req_id) {\n\n match &req.status {\n\n StorageRequestStatus::Idle => {\n\n req.status = StorageRequestStatus::Pending;\n\n }\n\n _ => return,\n\n }\n\n }\n\n }\n\n Action::StorageRequestError(action) => {\n\n if let Some(req) = state.storage.requests.get_mut(action.req_id) {\n", "file_path": "shell_automaton/src/storage/request/storage_request_reducer.rs", "rank": 80, "score": 239714.8311753833 }, { "content": "fn make_test_block_header() -> Result<BlockHeaderWithHash, Error> {\n\n let message_bytes = hex::decode(\"00006d6e0102dd00defaf70c53e180ea148b349a6feb4795610b2abc7b07fe91ce50a90814000000005c1276780432bc1d3a28df9a67b363aa1638f807214bb8987e5f9c0abcbd69531facffd1c80000001100000001000000000800000000000c15ef15a6f54021cb353780e2847fb9c546f1d72c1dc17c3db510f45553ce501ce1de000000000003c762c7df00a856b8bfcaf0676f069f825ca75f37f2bee9fe55ba109cec3d1d041d8c03519626c0c0faa557e778cb09d2e0c729e8556ed6a7a518c84982d1f2682bc6aa753f\")?;\n\n let block_header = BlockHeaderWithHash::new(BlockHeader::from_bytes(message_bytes)?)?;\n\n Ok(block_header)\n\n}\n", "file_path": "storage/tests/block_storage.rs", "rank": 82, "score": 234942.79678080365 }, { "content": "fn make_test_block_header() -> Result<BlockHeaderWithHash, Error> {\n\n let message_bytes = hex::decode(\"00006d6e0102dd00defaf70c53e180ea148b349a6feb4795610b2abc7b07fe91ce50a90814000000005c1276780432bc1d3a28df9a67b363aa1638f807214bb8987e5f9c0abcbd69531facffd1c80000001100000001000000000800000000000c15ef15a6f54021cb353780e2847fb9c546f1d72c1dc17c3db510f45553ce501ce1de000000000003c762c7df00a856b8bfcaf0676f069f825ca75f37f2bee9fe55ba109cec3d1d041d8c03519626c0c0faa557e778cb09d2e0c729e8556ed6a7a518c84982d1f2682bc6aa753f\")?;\n\n let block_header = BlockHeaderWithHash::new(BlockHeader::from_bytes(message_bytes)?)?;\n\n Ok(block_header)\n\n}\n\n\n", "file_path": "storage/tests/storage_for_shell.rs", "rank": 83, "score": 234942.79678080365 }, { "content": "fn find_block_at_distance_benchmark(c: &mut Criterion) {\n\n let (storage, last_block_hash) = init_mocked_storage(100_000).unwrap();\n\n\n\n // just, check if impl. is correct\n\n assert_eq!(\n\n find_block_at_distance_old(&storage, last_block_hash.clone(), 99_998).unwrap(),\n\n storage\n\n .find_block_at_distance(last_block_hash.clone(), 99_998)\n\n .unwrap()\n\n );\n\n\n\n // run bench\n\n c.bench_function(\"find_block_at_distance\", |b| {\n\n b.iter(|| storage.find_block_at_distance(last_block_hash.clone(), 99_998))\n\n });\n\n}\n\n\n\ncriterion_group! {\n\n name = benches;\n\n config = Criterion::default();\n\n targets = find_block_at_distance_benchmark\n\n}\n\n\n\ncriterion_main!(benches);\n", "file_path": "storage/benches/predecessor_benchmarks.rs", "rank": 84, "score": 234585.7471094142 }, { "content": "/// Function to find the closest power of 2 value to the distance. Returns the closest power\n\n/// and the rest (distance = 2^closest_power + rest)\n\nfn closest_power_two_and_rest(distance: u32) -> (u32, u32) {\n\n let base: u32 = 2;\n\n\n\n let mut closest_power: u32 = 0;\n\n let mut rest: u32 = 0;\n\n let mut distance: u32 = distance;\n\n\n\n while distance > 1 {\n\n rest += base.pow(closest_power) * (distance % 2);\n\n distance /= 2;\n\n closest_power += 1;\n\n }\n\n (closest_power, rest)\n\n}\n\n\n\nconst LEN_BLOCK_HASH: usize = HashType::BlockHash.size();\n\nconst LEN_CHAIN_ID: usize = HashType::ChainId.size();\n\n\n\nconst MASK_IS_APPLIED: u8 = 0b0000_0001;\n\nconst MASK_HAS_SUCCESSOR: u8 = 0b0000_0010;\n", "file_path": "storage/src/block_meta_storage.rs", "rank": 85, "score": 234129.9337379568 }, { "content": "pub trait Flushable {\n\n fn flush(&self) -> Result<(), anyhow::Error>;\n\n}\n\n\n", "file_path": "storage/src/persistent/mod.rs", "rank": 86, "score": 233920.07965810542 }, { "content": "#[inline(always)]\n\npub fn sized<'a, O, F>(size: usize, f: F) -> impl FnMut(NomInput<'a>) -> NomResult<'a, O>\n\nwhere\n\n F: FnMut(NomInput<'a>) -> NomResult<'a, O>,\n\n{\n\n map_parser(take(size), f)\n\n}\n\n\n\n/// Parses optional field. Byte `0x00` indicates absence of the field,\n\n/// byte `0xff` preceedes encoding of the existing field.\n", "file_path": "tezos/encoding/src/nom.rs", "rank": 87, "score": 233671.44323346444 }, { "content": "fn put_size(size: usize, out: &mut Vec<u8>) -> BinResult {\n\n let size =\n\n u32::try_from(size).map_err(|_| BinError::size_error((u32::MAX >> 2) as usize, size))?;\n\n put_bytes(&size.to_be_bytes(), out);\n\n Ok(())\n\n}\n\n\n", "file_path": "tezos/encoding/src/enc.rs", "rank": 88, "score": 232885.88575633708 }, { "content": "pub fn n_bignum(mut input: NomInput) -> NomResult<BigUint> {\n\n let mut bitslice_vec = Vec::new();\n\n let mut has_next = true;\n\n let mut missing_bits = 0;\n\n while has_next {\n\n let (new_input, byte) = take(1_u8)(input)?;\n\n input = new_input;\n\n let bits = byte.view_bits();\n\n has_next = bits[0];\n\n bitslice_vec.push(&bits[1..]);\n\n missing_bits += 1;\n\n }\n\n let mut bitvec = bitvec![Msb0, u8; 0; missing_bits % 8];\n\n for bitslice in bitslice_vec.into_iter().rev() {\n\n bitvec.extend_from_bitslice(bitslice);\n\n }\n\n Ok((input, BigUint::from_bytes_be(&bitvec.into_vec())))\n\n}\n\n\n", "file_path": "tezos/encoding/src/nom.rs", "rank": 89, "score": 231741.67114245537 }, { "content": "fn serialize_file_sizes(file_sizes: &FileSizes) -> Vec<u8> {\n\n let mut output = Vec::with_capacity(SIZES_REST_BYTES_LENGTH);\n\n\n\n output.extend_from_slice(&file_sizes.commit_counter.to_le_bytes());\n\n output.extend_from_slice(&file_sizes.data_size.to_le_bytes());\n\n output.extend_from_slice(&file_sizes.shape_size.to_le_bytes());\n\n output.extend_from_slice(&file_sizes.shape_index_size.to_le_bytes());\n\n output.extend_from_slice(&file_sizes.commit_index_size.to_le_bytes());\n\n output.extend_from_slice(&file_sizes.strings_size.to_le_bytes());\n\n output.extend_from_slice(&file_sizes.hashes_size.to_le_bytes());\n\n output.extend_from_slice(&file_sizes.big_strings_size.to_le_bytes());\n\n\n\n debug_assert_eq!(output.len(), SIZES_REST_BYTES_LENGTH);\n\n\n\n output\n\n}\n\n\n", "file_path": "tezos/context/src/kv_store/persistent.rs", "rank": 90, "score": 231308.34373685115 }, { "content": "/// Arbitrary Blake2b digest generation from generic data.\n\n// Should be noted, that base Blake2b supports arbitrary digest length from 16 to 64 bytes\n\nfn digest(data: &[u8], out_len: usize) -> Result<Vec<u8>, Blake2bError> {\n\n let mut hasher = State::new(out_len, None).map_err(|_| Blake2bError::InvalidLenght)?;\n\n hasher.update(data)?;\n\n\n\n let hash = hasher.finalize()?;\n\n let mut result = Vec::with_capacity(out_len);\n\n result.extend_from_slice(hash.as_ref());\n\n Ok(result)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn blake2b_256() {\n\n let hash = digest_256(b\"hello world\").unwrap();\n\n let expected =\n\n hex::decode(\"256c83b297114d201b30179f3f0ef0cace9783622da5974326b436178aeef610\")\n\n .unwrap();\n", "file_path": "crypto/src/blake2b.rs", "rank": 91, "score": 230671.54869774205 }, { "content": "#[inline(always)]\n\npub fn bounded_dynamic<'a, O, F>(max: usize, f: F) -> impl FnMut(NomInput<'a>) -> NomResult<'a, O>\n\nwhere\n\n F: FnMut(NomInput<'a>) -> NomResult<'a, O>,\n\n O: Clone,\n\n{\n\n length_value(\n\n bounded_size(BoundedEncodingKind::Dynamic, max),\n\n all_consuming(f),\n\n )\n\n}\n\n\n\n/// Applies the parser `f` to the input, limiting it to `max` bytes at most.\n", "file_path": "tezos/encoding/src/nom.rs", "rank": 92, "score": 229656.93363639887 }, { "content": "/// Open RocksDB database at given path with specified Column Family configurations\n\n///\n\n/// # Arguments\n\n/// * `path` - Path to open RocksDB\n\n/// * `cfs` - Iterator of Column Family descriptors\n\npub fn open_kv<P, I>(path: P, cfs: I, cfg: &DbConfiguration) -> Result<DB, DBError>\n\nwhere\n\n P: AsRef<Path>,\n\n I: IntoIterator<Item = ColumnFamilyDescriptor>,\n\n{\n\n DB::open_cf_descriptors(&default_kv_options(cfg), path, cfs).map_err(DBError::from)\n\n}\n\n\n\n/// Create default database configuration options,\n\n/// based on recommended setting: https://github.com/facebook/rocksdb/wiki/Setup-Options-and-Basic-Tuning#other-general-options\n\npub(crate) fn default_kv_options(cfg: &DbConfiguration) -> Options {\n\n // default db options\n\n let mut db_opts = Options::default();\n\n db_opts.create_missing_column_families(true);\n\n db_opts.create_if_missing(true);\n\n\n\n // https://github.com/facebook/rocksdb/wiki/Setup-Options-and-Basic-Tuning#other-general-options\n\n db_opts.set_bytes_per_sync(1048576);\n\n db_opts.set_level_compaction_dynamic_level_bytes(true);\n\n db_opts.set_max_background_jobs(6);\n", "file_path": "storage/src/persistent/database.rs", "rank": 93, "score": 229297.1883532012 }, { "content": "/// Returns true, if [block] can be applied\n\npub fn can_apply_block<'b, OP, PA>(\n\n (block, block_metadata): (&'b BlockHash, &'b Meta),\n\n operations_complete: OP,\n\n predecessor_applied: PA,\n\n) -> Result<CanApplyStatus, StorageError>\n\nwhere\n\n OP: Fn(&'b BlockHash) -> Result<bool, StorageError>, /* func returns true, if operations are completed */\n\n PA: Fn(&'b BlockHash) -> Result<bool, StorageError>, /* func returns true, if predecessor is applied */\n\n{\n\n let block_predecessor = block_metadata.predecessor();\n\n\n\n // check if block is already applied, dont need to apply second time\n\n if block_metadata.is_applied() {\n\n return Ok(CanApplyStatus::AlreadyApplied);\n\n }\n\n\n\n // we need to have predecessor (every block has)\n\n if block_predecessor.is_none() {\n\n return Ok(CanApplyStatus::MissingPredecessor);\n\n }\n", "file_path": "shell/src/validation/mod.rs", "rank": 94, "score": 228929.19752329562 }, { "content": "fn put_short_size(size: usize, out: &mut Vec<u8>) -> BinResult {\n\n let size = u8::try_from(size).map_err(|_| BinError::size_error(u8::MAX as usize, size))?;\n\n put_bytes(&size.to_be_bytes(), out);\n\n Ok(())\n\n}\n\n\n", "file_path": "tezos/encoding/src/enc.rs", "rank": 95, "score": 228551.4045503462 }, { "content": "#[test]\n\nfn can_deserialize_get_current_head_message_known_valid() -> Result<(), Error> {\n\n let message_bytes = hex::decode(\"0000010400148eceda2f000000ce0003be930116caa5bebae6c1997498bd90b2d2d6dcb14e2cc3a83b38067c784a0b485a4763000000005c8f572e049518937f78bbc2e2d460e7d26daa73c93763362c64c2059f5b7ecaba6e6f580d000000110000000100000000080000000000714aa08e289a17ee0bbd90ef57b80c52318829029fc9e17e4a782248755cdeaafd0dac000000000003e35a661200a75ebed94c886ce8c2700cc2fb38e301e7573f481eff49aea6892068cef7c9290947567e9df3a2cfc99ed9b0666f9c0291f586f65eb9e42cf4cdbef1ef8424d000000020c533d1d8a515b35fac67eb9926a6c983397208511ce69808d57177415654bf090000000400000000\")?;\n\n let message = PeerMessageResponse::from_bytes(message_bytes)?;\n\n let message = message.message();\n\n\n\n match message {\n\n PeerMessage::CurrentHead(current_head_message) => {\n\n assert_eq!(\n\n &hex::decode(\"8eceda2f\")?,\n\n current_head_message.chain_id().as_ref()\n\n );\n\n\n\n let block_header = current_head_message.current_block_header();\n\n assert_eq!(245_395, block_header.level());\n\n assert_eq!(1, block_header.proto());\n\n\n\n let expected_protocol_data = hex::decode(\"000000000003e35a661200a75ebed94c886ce8c2700cc2fb38e301e7573f481eff49aea6892068cef7c9290947567e9df3a2cfc99ed9b0666f9c0291f586f65eb9e42cf4cdbef1ef8424d0\")?;\n\n assert_eq!(&expected_protocol_data, block_header.protocol_data());\n\n\n\n let mempool = current_head_message.current_mempool();\n", "file_path": "tezos/messages/tests/encoding_current_head.rs", "rank": 96, "score": 228514.14728684328 }, { "content": "fn hash(input: &[u8]) -> NomResult<Vec<u8>> {\n\n map(\n\n take(HashType::OperationListListHash.size()),\n\n |slice: &[u8]| slice.to_vec(),\n\n )(input)\n\n}\n\n\n", "file_path": "tezos/messages/src/p2p/encoding/operations_for_blocks.rs", "rank": 97, "score": 228225.47338986536 }, { "content": "#[derive(BitfieldSpecifier)]\n\n#[bits = 2]\n\n#[derive(Clone, Debug, Eq, PartialEq, Copy)]\n\nenum RelativeOffsetLength {\n\n OneByte,\n\n TwoBytes,\n\n FourBytes,\n\n EightBytes,\n\n}\n\n\n", "file_path": "tezos/context/src/serialize/persistent.rs", "rank": 98, "score": 228067.94062385804 }, { "content": "pub fn identity(pkh: &[u8], pk: &[u8], sk: &[u8], pow: &[u8]) -> Identity {\n\n Identity {\n\n peer_id: CryptoboxPublicKeyHash::try_from_bytes(pkh).unwrap(),\n\n public_key: PublicKey::from_bytes(pk).unwrap(),\n\n secret_key: SecretKey::from_bytes(sk).unwrap(),\n\n proof_of_work_stamp: ProofOfWork::from_hex(hex::encode(pow)).unwrap(),\n\n }\n\n}\n\n\n", "file_path": "shell_automaton/src/config.rs", "rank": 99, "score": 228015.91528188047 } ]
Rust
vendor/pulldown-cmark/src/simd.rs
47565647456/evtx
fbb2a713d335f5208bb6675f4f158babd6f2f389
use crate::parse::{LookupTable, LoopInstruction, Options}; use core::arch::x86_64::*; pub(crate) const VECTOR_SIZE: usize = std::mem::size_of::<__m128i>(); pub(crate) fn compute_lookup(options: &Options) -> [u8; 16] { let mut lookup = [0u8; 16]; let standard_bytes = [ b'\n', b'\r', b'*', b'_', b'&', b'\\', b'[', b']', b'<', b'!', b'`', ]; for &byte in &standard_bytes { add_lookup_byte(&mut lookup, byte); } if options.contains(Options::ENABLE_TABLES) { add_lookup_byte(&mut lookup, b'|'); } if options.contains(Options::ENABLE_STRIKETHROUGH) { add_lookup_byte(&mut lookup, b'~'); } if options.contains(Options::ENABLE_SMART_PUNCTUATION) { for &byte in &[b'.', b'-', b'"', b'\''] { add_lookup_byte(&mut lookup, byte); } } lookup } fn add_lookup_byte(lookup: &mut [u8; 16], byte: u8) { lookup[(byte & 0x0f) as usize] |= 1 << (byte >> 4); } #[target_feature(enable = "ssse3")] #[inline] unsafe fn compute_mask(lut: &[u8; 16], bytes: &[u8], ix: usize) -> i32 { debug_assert!(bytes.len() >= ix + VECTOR_SIZE); let bitmap = _mm_loadu_si128(lut.as_ptr() as *const __m128i); let bitmask_lookup = _mm_setr_epi8(1, 2, 4, 8, 16, 32, 64, -128, -1, -1, -1, -1, -1, -1, -1, -1); let raw_ptr = bytes.as_ptr().add(ix) as *const __m128i; let input = _mm_loadu_si128(raw_ptr); let bitset = _mm_shuffle_epi8(bitmap, input); let higher_nibbles = _mm_and_si128(_mm_srli_epi16(input, 4), _mm_set1_epi8(0x0f)); let bitmask = _mm_shuffle_epi8(bitmask_lookup, higher_nibbles); let tmp = _mm_and_si128(bitset, bitmask); let result = _mm_cmpeq_epi8(tmp, bitmask); _mm_movemask_epi8(result) } pub(crate) fn iterate_special_bytes<F, T>( lut: &LookupTable, bytes: &[u8], ix: usize, callback: F, ) -> (usize, Option<T>) where F: FnMut(usize, u8) -> LoopInstruction<Option<T>>, { if is_x86_feature_detected!("ssse3") && bytes.len() >= VECTOR_SIZE { unsafe { simd_iterate_special_bytes(&lut.simd, bytes, ix, callback) } } else { crate::parse::scalar_iterate_special_bytes(&lut.scalar, bytes, ix, callback) } } unsafe fn process_mask<F, T>( mut mask: i32, bytes: &[u8], mut offset: usize, callback: &mut F, ) -> Result<usize, (usize, Option<T>)> where F: FnMut(usize, u8) -> LoopInstruction<Option<T>>, { while mask != 0 { let mask_ix = mask.trailing_zeros() as usize; offset += mask_ix; match callback(offset, *bytes.get_unchecked(offset)) { LoopInstruction::ContinueAndSkip(skip) => { offset += skip + 1; mask >>= skip + 1 + mask_ix; } LoopInstruction::BreakAtWith(ix, val) => return Err((ix, val)), } } Ok(offset) } #[target_feature(enable = "ssse3")] unsafe fn simd_iterate_special_bytes<F, T>( lut: &[u8; 16], bytes: &[u8], mut ix: usize, mut callback: F, ) -> (usize, Option<T>) where F: FnMut(usize, u8) -> LoopInstruction<Option<T>>, { debug_assert!(bytes.len() >= VECTOR_SIZE); let upperbound = bytes.len() - VECTOR_SIZE; while ix < upperbound { let mask = compute_mask(lut, bytes, ix); let block_start = ix; ix = match process_mask(mask, bytes, ix, &mut callback) { Ok(ix) => std::cmp::max(ix, VECTOR_SIZE + block_start), Err((end_ix, val)) => return (end_ix, val), }; } if bytes.len() > ix { let mask = compute_mask(lut, bytes, upperbound) >> ix - upperbound; if let Err((end_ix, val)) = process_mask(mask, bytes, ix, &mut callback) { return (end_ix, val); } } (bytes.len(), None) } #[cfg(test)] mod simd_test { use super::{iterate_special_bytes, LoopInstruction}; use crate::Options; fn check_expected_indices(bytes: &[u8], expected: &[usize], skip: usize) { let mut opts = Options::empty(); opts.insert(Options::ENABLE_TABLES); opts.insert(Options::ENABLE_FOOTNOTES); opts.insert(Options::ENABLE_STRIKETHROUGH); opts.insert(Options::ENABLE_TASKLISTS); let lut = crate::parse::create_lut(&opts); let mut indices = vec![]; iterate_special_bytes::<_, i32>(&lut, bytes, 0, |ix, _byte_ty| { indices.push(ix); LoopInstruction::ContinueAndSkip(skip) }); assert_eq!(&indices[..], expected); } #[test] fn simple_no_match() { check_expected_indices("abcdef0123456789".as_bytes(), &[], 0); } #[test] fn simple_match() { check_expected_indices("*bcd&f0123456789".as_bytes(), &[0, 4], 0); } #[test] fn single_open_fish() { check_expected_indices("<".as_bytes(), &[0], 0); } #[test] fn long_match() { check_expected_indices("0123456789abcde~*bcd&f0".as_bytes(), &[15, 16, 20], 0); } #[test] fn border_skip() { check_expected_indices("0123456789abcde~~~~d&f0".as_bytes(), &[15, 20], 3); } #[test] fn exhaustive_search() { let chars = [ b'\n', b'\r', b'*', b'_', b'~', b'|', b'&', b'\\', b'[', b']', b'<', b'!', b'`', ]; for &c in &chars { for i in 0u8..=255 { if !chars.contains(&i) { let mut buf = [i; 18]; buf[3] = c; buf[6] = c; check_expected_indices(&buf[..], &[3, 6], 0); } } } } }
use crate::parse::{LookupTable, LoopInstruction, Options}; use core::arch::x86_64::*; pub(crate) const VECTOR_SIZE: usize = std::mem::size_of::<__m128i>(); pub(crate) fn compute_lookup(options: &Options) -> [u8; 16] { let mut lookup = [0u8; 16]; let standard_bytes = [ b'\n', b'\r', b'*', b'_', b'&', b'\\', b'[', b']', b'<', b'!', b'`', ]; for &byte in &standard_bytes { add_lookup_byte(&mut lookup, byte); } if options.contains(Options::ENABLE_TABLES) { add_lookup_byte(&mut lookup, b'|'); } if options.contains(Options::ENABLE_STRIKETHROUGH) { add_lookup_byte(&mut lookup, b'~'); } if options.contains(Options::ENABLE_SMART_PUNCTUATION) { for &byte in &[b'.', b'-', b'"', b'\''] { add_lookup_byte(&mut lookup, byte); } } lookup } fn add_lookup_byte(lookup: &mut [u8; 16], byte: u8) { lookup[(byte & 0x0f) as usize] |= 1 << (byte >> 4); } #[target_feature(enable = "ssse3")] #[inline] unsafe fn compute_mask(lut: &[u8; 16], bytes: &[u8], ix: usize) -> i32 { debug_assert!(bytes.len() >= ix + VECTOR_SIZE); let bitmap = _mm_loadu_si128(lut.as_ptr() as *const __m128i); let bitmask_lookup = _mm_setr_epi8(1, 2, 4, 8, 16, 32, 64, -128, -1, -1, -1, -1, -1, -1, -1, -1); let raw_ptr = bytes.as_ptr().add(ix) as *const __m128i; let input = _mm_loadu_si128(raw_ptr); let bitset = _mm_shuffle_epi8(bitmap, input); let higher_nibbles = _mm_and_si128(_mm_srli_epi16(input, 4), _mm_set1_epi8(0x0f)); let bitmask = _mm_shuffle_epi8(bitmask_lookup, higher_nibbles); let tmp = _mm_and_si128(bitset, bitmask); let result = _mm_cmpeq_epi8(tmp, bitmask); _mm_movemask_epi8(result) } pub(crate) fn iterate_special_bytes<F, T>( lut: &LookupTable, bytes: &[u8], ix: usize, callback: F, ) -> (usize, Option<T>) where F: FnMut(usize, u8) -> LoopInstruction<Option<T>>, { if is_x86_feature_detected!("ssse3") && bytes.len() >= VECTOR_SIZE { unsafe { simd_iterate_special_bytes(&lut.simd, bytes, ix, callback) } } else { crate::parse::scalar_iterate_special_bytes(&lut.scalar, bytes, ix, callback) } } unsafe fn process_mask<F, T>( mut mask: i32, bytes: &[u8], mut offset: usize, callback: &mut F, ) -> Result<usize, (usize, Option<T>)> where F: FnMut(usize, u8) -> LoopInstruction<Option<T>>, { while mask != 0 { let mask_ix = mask.trailing_zeros() as usize; offset += mask_ix; match callback(offset, *bytes.get_unchecked(offset)) { LoopInstruction::ContinueAndSkip(skip) => { offset += skip + 1; mask >>= skip + 1 + mask_ix; } LoopInstruction::BreakAtWith(ix, val) => return Err((ix, val)), } } Ok(offset) } #[target_feature(enable = "ssse3")] unsafe fn simd_iterate_special_bytes<F, T>( lut: &[u8; 16], bytes: &[u8], mut ix: usize, mut callback: F, ) -> (usize, Option<T>) where F: FnMut(usize, u8) -> LoopInstruction<Option<T>>, { debug_assert!(bytes.len() >= VECTOR_SIZE); let upperbound = bytes.len() - VECTOR_SIZE; while ix < upperbound { let mask = compute_mask(lut, bytes, ix); let block_start = ix; ix = match process_mask(mask, bytes, ix, &mut callback) { Ok(ix) => std::cmp::max(ix, VECTOR_SIZE + block_start), Err((end_ix, val)) => return (end_ix, val), }; } if bytes.len() > ix { let mask = compute_mask(lut, bytes, upperbound) >> ix - upperbound; if let Err((end_ix, val)) = process_mask(mask, bytes, ix, &mut callback) { return (end_ix, val); } } (bytes.len(), None) } #[cfg(test)] mod simd_test { use super::{iterate_special_bytes, LoopIn
pts = Options::empty(); opts.insert(Options::ENABLE_TABLES); opts.insert(Options::ENABLE_FOOTNOTES); opts.insert(Options::ENABLE_STRIKETHROUGH); opts.insert(Options::ENABLE_TASKLISTS); let lut = crate::parse::create_lut(&opts); let mut indices = vec![]; iterate_special_bytes::<_, i32>(&lut, bytes, 0, |ix, _byte_ty| { indices.push(ix); LoopInstruction::ContinueAndSkip(skip) }); assert_eq!(&indices[..], expected); } #[test] fn simple_no_match() { check_expected_indices("abcdef0123456789".as_bytes(), &[], 0); } #[test] fn simple_match() { check_expected_indices("*bcd&f0123456789".as_bytes(), &[0, 4], 0); } #[test] fn single_open_fish() { check_expected_indices("<".as_bytes(), &[0], 0); } #[test] fn long_match() { check_expected_indices("0123456789abcde~*bcd&f0".as_bytes(), &[15, 16, 20], 0); } #[test] fn border_skip() { check_expected_indices("0123456789abcde~~~~d&f0".as_bytes(), &[15, 20], 3); } #[test] fn exhaustive_search() { let chars = [ b'\n', b'\r', b'*', b'_', b'~', b'|', b'&', b'\\', b'[', b']', b'<', b'!', b'`', ]; for &c in &chars { for i in 0u8..=255 { if !chars.contains(&i) { let mut buf = [i; 18]; buf[3] = c; buf[6] = c; check_expected_indices(&buf[..], &[3, 6], 0); } } } } }
struction}; use crate::Options; fn check_expected_indices(bytes: &[u8], expected: &[usize], skip: usize) { let mut o
random
[ { "content": "pub fn read_attribute(cursor: &mut Cursor<&[u8]>) -> Result<BinXMLAttribute> {\n\n trace!(\"Offset `0x{:08x}` - Attribute\", cursor.position());\n\n let name = BinXmlNameRef::from_stream(cursor)?;\n\n\n\n Ok(BinXMLAttribute { name })\n\n}\n\n\n", "file_path": "src/binxml/tokens.rs", "rank": 0, "score": 137467.70281404897 }, { "content": "pub fn read_processing_instruction_data(cursor: &mut Cursor<&[u8]>) -> Result<String> {\n\n trace!(\n\n \"Offset `0x{:08x}` - ProcessingInstructionTarget\",\n\n cursor.position(),\n\n );\n\n\n\n let data =\n\n try_read!(cursor, len_prefixed_utf_16_str, \"pi_data\")?.unwrap_or_else(|| \"\".to_string());\n\n trace!(\"PIData - {}\", data,);\n\n Ok(data)\n\n}\n\n\n", "file_path": "src/binxml/tokens.rs", "rank": 1, "score": 137467.70281404897 }, { "content": "/// Reads a utf16 string from the given stream.\n\n/// If `len` is given, exactly `len` u16 values are read from the stream.\n\n/// If `len` is None, the string is assumed to be null terminated and the stream will be read to the first null (0).\n\nfn read_utf16_string<T: ReadSeek>(stream: &mut T, len: Option<usize>) -> io::Result<String> {\n\n let mut buffer = match len {\n\n Some(len) => Vec::with_capacity(len),\n\n None => Vec::new(),\n\n };\n\n\n\n match len {\n\n Some(len) => {\n\n for _ in 0..len {\n\n let next_char = stream.read_u16::<byteorder::LittleEndian>()?;\n\n buffer.push(next_char);\n\n }\n\n }\n\n None => loop {\n\n let next_char = stream.read_u16::<byteorder::LittleEndian>()?;\n\n\n\n if next_char == 0 {\n\n break;\n\n }\n\n\n\n buffer.push(next_char);\n\n },\n\n }\n\n\n\n // We need to stop if we see a NUL byte, even if asked for more bytes.\n\n decode_utf16(buffer.into_iter().take_while(|&byte| byte != 0x00))\n\n .map(|r| r.map_err(|_e| Error::from(ErrorKind::InvalidData)))\n\n .collect()\n\n}\n", "file_path": "src/utils/binxml_utils.rs", "rank": 2, "score": 133285.4985113277 }, { "content": "pub fn read_entity_ref(cursor: &mut Cursor<&[u8]>) -> Result<BinXmlEntityReference> {\n\n trace!(\"Offset `0x{:08x}` - EntityReference\", cursor.position());\n\n let name = BinXmlNameRef::from_stream(cursor)?;\n\n trace!(\"\\t name: {:?}\", name);\n\n\n\n Ok(BinXmlEntityReference { name })\n\n}\n\n\n", "file_path": "src/binxml/tokens.rs", "rank": 3, "score": 132282.47169435763 }, { "content": "pub fn read_fragment_header(cursor: &mut Cursor<&[u8]>) -> Result<BinXMLFragmentHeader> {\n\n trace!(\"Offset `0x{:08x}` - FragmentHeader\", cursor.position());\n\n let major_version = try_read!(cursor, u8, \"fragment_header_major_version\")?;\n\n let minor_version = try_read!(cursor, u8, \"fragment_header_minor_version\")?;\n\n let flags = try_read!(cursor, u8, \"fragment_header_flags\")?;\n\n Ok(BinXMLFragmentHeader {\n\n major_version,\n\n minor_version,\n\n flags,\n\n })\n\n}\n\n\n", "file_path": "src/binxml/tokens.rs", "rank": 4, "score": 132282.47169435763 }, { "content": "pub fn dump_stream<T: ReadSeek>(cursor: &mut T, lookbehind: i32) -> Result<String, Box<dyn Error>> {\n\n let mut s = String::new();\n\n\n\n cursor.seek(SeekFrom::Current(lookbehind.into()))?;\n\n\n\n let mut data = vec![0; 100_usize];\n\n let _ = cursor.read(&mut data)?;\n\n\n\n writeln!(\n\n s,\n\n \"\\n\\n---------------------------------------------------------------------------\"\n\n )?;\n\n writeln!(s, \"Current Value {:02x}\", data[0])?;\n\n writeln!(s, \" --\")?;\n\n write!(s, \"{}\", hexdump(&data, 0, 'C')?)?;\n\n writeln!(\n\n s,\n\n \"\\n----------------------------------------------------------------------------\"\n\n )?;\n\n\n\n Ok(s)\n\n}\n\n\n", "file_path": "src/utils/hexdump.rs", "rank": 5, "score": 110950.08865127376 }, { "content": "/// Reads a utf16 string from the given stream.\n\n/// size is the actual byte representation of the string (not the number of characters).\n\npub fn read_utf16_by_size<T: ReadSeek>(stream: &mut T, size: u64) -> io::Result<Option<String>> {\n\n match size {\n\n 0 => Ok(None),\n\n _ => read_utf16_string(stream, Some(size as usize / 2)).map(|mut s| {\n\n // Strip nul terminator if needed\n\n if let Some('\\0') = s.chars().last() {\n\n s.pop();\n\n }\n\n Some(s)\n\n }),\n\n }\n\n}\n\n\n", "file_path": "src/utils/binxml_utils.rs", "rank": 6, "score": 110804.03722179218 }, { "content": "pub fn read_null_terminated_utf16_string<T: ReadSeek>(stream: &mut T) -> io::Result<String> {\n\n read_utf16_string(stream, None)\n\n}\n\n\n", "file_path": "src/utils/binxml_utils.rs", "rank": 7, "score": 88346.74116151183 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let evtx_file = include_bytes!(\"../../samples/security.evtx\");\n\n // ~11ms before strings cache\n\n // ~9ms after strings cache\n\n // ~8ms with cached templates as well\n\n c.bench_function(\"read 90 records\", move |b| {\n\n b.iter(|| process_90_records(evtx_file))\n\n });\n\n\n\n c.bench_function(\"read 90 records json\", move |b| {\n\n b.iter(|| process_90_records_json(evtx_file))\n\n });\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "src/benches/benchmark.rs", "rank": 8, "score": 86740.96209808084 }, { "content": "// first chunk has 90 records\n\nfn process_90_records(buffer: &'static [u8]) {\n\n let mut parser = EvtxParser::from_buffer(buffer.to_vec()).unwrap();\n\n\n\n for (i, record) in parser.records().take(90).enumerate() {\n\n match record {\n\n Ok(r) => {\n\n assert_eq!(r.event_record_id, i as u64 + 1);\n\n }\n\n Err(e) => println!(\"Error while reading record {}, {:?}\", i, e),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/benches/benchmark.rs", "rank": 9, "score": 84721.35531524694 }, { "content": "fn process_90_records_json(buffer: &'static [u8]) {\n\n let mut parser = EvtxParser::from_buffer(buffer.to_vec()).unwrap();\n\n\n\n for (i, record) in parser.records_json().take(90).enumerate() {\n\n match record {\n\n Ok(r) => {\n\n assert_eq!(r.event_record_id, i as u64 + 1);\n\n }\n\n Err(e) => println!(\"Error while reading record {}, {:?}\", i, e),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/benches/benchmark.rs", "rank": 10, "score": 82688.92345945162 }, { "content": "#[inline]\n\npub fn checksum_ieee(data: &[u8]) -> u32 {\n\n let mut hasher = Hasher::new();\n\n hasher.update(data);\n\n hasher.finalize()\n\n}\n\n\n\n// Rust runs the tests concurrently, so unless we synchronize logging access\n\n// it will crash when attempting to run `cargo test` with some logging facilities.\n", "file_path": "src/lib.rs", "rank": 11, "score": 82085.25839026007 }, { "content": "pub fn read_systemtime<R: ReadSeek>(r: &mut R) -> DeserializationResult<DateTime<Utc>> {\n\n let year = try_read!(r, u16)?;\n\n let month = try_read!(r, u16)?;\n\n let _day_of_week = try_read!(r, u16)?;\n\n let day = try_read!(r, u16)?;\n\n let hour = try_read!(r, u16)?;\n\n let minute = try_read!(r, u16)?;\n\n let second = try_read!(r, u16)?;\n\n let milliseconds = try_read!(r, u16)?;\n\n\n\n Ok(DateTime::from_utc(\n\n NaiveDate::from_ymd(i32::from(year), u32::from(month), u32::from(day)).and_hms_nano(\n\n u32::from(hour),\n\n u32::from(minute),\n\n u32::from(second),\n\n u32::from(milliseconds),\n\n ),\n\n Utc,\n\n ))\n\n}\n", "file_path": "src/utils/time.rs", "rank": 12, "score": 80596.223187775 }, { "content": "/// Tests an .evtx file, asserting the number of parsed records matches `count`.\n\nfn test_full_sample(path: impl AsRef<Path>, ok_count: usize, err_count: usize) {\n\n ensure_env_logger_initialized();\n\n let mut parser = EvtxParser::from_path(path).unwrap();\n\n\n\n let mut actual_ok_count = 0;\n\n let mut actual_err_count = 0;\n\n\n\n for r in parser.records() {\n\n if r.is_ok() {\n\n actual_ok_count += 1;\n\n if log::log_enabled!(Level::Debug) {\n\n println!(\"{}\", r.unwrap().data);\n\n }\n\n } else {\n\n actual_err_count += 1;\n\n }\n\n }\n\n assert_eq!(\n\n actual_ok_count, ok_count,\n\n \"XML: Failed to parse all expected records\"\n", "file_path": "tests/test_full_samples.rs", "rank": 13, "score": 57037.650530888415 }, { "content": "fn print_line(\n\n line: &[u8],\n\n address: usize,\n\n display: char,\n\n bytes: usize,\n\n) -> Result<String, Box<dyn std::error::Error>> {\n\n let mut s = String::new();\n\n // print address (ex - 000000d0)\n\n write!(s, \"\\n{:08x}:\", address)?;\n\n\n\n let words = if (line.len() % bytes) == 0 {\n\n line.len() / bytes\n\n } else {\n\n (line.len() / bytes) + 1\n\n };\n\n\n\n for b in 0..words {\n\n let word = match bytes {\n\n 1 => u16::from(line[b]),\n\n _ => {\n", "file_path": "src/utils/hexdump.rs", "rank": 14, "score": 46847.31814915155 }, { "content": "#[test]\n\nfn it_respects_directory_output() {\n\n let d = tempdir().unwrap();\n\n let f = d.as_ref().join(\"test.out\");\n\n\n\n let sample = regular_sample();\n\n\n\n let mut cmd = Command::cargo_bin(\"evtx_dump\").expect(\"failed to find binary\");\n\n cmd.args(&[\"-f\", &f.to_string_lossy(), sample.to_str().unwrap()]);\n\n\n\n assert!(\n\n cmd.output().unwrap().stdout.is_empty(),\n\n \"Expected output to be printed to file, but was printed to stdout\"\n\n );\n\n\n\n let mut expected = vec![];\n\n\n\n File::open(&f).unwrap().read_to_end(&mut expected).unwrap();\n\n assert!(\n\n !expected.is_empty(),\n\n \"Expected output to be printed to file\"\n\n )\n\n}\n\n\n", "file_path": "tests/test_cli.rs", "rank": 15, "score": 45564.34433680696 }, { "content": "#[test]\n\nfn test_issue_65() {\n\n test_full_sample(sample_issue_65(), 459, 0)\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 16, "score": 45564.34433680696 }, { "content": "/// Dumps bytes at data to the screen as hex.\n\n/// Display may be one of:\n\n/// b One-byte octal display.\n\n/// Display the input offset in hexadecimal, followed by sixteen space-separated, three column, zero-filled, bytes of input data, in octal, per line.\n\n///\n\n/// c One-byte character display. One-byte character display.\n\n/// Display the input offset in hexadecimal, followed by sixteen space-separated, three column, space-filled, characters of input data per line.\n\n///\n\n/// C Canonical hex display.\n\n/// Display the input offset in hexadecimal, followed by sixteen space-separated, two column, hexadecimal bytes, followed by the same sixteen bytes in %_p format enclosed in ``|'' characters.\n\n///\n\n/// d Two-byte decimal display.\n\n/// o Two-byte octal display.\n\n/// x Two-byte hexadecimal display.\n\n/// Display the input offset in hexadecimal, followed by eight, space separated, four column, zero-filled, two-byte quantities of input data, in hexadecimal, per line.\n\npub fn hexdump(\n\n data: &[u8],\n\n offset: usize,\n\n display: char,\n\n) -> Result<String, Box<dyn std::error::Error>> {\n\n let mut s = String::new();\n\n let mut address = 0;\n\n\n\n let number_of_bytes = match display {\n\n 'b' => 1,\n\n 'c' => 1,\n\n 'C' => 1,\n\n 'd' => 2,\n\n 'o' => 2,\n\n _ => 2,\n\n };\n\n\n\n while address <= data.len() {\n\n // Read next 16 bytes of until end of data\n\n let end = cmp::min(address + 16, data.len());\n", "file_path": "src/utils/hexdump.rs", "rank": 17, "score": 44955.99570214748 }, { "content": "fn expand_template<'a>(\n\n mut template: BinXmlTemplateRef<'a>,\n\n chunk: &'a EvtxChunk<'a>,\n\n stack: &mut Vec<Cow<'a, BinXMLDeserializedTokens<'a>>>,\n\n) -> Result<()> {\n\n if let Some(template_def) = chunk\n\n .template_table\n\n .get_template(template.template_def_offset)\n\n {\n\n // We expect to find all the templates in the template cache.\n\n for token in template_def.tokens.iter() {\n\n if let BinXMLDeserializedTokens::Substitution(ref substitution_descriptor) = token {\n\n expand_token_substitution(&mut template, substitution_descriptor, chunk, stack)?;\n\n } else {\n\n _expand_templates(Cow::Borrowed(token), chunk, stack)?;\n\n }\n\n }\n\n } else {\n\n // If the file was not closed correctly, there can be a template which was not found in the header.\n\n // In that case, we will try to read it directly from the chunk.\n", "file_path": "src/binxml/assemble.rs", "rank": 18, "score": 44943.82336256429 }, { "content": "fn _expand_templates<'a>(\n\n token: Cow<'a, BinXMLDeserializedTokens<'a>>,\n\n chunk: &'a EvtxChunk<'a>,\n\n stack: &mut Vec<Cow<'a, BinXMLDeserializedTokens<'a>>>,\n\n) -> Result<()> {\n\n match token {\n\n // Owned values can be consumed when flatting, and passed on as owned.\n\n Cow::Owned(BinXMLDeserializedTokens::Value(BinXmlValue::BinXmlType(tokens))) => {\n\n for token in tokens.into_iter() {\n\n _expand_templates(Cow::Owned(token), chunk, stack)?;\n\n }\n\n }\n\n\n\n Cow::Borrowed(BinXMLDeserializedTokens::Value(BinXmlValue::BinXmlType(tokens))) => {\n\n for token in tokens.iter() {\n\n _expand_templates(Cow::Borrowed(token), chunk, stack)?;\n\n }\n\n }\n\n // Actual template handling.\n\n Cow::Owned(BinXMLDeserializedTokens::TemplateInstance(template)) => {\n", "file_path": "src/binxml/assemble.rs", "rank": 19, "score": 44943.82336256429 }, { "content": "#[test]\n\nfn test_sample_with_no_crc32() {\n\n test_full_sample(\n\n sample_with_no_crc32(),\n\n 17,\n\n 0,\n\n )\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 20, "score": 44389.9796441549 }, { "content": "#[test]\n\nfn test_it_refuses_to_overwrite_directory() {\n\n let d = tempdir().unwrap();\n\n\n\n let sample = regular_sample();\n\n let mut cmd = Command::cargo_bin(\"evtx_dump\").expect(\"failed to find binary\");\n\n cmd.args(&[\"-f\", &d.path().to_string_lossy(), sample.to_str().unwrap()]);\n\n\n\n cmd.assert().failure().code(1);\n\n}\n\n\n", "file_path": "tests/test_cli.rs", "rank": 21, "score": 44389.9796441549 }, { "content": "fn expand_token_substitution<'a>(\n\n template: &mut BinXmlTemplateRef<'a>,\n\n substitution_descriptor: &TemplateSubstitutionDescriptor,\n\n chunk: &'a EvtxChunk<'a>,\n\n stack: &mut Vec<Cow<'a, BinXMLDeserializedTokens<'a>>>,\n\n) -> Result<()> {\n\n if substitution_descriptor.ignore {\n\n return Ok(());\n\n }\n\n\n\n let value = template\n\n .substitution_array\n\n .get_mut(substitution_descriptor.substitution_index as usize);\n\n\n\n if let Some(value) = value {\n\n let value = mem::replace(\n\n value,\n\n BinXMLDeserializedTokens::Value(BinXmlValue::NullType),\n\n );\n\n _expand_templates(Cow::Owned(value), chunk, stack)?;\n", "file_path": "src/binxml/assemble.rs", "rank": 22, "score": 43660.8495502197 }, { "content": "fn expand_string_ref<'a>(\n\n string_ref: &BinXmlNameRef,\n\n chunk: &'a EvtxChunk<'a>,\n\n) -> Result<Cow<'a, BinXmlName>> {\n\n match chunk.string_cache.get_cached_string(string_ref.offset) {\n\n Some(s) => Ok(Cow::Borrowed(s)),\n\n None => {\n\n let mut cursor = Cursor::new(chunk.data);\n\n let cursor_ref = cursor.borrow_mut();\n\n try_seek!(cursor_ref, string_ref.offset, \"Cache missed string\")?;\n\n\n\n let string = BinXmlName::from_stream(cursor_ref)?;\n\n Ok(Cow::Owned(string))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/binxml/assemble.rs", "rank": 23, "score": 43660.8495502197 }, { "content": "#[test]\n\nfn test_dirty_sample_parallel() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../samples/2-system-Security-dirty.evtx\");\n\n\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(8));\n\n\n\n let mut count = 0;\n\n\n\n for r in parser.records() {\n\n r.unwrap();\n\n count += 1;\n\n }\n\n\n\n assert_eq!(count, 14621, \"Parallel iteration failed\");\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 24, "score": 43310.99278615514 }, { "content": "#[test]\n\nfn test_event_xml_sample() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../samples/security.evtx\");\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let first_record = parser\n\n .records()\n\n .next()\n\n .expect(\"to have records\")\n\n .expect(\"record to parse correctly\");\n\n\n\n assert_eq!(\n\n first_record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/security_event_1.xml\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 25, "score": 43310.99278615514 }, { "content": "#[test]\n\nfn test_event_json_sample() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../samples/security.evtx\");\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let first_record = parser\n\n .records_json()\n\n .next()\n\n .expect(\"to have records\")\n\n .expect(\"record to parse correctly\");\n\n\n\n assert_eq!(\n\n first_record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/security_event_1.json\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 26, "score": 43310.99278615514 }, { "content": "#[cfg(test)]\n\npub fn ensure_env_logger_initialized() {\n\n use std::io::Write;\n\n\n\n LOGGER_INIT.call_once(|| {\n\n let mut builder = env_logger::Builder::from_default_env();\n\n builder\n\n .format(|buf, record| writeln!(buf, \"[{}] - {}\", record.level(), record.args()))\n\n .init();\n\n });\n\n}\n\n\n\n// Cannot use `cfg(test)` here since `rustdoc` won't look at it.\n\n#[cfg(debug_assertions)]\n\nmod test_readme {\n\n macro_rules! calculated_doc {\n\n ($doc:expr, $id:ident) => {\n\n #[doc = $doc]\n\n enum $id {}\n\n }\n\n }\n\n\n\n calculated_doc!(include_str!(\"../README.md\"), _DoctestReadme);\n\n}\n", "file_path": "src/lib.rs", "rank": 27, "score": 42486.48485756763 }, { "content": "pub fn read_substitution_descriptor(\n\n cursor: &mut Cursor<&[u8]>,\n\n optional: bool,\n\n) -> Result<TemplateSubstitutionDescriptor> {\n\n trace!(\n\n \"Offset `0x{:08x}` - SubstitutionDescriptor<optional={}>\",\n\n cursor.position(),\n\n optional\n\n );\n\n let substitution_index = try_read!(cursor, u16)?;\n\n let value_type_token = try_read!(cursor, u8)?;\n\n\n\n let value_type = BinXmlValueType::from_u8(value_type_token).ok_or(\n\n DeserializationError::InvalidValueVariant {\n\n value: value_type_token,\n\n offset: cursor.position(),\n\n },\n\n )?;\n\n\n\n let ignore = optional && (value_type == BinXmlValueType::NullType);\n\n\n\n Ok(TemplateSubstitutionDescriptor {\n\n substitution_index,\n\n value_type,\n\n ignore,\n\n })\n\n}\n\n\n", "file_path": "src/binxml/tokens.rs", "rank": 28, "score": 42486.48485756763 }, { "content": "#[cfg(test)]\n\npub fn ensure_env_logger_initialized() {\n\n use std::io::Write;\n\n\n\n LOGGER_INIT.call_once(|| {\n\n let mut builder = env_logger::Builder::from_default_env();\n\n builder\n\n .format(|buf, record| writeln!(buf, \"[{}] - {}\", record.level(), record.args()))\n\n .init();\n\n });\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 29, "score": 42486.48485756763 }, { "content": "#[test]\n\nfn test_it_overwrites_file_anyways_if_passed_flag() {\n\n let d = tempdir().unwrap();\n\n let f = d.as_ref().join(\"test.out\");\n\n\n\n let mut file = File::create(&f).unwrap();\n\n file.write_all(b\"I'm a file!\").unwrap();\n\n\n\n let sample = regular_sample();\n\n let mut cmd = Command::cargo_bin(\"evtx_dump\").expect(\"failed to find binary\");\n\n cmd.args(&[\n\n \"-f\",\n\n &f.to_string_lossy(),\n\n \"--no-confirm-overwrite\",\n\n sample.to_str().unwrap(),\n\n ]);\n\n\n\n cmd.assert().success();\n\n\n\n let mut expected = vec![];\n\n\n\n File::open(&f).unwrap().read_to_end(&mut expected).unwrap();\n\n assert!(\n\n !expected.is_empty(),\n\n \"Expected output to be printed to file\"\n\n )\n\n}\n", "file_path": "tests/test_cli.rs", "rank": 30, "score": 42316.217806663844 }, { "content": "#[test]\n\nfn test_dirty_sample_with_a_bad_checksum_2() {\n\n // TODO: investigate 2 failing records\n\n test_full_sample(sample_with_a_bad_checksum_2(), 1774, 2)\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 31, "score": 42316.217806663844 }, { "content": "// https://github.com/omerbenamram/evtx/issues/10\n\nfn test_dirty_sample_single_threaded() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../samples/2-system-Security-dirty.evtx\");\n\n\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec()).unwrap();\n\n\n\n let mut count = 0;\n\n for r in parser.records() {\n\n r.unwrap();\n\n count += 1;\n\n }\n\n assert_eq!(count, 14621, \"Single threaded iteration failed\");\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 32, "score": 42316.217806663844 }, { "content": "#[test]\n\nfn test_sample_with_multiple_xml_fragments() {\n\n test_full_sample(sample_with_multiple_xml_fragments(), 1146, 0)\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 33, "score": 42316.217806663844 }, { "content": "#[test]\n\nfn test_sample_with_invalid_flags_in_header() {\n\n test_full_sample(sample_with_invalid_flags_in_header(), 126, 0)\n\n}\n", "file_path": "tests/test_full_samples.rs", "rank": 34, "score": 42316.217806663844 }, { "content": "#[test]\n\nfn test_dirty_sample_with_a_bad_checksum() {\n\n test_full_sample(sample_with_a_bad_checksum(), 1910, 4)\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 35, "score": 42316.217806663844 }, { "content": "pub fn read_template<'a>(\n\n cursor: &mut Cursor<&'a [u8]>,\n\n chunk: Option<&'a EvtxChunk<'a>>,\n\n ansi_codec: EncodingRef,\n\n) -> Result<BinXmlTemplateRef<'a>> {\n\n trace!(\"TemplateInstance at {}\", cursor.position());\n\n\n\n let _ = try_read!(cursor, u8)?;\n\n let _template_id = try_read!(cursor, u32)?;\n\n let template_definition_data_offset = try_read!(cursor, u32)?;\n\n\n\n // Need to skip over the template data.\n\n if (cursor.position() as u32) == template_definition_data_offset {\n\n let template_header = read_template_definition_header(cursor)?;\n\n try_seek!(\n\n cursor,\n\n cursor.position() + u64::from(template_header.data_size),\n\n \"Skip cached template\"\n\n )?;\n\n }\n", "file_path": "src/binxml/tokens.rs", "rank": 36, "score": 42124.77544275922 }, { "content": "pub fn expand_templates<'a>(\n\n token_tree: Vec<BinXMLDeserializedTokens<'a>>,\n\n chunk: &'a EvtxChunk<'a>,\n\n) -> Result<Vec<Cow<'a, BinXMLDeserializedTokens<'a>>>> {\n\n // We can assume the new tree will be at least as big as the old one.\n\n let mut stack = Vec::with_capacity(token_tree.len());\n\n\n\n for token in token_tree {\n\n _expand_templates(Cow::Owned(token), chunk, &mut stack)?\n\n }\n\n\n\n Ok(stack)\n\n}\n", "file_path": "src/binxml/assemble.rs", "rank": 37, "score": 42124.77544275922 }, { "content": "pub fn read_open_start_element(\n\n cursor: &mut Cursor<&[u8]>,\n\n chunk: Option<&EvtxChunk>,\n\n has_attributes: bool,\n\n is_substitution: bool,\n\n) -> Result<BinXMLOpenStartElement> {\n\n trace!(\n\n \"Offset `0x{:08x}` - OpenStartElement<has_attributes={}, is_substitution={}>\",\n\n cursor.position(),\n\n has_attributes,\n\n is_substitution\n\n );\n\n\n\n // According to https://github.com/libyal/libevtx/blob/master/documentation/Windows%20XML%20Event%20Log%20(EVTX).asciidoc\n\n // The dependency identifier is not present when the element start is used in a substitution token.\n\n if !is_substitution {\n\n let _dependency_identifier =\n\n try_read!(cursor, u16, \"open_start_element_dependency_identifier\")?;\n\n\n\n trace!(\n", "file_path": "src/binxml/tokens.rs", "rank": 38, "score": 41407.49799956788 }, { "content": "pub fn read_processing_instruction_target(\n\n cursor: &mut Cursor<&[u8]>,\n\n) -> Result<BinXMLProcessingInstructionTarget> {\n\n trace!(\n\n \"Offset `0x{:08x}` - ProcessingInstructionTarget\",\n\n cursor.position(),\n\n );\n\n\n\n let name = BinXmlNameRef::from_stream(cursor)?;\n\n trace!(\"\\tPITarget Name - {:?}\", name);\n\n Ok(BinXMLProcessingInstructionTarget { name })\n\n}\n\n\n", "file_path": "src/binxml/tokens.rs", "rank": 39, "score": 41407.49799956788 }, { "content": "pub fn read_template_definition_header(\n\n cursor: &mut Cursor<&[u8]>,\n\n) -> Result<BinXmlTemplateDefinitionHeader> {\n\n // If any of these fail we cannot reliably report the template information in error.\n\n let next_template_offset = try_read!(cursor, u32, \"next_template_offset\")?;\n\n let template_guid = try_read!(cursor, guid, \"template_guid\")?;\n\n // Data size includes the fragment header, element and end of file token;\n\n // except for the first 33 bytes of the template definition (above)\n\n let data_size = try_read!(cursor, u32, \"template_data_size\")?;\n\n\n\n Ok(BinXmlTemplateDefinitionHeader {\n\n next_template_offset,\n\n guid: template_guid,\n\n data_size,\n\n })\n\n}\n\n\n", "file_path": "src/binxml/tokens.rs", "rank": 40, "score": 41407.49799956788 }, { "content": "#[test]\n\nfn test_sample_with_dependency_identifier_edge_case() {\n\n test_full_sample(sample_with_dependency_id_edge_case(), 653, 0)\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 41, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_event_xml_sample_with_user_data() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\n\n \"../samples/E_Windows_system32_winevt_logs_Microsoft-Windows-CAPI2%4Operational.evtx\"\n\n );\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let first_record = parser\n\n .records()\n\n .next()\n\n .expect(\"to have records\")\n\n .expect(\"record to parse correctly\");\n\n\n\n println!(\"{}\", first_record.data);\n\n\n\n assert_eq!(\n\n first_record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/event_with_template_as_substitution.xml\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 42, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_event_xml_sample_with_entity_ref() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\n\n \"../samples/E_Windows_system32_winevt_logs_Microsoft-Windows-CAPI2%4Operational.evtx\"\n\n );\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let record = parser\n\n .records()\n\n .filter_map(|record| record.ok())\n\n .find(|record| record.event_record_id == 28)\n\n .expect(\"record to parse correctly\");\n\n\n\n println!(\"{}\", record.data);\n\n\n\n assert_eq!(\n\n record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/event_with_entity_ref.xml\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 43, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_dirty_sample_with_a_bad_chunk_magic() {\n\n test_full_sample(sample_with_a_bad_chunk_magic(), 270, 5)\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 44, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_event_json_sample_with_event_data() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../samples/2-system-Security-dirty.evtx\");\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let first_record = parser\n\n .records_json()\n\n .next()\n\n .expect(\"to have records\")\n\n .expect(\"record to parse correctly\");\n\n\n\n assert_eq!(\n\n first_record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/event_with_eventdata.json\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 45, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_dirty_sample_binxml_with_incomplete_template() {\n\n // Contains an unparsable record\n\n test_full_sample(sample_binxml_with_incomplete_template(), 17, 1)\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 46, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_dirty_sample_with_a_chunk_past_zeros() {\n\n test_full_sample(sample_with_a_chunk_past_zeroes(), 1160, 0)\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 47, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_event_xml_sample_with_entity_ref_2() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\n\n \"../samples/E_Windows_system32_winevt_logs_Microsoft-Windows-CAPI2%4Operational.evtx\"\n\n );\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let record = parser\n\n .records()\n\n .filter_map(|record| record.ok())\n\n .find(|record| record.event_record_id == 25)\n\n .expect(\"record to parse correctly\");\n\n\n\n println!(\"{}\", record.data);\n\n\n\n assert_eq!(\n\n record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/event_with_entity_ref_2.xml\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 48, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_parses_sample_with_irregular_boolean_values() {\n\n test_full_sample(sample_with_irregular_values(), 3028, 0);\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 49, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_dirty_sample_binxml_with_incomplete_token() {\n\n // Contains an unparsable record\n\n test_full_sample(sample_binxml_with_incomplete_sid(), 6, 1)\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 50, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_event_json_with_multiple_nodes_same_name() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\n\n \"../samples/E_Windows_system32_winevt_logs_Microsoft-Windows-CAPI2%4Operational.evtx\"\n\n );\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let record = parser\n\n .records_json()\n\n .filter_map(|record| record.ok())\n\n .find(|record| record.event_record_id == 28)\n\n .expect(\"record to parse correctly\");\n\n\n\n println!(\"{}\", record.data);\n\n\n\n assert_eq!(\n\n record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/event_with_multiple_nodes_same_name.json\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 51, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_event_xml_sample_with_event_data() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../samples/2-system-Security-dirty.evtx\");\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let first_record = parser\n\n .records()\n\n .next()\n\n .expect(\"to have records\")\n\n .expect(\"record to parse correctly\");\n\n\n\n assert_eq!(\n\n first_record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/event_with_eventdata.xml\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 52, "score": 41396.16623090985 }, { "content": "pub fn read_template_definition<'a>(\n\n cursor: &mut Cursor<&'a [u8]>,\n\n chunk: Option<&'a EvtxChunk<'a>>,\n\n ansi_codec: EncodingRef,\n\n) -> Result<BinXMLTemplateDefinition<'a>> {\n\n let header = read_template_definition_header(cursor)?;\n\n\n\n trace!(\n\n \"Offset `0x{:08x}` - TemplateDefinition {}\",\n\n cursor.position(),\n\n header\n\n );\n\n\n\n let template = match BinXmlDeserializer::read_binxml_fragment(\n\n cursor,\n\n chunk,\n\n Some(header.data_size),\n\n false,\n\n ansi_codec,\n\n ) {\n", "file_path": "src/binxml/tokens.rs", "rank": 53, "score": 40950.410750107156 }, { "content": "pub fn create_record_model<'a>(\n\n tokens: Vec<Cow<'a, BinXMLDeserializedTokens<'a>>>,\n\n chunk: &'a EvtxChunk<'a>,\n\n) -> Result<Vec<XmlModel<'a>>> {\n\n let mut current_element: Option<XmlElementBuilder> = None;\n\n let mut current_pi: Option<XmlPIBuilder> = None;\n\n let mut model: Vec<XmlModel> = Vec::with_capacity(tokens.len());\n\n\n\n for token in tokens {\n\n // Handle all places where we don't care if it's an Owned or a Borrowed value.\n\n match token {\n\n Cow::Owned(BinXMLDeserializedTokens::FragmentHeader(_))\n\n | Cow::Borrowed(BinXMLDeserializedTokens::FragmentHeader(_)) => {}\n\n Cow::Owned(BinXMLDeserializedTokens::TemplateInstance(_))\n\n | Cow::Borrowed(BinXMLDeserializedTokens::TemplateInstance(_)) => {\n\n return Err(EvtxError::FailedToCreateRecordModel(\n\n \"Call `expand_templates` before calling this function\",\n\n ));\n\n }\n\n Cow::Owned(BinXMLDeserializedTokens::AttributeList)\n", "file_path": "src/binxml/assemble.rs", "rank": 54, "score": 40950.410750107156 }, { "content": "pub fn samples_dir() -> PathBuf {\n\n PathBuf::from(file!())\n\n .parent()\n\n .unwrap()\n\n .parent()\n\n .unwrap()\n\n .join(\"samples\")\n\n .canonicalize()\n\n .unwrap()\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 55, "score": 40950.410750107156 }, { "content": "pub fn regular_sample() -> PathBuf {\n\n samples_dir().join(\"security.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 56, "score": 40950.410750107156 }, { "content": "pub fn sample_issue_65() -> PathBuf {\n\n samples_dir().join(\n\n \"E_ShadowCopy6_windows_system32_winevt_logs_Microsoft-Windows-CAPI2%4Operational.evtx\",\n\n )\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 57, "score": 40950.410750107156 }, { "content": "pub fn sample_with_no_crc32() -> PathBuf {\n\n samples_dir().join(\"Application_no_crc32.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 58, "score": 40950.410750107156 }, { "content": "#[test]\n\nfn test_event_json_sample_with_separate_json_attributes() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../samples/Application.evtx\");\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(\n\n ParserSettings::new()\n\n .num_threads(1)\n\n .separate_json_attributes(true),\n\n );\n\n\n\n let first_record = parser\n\n .records_json()\n\n .next()\n\n .expect(\"to have records\")\n\n .expect(\"record to parse correctly\");\n\n\n\n assert_eq!(\n\n first_record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/application_event_1_separate_attributes.json\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n", "file_path": "tests/test_record_samples.rs", "rank": 59, "score": 40542.72345474198 }, { "content": "#[test]\n\nfn test_sample_with_binxml_as_substitution_tokens_and_pi_target() {\n\n test_full_sample(\n\n sample_with_binxml_as_substitution_tokens_and_pi_target(),\n\n 340,\n\n 0,\n\n )\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 60, "score": 40542.72345474198 }, { "content": "pub fn sample_with_irregular_values() -> PathBuf {\n\n samples_dir().join(\"sample-with-irregular-bool-values.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 61, "score": 39871.42389210741 }, { "content": "pub fn sample_with_a_bad_checksum_2() -> PathBuf {\n\n samples_dir().join(\n\n \"2-vss_0-Microsoft-Windows-TerminalServices-RemoteConnectionManager%4Operational.evtx\",\n\n )\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 62, "score": 39871.42389210741 }, { "content": "pub fn sample_with_a_bad_checksum() -> PathBuf {\n\n samples_dir()\n\n .join(\"2-vss_0-Microsoft-Windows-RemoteDesktopServices-RdpCoreTS%4Operational.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 63, "score": 39871.42389210741 }, { "content": "#[test]\n\nfn test_event_xml_sample_with_event_data_with_attributes_and_text() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../samples/system.evtx\");\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let first_record = parser\n\n .records()\n\n .next()\n\n .expect(\"to have records\")\n\n .expect(\"record to parse correctly\");\n\n\n\n assert_eq!(\n\n first_record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/event_with_text_and_attributes.xml\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 64, "score": 39748.90877164121 }, { "content": "#[test]\n\nfn test_event_json_sample_with_event_data_with_attributes_and_text() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../samples/system.evtx\");\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let first_record = parser\n\n .records_json()\n\n .next()\n\n .expect(\"to have records\")\n\n .expect(\"record to parse correctly\");\n\n\n\n assert_eq!(\n\n first_record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/event_with_text_and_attributes.json\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 65, "score": 39748.90877164121 }, { "content": "#[test]\n\nfn test_event_json_with_multiple_nodes_same_name_separate() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\n\n \"../samples/E_Windows_system32_winevt_logs_Microsoft-Windows-CAPI2%4Operational.evtx\"\n\n );\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(\n\n ParserSettings::new()\n\n .num_threads(1)\n\n .separate_json_attributes(true),\n\n );\n\n\n\n let record = parser\n\n .records_json()\n\n .filter_map(|record| record.ok())\n\n .find(|record| record.event_record_id == 28)\n\n .expect(\"record to parse correctly\");\n\n\n\n println!(\"{}\", record.data);\n\n\n\n assert_eq!(\n\n record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/event_with_multiple_nodes_same_name_separate_attr.json\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n", "file_path": "tests/test_record_separate_json.rs", "rank": 66, "score": 39748.90877164121 }, { "content": "pub fn sample_binxml_with_incomplete_sid() -> PathBuf {\n\n samples_dir().join(\"Microsoft-Windows-HelloForBusiness%4Operational.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 67, "score": 38876.64891261611 }, { "content": "pub fn sample_binxml_with_incomplete_template() -> PathBuf {\n\n samples_dir().join(\"Microsoft-Windows-LanguagePackSetup%4Operational.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 68, "score": 38876.64891261611 }, { "content": "pub fn sample_with_invalid_flags_in_header() -> PathBuf {\n\n samples_dir().join(\"post-Security.evtx\")\n\n}\n", "file_path": "tests/fixtures.rs", "rank": 69, "score": 38876.64891261611 }, { "content": "pub fn sample_with_a_chunk_past_zeroes() -> PathBuf {\n\n samples_dir().join(\"2-vss_7-System.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 70, "score": 38876.64891261611 }, { "content": "pub fn sample_with_multiple_xml_fragments() -> PathBuf {\n\n samples_dir()\n\n .join(\"E_Windows_system32_winevt_logs_Microsoft-Windows-Shell-Core%4Operational.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 71, "score": 38876.64891261611 }, { "content": "pub fn sample_with_a_bad_chunk_magic() -> PathBuf {\n\n samples_dir().join(\"sample_with_a_bad_chunk_magic.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 72, "score": 38876.64891261611 }, { "content": "pub fn sample_with_dependency_id_edge_case() -> PathBuf {\n\n samples_dir().join(\"Archive-ForwardedEvents-test.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 73, "score": 37956.59733686211 }, { "content": "pub fn sample_with_binxml_as_substitution_tokens_and_pi_target() -> PathBuf {\n\n samples_dir().join(\"E_Windows_system32_winevt_logs_Microsoft-Windows-CAPI2%4Operational.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 74, "score": 37103.15456069424 }, { "content": "pub fn parse_tokens<'a, T: BinXmlOutput>(\n\n tokens: Vec<BinXMLDeserializedTokens<'a>>,\n\n chunk: &'a EvtxChunk<'a>,\n\n visitor: &mut T,\n\n) -> Result<()> {\n\n let expanded_tokens = expand_templates(tokens, chunk)?;\n\n let record_model = create_record_model(expanded_tokens, chunk)?;\n\n\n\n visitor.visit_start_of_stream()?;\n\n\n\n let mut stack = vec![];\n\n\n\n for owned_token in record_model {\n\n match owned_token {\n\n XmlModel::OpenElement(open_element) => {\n\n stack.push(open_element);\n\n visitor.visit_open_start_element(stack.last().ok_or({\n\n EvtxError::FailedToCreateRecordModel(\n\n \"Invalid parser state - expected stack to be non-empty\",\n\n )\n", "file_path": "src/binxml/assemble.rs", "rank": 75, "score": 35630.03221423131 }, { "content": "/// Reads an ansi encoded string from the given stream using `ansi_codec`.\n\npub fn read_ansi_encoded_string<T: ReadSeek>(\n\n stream: &mut T,\n\n size: u64,\n\n ansi_codec: EncodingRef,\n\n) -> DeserializationResult<Option<String>> {\n\n match size {\n\n 0 => Ok(None),\n\n _ => {\n\n let mut bytes = vec![0; size as usize];\n\n stream.read_exact(&mut bytes)?;\n\n\n\n // There may be multiple NULs in the string, prune them.\n\n bytes.retain(|&b| b != 0);\n\n\n\n let s = match decode(&bytes, DecoderTrap::Strict, ansi_codec).0 {\n\n Ok(s) => s,\n\n Err(message) => {\n\n let as_boxed_err = Box::<dyn StdErr + Send + Sync>::from(message.to_string());\n\n let wrapped_io_err = WrappedIoError::capture_hexdump(as_boxed_err, stream);\n\n return Err(DeserializationError::FailedToReadToken {\n", "file_path": "src/utils/binxml_utils.rs", "rank": 76, "score": 35047.390803510934 }, { "content": "pub fn read_len_prefixed_utf16_string<T: ReadSeek>(\n\n stream: &mut T,\n\n is_null_terminated: bool,\n\n) -> Result<Option<String>, FailedToReadString> {\n\n let expected_number_of_characters = stream.read_u16::<LittleEndian>()?;\n\n let needed_bytes = u64::from(expected_number_of_characters * 2);\n\n\n\n trace!(\n\n \"Offset `0x{offset:08x} ({offset})` reading a{nul}string of len {len}\",\n\n offset = stream.tell().unwrap_or(0),\n\n nul = if is_null_terminated {\n\n \" null terminated \"\n\n } else {\n\n \" \"\n\n },\n\n len = expected_number_of_characters\n\n );\n\n\n\n let s = read_utf16_by_size(stream, needed_bytes)?;\n\n\n", "file_path": "src/utils/binxml_utils.rs", "rank": 77, "score": 34303.443455228895 }, { "content": "mod binxml_utils;\n\npub(super) mod hexdump;\n\nmod time;\n\n\n\npub use self::binxml_utils::{\n\n read_ansi_encoded_string, read_len_prefixed_utf16_string, read_null_terminated_utf16_string,\n\n read_utf16_by_size,\n\n};\n\npub use self::hexdump::{dump_stream, hexdump};\n\npub use self::time::read_systemtime;\n", "file_path": "src/utils/mod.rs", "rank": 78, "score": 33442.05762143888 }, { "content": "pub mod deserializer;\n\npub mod name;\n\npub mod value_variant;\n\n\n\npub(crate) mod assemble;\n\npub(crate) mod tokens;\n", "file_path": "src/binxml/mod.rs", "rank": 79, "score": 33438.888188709796 }, { "content": "pub mod deserialized;\n\npub(crate) mod raw;\n\npub(crate) mod xml;\n", "file_path": "src/model/mod.rs", "rank": 80, "score": 33438.71157191007 }, { "content": "fn to_delimited_list<N: ToString>(ns: impl AsRef<Vec<N>>) -> String {\n\n ns.as_ref()\n\n .iter()\n\n .map(ToString::to_string)\n\n .collect::<Vec<String>>()\n\n .join(\",\")\n\n}\n\n\n\nimpl<'c> From<BinXmlValue<'c>> for serde_json::Value {\n\n fn from(value: BinXmlValue<'c>) -> Self {\n\n match value {\n\n BinXmlValue::NullType => Value::Null,\n\n BinXmlValue::StringType(s) => json!(s),\n\n BinXmlValue::AnsiStringType(s) => json!(s.into_owned()),\n\n BinXmlValue::Int8Type(num) => json!(num),\n\n BinXmlValue::UInt8Type(num) => json!(num),\n\n BinXmlValue::Int16Type(num) => json!(num),\n\n BinXmlValue::UInt16Type(num) => json!(num),\n\n BinXmlValue::Int32Type(num) => json!(num),\n\n BinXmlValue::UInt32Type(num) => json!(num),\n", "file_path": "src/binxml/value_variant.rs", "rank": 81, "score": 29603.91717519056 }, { "content": "impl<T: ReadSeek> Iterator for IntoIterChunks<T> {\n\n type Item = Result<EvtxChunkData>;\n\n fn next(&mut self) -> Option<<Self as Iterator>::Item> {\n\n info!(\"Chunk {}\", self.current_chunk_number);\n\n match self.parser.find_next_chunk(self.current_chunk_number) {\n\n None => None,\n\n Some((chunk, chunk_number)) => {\n\n self.current_chunk_number = match chunk_number.checked_add(1) {\n\n None => return None,\n\n Some(n) => n,\n\n };\n\n\n\n Some(chunk)\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/evtx_parser.rs", "rank": 82, "score": 17.143307949658276 }, { "content": " }\n\n\n\n /// Returns a tuple of the tokens.\n\n pub fn read_binxml_fragment(\n\n cursor: &mut Cursor<&'a [u8]>,\n\n chunk: Option<&'a EvtxChunk<'a>>,\n\n data_size: Option<u32>,\n\n is_inside_substitution: bool,\n\n ansi_codec: EncodingRef,\n\n ) -> Result<Vec<BinXMLDeserializedTokens<'a>>> {\n\n let offset = cursor.position();\n\n\n\n let de = BinXmlDeserializer::init(\n\n *cursor.get_ref(),\n\n offset,\n\n chunk,\n\n is_inside_substitution,\n\n ansi_codec,\n\n );\n\n\n", "file_path": "src/binxml/deserializer.rs", "rank": 83, "score": 16.640577470512497 }, { "content": "use crate::binxml::name::{BinXmlName, BinXmlNameLink};\n\nuse crate::err::DeserializationResult;\n\nuse crate::ChunkOffset;\n\n\n\nuse log::trace;\n\nuse std::borrow::BorrowMut;\n\nuse std::collections::HashMap;\n\nuse std::io::{Cursor, Seek, SeekFrom};\n\n\n\n#[derive(Debug)]\n\npub struct StringCache(HashMap<ChunkOffset, BinXmlName>);\n\n\n\nimpl StringCache {\n\n pub fn populate(data: &[u8], offsets: &[ChunkOffset]) -> DeserializationResult<Self> {\n\n let mut cache = HashMap::new();\n\n let mut cursor = Cursor::new(data);\n\n let cursor_ref = cursor.borrow_mut();\n\n\n\n for &offset in offsets.iter().filter(|&&offset| offset > 0) {\n\n try_seek!(cursor_ref, offset, \"first xml string\")?;\n", "file_path": "src/string_cache.rs", "rank": 84, "score": 16.624239225640277 }, { "content": " 0x94 => Some(BinXmlValueType::HexInt32ArrayType),\n\n 0x95 => Some(BinXmlValueType::HexInt64ArrayType),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> BinXmlValue<'a> {\n\n pub fn from_binxml_stream(\n\n cursor: &mut Cursor<&'a [u8]>,\n\n chunk: Option<&'a EvtxChunk<'a>>,\n\n size: Option<u16>,\n\n ansi_codec: EncodingRef,\n\n ) -> Result<BinXmlValue<'a>> {\n\n let value_type_token = try_read!(cursor, u8)?;\n\n\n\n let value_type = BinXmlValueType::from_u8(value_type_token).ok_or(\n\n DeserializationError::InvalidValueVariant {\n\n value: value_type_token,\n\n offset: cursor.position(),\n", "file_path": "src/binxml/value_variant.rs", "rank": 85, "score": 14.566961925575942 }, { "content": " TemplateCache(HashMap::new())\n\n }\n\n\n\n pub fn populate(\n\n data: &'chunk [u8],\n\n offsets: &[ChunkOffset],\n\n ansi_codec: EncodingRef,\n\n ) -> DeserializationResult<Self> {\n\n let mut cache = HashMap::new();\n\n let mut cursor = Cursor::new(data);\n\n let cursor_ref = cursor.borrow_mut();\n\n\n\n for offset in offsets.iter().filter(|&&offset| offset > 0) {\n\n try_seek!(cursor_ref, offset, \"first template\")?;\n\n\n\n loop {\n\n let table_offset = cursor_ref.position() as ChunkOffset;\n\n let definition = read_template_definition(cursor_ref, None, ansi_codec)?;\n\n let next_template_offset = definition.header.next_template_offset;\n\n\n", "file_path": "src/template_cache.rs", "rank": 86, "score": 14.293756325825854 }, { "content": "impl<'a> Iterator for IterTokens<'a> {\n\n type Item = Result<BinXMLDeserializedTokens<'a>>;\n\n\n\n /// yields tokens from the chunk, will return once the chunk is finished.\n\n fn next(&mut self) -> Option<<Self as Iterator>::Item> {\n\n self.inner_next()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::evtx_chunk::EvtxChunkData;\n\n use crate::{ensure_env_logger_initialized, ParserSettings};\n\n use std::sync::Arc;\n\n\n\n #[test]\n\n fn test_reads_a_single_record() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../../samples/security.evtx\");\n\n let from_start_of_chunk = &evtx_file[4096..];\n", "file_path": "src/binxml/deserializer.rs", "rank": 87, "score": 14.19638368974478 }, { "content": " },\n\n )?;\n\n\n\n let data = Self::deserialize_value_type(&value_type, cursor, chunk, size, ansi_codec)?;\n\n\n\n Ok(data)\n\n }\n\n\n\n pub fn deserialize_value_type(\n\n value_type: &BinXmlValueType,\n\n cursor: &mut Cursor<&'a [u8]>,\n\n chunk: Option<&'a EvtxChunk<'a>>,\n\n size: Option<u16>,\n\n ansi_codec: EncodingRef,\n\n ) -> Result<BinXmlValue<'a>> {\n\n trace!(\n\n \"Offset `0x{offset:08x} ({offset}): {value_type:?}, {size:?}\",\n\n offset = cursor.position(),\n\n value_type = value_type,\n\n size = size\n", "file_path": "src/binxml/value_variant.rs", "rank": 88, "score": 13.990274143617704 }, { "content": " use quick_xml::Reader;\n\n use std::borrow::Cow;\n\n\n\n fn bytes_to_string(bytes: &[u8]) -> String {\n\n String::from_utf8(bytes.to_vec()).expect(\"UTF8 Input\")\n\n }\n\n\n\n fn dummy_event() -> XmlElement<'static> {\n\n XmlElement {\n\n name: Cow::Owned(BinXmlName::from_str(\"Dummy\")),\n\n attributes: vec![],\n\n }\n\n }\n\n\n\n fn event_to_element(event: BytesStart) -> XmlElement {\n\n let mut attrs = vec![];\n\n\n\n for attr in event.attributes() {\n\n let attr = attr.expect(\"Failed to read attribute.\");\n\n attrs.push(XmlAttribute {\n", "file_path": "src/json_output.rs", "rank": 89, "score": 13.935764256423312 }, { "content": "pub mod err;\n\npub mod model;\n\n\n\nmod evtx_chunk;\n\nmod evtx_file_header;\n\nmod evtx_parser;\n\nmod evtx_record;\n\nmod string_cache;\n\nmod template_cache;\n\nmod utils;\n\n\n\nmod json_output;\n\nmod xml_output;\n\n\n\npub type ChunkOffset = u32;\n\npub type FileOffset = u64;\n\n\n\n// For tests, we only initialize logging once.\n\n#[cfg(test)]\n\nuse std::sync::Once;\n\n\n\n#[cfg(test)]\n\nstatic LOGGER_INIT: Once = Once::new();\n\n\n\nuse crc32fast::Hasher;\n\n\n\n#[inline]\n", "file_path": "src/lib.rs", "rank": 90, "score": 13.74050377819719 }, { "content": "use crate::err::DeserializationResult as Result;\n\n\n\nuse crate::ChunkOffset;\n\npub use byteorder::{LittleEndian, ReadBytesExt};\n\n\n\nuse crate::utils::read_len_prefixed_utf16_string;\n\n\n\nuse std::{\n\n fmt::Formatter,\n\n io::{Cursor, Seek, SeekFrom},\n\n};\n\n\n\nuse quick_xml::events::{BytesEnd, BytesStart};\n\nuse std::fmt;\n\n\n\n#[derive(Debug, PartialEq, PartialOrd, Clone, Hash)]\n\npub struct BinXmlName {\n\n str: String,\n\n}\n\n\n", "file_path": "src/binxml/name.rs", "rank": 91, "score": 13.683805688352278 }, { "content": " self\n\n }\n\n\n\n /// Allocate a new chunk from the given data, at the offset expected by `chunk_number`.\n\n /// If the read chunk contains valid data, an `Ok(Some(EvtxChunkData))` will be returned.\n\n /// If the read chunk contains invalid data (bad magic, bad checksum when `validate_checksum` is set to true),\n\n /// of if not enough data can be read (e.g. because we reached EOF), an `Err` is returned.\n\n /// If the read chunk is empty, `Ok(None)` will be returned.\n\n fn allocate_chunk(\n\n data: &mut T,\n\n chunk_number: u64,\n\n validate_checksum: bool,\n\n ) -> Result<Option<EvtxChunkData>> {\n\n let mut chunk_data = Vec::with_capacity(EVTX_CHUNK_SIZE);\n\n let chunk_offset = EVTX_FILE_HEADER_SIZE + chunk_number as usize * EVTX_CHUNK_SIZE;\n\n\n\n trace!(\n\n \"Offset `0x{:08x} ({})` - Reading chunk number `{}`\",\n\n chunk_offset,\n\n chunk_offset,\n", "file_path": "src/evtx_parser.rs", "rank": 92, "score": 13.659761113152074 }, { "content": "#[derive(Debug, PartialOrd, PartialEq, Clone, Hash)]\n\npub struct BinXmlNameRef {\n\n pub offset: ChunkOffset,\n\n}\n\n\n\nimpl fmt::Display for BinXmlName {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.str)\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, PartialOrd, Clone)]\n\npub(crate) struct BinXmlNameLink {\n\n pub next_string: Option<ChunkOffset>,\n\n pub hash: u16,\n\n}\n\n\n\nimpl BinXmlNameLink {\n\n pub fn from_stream(stream: &mut Cursor<&[u8]>) -> Result<Self> {\n\n let next_string = try_read!(stream, u32)?;\n", "file_path": "src/binxml/name.rs", "rank": 93, "score": 13.627872617729727 }, { "content": " fn visit_processing_instruction(&mut self, _pi: &BinXmlPI) -> Result<(), SerializationError> {\n\n Err(SerializationError::Unimplemented {\n\n message: format!(\"`{}`: visit_processing_instruction_data\", file!()),\n\n })\n\n }\n\n\n\n fn visit_start_of_stream(&mut self) -> SerializationResult<()> {\n\n trace!(\"visit_start_of_stream\");\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::binxml::name::BinXmlName;\n\n use crate::binxml::value_variant::BinXmlValue;\n\n use crate::model::xml::{XmlAttribute, XmlElement};\n\n use crate::{BinXmlOutput, JsonOutput, ParserSettings};\n\n use pretty_assertions::assert_eq;\n\n use quick_xml::events::{BytesStart, Event};\n", "file_path": "src/json_output.rs", "rank": 94, "score": 13.618440771814615 }, { "content": " #![allow(unused_variables)]\n\n\n\n use super::*;\n\n use crate::ensure_env_logger_initialized;\n\n use anyhow::anyhow;\n\n\n\n fn process_90_records(buffer: &'static [u8]) -> anyhow::Result<()> {\n\n let mut parser = EvtxParser::from_buffer(buffer.to_vec())?;\n\n\n\n for (i, record) in parser.records().take(90).enumerate() {\n\n match record {\n\n Ok(r) => {\n\n assert_eq!(r.event_record_id, i as u64 + 1);\n\n }\n\n Err(e) => return Err(anyhow!(\"Error while reading record {}, {:?}\", i, e)),\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n", "file_path": "src/evtx_parser.rs", "rank": 95, "score": 13.60769731842849 }, { "content": "use crate::evtx_chunk::EvtxChunk;\n\nuse encoding::EncodingRef;\n\n\n\nuse std::io::Cursor;\n\nuse std::mem;\n\n\n\npub struct IterTokens<'a> {\n\n cursor: Cursor<&'a [u8]>,\n\n chunk: Option<&'a EvtxChunk<'a>>,\n\n data_size: Option<u32>,\n\n data_read_so_far: u32,\n\n eof: bool,\n\n is_inside_substitution: bool,\n\n ansi_codec: EncodingRef,\n\n}\n\n\n\npub struct BinXmlDeserializer<'a> {\n\n data: &'a [u8],\n\n offset: u64,\n\n chunk: Option<&'a EvtxChunk<'a>>,\n", "file_path": "src/binxml/deserializer.rs", "rank": 96, "score": 13.562519050867929 }, { "content": " let name_hash = try_read!(stream, u16, \"name_hash\")?;\n\n\n\n Ok(BinXmlNameLink {\n\n next_string: if next_string > 0 {\n\n Some(next_string)\n\n } else {\n\n None\n\n },\n\n hash: name_hash,\n\n })\n\n }\n\n\n\n pub fn data_size() -> u32 {\n\n 6\n\n }\n\n}\n\n\n\nimpl BinXmlNameRef {\n\n pub fn from_stream(cursor: &mut Cursor<&[u8]>) -> Result<Self> {\n\n let name_offset = try_read!(cursor, u32, \"name_offset\")?;\n", "file_path": "src/binxml/name.rs", "rank": 97, "score": 13.446187919485041 }, { "content": " pub fn from_reader(input: &mut Cursor<&[u8]>) -> DeserializationResult<EvtxRecordHeader> {\n\n let mut magic = [0_u8; 4];\n\n input.take(4).read_exact(&mut magic)?;\n\n\n\n if &magic != b\"\\x2a\\x2a\\x00\\x00\" {\n\n return Err(DeserializationError::InvalidEvtxRecordHeaderMagic { magic });\n\n }\n\n\n\n let size = try_read!(input, u32)?;\n\n let record_id = try_read!(input, u64)?;\n\n let timestamp = try_read!(input, filetime)?;\n\n\n\n Ok(EvtxRecordHeader {\n\n data_size: size,\n\n event_record_id: record_id,\n\n timestamp,\n\n })\n\n }\n\n\n\n pub fn record_data_size(&self) -> u32 {\n", "file_path": "src/evtx_record.rs", "rank": 98, "score": 13.050042184416279 }, { "content": " fn next(&mut self) -> Option<<Self as Iterator>::Item> {\n\n match self.parser.find_next_chunk(self.current_chunk_number) {\n\n None => None,\n\n Some((chunk, chunk_number)) => {\n\n self.current_chunk_number = match chunk_number.checked_add(1) {\n\n None => return None,\n\n Some(n) => n,\n\n };\n\n\n\n Some(chunk)\n\n }\n\n }\n\n }\n\n}\n\n\n\npub struct IntoIterChunks<T: ReadSeek> {\n\n parser: EvtxParser<T>,\n\n current_chunk_number: u64,\n\n}\n\n\n", "file_path": "src/evtx_parser.rs", "rank": 99, "score": 12.950974614876127 } ]
Rust
src/creeps/lrh.rs
snorrwe/xenos
2b625daf8edea133949a70dcf1579dddc65a3668
use super::{ approach_target_room, gofer, harvester, update_scout_info, upgrader, CreepState, HOME_ROOM, LOADING, TARGET, TASK, }; use crate::prelude::*; use crate::state::RoomIFF; use num::FromPrimitive; use screeps::prelude::*; const HARVEST_TARGET_ROOM: &'static str = "harvest_target_room"; #[derive(Debug, Clone, Copy, FromPrimitive, ToPrimitive)] #[repr(u8)] enum LrhState { Idle = 0, Loading = 1, Unloading = 2, } pub fn run<'a>(state: &mut CreepState) -> ExecutionResult { let last_task = state.creep_memory_i64(TASK).unwrap_or(0); let last_task = LrhState::from_u32(last_task as u32).unwrap_or(LrhState::Idle); let mut priorities = [0; 3]; priorities[last_task as usize] += 1; let mut tasks = [ Task::new(|state| load(state)) .with_name("Load") .with_state_save(LrhState::Loading) .with_priority(priorities[LrhState::Loading as usize]) .with_required_bucket(2000), Task::new(|state| unload(state)) .with_name("Unload") .with_state_save(LrhState::Unloading) .with_priority(priorities[LrhState::Unloading as usize]), Task::new(|state| harvester::unload(state)) .with_name("Harvester unload") .with_priority(-1), Task::new(|state| upgrader::attempt_upgrade(state)).with_priority(-2), ]; sorted_by_priority(&mut tasks); sequence(state, tasks.iter()) } fn load<'a>(state: &mut CreepState) -> ExecutionResult { trace!("Loading"); if !state.creep_memory_bool(LOADING).unwrap_or(false) { Err("not loading")?; } let creep = state.creep(); if creep.carry_total() == creep.carry_capacity() { state.creep_memory_set(LOADING.into(), false); state.creep_memory_remove(TARGET); Err("full")?; } let tasks = [ Task::new(|state| approach_target_room(state, HARVEST_TARGET_ROOM)) .with_name("Approach target room"), Task::new(|state| set_target_room(state)).with_name("Set target room"), Task::new(|state| { update_scout_info(state)?; Err("continue")? }) .with_name("Update scout info"), Task::new(|state| harvester::attempt_harvest(state, Some(TARGET))) .with_name("Attempt harvest"), ]; sequence(state, tasks.iter()) } fn set_target_room<'a>(state: &'a mut CreepState) -> ExecutionResult { { let target = state.creep_memory_string(HARVEST_TARGET_ROOM); if target.is_some() { Err("Already has a target")?; } } let room = { let creep = state.creep(); creep.room() }; let room = WorldPosition::from(room); let neighbours = room.neighbours_in_vectors(); let target = { let gs: &mut GameState = unsafe { &mut *state.mut_game_state() }; let counts: &mut _ = gs .long_range_harvesters .entry(room) .or_insert([0; 4]); let scout_intel = &gs.scout_intel; let (i, target) = neighbours .iter() .enumerate() .filter(|(_, wp)| { scout_intel .get(&wp) .map(|int| match int.iff { RoomIFF::Unknown | RoomIFF::Neutral => true, _ => false, }) .unwrap_or(true) }) .min_by_key(|(i, _)| counts[*i]) .ok_or_else(|| { warn!( "Failed to find target room of LRH {:?} in room {:?}", state.creep().name(), state.creep().room().name() ); "Failed to find a target room" })?; counts[i] += 1; target }; state.creep_memory_set(HARVEST_TARGET_ROOM.into(), target.to_string().as_str()); Ok(()) } fn unload<'a>(state: &mut CreepState) -> ExecutionResult { trace!("Unloading"); if state.creep_memory_bool(LOADING).unwrap_or(false) { Err("loading")?; } if state.creep().carry_total() == 0 { state.creep_memory_set(LOADING.into(), true); state.creep_memory_remove(TARGET); Err("empty")?; } let tasks = [ Task::new(|state| approach_target_room(state, HOME_ROOM)).with_name("Approach target room"), Task::new(|state| gofer::attempt_unload(state)).with_name("Attempt unload"), ]; sequence(state, tasks.iter()) }
use super::{ approach_target_room, gofer, harvester, update_scout_info, upgrader, CreepState, HOME_ROOM, LOADING, TARGET, TASK, }; use crate::prelude::*; use crate::state::RoomIFF; use num::FromPrimitive; use screeps::prelude::*; const HARVEST_TARGET_ROOM: &'static str = "harvest_target_room"; #[derive(Debug, Clone, Copy, FromPrimitive, ToPrimitive)] #[repr(u8)] enum LrhState { Idle = 0, Loading = 1, Unloading = 2, } pub fn run<'a>(state: &mut CreepState) -> ExecutionResult { let last_task = state.creep_memory_i64(TASK).unwrap_or(0); let last_task = LrhState::from_u32(last_task as u32).unwrap_or(LrhState::Idle); let mut priorities = [0; 3]; priorities[last_task as usize] += 1; let mut tasks = [ Task::new(|state| load(state)) .with_name("Load") .with_state_save(LrhState::Loading) .with_priority(priorities[LrhState::Loading as usize]) .with_required_bucket(2000), Task::new(|state| unload(state)) .with_name("Unload") .with_state_save(LrhState::Unloading) .with_priority(priorities[LrhState::Unloading as usize]), Task::new(|state| harvester::unload(state)) .with_name("Harvester unload") .with_priority(-1), Task::new(|state| upgrader::attempt_upgrade(state)).with_priority(-2), ]; sorted_by_priority(&mut tasks); sequence(state, tasks.iter()) } fn load<'a>(state: &mut CreepState) -> ExecutionResult { trace!("Loading"); if !state.creep_memory_bool(LOADING).unwrap_or(false) { Err("not loading")?; } let creep = state.creep(); if creep.carry_total() == creep.carry_capacity() { state.creep_memory_set(LOADING.into(), false); state.creep_memory_remove(TARGET); Err("full")?; } let tasks = [ Task::new(|state| approach_target_room(state, HARVEST_TARGET_ROOM)) .with_name("Approach target room"), Task::new(|state| set_target_room(state)).with_name("Set target room"), Task::new(|state| { update_scout_info(state)?; Err("continue")? }) .with_name("Update scout info"), Task::new(|state| harvester::attempt_harvest(state, Some(TARGET))) .with_name("Attempt harvest"), ]; sequence(state, tasks.iter()) } fn set_target_room<'a>(state: &'a mut CreepState) -> ExecutionResult { { let target = state.creep_memory_string(HARVEST_TARGET_ROOM); if target.is_some() { Err("Already has a target")?; } } let room = { let creep = state.creep(); creep.room() }; let room = WorldPosition::from(room); let neighbours = room.neighbours_in_vectors(); let target = { let gs: &mut GameState = unsafe
() .enumerate() .filter(|(_, wp)| { scout_intel .get(&wp) .map(|int| match int.iff { RoomIFF::Unknown | RoomIFF::Neutral => true, _ => false, }) .unwrap_or(true) }) .min_by_key(|(i, _)| counts[*i]) .ok_or_else(|| { warn!( "Failed to find target room of LRH {:?} in room {:?}", state.creep().name(), state.creep().room().name() ); "Failed to find a target room" })?; counts[i] += 1; target }; state.creep_memory_set(HARVEST_TARGET_ROOM.into(), target.to_string().as_str()); Ok(()) } fn unload<'a>(state: &mut CreepState) -> ExecutionResult { trace!("Unloading"); if state.creep_memory_bool(LOADING).unwrap_or(false) { Err("loading")?; } if state.creep().carry_total() == 0 { state.creep_memory_set(LOADING.into(), true); state.creep_memory_remove(TARGET); Err("empty")?; } let tasks = [ Task::new(|state| approach_target_room(state, HOME_ROOM)).with_name("Approach target room"), Task::new(|state| gofer::attempt_unload(state)).with_name("Attempt unload"), ]; sequence(state, tasks.iter()) }
{ &mut *state.mut_game_state() }; let counts: &mut _ = gs .long_range_harvesters .entry(room) .or_insert([0; 4]); let scout_intel = &gs.scout_intel; let (i, target) = neighbours .iter
function_block-random_span
[ { "content": "/// target_key is a memory entry key\n\npub fn approach_target_room(state: &mut CreepState, target_key: &str) -> ExecutionResult {\n\n let target = state.creep_memory_string(target_key).ok_or(\"no target\")?;\n\n\n\n let creep = state.creep();\n\n\n\n let room = creep.room();\n\n let room_name = room.name();\n\n\n\n if room_name == target {\n\n Err(\"Already in the target room\")?;\n\n }\n\n\n\n let result = js! {\n\n const creep = @{creep};\n\n const room = @{target};\n\n const exitDir = creep.room.findExitTo(room);\n\n const exit = creep.pos.findClosestByRange(exitDir);\n\n return creep.moveTo(exit);\n\n };\n\n\n\n let result =\n\n ReturnCode::try_from(result).map_err(|e| format!(\"Failed to parse return code {:?}\", e))?;\n\n\n\n match result {\n\n ReturnCode::NoPath | ReturnCode::InvalidTarget => Err(\"Failed to move\")?,\n\n _ => Ok(()),\n\n }\n\n}\n\n\n", "file_path": "src/creeps/mod.rs", "rank": 0, "score": 297713.6808485233 }, { "content": "pub fn unload<'a>(state: &mut CreepState) -> ExecutionResult {\n\n let carry_total = state.creep().carry_total();\n\n if carry_total == 0 {\n\n trace!(\"Empty\");\n\n state.creep_memory_remove(TARGET);\n\n Err(\"empty\")?;\n\n }\n\n\n\n let tasks = [\n\n Task::new(|state: &mut CreepState| {\n\n let target = find_unload_target(state).ok_or_else(|| {\n\n state.creep_memory_remove(TARGET);\n\n \"could not find unload target\"\n\n })?;\n\n try_transfer::<StructureContainer>(state, &target)\n\n })\n\n .with_name(\"Try transfer container\"),\n\n Task::new(|state: &mut CreepState| {\n\n let n = unsafe {\n\n let room = state.creep().room();\n", "file_path": "src/creeps/harvester.rs", "rank": 1, "score": 265036.4480015676 }, { "content": "pub fn update_scout_info(state: &mut CreepState) -> ExecutionResult {\n\n let creep = state.creep();\n\n let room = creep.room();\n\n\n\n let n_sources = room.find(find::SOURCES).len() as u8;\n\n\n\n let controller = room.controller();\n\n\n\n let is_my_controller = controller\n\n .as_ref()\n\n .map(|c| {\n\n // c.my() can panic\n\n let result = js! {\n\n return @{c}.my;\n\n };\n\n result\n\n })\n\n .map(|my| bool::try_from(my).unwrap_or(false));\n\n\n\n let iff = match is_my_controller {\n", "file_path": "src/creeps/mod.rs", "rank": 2, "score": 261071.0242274552 }, { "content": "/// Fallback harvest, method for a worker to harvest energy temporary\n\n/// ## Contracts:\n\n/// - Should not interfere with the harvester::harvest functionality\n\npub fn harvest(state: &mut CreepState) -> ExecutionResult {\n\n trace!(\"Worker harvesting\");\n\n\n\n {\n\n let loading = state.creep_memory_bool(LOADING);\n\n if !loading.unwrap_or(false) {\n\n Err(\"not loading\")?;\n\n }\n\n let creep = state.creep();\n\n if creep.carry_total() == creep.carry_capacity() {\n\n state.creep_memory_set(LOADING.into(), false);\n\n state.creep_memory_remove(TARGET);\n\n return Ok(());\n\n }\n\n }\n\n\n\n harvester::attempt_harvest(state, Some(TARGET))\n\n}\n\n\n", "file_path": "src/creeps/mod.rs", "rank": 3, "score": 242883.7519666183 }, { "content": "fn find_unload_target_by_type<'a>(state: &mut CreepState, struct_type: &'a str) -> ExecutionResult {\n\n let res = js! {\n\n const creep = @{state.creep()};\n\n const ext = creep.pos.findClosestByRange(FIND_STRUCTURES, {\n\n filter: function (s) {\n\n return s.structureType == @{struct_type} && s.energy < s.energyCapacity;\n\n }\n\n });\n\n return ext && ext.id;\n\n };\n\n let target = String::try_from(res).map_err(|_| \"expected string\")?;\n\n state.creep_memory_set(TARGET.into(), target);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/creeps/gofer.rs", "rank": 4, "score": 241239.76085217413 }, { "content": "pub fn attempt_unload<'a>(state: &mut CreepState) -> ExecutionResult {\n\n trace!(\"Unloading\");\n\n let loading = state.creep_memory_bool(LOADING).unwrap_or(false);\n\n if loading {\n\n Err(\"loading\")?;\n\n }\n\n\n\n let creep = state.creep();\n\n\n\n let carry_total = creep.carry_total();\n\n\n\n if carry_total == 0 {\n\n state.creep_memory_set(LOADING.into(), true);\n\n Err(\"empty\")?;\n\n }\n\n\n\n let target = find_unload_target(state).ok_or_else(|| \"no unload target\")?;\n\n\n\n let tasks = [\n\n Task::new(|state: &mut WrappedState<Reference, CreepState>| {\n", "file_path": "src/creeps/gofer.rs", "rank": 5, "score": 235367.24039418166 }, { "content": "pub fn try_transfer<'a, T>(state: &mut CreepState, target: &'a Reference) -> ExecutionResult\n\nwhere\n\n T: Transferable + screeps::traits::TryFrom<&'a Reference>,\n\n{\n\n let target = T::try_from(target).map_err(|_| \"failed to convert transfer target\")?;\n\n transfer(state, &target)\n\n}\n\n\n", "file_path": "src/creeps/gofer.rs", "rank": 6, "score": 234064.54087521852 }, { "content": "pub fn sign_controller(creep: &Creep, msg: &str) -> ExecutionResult {\n\n let controller = creep\n\n .room()\n\n .controller()\n\n .ok_or_else(|| \"Room has no controller\")?;\n\n\n\n if let Some(sign) = controller.sign() {\n\n if sign.username == USERNAME {\n\n Err(\"Already signed\")?;\n\n }\n\n }\n\n\n\n match creep.sign_controller(&controller, msg) {\n\n ReturnCode::Ok => Ok(()),\n\n ReturnCode::NotInRange => move_to(creep, &controller),\n\n result => Err(format!(\"failed to sign controller {:?}\", result))?,\n\n }\n\n}\n\n\n", "file_path": "src/creeps/mod.rs", "rank": 7, "score": 226835.37442485098 }, { "content": "fn harvest_target<'a>(state: &mut CreepState, target_memory: &'a str) -> Option<Source> {\n\n trace!(\"Setting harvest target\");\n\n\n\n let target = state\n\n .creep_memory_string(target_memory)\n\n .and_then(|id| get_object_erased(id));\n\n\n\n if let Some(target) = target {\n\n trace!(\"Validating existing target\");\n\n return Source::try_from(target.as_ref())\n\n .map_err(|e| {\n\n debug!(\"Failed to convert target to Source {:?}\", e);\n\n state.creep_memory_remove(target_memory);\n\n })\n\n .ok();\n\n }\n\n\n\n find_harvest_target(state).map(|source| {\n\n state.creep_memory_set(target_memory.into(), source.id());\n\n source\n\n })\n\n}\n\n\n", "file_path": "src/creeps/harvester.rs", "rank": 8, "score": 222796.11024602118 }, { "content": "pub fn attempt_upgrade<'a>(state: &mut CreepState) -> ExecutionResult {\n\n let loading = state.creep_memory_bool(LOADING);\n\n if loading.unwrap_or(false) {\n\n return Err(\"loading\")?;\n\n }\n\n if state.creep().carry_total() == 0 {\n\n state.creep_memory_set(\"loading\".into(), true);\n\n Err(\"empty\")?;\n\n }\n\n let controller = state.creep().room().controller().ok_or_else(|| {\n\n let error = \"Creep has no access to a controller in the room!\";\n\n error!(\"{}\", error);\n\n error\n\n })?;\n\n let res = state.creep().upgrade_controller(&controller);\n\n match res {\n\n ReturnCode::Ok => Ok(()),\n\n ReturnCode::NotInRange => move_to(state.creep(), &controller),\n\n _ => {\n\n error!(\"Failed to upgrade controller {:?}\", res);\n\n Err(\"Failed to upgrade controller\")?\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/creeps/upgrader.rs", "rank": 9, "score": 219426.86979884378 }, { "content": "fn approach_target_room<'a>(state: &mut CreepState) -> ExecutionResult {\n\n let target_room = {\n\n state\n\n .creep_memory_string(TARGET_ROOM)\n\n .ok_or_else(|| \"no target set\")?\n\n };\n\n\n\n let creep = state.creep();\n\n\n\n let arrived = state.current_room().to_string().as_str() == target_room;\n\n\n\n if arrived {\n\n Err(\"Already in the room\")?;\n\n }\n\n let target_room = WorldPosition::parse_name(target_room)\n\n .map_err(|e| format!(\"Got an invalid room name as conquest target {:?}\", e))?\n\n .as_room_center();\n\n move_to_options(\n\n creep,\n\n &target_room,\n\n MoveToOptions {\n\n reuse_path: Some(30),\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/creeps/lrw.rs", "rank": 10, "score": 216866.13076234923 }, { "content": "/// The higher the more important\n\npub fn role_priority<'a>(_room: &'a Room, role: Role) -> i8 {\n\n match role {\n\n Role::Defender => 4,\n\n Role::Harvester => 3,\n\n Role::Gofer => 2,\n\n Role::Worker => 1,\n\n Role::Scout => -1,\n\n Role::Lrh => -2,\n\n Role::Conqueror => -3,\n\n Role::Lrw => -4,\n\n _ => 0,\n\n }\n\n}\n\n\n", "file_path": "src/creeps/spawn_info.rs", "rank": 11, "score": 214637.41135262724 }, { "content": "pub fn run<'a>(state: &mut CreepState) -> ExecutionResult {\n\n let tasks = [\n\n Task::new(|state| {\n\n let tasks = [\n\n Task::new(|state| attempt_upgrade(state)),\n\n Task::new(|state: &mut CreepState| sign_controller_stock_msgs(state.creep())),\n\n ];\n\n\n\n selector(state, tasks.iter())\n\n })\n\n .with_name(\"Attempt upgrade\"),\n\n Task::new(|state| withdraw_energy(state)).with_name(\"Withdraw energy\"),\n\n Task::new(|state| attempt_upgrade(state)).with_name(\"Attempt upgrade\"),\n\n ];\n\n\n\n sequence(state, tasks.iter())\n\n}\n\n\n", "file_path": "src/creeps/upgrader.rs", "rank": 12, "score": 213406.63298133336 }, { "content": "pub fn run<'a>(state: &mut CreepState) -> ExecutionResult {\n\n let tasks = [\n\n Task::new(|state| {\n\n update_scout_info(state).unwrap_or_else(|e| {\n\n warn!(\"Failed to update scout info {}\", e);\n\n });\n\n state.creep().say(\"🛰️\", false);\n\n Err(\"Continue\")?\n\n }),\n\n Task::new(|state| {\n\n approach_target_room(state, TARGET).map_err(|e| {\n\n state.creep_memory_remove(TARGET);\n\n debug!(\"Approach failed {}\", e);\n\n e\n\n })\n\n }),\n\n Task::new(|state| set_next_room(state)),\n\n ];\n\n\n\n sequence(state, tasks.iter())\n\n}\n\n\n", "file_path": "src/creeps/scout.rs", "rank": 13, "score": 213389.419027435 }, { "content": "pub fn run<'a>(state: &mut CreepState) -> ExecutionResult {\n\n let tasks = [\n\n Task::new(|state| attempt_harvest(state, None)).with_name(\"Attempt harvest\"),\n\n Task::new(|state| unload(state)).with_name(\"Attempt unload\"),\n\n Task::new(|state| attempt_harvest(state, None)).with_name(\"Attempt harvest\"),\n\n ];\n\n\n\n sequence(state, tasks.iter())\n\n}\n\n\n", "file_path": "src/creeps/harvester.rs", "rank": 14, "score": 213217.90873099014 }, { "content": "pub fn run<'a>(state: &mut CreepState) -> ExecutionResult {\n\n let last_task = state.creep_memory_i64(TASK).unwrap_or(0);\n\n let last_task: GoferState = GoferState::from_u32(last_task as u32).unwrap_or(GoferState::Idle);\n\n\n\n let mut priorities = [0; 4];\n\n priorities[last_task as usize] += 1;\n\n\n\n let mut tasks = [\n\n Task::new(|state| get_energy(state))\n\n .with_name(\"Get energy\")\n\n .with_priority(priorities[GoferState::WithdrawingEnergy as usize])\n\n .with_state_save(GoferState::WithdrawingEnergy),\n\n Task::new(|state| pickup_energy(state))\n\n .with_name(\"Pickup energy\")\n\n .with_priority(priorities[GoferState::PickingUpEnergy as usize])\n\n .with_state_save(GoferState::PickingUpEnergy),\n\n Task::new(|state| attempt_unload(state))\n\n .with_name(\"Attempt unload\")\n\n .with_priority(priorities[GoferState::Unloading as usize])\n\n .with_state_save(GoferState::Unloading),\n\n ];\n\n\n\n sorted_by_priority(&mut tasks);\n\n sequence(state, tasks.iter())\n\n}\n\n\n", "file_path": "src/creeps/gofer.rs", "rank": 15, "score": 213183.03981751332 }, { "content": "pub fn move_to<'a, T>(creep: &'a Creep, target: &'a T) -> ExecutionResult\n\nwhere\n\n T: screeps::HasPosition,\n\n{\n\n let res = js! {\n\n const creep = @{creep};\n\n const target = @{target.pos()};\n\n return creep.moveTo(target, {reusePath: 10});\n\n };\n\n let res =\n\n ReturnCode::try_from(res).map_err(|e| format!(\"Failed to convert move result {:?}\", e))?;\n\n match res {\n\n ReturnCode::Ok | ReturnCode::Tired => Ok(()),\n\n _ => {\n\n debug!(\"Move failed {:?}\", res);\n\n Err(\"Move failed\")?\n\n }\n\n }\n\n}\n\n\n\npub struct MoveToOptions {\n\n reuse_path: Option<i32>,\n\n}\n\n\n", "file_path": "src/creeps/mod.rs", "rank": 16, "score": 211751.1768414679 }, { "content": "/// Retreive energy from a Container\n\n/// # Contracts & Side effects\n\n/// Required the `loading` flag to be set to true\n\n/// If the creep is full sets the `loading` flag to false\n\npub fn get_energy<'a>(state: &mut CreepState) -> ExecutionResult {\n\n let creep = state.creep();\n\n {\n\n let loading = state.creep_memory_bool(LOADING).unwrap_or(false);\n\n if !loading {\n\n Err(\"not loading\")?;\n\n }\n\n if creep.carry_total() == creep.carry_capacity() {\n\n state.creep_memory_set(LOADING.into(), false);\n\n state.creep_memory_remove(TARGET);\n\n Err(\"full\")?\n\n }\n\n }\n\n\n\n let target = find_container(state).ok_or_else(|| \"no container found\")?;\n\n withdraw(state, &target).map_err(|e| {\n\n state.creep_memory_remove(TARGET);\n\n e\n\n })\n\n}\n\n\n", "file_path": "src/creeps/gofer.rs", "rank": 17, "score": 209603.34590580978 }, { "content": "pub fn initialize_creep<'a>(state: &'a mut GameState, creep: &'a Creep) -> ExecutionResult {\n\n assign_role(state, &creep).ok_or_else(|| \"Failed to find a role for creep\")?;\n\n let memory = state.creep_memory_entry(CreepName(&creep.name()));\n\n memory.insert(HOME_ROOM.into(), creep.room().name().into());\n\n Ok(())\n\n}\n\n\n", "file_path": "src/creeps/mod.rs", "rank": 18, "score": 205135.67599274463 }, { "content": "pub fn sorted_by_priority<'a, T: TaskInput>(nodes: &mut [Task<'a, T>]) {\n\n nodes.sort_by_key(|n| -n.priority);\n\n}\n\n\n", "file_path": "src/bt/mod.rs", "rank": 19, "score": 204588.0084223759 }, { "content": "fn withdraw<'a>(state: &mut CreepState, target: &'a StructureContainer) -> ExecutionResult {\n\n if target.store_total() == 0 {\n\n Err(\"Target is empty\")?;\n\n }\n\n let creep = state.creep();\n\n if creep.pos().is_near_to(target) {\n\n let r = creep.withdraw_all(target, ResourceType::Energy);\n\n if r != ReturnCode::Ok {\n\n debug!(\"couldn't withdraw: {:?}\", r);\n\n Err(\"can't withdraw\")?;\n\n }\n\n } else {\n\n move_to(creep, target)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/creeps/gofer.rs", "rank": 20, "score": 204061.05311225617 }, { "content": "fn transfer<'a, T>(state: &mut CreepState, target: &T) -> ExecutionResult\n\nwhere\n\n T: Transferable,\n\n{\n\n let creep = state.creep();\n\n if creep.pos().is_near_to(target) {\n\n if creep.transfer_all(target, ResourceType::Energy) != ReturnCode::Ok {\n\n Err(\"couldn't unload\")?;\n\n }\n\n } else {\n\n move_to(creep, target)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/creeps/gofer.rs", "rank": 21, "score": 204061.05311225622 }, { "content": "pub fn find_repair_target<'a>(room: &'a Room) -> Option<Structure> {\n\n trace!(\"Finding repair target in room {:?}\", room.name());\n\n\n\n let candidates = js! {\n\n const room = @{room};\n\n return room.find(FIND_STRUCTURES, {\n\n filter: s => {\n\n switch (s.structureType) {\n\n case STRUCTURE_WALL:\n\n return s.hits < 10*1000;\n\n default:\n\n return s.hits < s.hitsMax;\n\n }\n\n }\n\n });\n\n };\n\n let candidates: Vec<Structure> = candidates\n\n .try_into()\n\n .map_err(|e| {\n\n error!(\"Failed to deserialize repair candidates {:?}\", e);\n\n })\n\n .ok()?;\n\n\n\n candidates\n\n .into_iter()\n\n .filter(|s| s.as_attackable().is_some())\n\n .min_by_key(|s| s.as_attackable().map(|s| s.hits()).unwrap())\n\n}\n\n\n", "file_path": "src/creeps/mod.rs", "rank": 22, "score": 202017.00886275945 }, { "content": "/// Max number of creeps of a given role in the given room\n\npub fn target_number_of_role_in_room<'a>(role: Role, room: &'a Room, game_state: &GameState) -> i8 {\n\n let level = room.controller().map(|l| l.level()).unwrap_or(0);\n\n let room_pos = WorldPosition::from(room);\n\n let n_flags = game_state\n\n .expansion\n\n .iter()\n\n .filter(|w| w.dist(room_pos) <= 10)\n\n .count()\n\n .min(255) as i8;\n\n let n_sources = room.find(find::SOURCES).len() as i8;\n\n let containers = js! {\n\n const room = @{room};\n\n return room.find(FIND_STRUCTURES, {\n\n filter: (s) => s.structureType == STRUCTURE_CONTAINER\n\n });\n\n };\n\n let containers: Vec<StructureContainer> = containers.try_into().unwrap();\n\n let energy_in_containers = containers.iter().map(|c| c.energy()).sum::<u32>();\n\n let n_containers = containers.len() as i8;\n\n let n_constructions = (room.find(find::CONSTRUCTION_SITES).len()) as i8;\n", "file_path": "src/creeps/spawn_info.rs", "rank": 23, "score": 201729.859343573 }, { "content": "// TODO: return an array of all roles to spawn in order of priority\n\n/// Get the next target role in the given room\n\npub fn next_role<'a>(state: &'a mut GameState, room: &'a Room) -> Option<Role> {\n\n let creeps = { state.count_creeps_in_room(room).clone() };\n\n creeps\n\n .into_iter()\n\n .fold(None, |result: Option<Role>, (role, actual)| {\n\n let expected = target_number_of_role_in_room(role, room, state);\n\n if expected <= actual {\n\n return result;\n\n }\n\n result\n\n .map(|result| {\n\n let result_prio = role_priority(room, result);\n\n let role_prio = role_priority(room, role);\n\n if role_prio > result_prio {\n\n role\n\n } else {\n\n result\n\n }\n\n })\n\n .or_else(|| Some(role))\n\n })\n\n}\n\n\n", "file_path": "src/creeps/roles.rs", "rank": 24, "score": 200743.47480860126 }, { "content": "pub fn build_roads<'a>(room: &'a Room, state: &'a mut ConstructionState) -> ExecutionResult {\n\n trace!(\"Building roads in room {}\", room.name());\n\n\n\n can_continue_building(room)?;\n\n\n\n let matrix = state.connections.entry(room.name()).or_default();\n\n\n\n let targets = js! {\n\n const room = @{room};\n\n const targets = [\n\n ...room.find(FIND_MY_SPAWNS),\n\n ...room.find(FIND_MY_STRUCTURES, {\n\n filter: (s) => s\n\n && s.structureType != STRUCTURE_ROAD\n\n && s.structureType != STRUCTURE_WALL\n\n && s.structureType != STRUCTURE_RAMPART\n\n }),\n\n ...room.find(FIND_SOURCES)\n\n ];\n\n const result = targets.map((t)=> t && t.pos).filter((p) => p);\n", "file_path": "src/constructions/roads.rs", "rank": 25, "score": 199304.77566783142 }, { "content": "fn try_withdraw<'a, T>(state: &mut CreepState, target: &'a RoomObject) -> ExecutionResult\n\nwhere\n\n T: 'a + Withdrawable + screeps::traits::TryFrom<&'a Reference>,\n\n{\n\n let target = T::try_from(target.as_ref()).map_err(|_| \"Failed to convert target\")?;\n\n withdraw(state, &target)\n\n}\n\n\n", "file_path": "src/creeps/mod.rs", "rank": 28, "score": 194692.810741722 }, { "content": "pub fn spawn_config_by_role(room: &Room, role: Role) -> SpawnConfig {\n\n SpawnConfig {\n\n basic_body: basic_role_parts(room, role),\n\n body_extension: role_part_scale(room, role),\n\n body_max: role_part_max(room, role),\n\n }\n\n}\n\n\n", "file_path": "src/creeps/spawn_info.rs", "rank": 29, "score": 194259.727685 }, { "content": "fn set_next_room(state: &mut CreepState) -> ExecutionResult {\n\n if state.creep_memory_string(TARGET).is_some() {\n\n Err(\"Already has a target\")?;\n\n }\n\n\n\n let room = state.creep().room();\n\n let gs = state.get_game_state();\n\n\n\n let mut min = 1 << 30;\n\n let mut target_room = WorldPosition::default();\n\n for room in neighbours(&room) {\n\n if let Some(intel) = gs.scout_intel.get(&room) {\n\n if intel.time_of_recording < min {\n\n min = intel.time_of_recording;\n\n target_room = room;\n\n }\n\n } else {\n\n target_room = room;\n\n break;\n\n }\n\n }\n\n\n\n state.creep_memory_set(TARGET, target_room.to_string().as_str());\n\n Ok(())\n\n}\n\n\n", "file_path": "src/creeps/scout.rs", "rank": 30, "score": 193486.6907032202 }, { "content": "fn find_unload_target<'a>(state: &mut CreepState) -> Option<Reference> {\n\n read_unload_target(state).or_else(|| {\n\n find_container(state).unwrap_or_else(|e| {\n\n debug!(\"Failed to find unload target {:?}\", e);\n\n });\n\n read_unload_target(state)\n\n })\n\n}\n\n\n", "file_path": "src/creeps/harvester.rs", "rank": 31, "score": 192991.53881922583 }, { "content": "fn read_unload_target<'a>(state: &mut CreepState) -> Option<Reference> {\n\n let target = state.creep_memory_string(TARGET);\n\n\n\n if let Some(target) = target {\n\n let target = get_object_erased(target)?;\n\n Some(target.as_ref().clone())\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/creeps/harvester.rs", "rank": 32, "score": 192991.53881922583 }, { "content": "fn read_unload_target<'a>(state: &mut CreepState) -> Option<Reference> {\n\n state\n\n .creep_memory_string(TARGET)\n\n .and_then(|target| get_object_erased(target).map(|target| target.as_ref().clone()))\n\n}\n\n\n", "file_path": "src/creeps/gofer.rs", "rank": 33, "score": 192957.67790497217 }, { "content": "fn find_unload_target<'a>(state: &mut CreepState) -> Option<Reference> {\n\n trace!(\"Setting unload target\");\n\n if let Some(target) = read_unload_target(state) {\n\n let notfull = js! {\n\n const target = @{&target};\n\n return target.capacity < target.storeCapacity || target.energy < target.energyCapacity;\n\n };\n\n let notfull: bool = notfull.try_into().unwrap_or(false);\n\n if notfull {\n\n return Some(target);\n\n }\n\n }\n\n let tasks = [\n\n Task::new(|state| find_unload_target_by_type(state, \"spawn\"))\n\n .with_name(\"Find unload target by type spawn\"),\n\n Task::new(|state| find_unload_target_by_type(state, \"tower\"))\n\n .with_name(\"Find unload target by type tower\"),\n\n Task::new(|state| find_unload_target_by_type(state, \"extension\"))\n\n .with_name(\"Find unload target by type extension\"),\n\n Task::new(|state| find_storage(state)).with_name(\"Find unload target by type storage\"),\n", "file_path": "src/creeps/gofer.rs", "rank": 34, "score": 192957.67790497217 }, { "content": "/// Find and pick up energy from the ground\n\n/// # Contracts & Side effects\n\n/// Required the `loading` flag to be set to true\n\n/// If the creep is full sets the `loading` flag to false\n\npub fn pickup_energy(state: &mut CreepState) -> ExecutionResult {\n\n if !state.creep_memory_bool(LOADING).unwrap_or(false) {\n\n Err(\"not loading\")?;\n\n }\n\n\n\n if state.creep().carry_total() == state.creep().carry_capacity() {\n\n state.creep_memory_set(LOADING.into(), false);\n\n state.creep_memory_remove(TARGET);\n\n Err(\"full\")?;\n\n }\n\n\n\n let target = state\n\n .creep_memory_string(TARGET)\n\n .and_then(|id| get_object_typed::<Resource>(id).unwrap_or(None))\n\n .or_else(|| {\n\n find_dropped_energy(state.creep()).map(|target| {\n\n let id = target.id();\n\n state.creep_memory_set(TARGET.into(), id);\n\n target\n\n })\n", "file_path": "src/creeps/mod.rs", "rank": 36, "score": 187246.45836449572 }, { "content": "pub fn attack_simple(state: &mut CreepState) -> ExecutionResult {\n\n if let Some(ref target) = find_target(state) {\n\n let result = state.creep().attack(target);\n\n match result {\n\n ReturnCode::Ok => return Ok(()),\n\n ReturnCode::NotInRange => return move_to(state.creep(), target),\n\n _ => {\n\n warn!(\n\n \"Creep {} failed to attack {} {:?}\",\n\n state.creep_name().0,\n\n target.name(),\n\n result\n\n );\n\n Err(\"Failed to attack\")?;\n\n }\n\n }\n\n }\n\n Err(\"Can't find target to attack\")?\n\n}\n\n\n", "file_path": "src/creeps/defender.rs", "rank": 37, "score": 187239.95466934214 }, { "content": "pub fn run<'a>(state: &mut CreepState) -> ExecutionResult {\n\n let last_task = state.creep_memory_i64(TASK).unwrap_or(0);\n\n let last_task: WorkerState =\n\n WorkerState::from_u32(last_task as u32).unwrap_or(WorkerState::Idle);\n\n\n\n let mut priorities = [0; 6];\n\n priorities[last_task as usize] += 1;\n\n\n\n let mut tasks = [\n\n Task::new(|state| attempt_build(state))\n\n .with_name(\"Attempt build\")\n\n .with_priority(priorities[WorkerState::Building as usize])\n\n .with_state_save(WorkerState::Building),\n\n Task::new(|state: &mut CreepState| withdraw_energy(state))\n\n .with_name(\"Withdraw energy\")\n\n .with_state_save(WorkerState::WithdrawingEnergy)\n\n .with_priority(priorities[WorkerState::WithdrawingEnergy as usize]),\n\n Task::new(|state| harvest(state))\n\n .with_name(\"Harvest\")\n\n .with_state_save(WorkerState::Harvesting)\n", "file_path": "src/creeps/worker.rs", "rank": 38, "score": 186783.56393627275 }, { "content": "pub fn run<'a>(state: &mut CreepState) -> ExecutionResult {\n\n Task::new(_run)\n\n .with_required_bucket(300)\n\n .with_name(\"LRW\")\n\n .tick(state)\n\n}\n\n\n", "file_path": "src/creeps/lrw.rs", "rank": 39, "score": 186783.56393627275 }, { "content": "pub fn run<'a>(state: &mut CreepState) -> ExecutionResult {\n\n Task::new(_run)\n\n .with_required_bucket(300)\n\n .with_name(\"Conqueror\")\n\n .tick(state)\n\n}\n\n\n", "file_path": "src/creeps/conqueror.rs", "rank": 41, "score": 186783.56393627275 }, { "content": "pub fn run<'a>(state: &mut CreepState) -> ExecutionResult {\n\n let tasks = [\n\n Task::new(|state| attack_simple(state)),\n\n Task::new(|state| {\n\n state.creep().say(\"⚔️\", true);\n\n Ok(())\n\n }),\n\n ];\n\n\n\n selector(state, tasks.iter())\n\n}\n\n\n", "file_path": "src/creeps/defender.rs", "rank": 42, "score": 186783.56393627275 }, { "content": "pub fn neighbours(room: &Room) -> ArrayVec<[WorldPosition; 8]> {\n\n let coords = WorldPosition::from(room);\n\n let neighbours = coords\n\n .neighbours_in_vectors()\n\n .iter()\n\n .map(|coords| coords.to_string())\n\n .collect::<ArrayVec<[_; 8]>>();\n\n let names: Vec<&str> = neighbours.iter().map(|n| n.as_str()).collect();\n\n let result = js! {\n\n const room = @{room};\n\n const neighbours = @{names};\n\n // Directions in the same order as in neighbours_in_vectors\n\n // TODO: return the directions too?\n\n const directions = [\n\n FIND_EXIT_TOP,\n\n FIND_EXIT_LEFT,\n\n FIND_EXIT_BOTTOM,\n\n FIND_EXIT_RIGHT,\n\n ];\n\n return neighbours.filter((r,i) => room.findExitTo(r) == directions[i]);\n", "file_path": "src/rooms/mod.rs", "rank": 43, "score": 185335.7704543109 }, { "content": "pub fn attempt_build<'a>(state: &mut CreepState) -> ExecutionResult {\n\n trace!(\"Building\");\n\n\n\n let loading = state.creep_memory_bool(LOADING);\n\n if loading.unwrap_or(false) {\n\n Err(\"loading\")?;\n\n }\n\n\n\n if state.creep().carry_total() == 0 {\n\n state.creep_memory_set(LOADING.into(), true);\n\n Err(\"empty\")?\n\n }\n\n let target = get_build_target(state).ok_or_else(|| format!(\"Failed to find build target\"))?;\n\n let res = state.creep().build(&target);\n\n match res {\n\n ReturnCode::Ok => Ok(()),\n\n ReturnCode::NotInRange => move_to(state.creep(), &target),\n\n _ => {\n\n error!(\"Failed to build target {:?} {:?}\", res, target.id());\n\n state.creep_memory_remove(TARGET);\n\n Err(\"Failed to build target\")?\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/creeps/worker.rs", "rank": 44, "score": 183960.45275482512 }, { "content": "pub fn attempt_repair<'a>(state: &mut CreepState) -> ExecutionResult {\n\n let loading = state.creep_memory_bool(LOADING);\n\n if loading.unwrap_or(false) {\n\n return Err(\"loading\".into());\n\n }\n\n let creep = state.creep();\n\n if creep.carry_total() == 0 {\n\n state.creep_memory_set(\"loading\".into(), true);\n\n Err(\"empty\".into())\n\n } else {\n\n let target = find_repair_target(&creep.room()).ok_or_else(|| {\n\n let error = format!(\"Could not find a repair target\");\n\n debug!(\"{}\", error);\n\n error\n\n })?;\n\n repair(creep, &target)\n\n }\n\n}\n\n\n", "file_path": "src/creeps/repairer.rs", "rank": 45, "score": 183960.45275482512 }, { "content": "pub fn sign_controller_stock_msgs(creep: &Creep) -> ExecutionResult {\n\n const MESSAGES: &'static [&'static str] = &[\"Become as gods\", \"This cannot continue\"];\n\n let msg = MESSAGES[game::time() as usize % MESSAGES.len()];\n\n sign_controller(creep, msg)\n\n}\n\n\n", "file_path": "src/creeps/mod.rs", "rank": 46, "score": 182647.83246525444 }, { "content": "pub fn run(state: &mut GameState) -> ExecutionResult {\n\n let start = game::cpu::get_used();\n\n\n\n screeps::game::creeps::values()\n\n .into_iter()\n\n .for_each(|creep| {\n\n let mut state = CreepState::new(creep, state);\n\n run_creep(&mut state).unwrap_or(())\n\n });\n\n\n\n let end = game::cpu::get_used();\n\n\n\n state.creep_stats.total_execution_time = (end - start) as f32;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/creeps/mod.rs", "rank": 47, "score": 181396.2423329753 }, { "content": "/// Retreive energy from a Container\n\n/// # Contracts & Side effects\n\n/// Required the `loading` flag to be set to true\n\n/// If the creep is full sets the `loading` flag to false\n\npub fn withdraw_energy<'a>(state: &'a mut CreepState) -> ExecutionResult {\n\n trace!(\"Getting energy\");\n\n\n\n let target = {\n\n if !state.creep_memory_bool(LOADING).unwrap_or(false) {\n\n Err(\"not loading\")?;\n\n }\n\n\n\n if state.creep().carry_total() == state.creep().carry_capacity() {\n\n state.creep_memory_set(LOADING.into(), false);\n\n state.creep_memory_remove(TARGET);\n\n Err(\"full\")?;\n\n }\n\n\n\n state\n\n .creep_memory_string(TARGET)\n\n .and_then(|id| get_object_erased(id))\n\n .or_else(|| {\n\n find_available_energy(state.creep()).map(|target| {\n\n let id = js! {\n", "file_path": "src/creeps/mod.rs", "rank": 48, "score": 180939.4583206294 }, { "content": "fn repair<'a>(creep: &'a Creep, target: &'a Structure) -> ExecutionResult {\n\n let res = creep.repair(target);\n\n match res {\n\n ReturnCode::Ok => Ok(()),\n\n ReturnCode::NotInRange => move_to(creep, target),\n\n _ => Err(format!(\"Unexpected ReturnCode {:?}\", res))?,\n\n }\n\n}\n\n\n", "file_path": "src/creeps/repairer.rs", "rank": 49, "score": 177757.22839721653 }, { "content": "pub fn build_containers<'a>(room: &'a Room) -> ExecutionResult {\n\n trace!(\"Building containers in room {}\", room.name());\n\n\n\n let spawn = room\n\n .find(find::MY_STRUCTURES)\n\n .into_iter()\n\n .find(|structure| structure.structure_type() == StructureType::Spawn);\n\n if spawn.is_none() {\n\n Err(format!(\n\n \"Skipping container build until a spawn is built in room {}\",\n\n room.name()\n\n ))?;\n\n }\n\n\n\n let sources = room\n\n .find(find::SOURCES)\n\n .into_iter()\n\n .filter(|source| {\n\n let has_construction_site = source\n\n .pos()\n", "file_path": "src/constructions/containers.rs", "rank": 50, "score": 177490.56615473362 }, { "content": "/// The largest a creep of role `role` may be\n\nfn role_part_max(room: &Room, role: Role) -> Option<usize> {\n\n let level = room.controller().map(|c| c.level()).unwrap_or(0);\n\n\n\n let worker_count = {\n\n if level < 5 {\n\n 16\n\n } else if level < 8 {\n\n 24\n\n } else {\n\n 36\n\n }\n\n };\n\n\n\n let result = match role {\n\n Role::Harvester => Some(8),\n\n Role::Lrw | Role::Lrh | Role::Worker | Role::Upgrader => Some(worker_count),\n\n Role::Conqueror => None,\n\n Role::Scout => None,\n\n Role::Gofer => Some(worker_count * 2),\n\n Role::Defender => None,\n\n Role::Unknown => None,\n\n };\n\n result.map(|x| x.min(50))\n\n}\n", "file_path": "src/creeps/spawn_info.rs", "rank": 51, "score": 176831.45943944395 }, { "content": "fn find_harvest_target<'a>(state: &mut CreepState) -> Option<Source> {\n\n trace!(\"Finding harvest target\");\n\n\n\n let room = state.creep().room();\n\n let harvester_count = harvester_count(state);\n\n\n\n debug!(\n\n \"harvester count in room {:?} {:#?}\",\n\n room.name(),\n\n harvester_count\n\n );\n\n\n\n let sources = room.find(find::SOURCES);\n\n let mut sources = sources.into_iter();\n\n let first_source = sources.next()?;\n\n let first_dist = first_source.pos().get_range_to(&state.creep().pos());\n\n let first_count = harvester_count\n\n .get(&first_source.id())\n\n .map(|x| *x)\n\n .unwrap_or(0);\n", "file_path": "src/creeps/harvester.rs", "rank": 52, "score": 176765.89478624472 }, { "content": "fn withdraw<'a, T>(state: &mut CreepState, target: &'a T) -> ExecutionResult\n\nwhere\n\n T: Withdrawable,\n\n{\n\n let creep = state.creep();\n\n if creep.pos().is_near_to(target) {\n\n let r = creep.withdraw_all(target, ResourceType::Energy);\n\n if r != ReturnCode::Ok {\n\n debug!(\"couldn't withdraw: {:?}\", r);\n\n Err(\"couldn't withdraw\")?;\n\n }\n\n } else {\n\n move_to(creep, target)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/creeps/mod.rs", "rank": 53, "score": 176434.12432988733 }, { "content": "fn find_target(state: &mut CreepState) -> Option<Creep> {\n\n if let Some(id) = state.creep_memory_string(ATTACK_TARGET) {\n\n if let Ok(Some(creep)) = get_object_typed::<Creep>(&id) {\n\n return Some(creep);\n\n }\n\n }\n\n state\n\n .creep()\n\n .pos()\n\n .find_closest_by_range(find::HOSTILE_CREEPS)\n\n .map(|creep| {\n\n state.creep_memory_set(ATTACK_TARGET, creep.id());\n\n creep\n\n })\n\n}\n\n\n", "file_path": "src/creeps/defender.rs", "rank": 54, "score": 176306.69147501639 }, { "content": "/// Run the creep according to the given role\n\npub fn run_role<'a>(state: &mut CreepState, role: Role) -> ExecutionResult {\n\n let result = match role {\n\n Role::Upgrader => upgrader::run(state),\n\n Role::Harvester => harvester::run(state),\n\n Role::Worker => worker::run(state),\n\n Role::Gofer => gofer::run(state),\n\n Role::Conqueror => conqueror::run(state),\n\n Role::Lrh => lrh::run(state),\n\n Role::Lrw => lrw::run(state),\n\n Role::Scout => scout::run(state),\n\n Role::Defender => defender::run(state),\n\n _ => unimplemented!(),\n\n };\n\n\n\n result.map_err(|e| {\n\n warn!(\"Creep {} is idle: {}\", state.creep_name().0, e);\n\n ExecutionError::from(\"Idle\")\n\n })\n\n}\n\n\n", "file_path": "src/creeps/roles.rs", "rank": 55, "score": 172917.14095367485 }, { "content": "pub fn attempt_harvest<'a>(\n\n state: &mut CreepState,\n\n target_memory: Option<&'a str>,\n\n) -> ExecutionResult {\n\n trace!(\"Harvesting\");\n\n\n\n let target_memory = target_memory.unwrap_or(HARVEST_TARGET);\n\n let carry_total = state.creep().carry_total();\n\n let carry_cap = state.creep().carry_capacity();\n\n\n\n if carry_total == carry_cap {\n\n state.creep_memory_remove(target_memory);\n\n Err(\"full\")?;\n\n }\n\n\n\n let source =\n\n harvest_target(state, target_memory).ok_or_else(|| format!(\"No harvest target found\"))?;\n\n\n\n if state.creep().pos().is_near_to(&source) {\n\n let r = state.creep().harvest(&source);\n", "file_path": "src/creeps/harvester.rs", "rank": 56, "score": 170485.2446566599 }, { "content": "fn build_storage(room: &Room, _state: &mut ConstructionState) -> ExecutionResult {\n\n let pos = storage::find_storage_pos(room)?;\n\n\n\n debug!(\"Building storage at {:?}\", pos);\n\n\n\n let pos = pos.into_room_pos(&room.name());\n\n let result = room.create_construction_site(&pos, StructureType::Storage);\n\n match result {\n\n ReturnCode::Ok => Ok(()),\n\n ReturnCode::Full => {\n\n debug!(\"Can't place construction site {:?}\", result);\n\n Err(\"Room is full\")?\n\n }\n\n _ => {\n\n debug!(\"Can't place construction site {:?}\", result);\n\n Err(format!(\"Failed to place construction site {:?}\", result))?\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/constructions/mod.rs", "rank": 57, "score": 169958.79361650668 }, { "content": "fn manage_room<'a>(room: &'a Room, state: &mut ConstructionState) -> ExecutionResult {\n\n info!(\"Manage constructionSites of room {:?}\", room.name());\n\n\n\n let my = js! {\n\n const room = @{room};\n\n return room.controller && room.controller.my || false;\n\n };\n\n let my: bool = my.try_into().map_err(|e| {\n\n error!(\"Failed to convert bool, {:?}\", e);\n\n \"Conversion error\"\n\n })?;\n\n if !my {\n\n Err(\"Room is not mine\")?;\n\n }\n\n\n\n build_structures(room, state).unwrap_or_else(|e| warn!(\"Failed build_structures {:?}\", e));\n\n containers::build_containers(room).unwrap_or_else(|e| warn!(\"Failed containers {:?}\", e));\n\n roads::build_roads(room, state).unwrap_or_else(|e| warn!(\"Failed roads {:?}\", e));\n\n build_storage(room, state).unwrap_or_else(|e| warn!(\"Failed storage {:?}\", e));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/constructions/mod.rs", "rank": 58, "score": 169130.56079471658 }, { "content": "fn find_container<'a>(state: &mut CreepState) -> ExecutionResult {\n\n trace!(\"Finding new unload target\");\n\n\n\n let result = js! {\n\n let creep = @{state.creep()};\n\n const container = creep.pos.findClosestByRange(FIND_STRUCTURES, {\n\n filter: (i) => i.structureType == STRUCTURE_CONTAINER\n\n && i.store[RESOURCE_ENERGY] < i.storeCapacity\n\n });\n\n return container;\n\n };\n\n\n\n let container: Option<StructureContainer> = result.try_into().unwrap_or(None);\n\n if let Some(container) = container {\n\n state.creep_memory_set(TARGET.into(), container.id());\n\n Ok(())\n\n } else {\n\n Err(\"No container was found\")?\n\n }\n\n}\n\n\n", "file_path": "src/creeps/harvester.rs", "rank": 59, "score": 168325.17840637037 }, { "content": "fn find_storage<'a>(state: &mut CreepState) -> ExecutionResult {\n\n let storage = state\n\n .creep()\n\n .room()\n\n .storage()\n\n .ok_or_else(|| format!(\"No storage in room {:?}\", state.creep().room().name()))?;\n\n if storage.store_total() == storage.store_capacity() {\n\n Err(\"Storage is full\")?;\n\n }\n\n state.creep_memory_set(TARGET.into(), storage.id());\n\n Ok(())\n\n}\n\n\n", "file_path": "src/creeps/gofer.rs", "rank": 60, "score": 168290.3094928936 }, { "content": "fn set_target<'a>(state: &mut CreepState) -> ExecutionResult {\n\n if state.creep_memory_string(TARGET_ROOM).is_some() {\n\n trace!(\"has target\");\n\n Err(\"Creep already has a target\")?;\n\n }\n\n let flag = {\n\n state\n\n .get_game_state()\n\n .expansion\n\n .iter()\n\n .next()\n\n .ok_or_else(|| \"can't find a target\")?\n\n .clone()\n\n };\n\n\n\n state.creep_memory_set(TARGET_ROOM.into(), flag.to_string().as_str());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/creeps/lrw.rs", "rank": 61, "score": 168112.16073634944 }, { "content": "fn claim_target<'a>(state: &mut CreepState) -> ExecutionResult {\n\n debug!(\"claiming room\");\n\n\n\n let target_room = {\n\n state\n\n .creep_memory_string(CONQUEST_TARGET)\n\n .ok_or_else(|| \"no target set\")?\n\n };\n\n\n\n let creep = state.creep();\n\n\n\n let room_name = state.current_room().to_string();\n\n let room_name = room_name.as_str();\n\n\n\n let arrived = room_name == target_room;\n\n\n\n if !arrived {\n\n let target_room = WorldPosition::parse_name(target_room)\n\n .map_err(|e| format!(\"Got an invalid room name as conquest target {:?}\", e))?\n\n .as_room_center();\n", "file_path": "src/creeps/conqueror.rs", "rank": 62, "score": 168112.16073634944 }, { "content": "fn set_target<'a>(state: &mut CreepState) -> ExecutionResult {\n\n if state.creep_memory_string(CONQUEST_TARGET).is_some() {\n\n Err(\"Creep already has a target\")?;\n\n }\n\n\n\n let flag = {\n\n state\n\n .get_game_state()\n\n .expansion\n\n .iter()\n\n .next()\n\n .ok_or_else(|| \"can't find a target\")?\n\n .clone()\n\n };\n\n\n\n state.creep_memory_set(CONQUEST_TARGET.into(), flag.to_string().as_str());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/creeps/conqueror.rs", "rank": 63, "score": 168112.1607363494 }, { "content": "fn build_structures<'a>(room: &'a Room, state: &'a mut ConstructionState) -> ExecutionResult {\n\n let structures = [\n\n StructureType::Tower,\n\n StructureType::Extension,\n\n StructureType::Extension,\n\n StructureType::Spawn,\n\n ];\n\n\n\n let matrix = get_matrix_mut(state, room);\n\n let mut pos = matrix\n\n .find_next_pos(room)\n\n .map_err(|e| format!(\"Failed to get the next position {:?}\", e))?;\n\n\n\n let name = room.name();\n\n\n\n for structure in structures.iter() {\n\n debug!(\"Attempting build at position {:?} in room {}\", pos, &name);\n\n let roompos = pos.try_into_room_pos(&name).ok_or_else(|| {\n\n let err = format!(\"Failed to cast point {:?} to RoomPosition\", pos);\n\n error!(\"{}\", err);\n", "file_path": "src/constructions/mod.rs", "rank": 64, "score": 161516.5309182689 }, { "content": "pub fn find_dropped_energy(creep: &Creep) -> Option<Resource> {\n\n creep\n\n .room()\n\n .find(find::DROPPED_RESOURCES)\n\n .into_iter()\n\n .filter(|resource| resource.resource_type() == ResourceType::Energy)\n\n .max_by_key(|r| r.amount())\n\n}\n\n\n", "file_path": "src/creeps/mod.rs", "rank": 65, "score": 158870.46319246595 }, { "content": "pub fn is_free(room: &Room, pos: &RoomPosition) -> bool {\n\n let result = js! {\n\n const pos = @{pos};\n\n const room = @{room};\n\n try {\n\n let objects = room.lookAt(pos);\n\n let invalid = objects.find((o) => {\n\n return (o.type == LOOK_TERRAIN && o.terrain == \"wall\")\n\n || (o.type == LOOK_STRUCTURES && o.structure.structureType != STRUCTURE_ROAD)\n\n || o.type == LOOK_CONSTRUCTION_SITES;\n\n });\n\n return !invalid;\n\n } catch (e) {\n\n return false;\n\n }\n\n };\n\n bool::try_from(result).unwrap_or(false)\n\n}\n\n\n", "file_path": "src/constructions/mod.rs", "rank": 66, "score": 158695.12866628024 }, { "content": "fn get_matrix_mut<'a>(state: &'a mut ConstructionState, room: &Room) -> &'a mut ConstructionMatrix {\n\n let construction_matrices = &mut state.construction_matrices;\n\n let matrix = construction_matrices.entry(room.name()).or_insert_with(|| {\n\n let initial_p = spawns::find_initial_point(room)\n\n .map(Point::from)\n\n .unwrap_or_else(|e| {\n\n debug!(\"Cant find an optimal point {:?}\", e);\n\n let structs = room.find(find::MY_STRUCTURES);\n\n structs\n\n .last()\n\n .map(|s| s.pos())\n\n .map(|p| {\n\n let x = p.x() as i16;\n\n let y = p.y() as i16;\n\n Point(x, y)\n\n })\n\n .unwrap_or(Point(25, 25))\n\n });\n\n ConstructionMatrix::default().with_position(Point::from(initial_p))\n\n });\n\n matrix\n\n}\n\n\n", "file_path": "src/constructions/mod.rs", "rank": 67, "score": 157881.6677910344 }, { "content": "pub fn run<'a>(state: &mut GameState) -> ExecutionResult {\n\n let flags = flags::values();\n\n flags.into_iter().for_each(move |flag| {\n\n let room = WorldPosition::parse_name(&flag.pos().room_name()).unwrap();\n\n flag.remove();\n\n state.expansion.insert(room);\n\n });\n\n Ok(())\n\n}\n\n\n", "file_path": "src/flags.rs", "rank": 68, "score": 156289.15299489652 }, { "content": "fn find_enemy<'a>(room: &'a Room) -> Option<screeps::Creep> {\n\n room.find(find::HOSTILE_CREEPS).into_iter().next()\n\n}\n\n\n", "file_path": "src/structures/towers.rs", "rank": 69, "score": 155554.94821572138 }, { "content": "/// Find the optimal point to place the first spawn on\n\npub fn find_initial_point(room: &Room) -> Result<RoomPosition, String> {\n\n if !room\n\n .controller()\n\n .map(|c| {\n\n let result = js! {\n\n return @{c}.my;\n\n };\n\n result.try_into().unwrap_or(false)\n\n })\n\n .unwrap_or(false)\n\n {\n\n Err(\"The room is not mine, skipping spawn placement\")?;\n\n }\n\n let poi = room\n\n .find(find::SOURCES)\n\n .into_iter()\n\n .map(|s| s.pos())\n\n .collect::<Vec<_>>();\n\n if poi.len() < 2 {\n\n Err(\"The room has no sources, no optimal spawn point can be found\")?;\n\n }\n\n let mut it = poi.into_iter();\n\n\n\n let first = it.next().unwrap();\n\n let optimal_point = it.fold(first, |result, p| result.midpoint(&p));\n\n\n\n Ok(optimal_point)\n\n}\n", "file_path": "src/constructions/spawns.rs", "rank": 70, "score": 154698.13164581364 }, { "content": "pub fn run<'a>(state: &mut GameState) -> ExecutionResult {\n\n game::structures::values()\n\n .into_iter()\n\n .filter_map(|s| match s {\n\n Structure::Tower(t) => Some(t),\n\n _ => None,\n\n })\n\n .for_each(move |tower| {\n\n let mut state = WrappedState::new(tower, state);\n\n run_tower(&mut state)\n\n .map_err(move |e| {\n\n debug!(\"Tower in room {:?} is idle, {:?}\", state.item.room().name(), e);\n\n e\n\n })\n\n .unwrap_or(())\n\n });\n\n Ok(())\n\n}\n\n\n", "file_path": "src/structures/towers.rs", "rank": 71, "score": 153983.09915878915 }, { "content": "pub fn run<'a>(state: &mut GameState) -> ExecutionResult {\n\n Task::new(_run).with_required_bucket(5000).tick(state)\n\n}\n\n\n", "file_path": "src/constructions/mod.rs", "rank": 72, "score": 153983.09915878915 }, { "content": "/// Return the BehaviourTree that runs the spawns\n\npub fn run<'a>(state: &mut GameState) -> ExecutionResult {\n\n Task::new(|state| {\n\n const SPAWN_SKIP: u32 = 5;\n\n\n\n let time = game::time();\n\n if time % SPAWN_SKIP != 0 {\n\n Err(\"Skip spawns this tick\")?;\n\n }\n\n let rooms = game::rooms::values();\n\n rooms\n\n .into_iter()\n\n .map(|room| room.find(find::MY_SPAWNS))\n\n .filter(|spawns| spawns.len() > 0)\n\n .for_each(move |spawns| {\n\n let index = time as usize % spawns.len();\n\n let spawn = &spawns[index as usize];\n\n run_spawn(state, spawn).unwrap_or(())\n\n });\n\n Ok(())\n\n })\n\n .with_required_bucket(500)\n\n .tick(state)\n\n}\n\n\n", "file_path": "src/structures/spawns.rs", "rank": 73, "score": 153983.09915878915 }, { "content": "pub fn run<'a>(state: &'a mut GameState) -> ExecutionResult {\n\n let tasks = [Task::new(remove_exp_markers)];\n\n selector(state, tasks.iter())\n\n}\n\n\n", "file_path": "src/expansion.rs", "rank": 74, "score": 153261.5786763056 }, { "content": "pub fn find_storage_pos(room: &Room ) -> Result<Point, String> {\n\n let controller = room.controller().ok_or_else(|| \"Room has no controller\")?;\n\n if controller.level() < 4 {\n\n Err(\"Can't build Storage before level 4\")?;\n\n }\n\n\n\n if room.storage().is_some() {\n\n Err(\"Room already has a storage\")?;\n\n }\n\n\n\n let poi = room\n\n .find(find::SOURCES)\n\n .into_iter()\n\n .map(|s| s.pos())\n\n .collect::<Vec<_>>();\n\n\n\n let pos: Point = match poi.len() {\n\n 0 => Err(\"Can't build a storage in a room with no sources\")?,\n\n 1 => {\n\n let p = poi[0].midpoint(&controller.pos());\n", "file_path": "src/constructions/storage.rs", "rank": 75, "score": 151743.585197762 }, { "content": "fn run_creep(state: &mut CreepState) -> ExecutionResult {\n\n debug!(\"Running creep {}\", state.creep_name().0);\n\n\n\n if state.creep().spawning() {\n\n return Ok(());\n\n }\n\n let tasks = [\n\n Task::new(|state: &mut CreepState| {\n\n run_role(state)\n\n .map_err(|e| {\n\n debug!(\"Recording failed run {:?}\", e);\n\n unsafe {\n\n (*state.mut_game_state()).creep_stats.idle_creeps += 1;\n\n }\n\n state.creep().say(\"💤\", false);\n\n e\n\n })\n\n .map(|_| {\n\n debug!(\"Recording successful run\");\n\n unsafe {\n", "file_path": "src/creeps/mod.rs", "rank": 76, "score": 148931.0566746464 }, { "content": "fn harvester_count<'a>(state: &mut CreepState) -> HashMap<String, i32> {\n\n let mut result = HashMap::new();\n\n\n\n game::creeps::values().into_iter().for_each(|creep| {\n\n let target = state\n\n .get_game_state()\n\n .creep_memory_string(CreepName(&creep.name()), HARVEST_TARGET);\n\n if let Some(target) = target {\n\n *result.entry(target.to_owned()).or_insert(0) += 1;\n\n }\n\n });\n\n result\n\n}\n\n\n", "file_path": "src/creeps/harvester.rs", "rank": 77, "score": 147266.18680359214 }, { "content": "fn _run(state: &mut CreepState) -> ExecutionResult {\n\n let tasks = [\n\n Task::new(|state: &mut CreepState| {\n\n update_scout_info(state).unwrap_or_else(|e| {\n\n error!(\"Failed to update scout info {:?}\", e);\n\n });\n\n Err(\"continue\")?\n\n })\n\n .with_name(\"Update scout info\"),\n\n Task::new(|state| claim_target(state)).with_name(\"Claim target\"),\n\n Task::new(|state| set_target(state)).with_name(\"Set target\"),\n\n Task::new(|state: &mut CreepState| sign_controller_stock_msgs(state.creep()))\n\n .with_name(\"Set target\"),\n\n ];\n\n\n\n sequence(state, tasks.iter())\n\n}\n\n\n", "file_path": "src/creeps/conqueror.rs", "rank": 78, "score": 146671.69950117424 }, { "content": "fn _run(state: &mut CreepState) -> ExecutionResult {\n\n let tasks = [\n\n Task::new(|state| {\n\n update_scout_info(state).unwrap_or_else(|e| {\n\n error!(\"Failed to update scout info {:?}\", e);\n\n });\n\n Err(\"continue\")?\n\n })\n\n .with_name(\"Update scout info\"),\n\n Task::new(|state| approach_target_room(state)).with_name(\"Approach target room\"),\n\n Task::new(|state| set_target(state)).with_name(\"Set target\"),\n\n Task::new(|state| worker::run(state)).with_name(\"Worker run\"),\n\n ];\n\n\n\n sequence(state, tasks.iter())\n\n}\n\n\n", "file_path": "src/creeps/lrw.rs", "rank": 79, "score": 146671.69950117424 }, { "content": "/// Intended parts to be appended to 'role_parts'\n\nfn role_part_scale<'a>(_room: &Room, role: Role) -> BodyCollection {\n\n let it = match role {\n\n Role::Harvester => [Part::Work].iter(),\n\n Role::Scout | Role::Conqueror => [].iter(),\n\n Role::Gofer => [Part::Move, Part::Carry, Part::Carry].iter(),\n\n Role::Lrh => [Part::Move, Part::Carry, Part::Work, Part::Move].iter(),\n\n Role::Defender => [Part::Attack, Part::Move].iter(),\n\n _ => [Part::Move, Part::Carry, Part::Work].iter(),\n\n };\n\n it.map(|x| *x).collect()\n\n}\n\n\n", "file_path": "src/creeps/spawn_info.rs", "rank": 80, "score": 144030.24186840493 }, { "content": "/// The minimum parts required by the role\n\nfn basic_role_parts<'a>(_room: &Room, role: Role) -> BodyCollection {\n\n let it = match role {\n\n Role::Harvester => [Part::Move, Part::Work, Part::Carry, Part::Work].iter(),\n\n Role::Conqueror => [Part::Move, Part::Claim].iter(),\n\n Role::Gofer => [Part::Move, Part::Carry].iter(),\n\n Role::Lrh => [Part::Move, Part::Move, Part::Carry, Part::Work].iter(),\n\n Role::Upgrader | Role::Worker => [Part::Move, Part::Carry, Part::Work].iter(),\n\n Role::Lrw => [Part::Move, Part::Move, Part::Carry, Part::Work].iter(),\n\n Role::Scout => [Part::Move].iter(),\n\n Role::Defender => [Part::Move, Part::Attack].iter(),\n\n Role::Unknown => [].iter(),\n\n };\n\n it.map(|x| *x).collect()\n\n}\n\n\n", "file_path": "src/creeps/spawn_info.rs", "rank": 81, "score": 144030.24186840493 }, { "content": "fn assign_role<'a>(state: &'a mut GameState, creep: &'a Creep) -> Option<Role> {\n\n trace!(\"Assigning role to {}\", creep.name());\n\n\n\n if state\n\n .creep_memory_role(CreepName(&creep.name()), CREEP_ROLE)\n\n .is_some()\n\n {\n\n trace!(\"Already has a role\");\n\n None?;\n\n }\n\n\n\n let result = roles::next_role(state, &creep.room()).or_else(|| {\n\n warn!(\"Room is full\");\n\n None\n\n })?;\n\n\n\n let memory = state.creep_memory_entry(CreepName(&creep.name()));\n\n memory.insert(CREEP_ROLE.to_string(), (result as i64).into());\n\n Some(result)\n\n}\n\n\n", "file_path": "src/creeps/mod.rs", "rank": 82, "score": 143436.26506168273 }, { "content": "fn find_container<'a>(state: &mut CreepState) -> Option<StructureContainer> {\n\n read_target_container(state).or_else(|| {\n\n trace!(\"Finding new withdraw target\");\n\n state.creep_memory_remove(TARGET);\n\n let creep = state.creep();\n\n let containers = js! {\n\n let creep = @{creep};\n\n const containers = creep.room.find(FIND_STRUCTURES, {\n\n filter: (i) => i.structureType == STRUCTURE_CONTAINER\n\n && i.store[RESOURCE_ENERGY] > 0\n\n });\n\n return containers;\n\n };\n\n let containers: Vec<StructureContainer> =\n\n containers.try_into().map(|c| Some(c)).unwrap_or(None)?;\n\n\n\n let result = containers\n\n .into_iter()\n\n .max_by_key(|i| i.store_of(ResourceType::Energy));\n\n\n\n result.map(|c| {\n\n state.creep_memory_set(TARGET.into(), c.id());\n\n c\n\n })\n\n })\n\n}\n\n\n", "file_path": "src/creeps/gofer.rs", "rank": 83, "score": 141723.8890535978 }, { "content": "fn run_role<'a>(state: &'a mut CreepState) -> ExecutionResult {\n\n let role = state.creep_memory_role(CREEP_ROLE).ok_or_else(|| {\n\n let error = \"failed to read creep role\";\n\n error!(\"{}\", error);\n\n error\n\n })?;\n\n\n\n roles::run_role(state, role)\n\n}\n\n\n", "file_path": "src/creeps/mod.rs", "rank": 84, "score": 139517.05159518286 }, { "content": "fn get_build_target<'a>(state: &mut CreepState) -> Option<ConstructionSite> {\n\n state\n\n .creep_memory_string(TARGET)\n\n .and_then(|id| get_object_typed(id).unwrap_or(None))\n\n .or_else(|| {\n\n let sites = state.creep().room().find(find::MY_CONSTRUCTION_SITES);\n\n sites\n\n .into_iter()\n\n .min_by_key(|s| s.progress_total() - s.progress())\n\n .ok_or_else(|| debug!(\"Could not find a build target\"))\n\n .map(|site| {\n\n state.creep_memory_set(TARGET.into(), site.id());\n\n site\n\n })\n\n .ok()\n\n })\n\n}\n\n\n", "file_path": "src/creeps/worker.rs", "rank": 85, "score": 138855.4053851511 }, { "content": "fn can_continue_building(room: &Room) -> ExecutionResult {\n\n let rcl = room.controller().map(|c| c.level()).unwrap_or(0);\n\n if rcl < 3 {\n\n Err(format!(\n\n \"controller is not advanced enough to warrant road construction in room {}\",\n\n room.name()\n\n ))?;\n\n }\n\n\n\n let has_construction = room\n\n .find(find::MY_CONSTRUCTION_SITES)\n\n .into_iter()\n\n .next()\n\n .is_some();\n\n if has_construction {\n\n Err(format!(\"Room {} has incomplete constructions\", room.name()))?;\n\n }\n\n\n\n let has_tower = room\n\n .find(find::STRUCTURES)\n\n .into_iter()\n\n .any(|s| s.structure_type() == StructureType::Tower);\n\n if !has_tower {\n\n Err(format!(\"Room {} does not have a Tower yet\", room.name()))?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/constructions/roads.rs", "rank": 86, "score": 137102.74525486885 }, { "content": "#[derive(Debug, Clone, Copy, FromPrimitive, ToPrimitive)]\n\n#[repr(u8)]\n\nenum GoferState {\n\n Idle = 0,\n\n PickingUpEnergy,\n\n WithdrawingEnergy,\n\n Unloading,\n\n}\n\n\n", "file_path": "src/creeps/gofer.rs", "rank": 87, "score": 135828.55007686105 }, { "content": "fn find_available_energy<'a>(creep: &'a Creep) -> Option<RoomObject> {\n\n trace!(\"Finding new withdraw target\");\n\n let result = js! {\n\n const creep = @{creep};\n\n const ts = creep.pos.findClosestByRange(FIND_TOMBSTONES, {\n\n filter: (ts) => ts.creep.my && ts.store[RESOURCE_ENERGY]\n\n });\n\n if (ts) {\n\n return ts;\n\n }\n\n if (creep.room.storage && creep.room.storage.store[RESOURCE_ENERGY] > 0) {\n\n return creep.room.storage;\n\n }\n\n const container = creep.pos.findClosestByRange(FIND_STRUCTURES, {\n\n filter: (i) => i.structureType == STRUCTURE_CONTAINER && i.store[RESOURCE_ENERGY] > 0\n\n });\n\n return container;\n\n };\n\n result.try_into().unwrap_or_else(|_| None)\n\n}\n\n\n", "file_path": "src/creeps/mod.rs", "rank": 88, "score": 135217.3777904039 }, { "content": "/// Run an iterator of tasks as a Selector\n\npub fn selector<'a, T: 'a + TaskInput, It: Iterator<Item = &'a Task<'a, T>>>(\n\n state: &'a mut T,\n\n tasks: It,\n\n) -> ExecutionResult {\n\n let found = tasks\n\n .map(|node| (node, node.tick(state)))\n\n .find(|(_node, result)| result.is_err());\n\n if let Some(found) = found {\n\n Err(format!(\"A task failed in Selector {:?}\", found.1))?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/bt/control.rs", "rank": 89, "score": 133652.00096773647 }, { "content": "/// Run an iterator of tasks as a Sequence\n\npub fn sequence<'a, T: 'a + TaskInput, It: Iterator<Item = &'a Task<'a, T>>>(\n\n state: &'a mut T,\n\n mut tasks: It,\n\n) -> ExecutionResult {\n\n let found = tasks.any(|node| {\n\n let result = node.tick(state);\n\n debug!(\n\n \"Task result in sequence node: {:?} result: {:?}\",\n\n node, result\n\n );\n\n result.is_ok()\n\n });\n\n if found {\n\n Ok(())\n\n } else {\n\n Err(\"All tasks failed in Sequence!\")?\n\n }\n\n}\n\n\n", "file_path": "src/bt/control.rs", "rank": 90, "score": 133652.00096773647 }, { "content": "fn connect(pos0: &RoomPosition, pos1: &RoomPosition, room: &Room) -> ExecutionResult {\n\n trace!(\n\n \"Connecting {} {} and {} {} in room {}\",\n\n pos0.x(),\n\n pos0.y(),\n\n pos1.x(),\n\n pos1.y(),\n\n room.name()\n\n );\n\n\n\n let path = js! {\n\n const room = @{room};\n\n const path = room.findPath(@{pos0}, @{pos1}, {\n\n ignoreCreeps: true,\n\n plainCost: 1,\n\n swampCost: 2,\n\n range: 0,\n\n });\n\n return Object.values(path.map((step) => new RoomPosition( step.x, step.y, room.name )));\n\n };\n", "file_path": "src/constructions/roads.rs", "rank": 91, "score": 129958.1624558935 }, { "content": "pub fn move_to_options<'a, T>(\n\n creep: &'a Creep,\n\n target: &'a T,\n\n options: MoveToOptions,\n\n) -> ExecutionResult\n\nwhere\n\n T: screeps::HasPosition,\n\n{\n\n let reuse_path = options.reuse_path;\n\n let res = js! {\n\n const creep = @{creep};\n\n const target = @{target.pos()};\n\n const reusePath = @{reuse_path};\n\n return creep.moveTo(target, {reusePath: reusePath});\n\n };\n\n let res =\n\n ReturnCode::try_from(res).map_err(|e| format!(\"Failed to convert move result {:?}\", e))?;\n\n match res {\n\n ReturnCode::Ok | ReturnCode::Tired => Ok(()),\n\n _ => {\n\n debug!(\"Move failed {:?}\", res);\n\n Err(\"Move failed\")?\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/creeps/mod.rs", "rank": 93, "score": 125599.64910751896 }, { "content": "fn read_target_container<'a>(state: &CreepState) -> Option<StructureContainer> {\n\n state\n\n .creep_memory_string(TARGET)\n\n .and_then(|id| get_object_typed(id).ok())?\n\n}\n\n\n", "file_path": "src/creeps/gofer.rs", "rank": 94, "score": 122374.20864174451 }, { "content": "fn spawn_creep(state: &mut GameState, spawn: &StructureSpawn, role: Role) -> ExecutionResult {\n\n trace!(\"Spawning creep\");\n\n\n\n let room = spawn.room();\n\n\n\n let spawn_config = spawn_config_by_role(&room, role);\n\n\n\n let mut body = spawn_config\n\n .basic_body\n\n .into_iter()\n\n .collect::<ArrayVec<[Part; 50]>>();\n\n let mut body_len = body.len(); // the index until the body is valid\n\n let max_len = spawn_config.body_max;\n\n\n\n if !spawn_config.body_extension.is_empty() {\n\n loop {\n\n let spawn_options = SpawnOptions::new().dry_run(true);\n\n if body.len() == 50\n\n || max_len\n\n .map(|max_len| max_len <= body.len())\n", "file_path": "src/structures/spawns.rs", "rank": 95, "score": 117068.5194043405 }, { "content": "fn _run(_state: &mut GameState) -> ExecutionResult {\n\n let time = screeps::game::time();\n\n let rooms = screeps::game::rooms::values();\n\n let len = rooms.len() as u32;\n\n\n\n if time % (len * 3) > len {\n\n Err(\"Skipping constructions task\")?;\n\n }\n\n\n\n let mut state = ConstructionState::read_from_segment_or_default(CONSTRUCTIONS_SEGMENT);\n\n let _sentinel = MemorySentinel::new(CONSTRUCTIONS_SEGMENT as u8, &state);\n\n\n\n let index = time % len;\n\n let room = &rooms[index as usize];\n\n\n\n manage_room(room, &mut state)\n\n}\n\n\n", "file_path": "src/constructions/mod.rs", "rank": 96, "score": 113193.99195304737 }, { "content": "fn remove_exp_markers(state: &mut GameState) -> ExecutionResult {\n\n let rooms = game::rooms::values();\n\n\n\n let rooms = rooms\n\n .into_iter()\n\n .map(|r| (WorldPosition::from(&r), r))\n\n .collect::<HashMap<_, _>>();\n\n\n\n let mut retain = BTreeSet::new();\n\n\n\n for exp in state.expansion.iter().cloned() {\n\n if let Some(room) = rooms.get(&exp) {\n\n let controller = room.controller();\n\n if controller.is_none() {\n\n continue;\n\n }\n\n let controller = controller.unwrap();\n\n // Help the room until it reaches level 4\n\n if controller.my() && controller.level() >= 4 {\n\n let spawn = room.find(find::MY_SPAWNS).len();\n", "file_path": "src/expansion.rs", "rank": 97, "score": 111670.25059849018 }, { "content": "pub fn attempt_repair<'a>(tower: &'a StructureTower) -> ExecutionResult {\n\n trace!(\"Repairing\");\n\n\n\n if tower.energy() < tower.energy_capacity() * 3 / 4 {\n\n return Err(\"loading\".into());\n\n }\n\n let target = find_repair_target(&tower.room()).ok_or_else(|| {\n\n let error = format!(\"Could not find a repair target\");\n\n debug!(\"{}\", error);\n\n error\n\n })?;\n\n trace!(\"Got repair target {:?}\", target.id());\n\n repair(tower, &target)\n\n}\n\n\n", "file_path": "src/structures/towers.rs", "rank": 98, "score": 107822.22110271257 }, { "content": "fn repair<'a>(tower: &'a StructureTower, target: &'a Structure) -> ExecutionResult {\n\n let res = tower.repair(target);\n\n if res == ReturnCode::Ok {\n\n Ok(())\n\n } else {\n\n let error = format!(\"Unexpected ReturnCode {:?}\", res);\n\n Err(error)?\n\n }\n\n}\n\n\n", "file_path": "src/structures/towers.rs", "rank": 99, "score": 107267.84764609739 } ]
Rust
libeir_ir/src/algo/equality.rs
lumen/eir
37e790f388d13a836991f8a6220eb322269d509e
use std::collections::{BTreeMap, VecDeque}; use snafu::Snafu; use crate::Function; use crate::ValueKind; use crate::{Block, Const, PrimOp, Value}; #[derive(Snafu, Debug, PartialEq, Eq)] pub enum EqualityFail { BlockArity { left: Block, right: Block }, BlockOp { left: Block, right: Block }, BlockReadsLength { left: Block, right: Block }, MismatchingValue { left: Value, right: Value }, PrimReadsLength { left: PrimOp, right: PrimOp }, MismatchingConst { left: Const, right: Const }, } struct EqCtx<'a> { lf: &'a Function, rf: &'a Function, to_walk: VecDeque<(Block, Block)>, to_check: Vec<(Value, Value)>, map: BTreeMap<Value, Value>, } impl Function { pub fn graph_eq( &self, lhs_block: Block, rhs: &Function, rhs_block: Block, ) -> Result<(), EqualityFail> { let mut ctx = EqCtx { lf: self, rf: rhs, map: BTreeMap::new(), to_walk: VecDeque::new(), to_check: Vec::new(), }; ctx.to_walk.push_back((lhs_block, rhs_block)); ctx.map .insert(ctx.lf.block_value(lhs_block), ctx.rf.block_value(rhs_block)); while let Some((l_b, r_b)) = ctx.to_walk.pop_front() { let l_block_val = ctx.lf.block_value(l_b); let r_block_val = ctx.rf.block_value(r_b); debug_assert_eq!(ctx.map.get(&l_block_val), Some(&r_block_val)); let l_args = ctx.lf.block_args(l_b); let r_args = ctx.rf.block_args(r_b); if l_args.len() != r_args.len() { return Result::Err(EqualityFail::BlockArity { left: l_b, right: r_b, }); } for (l, r) in l_args.iter().zip(r_args.iter()) { ctx.map.insert(*l, *r); } if !ctx.lf.block_op_eq(l_b, &ctx.rf, r_b) { return Result::Err(EqualityFail::BlockOp { left: l_b, right: r_b, }); } let l_reads = ctx.lf.block_reads(l_b); let r_reads = ctx.rf.block_reads(r_b); if l_reads.len() != r_reads.len() { return Result::Err(EqualityFail::BlockReadsLength { left: l_b, right: r_b, }); } for (l, r) in l_reads.iter().zip(r_reads.iter()) { traverse_value(&mut ctx, *l, *r)?; } } for (l, r) in ctx.to_check.iter() { if ctx.map.get(l) != Some(r) { return Result::Err(EqualityFail::MismatchingValue { left: *l, right: *r, }); } } Ok(()) } } fn traverse_value<'a>(ctx: &mut EqCtx<'a>, l: Value, r: Value) -> Result<(), EqualityFail> { if let Some(nr) = ctx.map.get(&l) { if *nr == r { return Ok(()); } else { return Err(EqualityFail::MismatchingValue { left: l, right: r }); } } match (ctx.lf.value_kind(l), ctx.rf.value_kind(r)) { (ValueKind::Block(lb), ValueKind::Block(rb)) => { ctx.map.insert(l, r); ctx.to_walk.push_back((lb, rb)); Ok(()) } (ValueKind::Argument(_, _), ValueKind::Argument(_, _)) => { ctx.to_check.push((l, r)); Ok(()) } (ValueKind::Const(lc), ValueKind::Const(rc)) => { if !ctx.lf.cons().eq_other(lc, ctx.rf.cons(), rc) { return Err(EqualityFail::MismatchingConst { left: lc, right: rc, }); } Ok(()) } (ValueKind::PrimOp(lp), ValueKind::PrimOp(rp)) => { let l_reads = ctx.lf.primop_reads(lp); let r_reads = ctx.rf.primop_reads(rp); if l_reads.len() != r_reads.len() { return Err(EqualityFail::PrimReadsLength { left: lp, right: rp, }); } for (l, r) in l_reads.iter().zip(r_reads.iter()) { traverse_value(ctx, *l, *r)?; } Ok(()) } _ => Err(EqualityFail::MismatchingValue { left: l, right: r }), } } #[cfg(test)] mod tests { use crate::parse_function_unwrap; #[test] fn basic_equality() { let ir1 = parse_function_unwrap( " a'foo':a'bar'/1 { entry(%ret, %thr, %arg1): block2(%arg1); block2(%b): block3(%b); block3(%a): %ret(%a); } ", ); let ir2 = parse_function_unwrap( " a'a':a'b'/1 { entry(%ret, %thr, %arg1): block2(%arg1); block3(%a): %ret(%a); block2(%b): block3(%b); } ", ); assert!(ir1 .graph_eq(ir1.block_entry(), &ir2, ir2.block_entry()) .is_ok()); } #[test] fn args_length_inequality() { let ir1 = parse_function_unwrap( " a'foo':a'bar'/2 { entry(%ret, %thr, %arg1, %arg2): %ret(%arg1); } ", ); let ir2 = parse_function_unwrap( " a'foo':a'bar'/1 { entry(%ret, %thr, %arg1): %ret(%arg1); } ", ); assert!(ir1 .graph_eq(ir1.block_entry(), &ir2, ir2.block_entry()) .is_err()); } #[test] fn args_read_inequality() { let ir1 = parse_function_unwrap( " a'foo':a'bar'/2 { entry(%ret, %thr, %arg1, %arg2): %ret(%arg1); } ", ); let ir2 = parse_function_unwrap( " a'foo':a'bar'/2 { entry(%ret, %thr, %arg1, %arg2): %ret(%arg2); } ", ); assert!(ir1 .graph_eq(ir1.block_entry(), &ir2, ir2.block_entry()) .is_err()); } #[test] fn args_read_const_equality() { let ir1 = parse_function_unwrap( " a'foo':a'bar'/2 { entry(%ret, %thr, %arg1, %arg2): %ret(a'a'); } ", ); let ir2 = parse_function_unwrap( " a'foo':a'bar'/2 { entry(%ret, %thr, %arg1, %arg2): %ret(a'a'); } ", ); assert!(ir1 .graph_eq(ir1.block_entry(), &ir2, ir2.block_entry()) .is_ok()); } #[test] fn args_read_const_inequality() { let ir1 = parse_function_unwrap( " a'foo':a'bar'/2 { entry(%ret, %thr, %arg1, %arg2): %ret(a'a'); } ", ); let ir2 = parse_function_unwrap( " a'foo':a'bar'/2 { entry(%ret, %thr, %arg1, %arg2): %ret(a'b'); } ", ); assert!(ir1 .graph_eq(ir1.block_entry(), &ir2, ir2.block_entry()) .is_err()); } #[test] fn args_prim_inequality() { let ir1 = parse_function_unwrap( " a'foo':a'bar'/1 { entry(%ret, %thr, %arg1): %fun = a'a':a'a'/1; %fun(%ret, %thr, %arg1); } ", ); let ir2 = parse_function_unwrap( " a'foo':a'bar'/1 { entry(%ret, %thr, %arg1): %fun = a'a':a'b'/1; %fun(%ret, %thr, %arg1); } ", ); assert!(ir1 .graph_eq(ir1.block_entry(), &ir2, ir2.block_entry()) .is_err()); } }
use std::collections::{BTreeMap, VecDeque}; use snafu::Snafu; use crate::Function; use crate::ValueKind; use crate::{Block, Const, PrimOp, Value}; #[derive(Snafu, Debug, PartialEq, Eq)] pub enum EqualityFail { BlockArity { left: Block, right: Block }, BlockOp { left: Block, right: Block }, BlockReadsLength { left: Block, right: Block }, MismatchingValue { left: Value, right: Value }, PrimReadsLength { left: PrimOp, right: PrimOp }, MismatchingConst { left: Const, right: Const }, } struct EqCtx<'a> { lf: &'a Function, rf: &'a Function, to_walk: VecDeque<(Block, Block)>, to_check: Vec<(Value, Value)>, map: BTreeMap<Value, Value>, } impl Function { pub fn graph_eq( &self, lhs_block: Block, rhs: &Function, rhs_block: Block, ) -> Result<(), EqualityFail> { let mut ctx = EqCtx { lf: self, rf: rhs, map: BTreeMap::new(), to_walk: VecDeque::new(), to_check: Vec::new(), }; ctx.to_walk.push_back((lhs_block, rhs_block)); ctx.map .insert(ctx.lf.block_value(lhs_block), ctx.rf.block_value(rhs_block)); while let Some((l_b, r_b)) = ctx.to_walk.pop_front() { let l_block_val = ctx.lf.block_value(l_b); let r_block_val = ctx.rf.block_value(r_b); debug_assert_eq!(ctx.map.get(&l_block_val), Some(&r_block_val)); let l_args = ctx.lf.block_args(l_b); let r_args = ctx.rf.block_args(r_b); if l_args.len() != r_args.len() { return Result::Err(EqualityFail::BlockArity { left: l_b, right: r_b, }); } for (l, r) in l_args.iter().zip(r_args.iter()) { ctx.map.insert(*l, *r); } if !ctx.lf.block_op_eq(l_b, &ctx.rf, r_b) { return Result::Err(EqualityFail::BlockOp { left: l_b, right: r_b, }); } let l_reads = ctx.lf.block_reads(l_b); let r_reads = ctx.rf.block_reads(r_b); if l_reads.len() != r_reads.len() { return Result::Err(EqualityFail::BlockReadsLength { left: l_b, right: r_b, }); } for (l, r) in l_reads.iter().zip(r_reads.iter()) { traverse_value(&mut ctx, *l, *r)?; } } for (l, r) in ctx.to_check.iter() { if ctx.map.get(l) != Some(r) { return Result::Err(EqualityFail::MismatchingValue { left: *l, right: *r, }); } } Ok(()) } } fn traverse_value<'a>(ctx: &mut EqCtx<'a>, l: Value, r: Value) -> Result<(), EqualityFail> { if let Some(nr) = ctx.map.get(&l) { if *nr == r { return Ok(()); } else { return Err(EqualityFail::MismatchingValue { left: l, right: r }); } } match (ctx.lf.value_kind(l), ctx.rf.value_kind(r)) { (ValueKind::Block(lb), ValueKind::Block(rb)) => { ctx.map.insert(l, r); ctx.to_walk.push_back((lb, rb)); Ok(()) } (ValueKind::Argument(_, _), ValueKind::Argument(_, _)) => { ctx.to_check.push((l, r)); Ok(()) } (ValueKind::Const(lc), ValueKind::Const(rc)) => {
Ok(()) } (ValueKind::PrimOp(lp), ValueKind::PrimOp(rp)) => { let l_reads = ctx.lf.primop_reads(lp); let r_reads = ctx.rf.primop_reads(rp); if l_reads.len() != r_reads.len() { return Err(EqualityFail::PrimReadsLength { left: lp, right: rp, }); } for (l, r) in l_reads.iter().zip(r_reads.iter()) { traverse_value(ctx, *l, *r)?; } Ok(()) } _ => Err(EqualityFail::MismatchingValue { left: l, right: r }), } } #[cfg(test)] mod tests { use crate::parse_function_unwrap; #[test] fn basic_equality() { let ir1 = parse_function_unwrap( " a'foo':a'bar'/1 { entry(%ret, %thr, %arg1): block2(%arg1); block2(%b): block3(%b); block3(%a): %ret(%a); } ", ); let ir2 = parse_function_unwrap( " a'a':a'b'/1 { entry(%ret, %thr, %arg1): block2(%arg1); block3(%a): %ret(%a); block2(%b): block3(%b); } ", ); assert!(ir1 .graph_eq(ir1.block_entry(), &ir2, ir2.block_entry()) .is_ok()); } #[test] fn args_length_inequality() { let ir1 = parse_function_unwrap( " a'foo':a'bar'/2 { entry(%ret, %thr, %arg1, %arg2): %ret(%arg1); } ", ); let ir2 = parse_function_unwrap( " a'foo':a'bar'/1 { entry(%ret, %thr, %arg1): %ret(%arg1); } ", ); assert!(ir1 .graph_eq(ir1.block_entry(), &ir2, ir2.block_entry()) .is_err()); } #[test] fn args_read_inequality() { let ir1 = parse_function_unwrap( " a'foo':a'bar'/2 { entry(%ret, %thr, %arg1, %arg2): %ret(%arg1); } ", ); let ir2 = parse_function_unwrap( " a'foo':a'bar'/2 { entry(%ret, %thr, %arg1, %arg2): %ret(%arg2); } ", ); assert!(ir1 .graph_eq(ir1.block_entry(), &ir2, ir2.block_entry()) .is_err()); } #[test] fn args_read_const_equality() { let ir1 = parse_function_unwrap( " a'foo':a'bar'/2 { entry(%ret, %thr, %arg1, %arg2): %ret(a'a'); } ", ); let ir2 = parse_function_unwrap( " a'foo':a'bar'/2 { entry(%ret, %thr, %arg1, %arg2): %ret(a'a'); } ", ); assert!(ir1 .graph_eq(ir1.block_entry(), &ir2, ir2.block_entry()) .is_ok()); } #[test] fn args_read_const_inequality() { let ir1 = parse_function_unwrap( " a'foo':a'bar'/2 { entry(%ret, %thr, %arg1, %arg2): %ret(a'a'); } ", ); let ir2 = parse_function_unwrap( " a'foo':a'bar'/2 { entry(%ret, %thr, %arg1, %arg2): %ret(a'b'); } ", ); assert!(ir1 .graph_eq(ir1.block_entry(), &ir2, ir2.block_entry()) .is_err()); } #[test] fn args_prim_inequality() { let ir1 = parse_function_unwrap( " a'foo':a'bar'/1 { entry(%ret, %thr, %arg1): %fun = a'a':a'a'/1; %fun(%ret, %thr, %arg1); } ", ); let ir2 = parse_function_unwrap( " a'foo':a'bar'/1 { entry(%ret, %thr, %arg1): %fun = a'a':a'b'/1; %fun(%ret, %thr, %arg1); } ", ); assert!(ir1 .graph_eq(ir1.block_entry(), &ir2, ir2.block_entry()) .is_err()); } }
if !ctx.lf.cons().eq_other(lc, ctx.rf.cons(), rc) { return Err(EqualityFail::MismatchingConst { left: lc, right: rc, }); }
if_condition
[ { "content": "fn lower_function(ctx: &mut LowerCtx, b: &mut FunctionBuilder, fun: &Function) -> IrBlock {\n\n let entry = b.block_insert_with_span(Some(fun.span()));\n\n\n\n match fun {\n\n Function::Named(_named) => unimplemented!(),\n\n Function::Unnamed(lambda) => {\n\n ctx.fun_num += 1;\n\n let base_fun = &ctx.functions[0];\n\n let new_fun = format!(\"{}-fun-{}\", base_fun, ctx.fun_num);\n\n ctx.functions.push(new_fun);\n\n\n\n lower_function_base(ctx, b, entry, lambda.span, lambda.arity, &lambda.clauses);\n\n\n\n ctx.functions.pop().unwrap();\n\n }\n\n }\n\n\n\n entry\n\n}\n\n\n", "file_path": "libeir_syntax_erl/src/lower/mod.rs", "rank": 1, "score": 327396.7893905393 }, { "content": "pub fn function_map(text: &str) -> (Result<(crate::Function, LowerMap), ()>, Errors) {\n\n function_map_codemap(text, Arc::new(CodeMap::new()))\n\n}\n\n\n", "file_path": "libeir_ir/src/text/parser/mod.rs", "rank": 2, "score": 306530.3519340423 }, { "content": "pub fn print_constants(_ctx: &mut ToEirTextContext, _fun: &Function, _indent: usize, _out: &mut dyn Write) -> std::io::Result<()> {\n\n // TODO\n\n //let mut used_values = HashSet::new();\n\n //fun.used_values(&mut used_values);\n\n\n\n //let mut values: Vec<_> = used_values.iter().cloned().collect();\n\n //values.sort();\n\n\n\n //for value in values.iter() {\n\n // let typ = fun.value(*value);\n\n // match typ {\n\n // ValueType::Constant(cons) => {\n\n // write_indent(out, indent)?;\n\n // write!(out, \"%{} = \", value.index())?;\n\n // cons.to_eir_text(ctx, indent+1, out)?;\n\n // write!(out, \";\\n\")?;\n\n // },\n\n // ValueType::Variable => (),\n\n // }\n\n //}\n", "file_path": "libeir_ir/src/text/printer.old.rs", "rank": 3, "score": 305166.0139850219 }, { "content": "fn lower_top_function(ctx: &mut LowerCtx, b: &mut FunctionBuilder, function: &NamedFunction) {\n\n let entry = b.block_insert();\n\n b.block_set_entry(entry);\n\n\n\n let fun_name = format!(\"{}/{}\", function.name, function.arity);\n\n assert!(ctx.functions.len() == 0);\n\n ctx.functions.push(fun_name);\n\n\n\n lower_function_base(\n\n ctx,\n\n b,\n\n entry,\n\n function.span,\n\n function.arity,\n\n &function.clauses,\n\n );\n\n\n\n ctx.functions.pop().unwrap();\n\n assert!(ctx.functions.len() == 0);\n\n}\n", "file_path": "libeir_syntax_erl/src/lower/mod.rs", "rank": 4, "score": 299243.04859074263 }, { "content": "pub fn intern_binary_const(ident: Ident, c: &mut ConstantContainer) -> Result<Const, LowerError> {\n\n let chars = tokenize_string(ident)?;\n\n\n\n let bytes = chars.iter().copied().map(|i| i as u8).collect::<Vec<_>>();\n\n let bin = BinaryTerm(bytes);\n\n Ok(c.from(bin))\n\n}\n\n\n", "file_path": "libeir_syntax_erl/src/lower/expr/literal.rs", "rank": 5, "score": 299057.7394347346 }, { "content": "pub fn intern_string_const(ident: Ident, c: &mut ConstantContainer) -> Result<Const, LowerError> {\n\n let chars = tokenize_string(ident)?;\n\n\n\n let mut cons = c.from(NilTerm);\n\n for elem in chars.iter().rev() {\n\n let val = c.from(*elem);\n\n cons = c.list_cell(val, cons);\n\n }\n\n\n\n Ok(c.from(cons))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use libeir_intern::Ident;\n\n\n\n use super::tokenize_string;\n\n\n\n #[test]\n\n fn string_literal_parse() {\n", "file_path": "libeir_syntax_erl/src/lower/expr/literal.rs", "rank": 6, "score": 299057.7394347346 }, { "content": "pub fn scoped_cell<T: ?Sized, F, R>(value: &mut T, fun: F) -> R\n\nwhere\n\n F: FnOnce(ScopedCell<T>) -> R,\n\n{\n\n let guard = new(value);\n\n\n\n let cell = guard.clone_cell();\n\n fun(cell)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{new, scoped_cell};\n\n\n\n #[test]\n\n fn creation() {\n\n let mut a: u32 = 0;\n\n scoped_cell(&mut a, |_ac| {});\n\n }\n\n\n", "file_path": "util/scoped_cell/src/lib.rs", "rank": 7, "score": 287501.7869993156 }, { "content": "fn format_value(value: Value, fun: &Function, out: &mut dyn Write) -> std::io::Result<()> {\n\n match fun.value_kind(value) {\n\n ValueKind::Block(block) => write!(out, \"{}\", block)?,\n\n ValueKind::Const(cons) => {\n\n fun.cons().write(cons, out);\n\n },\n\n _ => write!(out, \"%{}\", value.index())?,\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "libeir_ir/src/text/printer.old.rs", "rank": 8, "score": 284307.1900066528 }, { "content": "pub fn function(text: &str) -> (Result<crate::Function, ()>, Errors) {\n\n match function_map(text) {\n\n (Ok((fun, _)), errors) => (Ok(fun), errors),\n\n (Err(()), errors) => (Err(()), errors),\n\n }\n\n}\n\n\n", "file_path": "libeir_ir/src/text/parser/mod.rs", "rank": 9, "score": 279840.0345302822 }, { "content": "fn map_node(ctx: &mut LowerCtx, b: &mut FunctionBuilder, t: &mut Tree, node: TreeNode) -> TreeNode {\n\n let new = match t.nodes[node].clone() {\n\n TreeNodeKind::Atomic(_, _) => node,\n\n TreeNodeKind::Wildcard(_) => node,\n\n TreeNodeKind::Tuple { span, elems } => {\n\n let mut new_elems = EntityList::new();\n\n for idx in 0..elems.len(&t.node_pool) {\n\n let node = elems.get(idx, &t.node_pool).unwrap();\n\n let new = map_node(ctx, b, t, node);\n\n new_elems.push(new, &mut t.node_pool);\n\n }\n\n t.nodes.push(TreeNodeKind::Tuple {\n\n span: span,\n\n elems: new_elems,\n\n })\n\n }\n\n TreeNodeKind::Cons { span, head, tail } => {\n\n let new_head = map_node(ctx, b, t, head);\n\n let new_tail = map_node(ctx, b, t, tail);\n\n t.nodes.push(TreeNodeKind::Cons {\n", "file_path": "libeir_syntax_erl/src/lower/pattern/tree/merge.rs", "rank": 10, "score": 273559.0789040922 }, { "content": "fn read_condition<R, S>(reader: &mut R) -> Result<VecDeque<Lexed>>\n\nwhere\n\n R: TokenReader<Source = S>,\n\n{\n\n let mut open = 0;\n\n let mut condition = VecDeque::new();\n\n\n\n loop {\n\n match reader.try_read_token()? {\n\n None => return Err(PreprocessorError::UnexpectedEOF),\n\n Some(token) => match token {\n\n LexicalToken(_, Token::LParen, _) => {\n\n open = open + 1;\n\n condition.push_back(Ok(token));\n\n }\n\n LexicalToken(_, Token::RParen, _) if open == 0 => {\n\n reader.unread_token(token);\n\n break;\n\n }\n\n LexicalToken(_, Token::RParen, _) => {\n", "file_path": "libeir_syntax_erl/src/preprocessor/directives.rs", "rank": 11, "score": 265646.03576520266 }, { "content": "pub fn calculate_live_values(fun: &Function) -> LiveValues {\n\n let mut forest = SetForest::new();\n\n\n\n let mut live_at: HashMap<Block, Set<Value>> = HashMap::new();\n\n let mut live_in: HashMap<Block, Set<Value>> = HashMap::new();\n\n\n\n // Iterate dataflow until all dependencies have been resolved\n\n loop {\n\n let res = dataflow_pass(fun, &mut forest, &mut live_at, &mut live_in);\n\n if res {\n\n break;\n\n }\n\n }\n\n\n\n // Validate that the live set at entry is empty\n\n {\n\n let entry = fun.block_entry();\n\n assert!(\n\n live_at[&entry].iter(&forest).count() == 0,\n\n \"{:?}\",\n", "file_path": "libeir_ir/src/algo/live.rs", "rank": 12, "score": 261765.96598179982 }, { "content": "pub fn function_map_unwrap(text: &str) -> (crate::Function, LowerMap) {\n\n let codemap = Arc::new(CodeMap::new());\n\n match function_map_codemap(text, codemap.clone()) {\n\n (Ok(fun), errors) => {\n\n errors.print(&codemap);\n\n fun\n\n }\n\n (Err(()), errors) => {\n\n errors.print(&codemap);\n\n panic!();\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n\n\n use std::sync::Arc;\n\n\n\n use super::function_unwrap;\n", "file_path": "libeir_ir/src/text/parser/mod.rs", "rank": 13, "score": 255479.7425456681 }, { "content": "pub fn function_into_graph_printer<O>(fun: &Function, g: &mut GraphPrinter<O>)\n\nwhere\n\n O: std::fmt::Write,\n\n{\n\n let mut buf = String::new();\n\n\n\n let mut config = pr::FormatConfig {\n\n width: 80,\n\n block_iterator_config: pr::DfsBlockIteratorConfig,\n\n value_formatter: pr::StandardValueFormatter,\n\n block_value_layout: pr::ReferencePrimopBlockValueLayout::default(),\n\n };\n\n let mut state = FormatState {\n\n function: fun,\n\n nesting: 0,\n\n };\n\n\n\n let arena = Arena::new();\n\n let mut ctx = FunctionFormatData {\n\n arena: &arena,\n", "file_path": "libeir_ir/src/text/dot_printer.rs", "rank": 14, "score": 253473.97740614536 }, { "content": "pub fn function_map_codemap(\n\n text: &str,\n\n codemap: Arc<CodeMap>,\n\n) -> (Result<(crate::Function, LowerMap), ()>, Errors) {\n\n let mut errors = Errors::new();\n\n\n\n let parser = Parser::new((), codemap);\n\n\n\n let ret = match parser.parse_string(&mut errors, text) {\n\n Ok(named) => {\n\n let named: NamedFunction = named;\n\n\n\n error_tee(&mut errors, |mut errors| {\n\n let mut adapter = errors.make_into_adapter();\n\n\n\n match named.function.lower(&mut adapter, named.name) {\n\n Ok(res) => Ok(res),\n\n Err(()) => Err(()),\n\n }\n\n })\n\n }\n\n Err(()) => Err(()),\n\n };\n\n\n\n (ret, errors)\n\n}\n\n\n", "file_path": "libeir_ir/src/text/parser/mod.rs", "rank": 15, "score": 244114.05722179063 }, { "content": "pub fn format_function_body<B, V, L, S>(\n\n function: &Function,\n\n config: &mut FormatConfig<B, V, L>,\n\n sink: &mut S,\n\n) -> Result<(), DynError>\n\nwhere\n\n B: BlockIteratorConfig,\n\n V: ValueFormatter,\n\n L: BlockValueLayout,\n\n S: BlockFormatSink,\n\n{\n\n let mut state = FormatState {\n\n function,\n\n nesting: 0,\n\n };\n\n format_function_body_state(config, &mut state, sink)\n\n}\n\n\n", "file_path": "libeir_ir/src/text/printer/mod.rs", "rank": 16, "score": 239362.1204577384 }, { "content": "pub fn start_basic_block(module: &Atom, ident: &FunctionIdent, block: LabelN) {\n\n TRACE_COLLECTOR.with(|c| {\n\n let mut c = c.lock().unwrap();\n\n let pid = c.current_pid;\n\n c.events.push(TraceEvent{\n\n pid: pid,\n\n typ: TraceEventType::BasicBlockStart {\n\n module: module.clone(),\n\n ident: ident.clone(),\n\n block: block,\n\n },\n\n });\n\n })\n\n}\n\n\n", "file_path": "libeir_interpreter/src/trace/trace.rs", "rank": 17, "score": 234532.35688633908 }, { "content": "fn eval_map(mut fields: Vec<MapField>) -> Result<Vec<MapField>, PreprocessorError> {\n\n let mut result = Vec::new();\n\n\n\n for field in fields.drain(..) {\n\n match field {\n\n MapField::Assoc {\n\n span,\n\n id,\n\n key,\n\n value,\n\n } => result.push(MapField::Assoc {\n\n span,\n\n id,\n\n key: eval(key)?,\n\n value: eval(value)?,\n\n }),\n\n MapField::Exact {\n\n span,\n\n id,\n\n key,\n", "file_path": "libeir_syntax_erl/src/preprocessor/evaluator.rs", "rank": 18, "score": 232670.23835065492 }, { "content": "pub fn enter_function(_ident: &FunctionIdent, _lambda: Option<Block>, _args: &[Rc<Term>]) {}\n", "file_path": "libeir_interpreter/src/trace/dummy.rs", "rank": 19, "score": 227566.28242393764 }, { "content": "pub fn enter_function(ident: &FunctionIdent, lambda: Option<Block>, args: &[Rc<Term>]) {\n\n TRACE_COLLECTOR.with(|c| {\n\n let mut c = c.lock().unwrap();\n\n let pid = c.current_pid;\n\n if !c.pid_stacks.contains_key(&pid) {\n\n c.pid_stacks.insert(pid, Vec::new());\n\n }\n\n {\n\n let stack = c.pid_stacks.get_mut(&pid).unwrap();\n\n stack.push(StackEntry {\n\n pid: pid,\n\n module: module.clone(),\n\n ident: ident.clone(),\n\n args: args.to_vec(),\n\n });\n\n }\n\n c.events.push(TraceEvent {\n\n pid: pid,\n\n typ: TraceEventType::FunctionEnter {\n\n ident: ident.clone(),\n\n args: args.to_vec(),\n\n },\n\n });\n\n })\n\n}\n\n\n", "file_path": "libeir_interpreter/src/trace/trace.rs", "rank": 20, "score": 227566.28242393764 }, { "content": "pub fn function_to_dot(fun: &Function) -> String {\n\n let mut g = GraphPrinter::new();\n\n function_into_graph_printer(fun, &mut g);\n\n g.finish().unwrap()\n\n}\n", "file_path": "libeir_ir/src/text/dot_printer.rs", "rank": 21, "score": 222175.64651092474 }, { "content": "pub fn exit_function(ident: &FunctionIdent, ret: Option<&CallReturn>) {\n\n TRACE_COLLECTOR.with(|c| {\n\n let mut c = c.lock().unwrap();\n\n let pid = c.current_pid;\n\n {\n\n let stack = c.pid_stacks.get_mut(&pid).unwrap();\n\n let removed = stack.pop().unwrap();\n\n assert!(&removed.module == module);\n\n assert!(&removed.ident == ident);\n\n }\n\n c.events.push(TraceEvent {\n\n pid: pid,\n\n typ: TraceEventType::FunctionExit {\n\n ret: ret.cloned(),\n\n },\n\n });\n\n })\n\n}\n\n\n", "file_path": "libeir_interpreter/src/trace/trace.rs", "rank": 23, "score": 218983.74357423626 }, { "content": "pub fn match_op(\n\n exec: &mut CallExecutor,\n\n fun: &ErlangFunction,\n\n branches: &[MatchKind],\n\n block: Block,\n\n) -> TermCall {\n\n let reads = fun.fun.block_reads(block);\n\n\n\n let branches_elems = Term::as_value_list(&exec.make_term(fun, reads[0]));\n\n\n\n let unpack_term = exec.make_term(fun, reads[1]);\n\n\n\n for (idx, kind) in branches.iter().enumerate() {\n\n let branch_args = Term::as_value_list(&exec.make_term(fun, reads[idx + 2]));\n\n\n\n match kind {\n\n MatchKind::Value => {\n\n assert!(branch_args.len() == 1);\n\n\n\n if unpack_term.erl_exact_eq(&*branch_args[0]) {\n", "file_path": "libeir_interpreter/src/process/match.rs", "rank": 24, "score": 218208.8091383432 }, { "content": "pub fn analyze(fun: &Function) -> LowerData {\n\n let live = fun.live_values();\n\n let func_tree = fun.func_tree(&live, true);\n\n\n\n LowerData { live, func_tree }\n\n}\n", "file_path": "libeir_lowerutils/src/lib.rs", "rank": 25, "score": 214875.67336374815 }, { "content": "pub fn register(dialect: &mut Dialect) {\n\n dialect.register_op::<ReceiveStart>();\n\n dialect.register_op_branches_impl(&ReceiveStart);\n\n\n\n dialect.register_op::<ReceiveWait>();\n\n dialect.register_op_branches_impl(&ReceiveWait);\n\n\n\n dialect.register_op::<ReceiveDone>();\n\n dialect.register_op_branches_impl(&ReceiveDone);\n\n}\n", "file_path": "libeir_ir/src/operation/receive.rs", "rank": 26, "score": 214676.2072559012 }, { "content": "pub fn register(dialect: &mut Dialect) {\n\n dialect.register_op::<Case>();\n\n dialect.register_op_branches_impl(&Case {\n\n inner: Box::new(Inner {\n\n container: PatternContainer::new(),\n\n clauses: Vec::new(),\n\n }),\n\n });\n\n}\n", "file_path": "libeir_ir/src/operation/case.rs", "rank": 27, "score": 214676.2072559012 }, { "content": "pub fn function_unwrap(text: &str) -> crate::Function {\n\n let codemap = Arc::new(CodeMap::new());\n\n match function_codemap(text, codemap.clone()) {\n\n (Ok(fun), errors) => {\n\n errors.print(&codemap);\n\n fun\n\n }\n\n (Err(()), errors) => {\n\n errors.print(&codemap);\n\n panic!();\n\n }\n\n }\n\n}\n\n\n", "file_path": "libeir_ir/src/text/parser/mod.rs", "rank": 28, "score": 214187.7611901131 }, { "content": "pub trait ContainerDebug<C>: Debug {\n\n fn fmt(&self, container: &C, f: &mut Formatter) -> Result;\n\n}\n\n\n\npub struct ContainerDebugAdapter<'a, C, V> {\n\n pub container: &'a C,\n\n pub value: &'a V,\n\n}\n\nimpl<C, V> Debug for ContainerDebugAdapter<'_, C, V>\n\nwhere\n\n V: ContainerDebug<C>,\n\n{\n\n fn fmt(&self, f: &mut Formatter) -> Result {\n\n ContainerDebug::fmt(self.value, self.container, f)\n\n }\n\n}\n\n\n\ndefault impl<C, V> ContainerDebug<C> for V {\n\n fn fmt(&self, _con: &C, f: &mut Formatter) -> Result {\n\n std::fmt::Debug::fmt(self, f)\n", "file_path": "libeir_ir/src/function/format.rs", "rank": 29, "score": 211600.09092272422 }, { "content": "pub fn register(dialect: &mut Dialect) {\n\n dialect.register_op::<BinaryConstructStart>();\n\n dialect.register_op_branches_impl(&BinaryConstructStart);\n\n\n\n dialect.register_op::<BinaryConstructPush>();\n\n dialect.register_op_branches_impl(&BinaryConstructPush::default());\n\n\n\n dialect.register_op::<BinaryConstructFinish>();\n\n dialect.register_op_branches_impl(&BinaryConstructFinish);\n\n}\n", "file_path": "libeir_ir/src/operation/binary_construct.rs", "rank": 30, "score": 211538.39233212455 }, { "content": "/// The guard must not be dropped while a borrow of a related cell is in\n\n/// progress. If this happens, the whole process will be aborted.\n\npub fn new<'a, T: ?Sized + 'a>(value: &'a mut T) -> ScopedCellCreatorGuard<'a, T> {\n\n // Because we are using the value reference in a `PhantomData`, the\n\n // reference will be concidered used, while not actually existing in a\n\n // usable form until this guard stuct is dropped.\n\n //\n\n // This should make things sound when we create a mutable reference from\n\n // the pointer we have stored in `ScopedCellInner`, because that can only\n\n // be done while the actual value reference is tied up in the PhantomData\n\n // of this guard.\n\n\n\n unsafe {\n\n let inner_layout = Layout::new::<ScopedCellInner<T>>();\n\n let inner_ptr_u8 = alloc(inner_layout);\n\n\n\n let inner_ptr = NonNull::new(inner_ptr_u8 as *mut _).unwrap();\n\n std::ptr::write(\n\n inner_ptr.as_ptr(),\n\n ScopedCellInner {\n\n references: 1,\n\n active: false,\n", "file_path": "util/scoped_cell/src/lib.rs", "rank": 31, "score": 205497.34522064764 }, { "content": "pub fn module_codemap(text: &str, codemap: Arc<CodeMap>) -> (Result<crate::Module, ()>, Errors) {\n\n let mut errors = Errors::new();\n\n\n\n let parser = Parser::new((), codemap);\n\n\n\n let res = match parser.parse_string(&mut errors, text) {\n\n Ok(module) => {\n\n let module: super::ast::Module = module;\n\n\n\n error_tee(&mut errors, |mut errors| {\n\n let mut adapter = errors.make_into_adapter();\n\n\n\n match module.lower(&mut adapter) {\n\n Ok(module) => Ok(module),\n\n Err(()) => Err(()),\n\n }\n\n })\n\n }\n\n Err(()) => Err(()),\n\n };\n\n\n\n (res, errors)\n\n}\n\n\n", "file_path": "libeir_ir/src/text/parser/mod.rs", "rank": 32, "score": 204763.74866333144 }, { "content": "pub fn make_maps() -> NativeModule {\n\n let mut module = NativeModule::new(Symbol::intern(\"maps\"));\n\n module.add_fun(Symbol::intern(\"new\"), 0, Box::new(new_0));\n\n module\n\n}\n", "file_path": "libeir_interpreter/src/erl_lib/maps.rs", "rank": 33, "score": 204029.5966107556 }, { "content": "pub fn end_basic_block() {\n\n TRACE_COLLECTOR.with(|c| {\n\n let mut c = c.lock().unwrap();\n\n let pid = c.current_pid;\n\n c.events.push(TraceEvent{\n\n pid: pid,\n\n typ: TraceEventType::BasicBlockEnd,\n\n });\n\n })\n\n}\n\n\n", "file_path": "libeir_interpreter/src/trace/trace.rs", "rank": 34, "score": 202395.3716501016 }, { "content": "pub fn function_codemap(\n\n text: &str,\n\n codemap: Arc<CodeMap>,\n\n) -> (Result<crate::Function, ()>, Errors) {\n\n match function_map_codemap(text, codemap) {\n\n (Ok((fun, _)), errors) => (Ok(fun), errors),\n\n (Err(()), errors) => (Err(()), errors),\n\n }\n\n}\n\n\n", "file_path": "libeir_ir/src/text/parser/mod.rs", "rank": 35, "score": 202151.3247351367 }, { "content": "pub trait ErlEq<Rhs = Self> {\n\n fn erl_eq(&self, other: &Rhs) -> bool;\n\n}\n\n\n", "file_path": "libeir_interpreter/src/term.rs", "rank": 36, "score": 201982.35810314104 }, { "content": "pub fn format_module<B, V, L, S>(\n\n module: &Module,\n\n config: &mut FormatConfig<B, V, L>,\n\n sink: &mut S,\n\n) -> Result<(), DynError>\n\nwhere\n\n B: BlockIteratorConfig,\n\n V: ValueFormatter,\n\n L: BlockValueLayout,\n\n S: BlockFormatSink,\n\n{\n\n sink.write_str(&format!(\"{} {{\\n\", module.name().name.as_str().get()));\n\n\n\n let num_functions = module.function_iter().count();\n\n for (i, fun) in module.function_iter().enumerate() {\n\n let function = fun.function();\n\n let ident = function.ident();\n\n sink.write_str(&format!(\" {}/{} {{\\n\", &ident.name, ident.arity));\n\n let mut state = FormatState {\n\n function,\n", "file_path": "libeir_ir/src/text/printer/mod.rs", "rank": 37, "score": 200976.47132282527 }, { "content": "pub trait ErlExactEq<Rhs = Self> {\n\n fn erl_exact_eq(&self, other: &Rhs) -> bool;\n\n}\n\n\n", "file_path": "libeir_interpreter/src/term.rs", "rank": 38, "score": 198664.96931610786 }, { "content": "fn format_value_list(values: &[Value], fun: &Function,\n\n out: &mut dyn Write) -> std::io::Result<()> {\n\n for (idx, value) in values.iter().enumerate() {\n\n if idx != 0 {\n\n write!(out, \", \")?;\n\n }\n\n format_value(*value, fun, out)?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "libeir_ir/src/text/printer.old.rs", "rank": 39, "score": 197496.14570888883 }, { "content": "pub fn error_tee<'a, E, W, F, R>(\n\n receiver: &'a mut (dyn ErrorReceiver<E = E, W = W> + 'a),\n\n fun: F,\n\n) -> R\n\nwhere\n\n F: FnOnce(ErrorReceiverTee<E, W>) -> R,\n\n{\n\n scoped_cell(receiver, |cell| {\n\n let tee = ErrorReceiverTee { cell };\n\n fun(tee)\n\n })\n\n}\n\n\n\npub struct ErrorReceiverTee<'a, E, W> {\n\n cell: ScopedCell<dyn ErrorReceiver<E = E, W = W> + 'a>,\n\n}\n\nimpl<'a, E, W> Clone for ErrorReceiverTee<'a, E, W> {\n\n fn clone(&self) -> Self {\n\n ErrorReceiverTee {\n\n cell: self.cell.clone(),\n", "file_path": "util/libeir_util_parse/src/errors.rs", "rank": 40, "score": 196360.91640250603 }, { "content": "/// This evaluator is used for performing simple reductions\n\n/// during preprocessing, namely for evaluating conditionals\n\n/// in -if/-elseif directives.\n\n///\n\n/// As a result, the output of this function is _not_ a primitive\n\n/// value, but rather an Expr which has been reduced to its simplest\n\n/// form (e.g. a BinaryOp that can be evaluated at compile-time would\n\n/// be converted into the corresponding literal representation of the\n\n/// result of that op)\n\n///\n\n/// Exprs which are not able to be evaluated at compile-time will be\n\n/// treated as errors. In particular the following constructs are supported,\n\n/// and you can consider everything else as invalid unless explicitly noted:\n\n///\n\n/// - Math on constants or expressions which evaluate to constants\n\n/// - Bit shift operations on constants or expressions which evaluate to constants\n\n/// - Comparisons on constants or expressions which evaluate to constants\n\n/// - The use of `++` and `--` on constant lists, or expressions which evaluate to constant lists\n\npub fn eval(expr: Expr) -> Result<Expr, PreprocessorError> {\n\n let result = match expr {\n\n // Nothing to be done here\n\n Expr::Var(_) => expr,\n\n Expr::Literal(_) => expr,\n\n Expr::Nil(_) => expr,\n\n Expr::FunctionName(_) => expr,\n\n Expr::RecordIndex(_) => expr,\n\n\n\n // Recursively evaluate subexpressions\n\n Expr::Cons(Cons {\n\n span,\n\n id,\n\n head,\n\n tail,\n\n }) => Expr::Cons(Cons {\n\n span,\n\n id,\n\n head: Box::new(eval(*head)?),\n\n tail: Box::new(eval(*tail)?),\n", "file_path": "libeir_syntax_erl/src/preprocessor/evaluator.rs", "rank": 41, "score": 193491.44653110328 }, { "content": "pub fn run_ct_suite(vm: &mut VMState, module: Ident) {\n\n let spec = get_suite_spec(vm, module);\n\n\n\n let config = Term::Nil;\n\n\n\n for entry in spec.entries.iter() {\n\n match entry {\n\n SuiteEntrySpec::Group { tests, .. } => {\n\n for test in tests {\n\n let fun = FunctionIdent {\n\n module,\n\n name: Ident::with_empty_span(*test),\n\n arity: 1,\n\n };\n\n let res = vm.call(&fun, &[config.clone()]);\n\n res.unwrap();\n\n }\n\n }\n\n SuiteEntrySpec::Test(sym) => {\n\n let fun = FunctionIdent {\n", "file_path": "libeir_tests/src/ct_runner.rs", "rank": 42, "score": 190502.59928308043 }, { "content": "pub fn module(text: &str) -> (Result<crate::Module, ()>, Errors) {\n\n module_codemap(text, Arc::new(CodeMap::new()))\n\n}\n\n\n", "file_path": "libeir_ir/src/text/parser/mod.rs", "rank": 43, "score": 189402.79455658945 }, { "content": "fn new_0(_vm: &VMState, _proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n assert!(args.len() == 0);\n\n NativeReturn::Return {\n\n term: Term::Map(MapTerm::new()).into(),\n\n }\n\n}\n\n\n", "file_path": "libeir_interpreter/src/erl_lib/maps.rs", "rank": 44, "score": 186116.82410108251 }, { "content": "fn is_map(_vm: &VMState, _proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n assert!(args.len() == 1);\n\n let a1 = &*args[0];\n\n match a1 {\n\n Term::Map(_) => NativeReturn::Return {\n\n term: Term::new_bool(true).into(),\n\n },\n\n _ => NativeReturn::Return {\n\n term: Term::new_bool(false).into(),\n\n },\n\n }\n\n}\n\n\n", "file_path": "libeir_interpreter/src/erl_lib/erlang.rs", "rank": 45, "score": 186116.82410108251 }, { "content": "fn is_function(_vm: &VMState, _proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n assert!(args.len() == 1 || args.len() == 2);\n\n\n\n let arity_ref = if args.len() == 2 {\n\n if let Some(int) = args[1].as_i64() {\n\n Some(int)\n\n } else {\n\n panic!()\n\n }\n\n } else {\n\n None\n\n };\n\n\n\n if let Term::CapturedFunction { ident, .. } = &*args[0] {\n\n let res = arity_ref.map(|a| a == ident.arity as i64).unwrap_or(true);\n\n NativeReturn::Return {\n\n term: Term::new_bool(res).into(),\n\n }\n\n } else if let Term::BoundLambda { ident, .. } = &*args[0] {\n\n let res = arity_ref.map(|a| a == ident.arity as i64).unwrap_or(true);\n", "file_path": "libeir_interpreter/src/erl_lib/erlang.rs", "rank": 46, "score": 185950.4121870959 }, { "content": "pub fn lower<S>(input: S, config: ParseConfig) -> Result<Module, ()>\n\nwhere\n\n S: AsRef<str>,\n\n{\n\n let mut errors: Errors<ErlangError, ErlangError> = Errors::new();\n\n let codemap = Arc::new(CodeMap::new());\n\n let eir_res = error_tee(&mut errors, |mut errors| {\n\n let parser = Parser::new(config, codemap.clone());\n\n let ast = parser.parse_string(&mut errors.make_into_adapter(), input)?;\n\n let eir = lower_module(&mut errors.make_into_adapter(), codemap.clone(), &ast)?;\n\n Ok(eir)\n\n });\n\n\n\n errors.print(&codemap);\n\n\n\n eir_res\n\n}\n\n\n", "file_path": "libeir_tests/src/lib.rs", "rank": 47, "score": 185773.63058690514 }, { "content": "fn get_value_list<'a>(fun: &'a Function, value: Value) -> Option<&'a [Value]> {\n\n if let Some(prim) = fun.value_primop(value) {\n\n match fun.primop_kind(prim) {\n\n crate::PrimOpKind::ValueList => return Some(fun.primop_reads(prim)),\n\n _ => (),\n\n }\n\n }\n\n None\n\n}\n\n\n\nimpl Function {\n\n pub fn validate(&self, errors: &mut Vec<ValidationError>) {\n\n let block_graph = self.block_graph();\n\n let doms = petgraph::algo::dominators::simple_fast(&block_graph, self.block_entry());\n\n\n\n // Validate internal graph invariants\n\n self.graph_validate_global();\n\n\n\n self.validate_entry_invariants(errors);\n\n self.validate_blocks(errors);\n", "file_path": "libeir_ir/src/algo/validate.rs", "rank": 48, "score": 185763.27937514242 }, { "content": "pub fn warning_args<F>(_text: String, _make_args: F) where F: FnOnce() -> HashMap<String, ::serde_json::Value> {}\n", "file_path": "libeir_interpreter/src/trace/dummy.rs", "rank": 49, "score": 184987.5732335494 }, { "content": "pub fn warning_args<F>(text: String, make_args: F) where F: FnOnce() -> HashMap<String, ::serde_json::Value> {\n\n TRACE_COLLECTOR.with(|c| {\n\n let mut c = c.lock().unwrap();\n\n let pid = c.current_pid;\n\n c.events.push(TraceEvent {\n\n pid: pid,\n\n typ: TraceEventType::Warning {\n\n text: text,\n\n args: (make_args)()\n\n }\n\n });\n\n })\n\n}\n\n\n", "file_path": "libeir_interpreter/src/trace/trace.rs", "rank": 50, "score": 184987.5732335494 }, { "content": "fn get_value_list<'a>(fun: &'a Function, value: Value) -> Option<&'a [Value]> {\n\n if let Some(prim) = fun.value_primop(value) {\n\n match fun.primop_kind(prim) {\n\n crate::PrimOpKind::ValueList => return Some(fun.primop_reads(prim)),\n\n _ => (),\n\n }\n\n }\n\n None\n\n}\n\n\n\npub struct FormatConfig<B, V, L>\n\nwhere\n\n B: BlockIteratorConfig,\n\n V: ValueFormatter,\n\n L: BlockValueLayout,\n\n{\n\n pub width: usize,\n\n\n\n /// Encapsulates the iteration order for blocks within a function.\n\n pub block_iterator_config: B,\n", "file_path": "libeir_ir/src/text/printer/mod.rs", "rank": 51, "score": 183455.35495791672 }, { "content": "fn get_value_list<'a>(fun: &'a Function, value: Value) -> Option<&'a [Value]> {\n\n if let Some(prim) = fun.value_primop(value) {\n\n match fun.primop_kind(prim) {\n\n crate::PrimOpKind::ValueList =>\n\n return Some(fun.primop_reads(prim)),\n\n _ => (),\n\n }\n\n }\n\n None\n\n}\n\n\n\nimpl ToEirTextFun for Block {\n\n fn to_eir_text_fun(&self, ctx: &mut ToEirTextContext, fun: &Function,\n\n indent: usize, out: &mut dyn Write)\n\n -> std::io::Result<()>\n\n {\n\n\n\n write_indent(out, indent)?;\n\n write!(out, \"{}(\", self)?;\n\n format_value_list(fun.block_args(*self), fun, out)?;\n", "file_path": "libeir_ir/src/text/printer.old.rs", "rank": 52, "score": 183455.35495791672 }, { "content": "fn erl_self(_vm: &VMState, proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n assert!(args.len() == 0);\n\n NativeReturn::Return {\n\n term: Term::Pid(proc.pid).into(),\n\n }\n\n}\n\n\n\n//fn process_flag(vm: &VMState, proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n// assert!(args.len() == 2);\n\n// if args[0].erl_eq(&Term::new_atom(\"trap_exit\")) {\n\n// let mut mailboxes = vm.mailboxes.borrow_mut();\n\n// let mailbox = &mut mailboxes.get_mut(&proc.pid).unwrap();\n\n// let old_trap_exits = mailbox.get_trap_exits();\n\n// mailbox.set_trap_exits(args[1].as_boolean().unwrap());\n\n// NativeReturn::Return { term: Term::new_bool(old_trap_exits).into() }\n\n// } else {\n\n// unimplemented!()\n\n// }\n\n//}\n\n\n", "file_path": "libeir_interpreter/src/erl_lib/erlang.rs", "rank": 53, "score": 183323.59840670525 }, { "content": "fn exact_not_eq(_vm: &VMState, _proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n assert!(args.len() == 2);\n\n NativeReturn::Return {\n\n term: Term::new_bool(!args[0].erl_exact_eq(&*args[1])).into(),\n\n }\n\n}\n\n\n", "file_path": "libeir_interpreter/src/erl_lib/erlang.rs", "rank": 54, "score": 183320.09970582352 }, { "content": "fn exact_eq(_vm: &VMState, _proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n assert!(args.len() == 2);\n\n NativeReturn::Return {\n\n term: Term::new_bool(args[0].erl_exact_eq(&*args[1])).into(),\n\n }\n\n}\n", "file_path": "libeir_interpreter/src/erl_lib/erlang.rs", "rank": 55, "score": 183320.09970582352 }, { "content": "fn map_size(_vm: &VMState, _proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n assert!(args.len() == 1);\n\n if let Some(map) = args[0].as_map() {\n\n NativeReturn::Return {\n\n term: Term::new_usize(map.len()).into(),\n\n }\n\n } else {\n\n unimplemented!()\n\n }\n\n}\n\n\n", "file_path": "libeir_interpreter/src/erl_lib/erlang.rs", "rank": 56, "score": 183261.94455522008 }, { "content": "pub fn parse<'input>(text: &'input str) -> Result<Annotated<Module>,\n\n ::lalrpop_util::ParseError<usize, Tok<'input>, ()>> {\n\n\n\n let tokenizer = Tokenizer::new(text);\n\n let parser = grammar::AnnotatedModuleParser::new();\n\n parser.parse(text, tokenizer)\n\n}\n", "file_path": "libeir_syntax_core/src/parser/mod.rs", "rank": 57, "score": 183151.78674166047 }, { "content": "pub fn copy<S, D, T>(from: S, mut to: D)\n\nwhere\n\n S: BitRead<T = T>,\n\n D: BitWrite<T = T>,\n\n T: BitTransport,\n\n{\n\n to.write(from)\n\n}\n\n\n\npub struct CarrierWordIter<'a, I, T>\n\nwhere\n\n I: BitRead<T = T>,\n\n T: BitTransport,\n\n{\n\n inner: &'a I,\n\n _transport: PhantomData<T>,\n\n idx: usize,\n\n rem: usize,\n\n}\n\nimpl<'a, I, T> Iterator for CarrierWordIter<'a, I, T>\n", "file_path": "util/libeir_util_binary/src/lib.rs", "rank": 58, "score": 182211.32342667918 }, { "content": "pub fn tokenize_string(ident: Ident) -> Result<Vec<u64>, LowerError> {\n\n let string = ident.name.as_str().get();\n\n\n\n // http://erlang.org/doc/reference_manual/data_types.html#escape-sequences\n\n\n\n #[derive(Copy, Clone, Debug)]\n\n enum StringState {\n\n Norm,\n\n Escape {\n\n start: usize,\n\n },\n\n Oct {\n\n start: usize,\n\n digit_start: usize,\n\n num: usize,\n\n },\n\n HexStart {\n\n start: usize,\n\n },\n\n HexN {\n", "file_path": "libeir_syntax_erl/src/lower/expr/literal.rs", "rank": 59, "score": 181858.71883840568 }, { "content": "pub fn specifier_from_parsed(parsed: &[BitType]) -> Result<BinaryEntrySpecifier, LowerError> {\n\n let mut typ = None;\n\n let mut signed = None;\n\n let mut endianness = None;\n\n let mut unit = None;\n\n\n\n for entry in parsed {\n\n match entry {\n\n // Types\n\n BitType::Name(_id, _span, ident) if ident.as_str() == \"integer\" => {\n\n try_specifier!(typ, entry, TypeName::Integer)\n\n }\n\n BitType::Name(_id, _span, ident) if ident.as_str() == \"float\" => {\n\n try_specifier!(typ, entry, TypeName::Float)\n\n }\n\n BitType::Name(_id, _span, ident) if ident.as_str() == \"binary\" => {\n\n try_specifier!(typ, entry, TypeName::Bytes)\n\n }\n\n BitType::Name(_id, _span, ident) if ident.as_str() == \"bytes\" => {\n\n try_specifier!(typ, entry, TypeName::Bytes)\n", "file_path": "libeir_syntax_erl/src/lower/expr/binary.rs", "rank": 60, "score": 181398.02683805636 }, { "content": "pub trait IntoValue {\n\n fn into_value<'a>(self, b: &mut FunctionBuilder<'a>) -> Value;\n\n fn get_value(self, fun: &Function) -> Option<Value>;\n\n}\n\nimpl IntoValue for Value {\n\n fn into_value<'a>(self, _b: &mut FunctionBuilder<'a>) -> Value {\n\n self\n\n }\n\n fn get_value(self, _fun: &Function) -> Option<Value> {\n\n Some(self)\n\n }\n\n}\n\nimpl IntoValue for Block {\n\n fn into_value<'a>(self, b: &mut FunctionBuilder<'a>) -> Value {\n\n b.fun.values.push(ValueKind::Block(self))\n\n }\n\n fn get_value(self, fun: &Function) -> Option<Value> {\n\n fun.values.get(ValueKind::Block(self))\n\n }\n\n}\n", "file_path": "libeir_ir/src/function/builder/mod.rs", "rank": 61, "score": 178792.5594954167 }, { "content": "fn eval_list(mut exprs: Vec<Expr>) -> Result<Vec<Expr>, PreprocessorError> {\n\n let mut result = Vec::new();\n\n\n\n for expr in exprs.drain(..) {\n\n result.push(eval(expr)?);\n\n }\n\n\n\n Ok(result)\n\n}\n\n\n", "file_path": "libeir_syntax_erl/src/preprocessor/evaluator.rs", "rank": 62, "score": 178514.7283798053 }, { "content": "fn write_indent(out: &mut dyn Write, indent: usize) -> std::io::Result<()> {\n\n for _ in 0..indent {\n\n write!(out, \" \")?;\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl ToEirText for FunctionIdent {\n\n fn to_eir_text(&self, _ctx: &mut ToEirTextContext, indent: usize, out: &mut dyn Write) -> std::io::Result<()> {\n\n write_indent(out, indent)?;\n\n write!(out, \"{}:{}/{}\",\n\n self.module, self.name, self.arity)?;\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "libeir_ir/src/text/printer.old.rs", "rank": 63, "score": 177661.0328318145 }, { "content": "pub fn to_decision_tree<P>(pattern: &mut P) -> cfg::PatternCfg<P>\n\nwhere\n\n P: PatternProvider,\n\n{\n\n let mut context = MatchCompileContext::new(pattern);\n\n\n\n let root: matrix::MatchMatrix<P> = (*context.root_matrix()).clone();\n\n\n\n let root_cfg = context.cfg.get_entry();\n\n let wildcard = context.pattern.get_wildcard();\n\n\n\n matrix_to_decision_tree(\n\n root_cfg,\n\n &mut context,\n\n Some(wildcard),\n\n &root,\n\n root.variables.clone(),\n\n 0,\n\n );\n\n\n\n let cfg = context.cfg;\n\n assert!(!::petgraph::algo::is_cyclic_directed(&cfg.graph));\n\n cfg\n\n}\n", "file_path": "util/libeir_util_pattern_compiler/src/lib.rs", "rank": 64, "score": 175598.4711747404 }, { "content": "fn format_function_body_state<B, V, L, S>(\n\n config: &mut FormatConfig<B, V, L>,\n\n state: &mut FormatState,\n\n sink: &mut S,\n\n) -> Result<(), DynError>\n\nwhere\n\n B: BlockIteratorConfig,\n\n V: ValueFormatter,\n\n L: BlockValueLayout,\n\n S: BlockFormatSink,\n\n{\n\n let function = state.function;\n\n let mut block_iter = config.block_iterator_config.new(function);\n\n\n\n let arena = Arena::new();\n\n let mut ctx = FunctionFormatData {\n\n arena: &arena,\n\n buf: String::new(),\n\n value_buf: Vec::new(),\n\n config: PhantomData,\n", "file_path": "libeir_ir/src/text/printer/mod.rs", "rank": 65, "score": 174545.37983365764 }, { "content": "fn eval_record(mut fields: Vec<RecordField>) -> Result<Vec<RecordField>, PreprocessorError> {\n\n let mut result = Vec::new();\n\n\n\n for field in fields.drain(..) {\n\n let new_field = match field {\n\n RecordField {\n\n span,\n\n id,\n\n name,\n\n value: Some(value),\n\n ty,\n\n } => RecordField {\n\n span,\n\n id,\n\n name,\n\n value: Some(eval(value)?),\n\n ty,\n\n },\n\n RecordField {\n\n span,\n", "file_path": "libeir_syntax_erl/src/preprocessor/evaluator.rs", "rank": 66, "score": 173976.8632258418 }, { "content": "pub trait BlockValueLayout {\n\n /// Lays out the root scope for the module. This is called once\n\n /// at the beginning of processing a module.\n\n fn layout_module(&mut self, fun: &Function);\n\n /// Lays out the given block. This will be called once for each block.\n\n fn layout(&mut self, fun: &Function, block: Block);\n\n\n\n /// Values for the current layout.\n\n fn values(&self) -> &[Value];\n\n\n\n /// Queries whether the given value should be laid out inline, or if\n\n /// it should be referenced by value.\n\n fn should_layout(&self, value: Value, within: Option<Value>) -> bool;\n\n}\n\n\n\npub struct ReferencePrimopBlockValueLayout {\n\n values: Vec<Value>,\n\n values_set: BTreeMap<Value, usize>,\n\n}\n\nimpl Default for ReferencePrimopBlockValueLayout {\n", "file_path": "libeir_ir/src/text/printer/mod.rs", "rank": 67, "score": 172246.1280522262 }, { "content": "pub fn can_subsitute(graph: &ChainGraph, fun: &Function, chain: Chain) -> bool {\n\n let target = graph.target_block;\n\n let target_reads = fun.block_reads(target);\n\n let chain_data = &graph.chains[chain];\n\n\n\n if fun.block_kind(target).unwrap().is_call() && chain_data.args.len() == target_reads.len() - 1\n\n {\n\n chain_data\n\n .args\n\n .iter()\n\n .zip(target_reads.iter().skip(1))\n\n .map(|(ch, val)| -> Option<()> {\n\n let ch = (*ch)?;\n\n\n\n let node;\n\n if let Some(to) = graph.get_uniform(*val) {\n\n node = to;\n\n } else {\n\n let start_node = graph.get_root(*val)?;\n\n node = graph.follow_chain(start_node, chain);\n", "file_path": "libeir_passes/src/simplify_cfg/chain_graph/synthesis/single.rs", "rank": 68, "score": 171392.7500200923 }, { "content": "pub fn write_dot(module: &Module, ident: Option<FunctionIdent>) {\n\n if let Some(ident) = ident {\n\n let idx = module.ident_index(&ident).unwrap();\n\n let fun_def = &module[idx];\n\n let fun = fun_def.function();\n\n\n\n let dot = libeir_ir::text::dot_printer::function_to_dot(fun);\n\n print!(\"{}\", dot);\n\n } else {\n\n unimplemented!()\n\n }\n\n}\n", "file_path": "libeir_tests/src/lib.rs", "rank": 69, "score": 170130.6001806843 }, { "content": "struct PromoteCtx<'a, 'b> {\n\n ctx: &'a mut LowerCtx<'b>,\n\n binds: HashMap<Ident, TreeNode>,\n\n binds_scope: HashMapStack<Ident, TreeNode>,\n\n shadow: bool,\n\n}\n\nimpl<'a, 'b> PromoteCtx<'a, 'b> {\n\n fn resolve_or_bind(\n\n &mut self,\n\n hier: bool,\n\n ident: Ident,\n\n node: TreeNode,\n\n ) -> Option<Either<TreeNode, IrValue>> {\n\n if hier {\n\n self.binds_scope.insert(ident, node);\n\n }\n\n if self.shadow {\n\n if let Some(bound_node) = self.binds.get(&ident) {\n\n Some(Either::Left(*bound_node))\n\n } else {\n", "file_path": "libeir_syntax_erl/src/lower/pattern/tree/promote_values.rs", "rank": 70, "score": 167020.07356124627 }, { "content": "pub fn integer_to_carrier(mut int: BigInt, bits: usize, endian: Endian) -> BitSlice<Vec<u8>> {\n\n let negative = int < 0;\n\n if negative {\n\n int += 1;\n\n }\n\n\n\n let keep_bytes = (bits + 7) / 8;\n\n let aux_bits = bits % 8;\n\n\n\n let (_sign, mut digits) = match endian {\n\n Endian::Big => int.to_bytes_be(),\n\n Endian::Little => int.to_bytes_le(),\n\n };\n\n\n\n match endian {\n\n Endian::Big => {\n\n let mut new = Vec::new();\n\n\n\n if keep_bytes > digits.len() {\n\n new.resize(keep_bytes - digits.len(), 0);\n", "file_path": "util/libeir_util_binary/src/integer.rs", "rank": 71, "score": 160381.86998955382 }, { "content": "fn make_frontend(codemap: Arc<CodeMap>, matches: &ArgMatches) -> AnyFrontend {\n\n match value_t!(matches, \"IN_FORMAT\", InputType).unwrap() {\n\n InputType::Erl => make_erlang_frontend(codemap, matches).into(),\n\n InputType::Abstr => AbstrErlangFrontend::new(codemap).into(),\n\n InputType::Eir => EirFrontend::new(codemap).into(),\n\n }\n\n}\n\n\n", "file_path": "tools/src/compile.rs", "rank": 72, "score": 158369.96812492 }, { "content": "pub fn rewrite(\n\n b: &mut FunctionBuilder,\n\n map: &mut BTreeMap<Value, Value>,\n\n target: Block,\n\n graph: &ChainGraph,\n\n synthesis: &Synthesis,\n\n) {\n\n let segment_set_pool = synthesis.segment_set_pool.as_ref().unwrap();\n\n let segments_back = synthesis.segments_back.as_ref().unwrap();\n\n\n\n trace!(\"REWRITE {}\", target);\n\n //println!(\"{:#?}\", synthesis);\n\n\n\n //assert!(synthesis.segments[synthesis.order[0]].kind.is_out());\n\n //assert!(\n\n // synthesis\n\n // .order\n\n // .iter()\n\n // .map(|s| &synthesis.segments[*s])\n\n // .filter(|s| s.kind.is_out())\n", "file_path": "libeir_passes/src/simplify_cfg/rewrite.rs", "rank": 73, "score": 158299.38998131454 }, { "content": "pub trait PatternProvider: Debug {\n\n /// A reference to a unique node in the pattern graph.\n\n /// Every `PatternNodeKey` should belong to ONE and ONLY one\n\n /// `PatternNodeKind`.\n\n type PatternNodeKey: Copy + Hash + Debug + PartialEq + Eq;\n\n\n\n /// The type of pattern node.\n\n type PatternNodeKind: Copy + Hash + Debug + PartialEq + Eq;\n\n\n\n /// A variable in the output CFG.\n\n /// The provider is responsible for creating these as specializations\n\n /// are performed by `expand_clause_nodes`.\n\n type CfgVariable: Copy + Hash + Debug + PartialEq + Eq;\n\n\n\n const WILDCARD: Self::PatternNodeKind;\n\n\n\n fn get_root(&self) -> ExpandedClauseNodes<Self::CfgVariable, Self::PatternNodeKey>;\n\n\n\n /// Used to determine if the given `key` should be included in the\n\n /// specialization on `kind`.\n", "file_path": "util/libeir_util_pattern_compiler/src/pattern.rs", "rank": 74, "score": 156371.9943833265 }, { "content": "pub fn gen_vm_id() {\n\n let counter = VM_ID_COUNTER.lock().unwrap();\n\n VM_ID.with(|v| {\n\n let mut vm_id = v.write().unwrap();\n\n *vm_id = Some(*counter);\n\n });\n\n}\n\n\n", "file_path": "libeir_interpreter/src/trace/trace.rs", "rank": 75, "score": 156003.41854318135 }, { "content": "pub trait ErlOrd<Rhs = Self> {\n\n fn erl_ord(&self, other: &Rhs) -> ::std::cmp::Ordering;\n\n}\n\n\n\nimpl ErlEq for f64 {\n\n fn erl_eq(&self, other: &f64) -> bool {\n\n (*self) == (*other)\n\n }\n\n}\n\n\n\nimpl ErlEq for Term {\n\n fn erl_eq(&self, other: &Term) -> bool {\n\n match (self, other) {\n\n (Term::BoundLambda { .. }, _) => unreachable!(),\n\n (_, Term::BoundLambda { .. }) => unreachable!(),\n\n (Term::ValueList(_), _) => unimplemented!(),\n\n (_, Term::ValueList(_)) => unimplemented!(),\n\n\n\n (Term::Nil, Term::Nil) => true,\n\n (Term::ListCell(h1, t1), Term::ListCell(h2, t2)) => h1.erl_eq(h2) && t1.erl_eq(t2),\n", "file_path": "libeir_interpreter/src/term.rs", "rank": 76, "score": 155553.60741186593 }, { "content": "fn format_pattern(_ctx: &mut ToEirTextContext, pat: &PatternContainer, _indent: usize,\n\n annotated_nodes: &HashSet<PatternNode>,\n\n node: PatternNode, out: &mut dyn Write) -> std::io::Result<()> {\n\n if annotated_nodes.contains(&node) {\n\n write!(out, \"n{} @ \", node.index())?;\n\n }\n\n match pat.node_kind(node) {\n\n PatternNodeKind::Wildcard => write!(out, \"_\")?,\n\n _ => write!(out, \"?\")?,\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "libeir_ir/src/text/printer.old.rs", "rank": 77, "score": 155500.49745900038 }, { "content": "pub fn lower_cfg(\n\n bump: &Bump,\n\n b: &mut FunctionBuilder,\n\n pat: &PatternContainer,\n\n provider: &ErlangPatternProvider,\n\n cfg: &PatternCfg<ErlangPatternProvider>,\n\n clauses: &[PatternClause],\n\n destinations: &DecisionTreeDestinations,\n\n) -> Block {\n\n assert!(destinations.guards.len() == destinations.bodies.len());\n\n\n\n let entry_kind = &cfg.graph[cfg.entry];\n\n assert!(*entry_kind == CfgNodeKind::Root);\n\n\n\n let mut ctx = LowerCtx {\n\n provider,\n\n mapping: BFnvHashMap::with_hasher_in(Default::default(), &bump),\n\n destinations,\n\n };\n\n\n", "file_path": "libeir_passes/src/compile_pattern/lower_cfg.rs", "rank": 78, "score": 153816.94348727496 }, { "content": "pub trait Parse<T = Self> {\n\n type Parser;\n\n type Error;\n\n type Config;\n\n type Token;\n\n\n\n fn file_map_error(err: SourceError) -> Self::Error;\n\n\n\n /// Initializes a token stream for the underlying parser and invokes parse_tokens\n\n fn parse<'a, S>(\n\n parser: &Parser<Self::Config>,\n\n errors: &'a mut (dyn ErrorReceiver<E = Self::Error, W = Self::Error> + 'a),\n\n source: S,\n\n ) -> Result<T, ()>\n\n where\n\n S: Source;\n\n\n\n /// Implemented by each parser, which should parse the token stream and produce a T\n\n fn parse_tokens<'a, S>(\n\n errors: &'a mut (dyn ErrorReceiver<E = Self::Error, W = Self::Error> + 'a),\n\n tokens: S,\n\n ) -> Result<T, ()>\n\n where\n\n S: IntoIterator<Item = Self::Token>;\n\n}\n", "file_path": "util/libeir_util_parse/src/parser.rs", "rank": 79, "score": 153343.32739534433 }, { "content": "fn make_erlang_frontend(codemap: Arc<CodeMap>, matches: &ArgMatches) -> ErlangFrontend {\n\n use libeir_syntax_erl::ParseConfig;\n\n\n\n let mut config = ParseConfig::default();\n\n\n\n if let Some(includes) = matches.values_of(\"INCLUDE_PATHS\") {\n\n for include in includes {\n\n config.include_paths.push_front(PathBuf::from(include));\n\n }\n\n }\n\n if let Some(includes) = matches.values_of(\"CODE_PATHS\") {\n\n for include in includes {\n\n config.code_paths.push_front(PathBuf::from(include));\n\n }\n\n }\n\n\n\n ErlangFrontend::new(config, codemap)\n\n}\n\n\n", "file_path": "tools/src/compile.rs", "rank": 80, "score": 153220.36037464216 }, { "content": "fn delete(_vm: &VMState, _proc: &mut ProcessContext, args: &[Term]) -> CallReturn {\n\n assert!(args.len() == 1);\n\n // TODO\n\n let n = vec![Term::new_atom(\"error\"), Term::new_atom(\"enoent\")];\n\n CallReturn::Return { term: Term::Tuple(n) }\n\n}\n\n\n", "file_path": "libeir_interpreter/src/erl_lib/file.rs", "rank": 81, "score": 153155.68733281977 }, { "content": "fn getenv(_vm: &VMState, _proc: &mut ProcessContext, args: &[Term]) -> CallReturn {\n\n if args.len() == 1 {\n\n CallReturn::Return { term: Term::new_bool(false) }\n\n } else {\n\n CallReturn::Throw\n\n }\n\n}\n\n\n", "file_path": "libeir_interpreter/src/erl_lib/os.rs", "rank": 82, "score": 153155.68733281977 }, { "content": " function as parse_function, function_map as parse_function_map,\n\n function_map_unwrap as parse_function_map_unwrap, function_unwrap as parse_function_unwrap,\n\n module as parse_module, module_unwrap as parse_module_unwrap,\n\n};\n\n\n\npub mod ast;\n", "file_path": "libeir_ir/src/text/mod.rs", "rank": 83, "score": 153056.08969055142 }, { "content": "fn not(_vm: &VMState, _proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n assert!(args.len() == 1);\n\n if let Some(b) = args[0].as_boolean() {\n\n NativeReturn::Return {\n\n term: Term::new_bool(!b).into(),\n\n }\n\n } else {\n\n panic!()\n\n }\n\n}\n\n\n", "file_path": "libeir_interpreter/src/erl_lib/erlang.rs", "rank": 84, "score": 151085.50566052174 }, { "content": "fn or(_vm: &VMState, _proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n assert!(args.len() == 2);\n\n if let (Some(a1), Some(a2)) = (args[0].as_boolean(), args[1].as_boolean()) {\n\n NativeReturn::Return {\n\n term: Term::new_bool(a1 || a2).into(),\n\n }\n\n } else {\n\n panic!()\n\n }\n\n}\n\n\n", "file_path": "libeir_interpreter/src/erl_lib/erlang.rs", "rank": 85, "score": 151085.50566052174 }, { "content": "fn and(_vm: &VMState, _proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n assert!(args.len() == 2);\n\n if let (Some(a1), Some(a2)) = (args[0].as_boolean(), args[1].as_boolean()) {\n\n NativeReturn::Return {\n\n term: Term::new_bool(a1 && a2).into(),\n\n }\n\n } else {\n\n panic!()\n\n }\n\n}\n\n\n", "file_path": "libeir_interpreter/src/erl_lib/erlang.rs", "rank": 86, "score": 151085.50566052174 }, { "content": "fn os_type(_vm: &VMState, _proc: &mut ProcessContext, args: &[Term]) -> CallReturn {\n\n assert!(args.len() == 0);\n\n // TODO\n\n let family = Term::new_atom(\"unix\");\n\n let name = Term::new_atom(\"linux\");\n\n CallReturn::Return { term: Term::Tuple(vec![family, name]) }\n\n}\n\n\n", "file_path": "libeir_interpreter/src/erl_lib/os.rs", "rank": 87, "score": 150932.49945963357 }, { "content": "pub fn lower_module<'a>(\n\n errors: &'a mut (dyn ErrorReceiver<E = LowerError, W = LowerError> + 'a),\n\n codemap: Arc<CodeMap>,\n\n module: &Module,\n\n) -> Result<IrModule, ()> {\n\n // TODO sort functions for more deterministic compilation\n\n\n\n let mut ir_module = IrModule::new_with_span(module.name, module.span);\n\n\n\n let mut ctx = LowerCtx {\n\n codemap,\n\n module,\n\n\n\n scope: scope::ScopeTracker::new(),\n\n exc_stack: ExceptionHandlerStack::new(),\n\n\n\n sentinel_value: None,\n\n\n\n errors,\n\n\n", "file_path": "libeir_syntax_erl/src/lower/mod.rs", "rank": 88, "score": 150766.5426911971 }, { "content": "pub fn constant_to_doc<'a>(\n\n arena: &'a Arena<'a>,\n\n container: &ConstantContainer,\n\n constant: Const,\n\n) -> RefDoc<'a, ()> {\n\n constant_to_doc_state(arena, container, constant, ConstantState::Normal)\n\n}\n\n\n", "file_path": "libeir_ir/src/text/printer/constant.rs", "rank": 89, "score": 150766.5426911971 }, { "content": "pub fn get_pid() -> Pid {\n\n TRACE_COLLECTOR.with(|c| {\n\n let c = c.lock().unwrap();\n\n c.current_pid\n\n })\n\n}\n\n\n", "file_path": "libeir_interpreter/src/trace/trace.rs", "rank": 90, "score": 150766.5426911971 }, { "content": "/// This implements `Send` and `Sync` unconditionally.\n\n/// (the trait itself doesn't need to have these bounds and the\n\n/// resources are already guaranteed to fulfill it).\n\nstruct Invariant<T: ?Sized>(*mut T);\n\n\n\nunsafe impl<T> Send for Invariant<T> where T: ?Sized {}\n\n\n\nunsafe impl<T> Sync for Invariant<T> where T: ?Sized {}\n\n\n", "file_path": "util/meta_table/src/lib.rs", "rank": 91, "score": 149161.24281929655 }, { "content": "//fn member_list(item: &Term, list: &Term) -> NativeReturn {\n\n// if let Term::Nil = list {\n\n// NativeReturn::Return { term: Term::new_bool(false).into() }\n\n// } else if let Term::List(ref head, ref tail) = list {\n\n// for l_item in head {\n\n// if item.erl_exact_eq(l_item) {\n\n// return NativeReturn::Return { term: Term::new_bool(true).into() };\n\n// }\n\n// }\n\n// member_list(item, tail)\n\n// } else {\n\n// NativeReturn::Throw\n\n// }\n\n//}\n\n//\n\n//fn member(_vm: &VMState, _proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n// assert!(args.len() == 2);\n\n// member_list(&args[0], &args[1])\n\n//}\n\n//\n\nfn reverse_2(_vm: &VMState, _proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n assert!(args.len() == 2);\n\n\n\n let (mut head, tail) = Term::as_inproper_list(&args[0]);\n\n assert!(tail.erl_eq(&Term::Nil));\n\n\n\n head.reverse();\n\n NativeReturn::Return {\n\n term: Term::slice_to_list(&head, args[1].clone()),\n\n }\n\n}\n\n\n", "file_path": "libeir_interpreter/src/erl_lib/lists.rs", "rank": 92, "score": 148882.7435185795 }, { "content": "fn put(_vm: &VMState, proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n assert!(args.len() == 2);\n\n if let Some(entry) = proc.dict.iter_mut().find(|e| e.0.erl_exact_eq(&args[0])) {\n\n let old = entry.1.clone();\n\n entry.1 = args[1].clone();\n\n NativeReturn::Return { term: old }\n\n } else {\n\n proc.dict.push((args[0].clone(), args[1].clone()));\n\n NativeReturn::Return {\n\n term: Term::new_atom(\"undefined\").into(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "libeir_interpreter/src/erl_lib/erlang.rs", "rank": 93, "score": 148862.31778733555 }, { "content": "fn setelement(_vm: &VMState, _proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n assert!(args.len() == 3);\n\n let idx = if let Some(num) = args[0].as_usize() {\n\n num\n\n } else {\n\n panic!()\n\n };\n\n let value = args[2].clone();\n\n if let Term::Tuple(vals) = &*args[1] {\n\n if idx == 0 || idx > vals.len() {\n\n panic!()\n\n } else {\n\n let mut vals = vals.clone();\n\n vals[idx - 1] = value;\n\n NativeReturn::Return {\n\n term: Term::Tuple(vals).into(),\n\n }\n\n }\n\n } else {\n\n panic!()\n\n }\n\n}\n", "file_path": "libeir_interpreter/src/erl_lib/erlang.rs", "rank": 94, "score": 148862.31778733555 }, { "content": "fn erase(_vm: &VMState, proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n assert!(args.len() == 1);\n\n let idx = proc\n\n .dict\n\n .iter()\n\n .enumerate()\n\n .find(|e| (e.1).0.erl_exact_eq(&args[0]))\n\n .map(|(idx, _)| idx);\n\n if let Some(entry) = idx {\n\n let (_key, val) = proc.dict.remove(entry);\n\n NativeReturn::Return { term: val }\n\n } else {\n\n NativeReturn::Return {\n\n term: Term::new_atom(\"undefined\").into(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "libeir_interpreter/src/erl_lib/erlang.rs", "rank": 95, "score": 148862.31778733555 }, { "content": "fn element(_vm: &VMState, _proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n assert!(args.len() == 2);\n\n let idx = if let Some(num) = args[0].as_usize() {\n\n num\n\n } else {\n\n panic!()\n\n };\n\n if let Term::Tuple(vals) = &*args[1] {\n\n if idx == 0 || idx > vals.len() {\n\n panic!()\n\n } else {\n\n NativeReturn::Return {\n\n term: vals[idx - 1].clone(),\n\n }\n\n }\n\n } else {\n\n panic!()\n\n }\n\n}\n\n\n", "file_path": "libeir_interpreter/src/erl_lib/erlang.rs", "rank": 96, "score": 148862.31778733555 }, { "content": "fn less_than(_vm: &VMState, _proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n assert!(args.len() == 2);\n\n let a1 = &*args[0];\n\n let a2 = &*args[1];\n\n let ord = a1.erl_ord(a2);\n\n NativeReturn::Return {\n\n term: Term::new_bool(ord == std::cmp::Ordering::Less).into(),\n\n }\n\n}\n", "file_path": "libeir_interpreter/src/erl_lib/erlang.rs", "rank": 97, "score": 148862.31778733555 }, { "content": "fn get(_vm: &VMState, proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n assert!(args.len() == 1);\n\n if let Some(entry) = proc.dict.iter().find(|e| e.0.erl_exact_eq(&args[0])) {\n\n NativeReturn::Return {\n\n term: entry.1.clone(),\n\n }\n\n } else {\n\n NativeReturn::Return {\n\n term: Term::new_atom(\"undefined\").into(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "libeir_interpreter/src/erl_lib/erlang.rs", "rank": 98, "score": 148862.31778733555 }, { "content": "fn greater_than(_vm: &VMState, _proc: &mut ProcessContext, args: &[Rc<Term>]) -> NativeReturn {\n\n assert!(args.len() == 2);\n\n let a1 = &*args[0];\n\n let a2 = &*args[1];\n\n let ord = a1.erl_ord(a2);\n\n NativeReturn::Return {\n\n term: Term::new_bool(ord == std::cmp::Ordering::Greater).into(),\n\n }\n\n}\n\n\n", "file_path": "libeir_interpreter/src/erl_lib/erlang.rs", "rank": 99, "score": 148862.31778733555 } ]
Rust
src/firmware/types.rs
tfanelli-rh/sev
e0e17aac9249b00b0cb3e24a2780ca814d229a11
use crate::certs::sev; use crate::Version; use std::marker::PhantomData; pub struct PlatformReset; bitflags::bitflags! { #[derive(Default)] pub struct PlatformStatusFlags: u32 { const OWNED = 1 << 0; const ENCRYPTED_STATE = 1 << 8; } } #[derive(Default)] #[repr(C, packed)] pub struct PlatformStatus { pub version: Version, pub state: u8, pub flags: PlatformStatusFlags, pub build: u8, pub guest_count: u32, } pub struct PekGen; #[repr(C, packed)] pub struct PekCsr<'a> { addr: u64, len: u32, _phantom: PhantomData<&'a ()>, } impl<'a> PekCsr<'a> { pub fn new(cert: &'a mut sev::Certificate) -> Self { Self { addr: cert as *mut _ as _, len: std::mem::size_of_val(cert) as _, _phantom: PhantomData, } } } #[repr(C, packed)] pub struct PekCertImport<'a> { pek_addr: u64, pek_len: u32, oca_addr: u64, oca_len: u32, _phantom: PhantomData<&'a ()>, } impl<'a> PekCertImport<'a> { pub fn new(pek: &'a sev::Certificate, oca: &'a sev::Certificate) -> Self { Self { pek_addr: pek as *const _ as _, pek_len: std::mem::size_of_val(pek) as _, oca_addr: oca as *const _ as _, oca_len: std::mem::size_of_val(oca) as _, _phantom: PhantomData, } } } pub struct PdhGen; #[repr(C, packed)] pub struct PdhCertExport<'a> { pdh_addr: u64, pdh_len: u32, certs_addr: u64, certs_len: u32, _phantom: PhantomData<&'a ()>, } impl<'a> PdhCertExport<'a> { pub fn new(pdh: &'a mut sev::Certificate, certs: &'a mut [sev::Certificate; 3]) -> Self { Self { pdh_addr: pdh as *mut _ as _, pdh_len: std::mem::size_of_val(pdh) as _, certs_addr: certs.as_mut_ptr() as _, certs_len: std::mem::size_of_val(certs) as _, _phantom: PhantomData, } } } #[repr(C, packed)] pub struct GetId<'a> { id_addr: u64, id_len: u32, _phantom: PhantomData<&'a ()>, } impl<'a> GetId<'a> { pub fn new(id: &'a mut [u8; 64]) -> Self { Self { id_addr: id.as_mut_ptr() as _, id_len: id.len() as _, _phantom: PhantomData, } } pub fn as_slice(&self) -> &[u8] { unsafe { std::slice::from_raw_parts(self.id_addr as *const u8, self.id_len as _) } } } #[derive(Default)] #[repr(C, packed)] pub struct SnpPlatformStatus { pub version: Version, pub state: u8, pub build_id: u32, pub guest_count: u32, pub tcb_version: u64, }
use crate::certs::sev; use crate::Version; use std::marker::PhantomData; pub struct PlatformReset; bitflags::bitflags! { #[derive(Default)] pub struct PlatformStatusFlags: u32 { const OWNED = 1 << 0; const ENCRYPTED_STATE = 1 << 8; } } #[derive(Default)] #[repr(C, packed)] pub struct PlatformStatus { pub version: Version, pub state: u8, pub flags: PlatformStatusFlags, pub build: u8, pub guest_count: u32, } pub struct PekGen; #[repr(C, packed)] pub struct PekCsr<'a> { addr: u64, len: u32, _phantom: PhantomData<&'a ()>, } impl<'a> PekCsr<'a> { pub fn new(cert: &'a mut sev::Certificate)
*const u8, self.id_len as _) } } } #[derive(Default)] #[repr(C, packed)] pub struct SnpPlatformStatus { pub version: Version, pub state: u8, pub build_id: u32, pub guest_count: u32, pub tcb_version: u64, }
-> Self { Self { addr: cert as *mut _ as _, len: std::mem::size_of_val(cert) as _, _phantom: PhantomData, } } } #[repr(C, packed)] pub struct PekCertImport<'a> { pek_addr: u64, pek_len: u32, oca_addr: u64, oca_len: u32, _phantom: PhantomData<&'a ()>, } impl<'a> PekCertImport<'a> { pub fn new(pek: &'a sev::Certificate, oca: &'a sev::Certificate) -> Self { Self { pek_addr: pek as *const _ as _, pek_len: std::mem::size_of_val(pek) as _, oca_addr: oca as *const _ as _, oca_len: std::mem::size_of_val(oca) as _, _phantom: PhantomData, } } } pub struct PdhGen; #[repr(C, packed)] pub struct PdhCertExport<'a> { pdh_addr: u64, pdh_len: u32, certs_addr: u64, certs_len: u32, _phantom: PhantomData<&'a ()>, } impl<'a> PdhCertExport<'a> { pub fn new(pdh: &'a mut sev::Certificate, certs: &'a mut [sev::Certificate; 3]) -> Self { Self { pdh_addr: pdh as *mut _ as _, pdh_len: std::mem::size_of_val(pdh) as _, certs_addr: certs.as_mut_ptr() as _, certs_len: std::mem::size_of_val(certs) as _, _phantom: PhantomData, } } } #[repr(C, packed)] pub struct GetId<'a> { id_addr: u64, id_len: u32, _phantom: PhantomData<&'a ()>, } impl<'a> GetId<'a> { pub fn new(id: &'a mut [u8; 64]) -> Self { Self { id_addr: id.as_mut_ptr() as _, id_len: id.len() as _, _phantom: PhantomData, } } pub fn as_slice(&self) -> &[u8] { unsafe { std::slice::from_raw_parts(self.id_addr as
random
[]
Rust
apps/fifteen_min/src/bus.rs
lucasccdias/abstreet
cf88a2a13396d1872f5165f54189c753b9686d21
use abstutil::prettyprint_usize; use geom::Duration; use map_gui::tools::{InputWaypoints, WaypointID}; use map_model::connectivity::WalkingOptions; use synthpop::{TripEndpoint, TripMode}; use widgetry::mapspace::{ObjectID, World, WorldOutcome}; use widgetry::{ Color, EventCtx, GfxCtx, HorizontalAlignment, Key, Line, Outcome, Panel, State, Text, Transition, VerticalAlignment, Widget, }; use crate::isochrone::{Isochrone, MovementOptions, Options}; use crate::App; pub struct BusExperiment { panel: Panel, waypoints: InputWaypoints, world: World<ID>, } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] enum ID { Waypoint(WaypointID), BusRoute(usize), } impl ObjectID for ID {} impl BusExperiment { pub fn new_state(ctx: &mut EventCtx, app: &App) -> Box<dyn State<App>> { let mut state = BusExperiment { panel: Panel::empty(ctx), waypoints: InputWaypoints::new(app), world: World::unbounded(), }; state.recalculate_everything(ctx, app); Box::new(state) } fn recalculate_everything(&mut self, ctx: &mut EventCtx, app: &App) { let map = &app.map; let mut world = World::bounded(map.get_bounds()); self.waypoints .rebuild_world(ctx, &mut world, ID::Waypoint, 1); for (idx, pair) in self.waypoints.get_waypoints().windows(2).enumerate() { if let Some(path) = TripEndpoint::path_req(pair[0], pair[1], TripMode::Drive, map) .and_then(|req| map.pathfind(req).ok()) { let duration = path.estimate_duration(map, None); if let Ok(hitbox) = path.trace_v2(map) { world .add(ID::BusRoute(idx)) .hitbox(hitbox) .zorder(0) .draw_color(self.waypoints.get_waypoint_color(idx)) .hover_alpha(0.8) .tooltip(Text::from(Line(format!("Freeflow time is {duration}")))) .build(ctx); } } } let stops = self .waypoints .get_waypoints() .into_iter() .filter_map(|endpt| match endpt { TripEndpoint::Building(b) => Some(b), _ => None, }) .collect::<Vec<_>>(); let isochrone = Isochrone::new( ctx, app, stops, Options { movement: MovementOptions::Walking(WalkingOptions::default()), thresholds: vec![(Duration::minutes(15), Color::grey(0.3).alpha(0.5))], /*thresholds: vec![ (Duration::minutes(5), Color::grey(0.3).alpha(0.5)), (Duration::minutes(10), Color::grey(0.3).alpha(0.3)), (Duration::minutes(15), Color::grey(0.3).alpha(0.2)), ],*/ }, ); world.draw_master_batch_built(isochrone.draw); world.initialize_hover(ctx); world.rebuilt_during_drag(&self.world); self.world = world; self.panel = Panel::new_builder(Widget::col(vec![ map_gui::tools::app_header(ctx, app, "Bus planner"), ctx.style() .btn_back("15-minute neighborhoods") .hotkey(Key::Escape) .build_def(ctx), Text::from_multiline(vec![ Line("Within a 15 min walk of all stops:"), Line(format!( "Population: {}", prettyprint_usize(isochrone.population) )), Line(format!( "Shops: {}", prettyprint_usize( isochrone .amenities_reachable .borrow() .values() .map(|x| x.len()) .sum() ) )), ]) .into_widget(ctx), self.waypoints.get_panel_widget(ctx), ])) .aligned(HorizontalAlignment::Left, VerticalAlignment::Top) .ignore_initial_events() .build(ctx); } } impl State<App> for BusExperiment { fn event(&mut self, ctx: &mut EventCtx, app: &mut App) -> Transition<App> { let panel_outcome = self.panel.event(ctx); if let Outcome::Clicked(ref x) = panel_outcome { if x == "15-minute neighborhoods" { return Transition::Pop; } } let world_outcome = self.world.event(ctx); let world_outcome_for_waypoints = world_outcome .maybe_map_id(|id| match id { ID::Waypoint(id) => Some(id), _ => None, }) .unwrap_or(WorldOutcome::Nothing); if self .waypoints .event(app, panel_outcome, world_outcome_for_waypoints) { self.recalculate_everything(ctx, app); } Transition::Keep } fn draw(&self, g: &mut GfxCtx, _: &App) { self.panel.draw(g); self.world.draw(g); } }
use abstutil::prettyprint_usize; use geom::Duration; use map_gui::tools::{InputWaypoints, WaypointID}; use map_model::connectivity::WalkingOptions; use synthpop::{TripEndpoint, TripMode}; use widgetry::mapspace::{ObjectID, World, WorldOutcome}; use widgetry::{ Color, EventCtx, GfxCtx, HorizontalAlignment, Key, Line, Outcome, Panel, State, Text, Transition, VerticalAlignment, Widget, }; use crate::isochrone::{Isochrone, MovementOptions, Options}; use crate::App; pub struct BusExperiment { panel: Panel, waypoints: InputWaypoints, world: World<ID>, } #[derive(Clone, Copy, Debug, Partia
lAlignment::Left, VerticalAlignment::Top) .ignore_initial_events() .build(ctx); } } impl State<App> for BusExperiment { fn event(&mut self, ctx: &mut EventCtx, app: &mut App) -> Transition<App> { let panel_outcome = self.panel.event(ctx); if let Outcome::Clicked(ref x) = panel_outcome { if x == "15-minute neighborhoods" { return Transition::Pop; } } let world_outcome = self.world.event(ctx); let world_outcome_for_waypoints = world_outcome .maybe_map_id(|id| match id { ID::Waypoint(id) => Some(id), _ => None, }) .unwrap_or(WorldOutcome::Nothing); if self .waypoints .event(app, panel_outcome, world_outcome_for_waypoints) { self.recalculate_everything(ctx, app); } Transition::Keep } fn draw(&self, g: &mut GfxCtx, _: &App) { self.panel.draw(g); self.world.draw(g); } }
lEq, Eq, Hash)] enum ID { Waypoint(WaypointID), BusRoute(usize), } impl ObjectID for ID {} impl BusExperiment { pub fn new_state(ctx: &mut EventCtx, app: &App) -> Box<dyn State<App>> { let mut state = BusExperiment { panel: Panel::empty(ctx), waypoints: InputWaypoints::new(app), world: World::unbounded(), }; state.recalculate_everything(ctx, app); Box::new(state) } fn recalculate_everything(&mut self, ctx: &mut EventCtx, app: &App) { let map = &app.map; let mut world = World::bounded(map.get_bounds()); self.waypoints .rebuild_world(ctx, &mut world, ID::Waypoint, 1); for (idx, pair) in self.waypoints.get_waypoints().windows(2).enumerate() { if let Some(path) = TripEndpoint::path_req(pair[0], pair[1], TripMode::Drive, map) .and_then(|req| map.pathfind(req).ok()) { let duration = path.estimate_duration(map, None); if let Ok(hitbox) = path.trace_v2(map) { world .add(ID::BusRoute(idx)) .hitbox(hitbox) .zorder(0) .draw_color(self.waypoints.get_waypoint_color(idx)) .hover_alpha(0.8) .tooltip(Text::from(Line(format!("Freeflow time is {duration}")))) .build(ctx); } } } let stops = self .waypoints .get_waypoints() .into_iter() .filter_map(|endpt| match endpt { TripEndpoint::Building(b) => Some(b), _ => None, }) .collect::<Vec<_>>(); let isochrone = Isochrone::new( ctx, app, stops, Options { movement: MovementOptions::Walking(WalkingOptions::default()), thresholds: vec![(Duration::minutes(15), Color::grey(0.3).alpha(0.5))], /*thresholds: vec![ (Duration::minutes(5), Color::grey(0.3).alpha(0.5)), (Duration::minutes(10), Color::grey(0.3).alpha(0.3)), (Duration::minutes(15), Color::grey(0.3).alpha(0.2)), ],*/ }, ); world.draw_master_batch_built(isochrone.draw); world.initialize_hover(ctx); world.rebuilt_during_drag(&self.world); self.world = world; self.panel = Panel::new_builder(Widget::col(vec![ map_gui::tools::app_header(ctx, app, "Bus planner"), ctx.style() .btn_back("15-minute neighborhoods") .hotkey(Key::Escape) .build_def(ctx), Text::from_multiline(vec![ Line("Within a 15 min walk of all stops:"), Line(format!( "Population: {}", prettyprint_usize(isochrone.population) )), Line(format!( "Shops: {}", prettyprint_usize( isochrone .amenities_reachable .borrow() .values() .map(|x| x.len()) .sum() ) )), ]) .into_widget(ctx), self.waypoints.get_panel_widget(ctx), ])) .aligned(Horizonta
random
[ { "content": "pub fn custom_bar(ctx: &mut EventCtx, filled_color: Color, pct_full: f64, txt: Text) -> Widget {\n\n let total_width = 300.0;\n\n let height = 32.0;\n\n let radius = 4.0;\n\n\n\n let mut batch = GeomBatch::new();\n\n // Background\n\n batch.push(\n\n Color::hex(\"#666666\"),\n\n Polygon::rounded_rectangle(total_width, height, radius),\n\n );\n\n // Foreground\n\n if let Some(poly) = Polygon::maybe_rounded_rectangle(pct_full * total_width, height, radius) {\n\n batch.push(filled_color, poly);\n\n }\n\n // Text\n\n let label = txt.render_autocropped(ctx);\n\n let dims = label.get_dims();\n\n batch.append(label.translate(10.0, height / 2.0 - dims.height / 2.0));\n\n batch.into_widget(ctx)\n\n}\n\n\n", "file_path": "apps/santa/src/meters.rs", "rank": 0, "score": 321522.63587878697 }, { "content": "#[allow(non_snake_case)]\n\npub fn Line<S: Into<String>>(text: S) -> TextSpan {\n\n TextSpan {\n\n text: text.into(),\n\n fg_color: None,\n\n outline_color: None,\n\n size: DEFAULT_FONT_SIZE,\n\n font: DEFAULT_FONT,\n\n underlined: false,\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Text {\n\n // The bg_color will cover the entire block, but some lines can have extra highlighting.\n\n lines: Vec<(Option<Color>, Vec<TextSpan>)>,\n\n // TODO Stop using this as much as possible.\n\n bg_color: Option<Color>,\n\n}\n\n\n\nimpl From<TextSpan> for Text {\n", "file_path": "widgetry/src/text.rs", "rank": 1, "score": 313925.8886511948 }, { "content": "/// Objects in a `World` are uniquely identified by this caller-specified type\n\npub trait ObjectID: Clone + Copy + Debug + Eq + Hash {}\n\n\n\n/// This provides a builder API for adding objects to a `World`.\n\npub struct ObjectBuilder<'a, ID: ObjectID> {\n\n world: &'a mut World<ID>,\n\n\n\n id: ID,\n\n hitbox: Option<Polygon>,\n\n zorder: usize,\n\n draw_normal: Option<ToggleZoomedBuilder>,\n\n draw_hover: Option<ToggleZoomedBuilder>,\n\n tooltip: Option<Text>,\n\n clickable: bool,\n\n draggable: bool,\n\n keybindings: Vec<(MultiKey, &'static str)>,\n\n}\n\n\n\nimpl<'a, ID: ObjectID> ObjectBuilder<'a, ID> {\n\n /// Specifies the geometry of the object. Required.\n\n pub fn hitbox(mut self, polygon: Polygon) -> Self {\n", "file_path": "widgetry/src/mapspace/world.rs", "rank": 2, "score": 303023.73172114696 }, { "content": "pub fn left_panel_builder(ctx: &EventCtx, top_panel: &Panel, contents: Widget) -> PanelBuilder {\n\n let top_height = top_panel.panel_dims().height;\n\n Panel::new_builder(\n\n contents.corner_rounding(CornerRounding::CornerRadii(CornerRadii {\n\n top_left: 0.0,\n\n bottom_left: 0.0,\n\n bottom_right: 0.0,\n\n top_right: 0.0,\n\n })),\n\n )\n\n .aligned(\n\n HorizontalAlignment::Percent(0.0),\n\n VerticalAlignment::Below(top_height),\n\n )\n\n .dims_height(PanelDims::ExactPixels(\n\n ctx.canvas.window_height - top_height,\n\n ))\n\n}\n\n\n", "file_path": "apps/ltn/src/common/mod.rs", "rank": 3, "score": 280365.3724671572 }, { "content": "pub fn lctrl(key: Key) -> MultiKey {\n\n MultiKey::LCtrl(key)\n\n}\n\n\n", "file_path": "widgetry/src/event.rs", "rank": 4, "score": 258013.2257737854 }, { "content": "pub fn percentage_bar(ctx: &EventCtx, txt: Text, pct_green: f64) -> Widget {\n\n let bad_color = Color::RED;\n\n let good_color = Color::GREEN;\n\n\n\n let total_width = 450.0;\n\n let height = 32.0;\n\n let radius = 4.0;\n\n\n\n let mut batch = GeomBatch::new();\n\n // Background\n\n batch.push(\n\n bad_color,\n\n Polygon::rounded_rectangle(total_width, height, radius),\n\n );\n\n // Foreground\n\n if let Some(poly) = Polygon::maybe_rounded_rectangle(pct_green * total_width, height, radius) {\n\n batch.push(good_color, poly);\n\n }\n\n // Text\n\n let label = txt.render_autocropped(ctx);\n\n let dims = label.get_dims();\n\n batch.append(label.translate(10.0, height / 2.0 - dims.height / 2.0));\n\n batch.into_widget(ctx)\n\n}\n\n\n", "file_path": "map_gui/src/tools/ui.rs", "rank": 5, "score": 257692.32212130487 }, { "content": "fn make_controls(ctx: &mut EventCtx, app: &App, opts: &Options, legend: Option<Widget>) -> Panel {\n\n let (total_ppl, ppl_in_bldg, ppl_off_map) = app.primary.sim.num_ppl();\n\n\n\n let mut col = vec![\n\n header(\n\n ctx,\n\n &format!(\"Population: {}\", prettyprint_usize(total_ppl)),\n\n ),\n\n Widget::row(vec![\n\n Widget::row(vec![\n\n Image::from_path(\"system/assets/tools/home.svg\").into_widget(ctx),\n\n Line(prettyprint_usize(ppl_in_bldg))\n\n .small()\n\n .into_widget(ctx),\n\n ]),\n\n Line(format!(\"Off-map: {}\", prettyprint_usize(ppl_off_map)))\n\n .small()\n\n .into_widget(ctx),\n\n ])\n\n .centered(),\n", "file_path": "apps/game/src/layer/population.rs", "rank": 6, "score": 255360.36964755895 }, { "content": "fn make_controls(ctx: &mut EventCtx, app: &App, opts: &Options, legend: Option<Widget>) -> Panel {\n\n let model = app.primary.sim.get_pandemic_model().unwrap();\n\n let pct = 100.0 / (model.count_total() as f64);\n\n\n\n let mut col = vec![\n\n header(ctx, \"Pandemic model\"),\n\n Text::from_multiline(vec![\n\n Line(format!(\n\n \"{} Sane ({:.1}%)\",\n\n prettyprint_usize(model.count_sane()),\n\n (model.count_sane() as f64) * pct\n\n )),\n\n Line(format!(\n\n \"{} Exposed ({:.1}%)\",\n\n prettyprint_usize(model.count_exposed()),\n\n (model.count_exposed() as f64) * pct\n\n )),\n\n Line(format!(\n\n \"{} Infected ({:.1}%)\",\n\n prettyprint_usize(model.count_infected()),\n", "file_path": "apps/game/src/layer/pandemic.rs", "rank": 7, "score": 255360.36964755892 }, { "content": "pub fn make_bar(ctx: &mut EventCtx, filled_color: Color, value: usize, max: usize) -> Widget {\n\n let pct_full = if max == 0 {\n\n 0.0\n\n } else {\n\n (value as f64) / (max as f64)\n\n };\n\n let txt = Text::from(format!(\n\n \"{} / {}\",\n\n prettyprint_usize(value),\n\n prettyprint_usize(max)\n\n ));\n\n custom_bar(ctx, filled_color, pct_full, txt)\n\n}\n", "file_path": "apps/santa/src/meters.rs", "rank": 8, "score": 254079.0154842489 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct ButtonStateStyle<'a, 'c> {\n\n image: Option<Image<'a, 'c>>,\n\n label: Option<Label>,\n\n outline: Option<OutlineStyle>,\n\n bg_color: Option<Color>,\n\n custom_batch: Option<GeomBatch>,\n\n}\n\n\n\n// can we take 'b out? and make the func that uses it generic?\n\nimpl<'b, 'a: 'b, 'c> ButtonBuilder<'a, 'c> {\n\n pub fn new() -> Self {\n\n ButtonBuilder {\n\n padding: EdgeInsets {\n\n top: 8.0,\n\n bottom: 8.0,\n\n left: 16.0,\n\n right: 16.0,\n\n },\n\n stack_spacing: 10.0,\n\n ..Default::default()\n", "file_path": "widgetry/src/widgets/button.rs", "rank": 9, "score": 251049.99100126157 }, { "content": "pub fn angle_from_arrow_keys(ctx: &EventCtx) -> Option<Angle> {\n\n let mut x: f64 = 0.0;\n\n let mut y: f64 = 0.0;\n\n if ctx.is_key_down(Key::LeftArrow) || ctx.is_key_down(Key::A) {\n\n x -= 1.0;\n\n }\n\n if ctx.is_key_down(Key::RightArrow) || ctx.is_key_down(Key::D) {\n\n x += 1.0;\n\n }\n\n if ctx.is_key_down(Key::UpArrow) || ctx.is_key_down(Key::W) {\n\n y -= 1.0;\n\n }\n\n if ctx.is_key_down(Key::DownArrow) || ctx.is_key_down(Key::S) {\n\n y += 1.0;\n\n }\n\n\n\n if x == 0.0 && y == 0.0 {\n\n return None;\n\n }\n\n Some(Angle::new_rads(y.atan2(x)))\n\n}\n", "file_path": "apps/santa/src/controls.rs", "rank": 10, "score": 248213.62063762365 }, { "content": "pub fn hotkeys(keys: Vec<Key>) -> MultiKey {\n\n MultiKey::Any(keys)\n\n}\n\n\n\nimpl std::convert::From<Key> for Option<MultiKey> {\n\n fn from(key: Key) -> Option<MultiKey> {\n\n Some(MultiKey::Normal(key))\n\n }\n\n}\n\n\n\nimpl std::convert::From<Key> for MultiKey {\n\n fn from(key: Key) -> MultiKey {\n\n MultiKey::Normal(key)\n\n }\n\n}\n", "file_path": "widgetry/src/event.rs", "rank": 11, "score": 240314.5230229474 }, { "content": "/// A temporary state of an application. There's a stack of these, with the most recent being the\n\n/// active one.\n\npub trait State<A>: downcast_rs::Downcast {\n\n /// Respond to a UI event, such as input or time passing.\n\n fn event(&mut self, ctx: &mut EventCtx, shared_app_state: &mut A) -> Transition<A>;\n\n /// Draw\n\n fn draw(&self, g: &mut GfxCtx, shared_app_state: &A);\n\n\n\n /// Specifies what to draw before draw()\n\n fn draw_baselayer(&self) -> DrawBaselayer {\n\n DrawBaselayer::DefaultDraw\n\n }\n\n\n\n /// Before this state is popped or replaced, call this.\n\n fn on_destroy(&mut self, _: &mut EventCtx, _: &mut A) {}\n\n // We don't need an on_enter -- the constructor for the state can just do it.\n\n}\n\n\n\ndowncast_rs::impl_downcast!(State<A>);\n\n\n\n/// When a state responds to an event, it can specify some way to manipulate the stack of states.\n\npub enum Transition<A> {\n", "file_path": "widgetry/src/app_state.rs", "rank": 12, "score": 240283.93080623302 }, { "content": "pub fn debug(ctx: &EventCtx, app: &App, details: &mut Details, id: LaneID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::LaneDebug(id)),\n\n debug_body(ctx, app, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "apps/game/src/info/lane.rs", "rank": 13, "score": 239099.37659016845 }, { "content": "struct Hovering<X: Axis<X>, Y: Axis<Y>> {\n\n hits: Vec<(X, Y)>,\n\n tooltip: Text,\n\n draw_cursor: Drawable,\n\n}\n", "file_path": "widgetry/src/widgets/line_plot.rs", "rank": 14, "score": 220694.22592761216 }, { "content": "// TODO Kinda misnomer\n\npub fn tool_panel(ctx: &mut EventCtx) -> Panel {\n\n Panel::new_builder(Widget::row(vec![\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/tools/home.svg\")\n\n .hotkey(Key::Escape)\n\n .build_widget(ctx, \"back\"),\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/tools/settings.svg\")\n\n .build_widget(ctx, \"settings\"),\n\n ]))\n\n .aligned(HorizontalAlignment::Left, VerticalAlignment::BottomAboveOSD)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "apps/game/src/common/mod.rs", "rank": 15, "score": 217247.03293201962 }, { "content": "pub trait TextExt {\n\n fn text_widget(self, ctx: &EventCtx) -> Widget;\n\n fn batch_text(self, ctx: &EventCtx) -> Widget;\n\n}\n\n\n\nimpl TextExt for &str {\n\n fn text_widget(self, ctx: &EventCtx) -> Widget {\n\n Line(self).into_widget(ctx)\n\n }\n\n fn batch_text(self, ctx: &EventCtx) -> Widget {\n\n Line(self).batch(ctx)\n\n }\n\n}\n\n\n\nimpl TextExt for String {\n\n fn text_widget(self, ctx: &EventCtx) -> Widget {\n\n Line(self).into_widget(ctx)\n\n }\n\n fn batch_text(self, ctx: &EventCtx) -> Widget {\n\n Line(self).batch(ctx)\n", "file_path": "widgetry/src/text.rs", "rank": 16, "score": 217120.72329819982 }, { "content": "struct Waypoint {\n\n at: TripEndpoint,\n\n label: String,\n\n center: Pt2D,\n\n}\n\n\n\nimpl InputWaypoints {\n\n /// Allows any number of waypoints\n\n pub fn new(app: &dyn AppLike) -> InputWaypoints {\n\n let map = app.map();\n\n let mut snap_to_endpts = FindClosest::new(map.get_bounds());\n\n for i in map.all_intersections() {\n\n if i.is_border() {\n\n snap_to_endpts.add(TripEndpoint::Border(i.id), i.polygon.points());\n\n }\n\n }\n\n for b in map.all_buildings() {\n\n snap_to_endpts.add(TripEndpoint::Building(b.id), b.polygon.points());\n\n }\n\n\n", "file_path": "map_gui/src/tools/waypoints.rs", "rank": 17, "score": 216645.9675789563 }, { "content": "pub fn route(ctx: &mut EventCtx, app: &App, details: &mut Details, id: TransitRouteID) -> Widget {\n\n let header = {\n\n let map = &app.primary.map;\n\n let route = map.get_tr(id);\n\n\n\n Widget::row(vec![\n\n Line(format!(\"Route {}\", route.short_name))\n\n .small_heading()\n\n .into_widget(ctx),\n\n header_btns(ctx),\n\n ])\n\n };\n\n\n\n Widget::custom_col(vec![\n\n header,\n\n route_body(ctx, app, details, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "apps/game/src/info/transit.rs", "rank": 18, "score": 216515.01449244414 }, { "content": "pub fn stop(ctx: &mut EventCtx, app: &App, details: &mut Details, id: TransitStopID) -> Widget {\n\n let header = Widget::row(vec![\n\n Line(\"Bus stop\").small_heading().into_widget(ctx),\n\n header_btns(ctx),\n\n ]);\n\n\n\n Widget::custom_col(vec![header, stop_body(ctx, app, details, id).tab_body(ctx)])\n\n}\n\n\n", "file_path": "apps/game/src/info/transit.rs", "rank": 19, "score": 216515.01449244414 }, { "content": "#[derive(Deserialize)]\n\nstruct InnerNodeMap<T: Copy + Ord + Debug> {\n\n id_to_node: Vec<T>,\n\n}\n\n\n", "file_path": "map_model/src/pathfind/node_map.rs", "rank": 20, "score": 215187.87087628176 }, { "content": "fn inner_warp_to_id(ctx: &mut EventCtx, app: &mut App, line: &str) -> Option<Transition> {\n\n if line.is_empty() {\n\n return None;\n\n }\n\n if line == \"j\" {\n\n if let Some((pt, zoom)) = app.primary.last_warped_from {\n\n return Some(Transition::Replace(Warping::new_state(\n\n ctx,\n\n pt,\n\n Some(zoom),\n\n None,\n\n &mut app.primary,\n\n )));\n\n }\n\n return None;\n\n }\n\n\n\n let id = match (&line[1..line.len()]).parse::<usize>() {\n\n Ok(idx) => match line.chars().next().unwrap() {\n\n 'r' => {\n", "file_path": "apps/game/src/common/warp.rs", "rank": 21, "score": 213888.8682990063 }, { "content": "pub trait Axis<T>: 'static + Copy + std::cmp::Ord + Default {\n\n // percent is [0.0, 1.0]\n\n fn from_percent(&self, percent: f64) -> T;\n\n fn to_percent(self, max: T) -> f64;\n\n fn prettyprint(self, unit_fmt: &UnitFmt) -> String;\n\n // For order of magnitude calculations\n\n fn to_f64(self) -> f64;\n\n fn from_f64(&self, x: f64) -> T;\n\n fn zero() -> T;\n\n}\n\n\n\nimpl Axis<usize> for usize {\n\n fn from_percent(&self, percent: f64) -> usize {\n\n ((*self as f64) * percent) as usize\n\n }\n\n fn to_percent(self, max: usize) -> f64 {\n\n if max == 0 {\n\n 0.0\n\n } else {\n\n (self as f64) / (max as f64)\n", "file_path": "widgetry/src/widgets/plots.rs", "rank": 22, "score": 213080.73038027482 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct Label {\n\n text: Option<String>,\n\n color: Option<Color>,\n\n styled_text: Option<Text>,\n\n font_size: Option<usize>,\n\n font: Option<Font>,\n\n}\n", "file_path": "widgetry/src/widgets/button.rs", "rank": 23, "score": 211717.0366695127 }, { "content": "struct Tab {\n\n tab_id: String,\n\n bar_item: ButtonBuilder<'static, 'static>,\n\n content: Widget,\n\n}\n\n\n\nimpl Tab {\n\n fn new(tab_id: String, bar_item: ButtonBuilder<'static, 'static>, content: Widget) -> Self {\n\n Self {\n\n tab_id,\n\n bar_item,\n\n content,\n\n }\n\n }\n\n\n\n fn build_bar_item_widget(&self, ctx: &EventCtx, active: bool) -> Widget {\n\n self.bar_item\n\n .clone()\n\n .corner_rounding(CornerRadii {\n\n top_left: DEFAULT_CORNER_RADIUS,\n", "file_path": "widgetry/src/widgets/tabs.rs", "rank": 24, "score": 211710.62253583595 }, { "content": "/// Many states fit a pattern of managing a single panel, handling mouseover events, and other\n\n/// interactions on the map. Implementing this instead of `State` reduces some boilerplate.\n\npub trait SimpleState<A> {\n\n /// Called when something on the panel has been clicked. Since the action is just a string,\n\n /// the fallback case can just use `unreachable!()`.\n\n fn on_click(\n\n &mut self,\n\n ctx: &mut EventCtx,\n\n app: &mut A,\n\n action: &str,\n\n panel: &mut Panel,\n\n ) -> Transition<A>;\n\n /// Called when something on the panel has been clicked.\n\n fn on_click_custom(\n\n &mut self,\n\n _ctx: &mut EventCtx,\n\n _app: &mut A,\n\n _action: Box<dyn CloneableAny>,\n\n _panel: &mut Panel,\n\n ) -> Transition<A> {\n\n Transition::Keep\n\n }\n", "file_path": "widgetry/src/app_state.rs", "rank": 25, "score": 211230.39965312625 }, { "content": "pub fn area(ctx: &EventCtx, app: &App, _: &mut Details, id: AreaID) -> Widget {\n\n let header = Widget::row(vec![\n\n Line(id.to_string()).small_heading().into_widget(ctx),\n\n header_btns(ctx),\n\n ]);\n\n\n\n Widget::custom_col(vec![header, area_body(ctx, app, id).tab_body(ctx)])\n\n}\n\n\n", "file_path": "apps/game/src/info/debug.rs", "rank": 26, "score": 211212.9024798363 }, { "content": "/// Any data that should last the entire lifetime of the application should be stored in the struct\n\n/// implementing this trait.\n\npub trait SharedAppState {\n\n /// Before `State::event` is called, call this.\n\n fn before_event(&mut self) {}\n\n /// When DrawBaselayer::DefaultDraw is called, run this.\n\n fn draw_default(&self, _: &mut GfxCtx) {}\n\n\n\n /// Will be called if `State::event` or `State::draw` panics.\n\n fn dump_before_abort(&self, _: &Canvas) {}\n\n /// Called before a normal exit, like window close\n\n fn before_quit(&self, _: &Canvas) {}\n\n\n\n /// If widgetry determines the video card is low on memory, this may be called. The application\n\n /// should make its best effort to delete any unused Drawables.\n\n fn free_memory(&mut self) {}\n\n}\n\n\n\npub(crate) struct App<A: SharedAppState> {\n\n /// A stack of states\n\n pub(crate) states: Vec<Box<dyn State<A>>>,\n\n pub(crate) shared_app_state: A,\n", "file_path": "widgetry/src/app_state.rs", "rank": 27, "score": 209879.43802708905 }, { "content": "/// A button to return to the title screen\n\npub fn home_btn(ctx: &EventCtx) -> Widget {\n\n ctx.style()\n\n .btn_plain\n\n .btn()\n\n .image_path(\"system/assets/pregame/logo.svg\")\n\n .image_dims(50.0)\n\n .build_widget(ctx, \"Home\")\n\n}\n\n\n", "file_path": "map_gui/src/tools/mod.rs", "rank": 28, "score": 209383.78520064312 }, { "content": "#[derive(PartialEq)]\n\nenum State {\n\n Initial {\n\n hovering: Option<usize>,\n\n selected: Option<usize>,\n\n },\n\n Idle {\n\n hovering: Option<usize>,\n\n selected: Option<usize>,\n\n },\n\n Dragging {\n\n orig_idx: usize,\n\n drag_from: ScreenPt,\n\n cursor_at: ScreenPt,\n\n new_idx: usize,\n\n },\n\n}\n\n\n\nimpl<T: 'static + Copy + PartialEq> DragDrop<T> {\n\n /// This widget emits several events.\n\n ///\n", "file_path": "widgetry/src/widgets/drag_drop.rs", "rank": 29, "score": 208488.1799809461 }, { "content": "struct LayoutStyle {\n\n bg_color: Option<Color>,\n\n // (thickness, color)\n\n outline: Option<(f64, Color)>,\n\n corner_rounding: CornerRounding,\n\n style: Style,\n\n}\n\n\n\n// Layouting\n\n// TODO Maybe I just want margin, not padding. And maybe more granular controls per side. And to\n\n// apply margin to everything in a row or column.\n\n// TODO Row and columns feel backwards when using them.\n\nimpl Widget {\n\n pub fn centered(mut self) -> Widget {\n\n self.layout.style.align_items = AlignItems::Center;\n\n self.layout.style.justify_content = JustifyContent::SpaceAround;\n\n self\n\n }\n\n\n\n pub fn centered_horiz(self) -> Widget {\n", "file_path": "widgetry/src/widgets/mod.rs", "rank": 30, "score": 207492.88546431874 }, { "content": "#[derive(Clone, Debug, Serialize, Deserialize)]\n\nstruct State {\n\n id: IntersectionID,\n\n // The in-progress turns which any potential new turns must not conflict with\n\n accepted: BTreeSet<Request>,\n\n // Track when a request is first made and if it's \"urgent\" (because the agent is overflowing a\n\n // short queue)\n\n #[serde(\n\n serialize_with = \"serialize_btreemap\",\n\n deserialize_with = \"deserialize_btreemap\"\n\n )]\n\n waiting: BTreeMap<Request, (Time, bool)>,\n\n // When a vehicle begins an uber-turn, reserve the future turns to ensure they're able to\n\n // complete the entire sequence. This is especially necessary since groups of traffic signals\n\n // are not yet configured as one.\n\n reserved: BTreeSet<Request>,\n\n // In some cases, a turn completing at one intersection may affect agents waiting to start an\n\n // uber-turn at nearby intersections.\n\n uber_turn_neighbors: Vec<IntersectionID>,\n\n\n\n signal: Option<SignalState>,\n\n}\n\n\n", "file_path": "sim/src/mechanics/intersection.rs", "rank": 31, "score": 206451.3419101577 }, { "content": "pub fn app_top_panel(ctx: &mut EventCtx, app: &App) -> Panel {\n\n Panel::new_builder(\n\n Widget::row(vec![\n\n map_gui::tools::home_btn(ctx),\n\n Line(\"Low traffic neighborhoods\")\n\n .small_heading()\n\n .into_widget(ctx)\n\n .centered_vert(),\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/tools/info.svg\")\n\n .build_widget(ctx, \"about this tool\")\n\n .centered_vert(),\n\n map_gui::tools::change_map_btn(ctx, app).centered_vert(),\n\n Widget::row(vec![\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/tools/search.svg\")\n\n .hotkey(lctrl(Key::F))\n\n .build_widget(ctx, \"search\")\n", "file_path": "apps/ltn/src/common/mod.rs", "rank": 32, "score": 203047.30359692575 }, { "content": "struct Column<A, T> {\n\n name: String,\n\n render: Box<dyn Fn(&EventCtx, &A, &T) -> GeomBatch>,\n\n col: Col<T>,\n\n}\n\n\n\npub struct Filter<A, T, F> {\n\n pub state: F,\n\n pub to_controls: Box<dyn Fn(&mut EventCtx, &A, &F) -> Widget>,\n\n pub from_controls: Box<dyn Fn(&Panel) -> F>,\n\n pub apply: Box<dyn Fn(&F, &T, &A) -> bool>,\n\n}\n\n\n\nimpl<A, T, F> Table<A, T, F> {\n\n pub fn new(\n\n id: impl Into<String>,\n\n data: Vec<T>,\n\n label_per_row: Box<dyn Fn(&T) -> String>,\n\n default_sort_by: &str,\n\n filter: Filter<A, T, F>,\n", "file_path": "widgetry/src/widgets/table.rs", "rank": 33, "score": 202730.47942761634 }, { "content": "pub fn bus_status(ctx: &mut EventCtx, app: &App, details: &mut Details, id: CarID) -> Widget {\n\n Widget::custom_col(vec![\n\n bus_header(ctx, app, details, id, Tab::TransitVehicleStatus(id)),\n\n bus_status_body(ctx, app, details, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "apps/game/src/info/transit.rs", "rank": 34, "score": 201500.60174742283 }, { "content": "pub trait SpinnerValue:\n\n Copy\n\n + PartialOrd\n\n + std::fmt::Display\n\n + std::ops::Add<Output = Self>\n\n + std::ops::AddAssign\n\n + std::ops::Sub<Output = Self>\n\n + std::ops::SubAssign\n\nwhere\n\n Self: std::marker::Sized,\n\n{\n\n}\n\n\n\nimpl<T> SpinnerValue for T where\n\n T: Copy\n\n + PartialOrd\n\n + std::fmt::Display\n\n + std::ops::Add<Output = Self>\n\n + std::ops::AddAssign\n\n + std::ops::Sub<Output = Self>\n", "file_path": "widgetry/src/widgets/spinner.rs", "rank": 35, "score": 199811.6432828236 }, { "content": "/// Create a new widget by implementing this trait. You can instantiate your widget by calling\n\n/// `Widget::new(Box::new(instance of your new widget))`, which gives you the usual style options.\n\npub trait WidgetImpl: downcast_rs::Downcast {\n\n /// What width and height does the widget occupy? If this changes, be sure to set\n\n /// `redo_layout` to true in `event`.\n\n fn get_dims(&self) -> ScreenDims;\n\n /// Your widget's top left corner should be here. Handle mouse events and draw appropriately.\n\n fn set_pos(&mut self, top_left: ScreenPt);\n\n /// Your chance to react to an event. Any side effects outside of this widget are communicated\n\n /// through the output.\n\n fn event(&mut self, ctx: &mut EventCtx, output: &mut WidgetOutput);\n\n /// Draw the widget. Be sure to draw relative to the top-left specified by `set_pos`.\n\n fn draw(&self, g: &mut GfxCtx);\n\n /// If a new Panel is being created to replace an older one, all widgets have the chance to\n\n /// preserve state from the previous version.\n\n fn can_restore(&self) -> bool {\n\n false\n\n }\n\n /// Restore state from the previous version of this widget, with the same ID. Implementors must\n\n /// downcast.\n\n fn restore(&mut self, _: &mut EventCtx, _prev: &dyn WidgetImpl) {\n\n unreachable!()\n", "file_path": "widgetry/src/widgets/mod.rs", "rank": 36, "score": 198611.2219530504 }, { "content": "struct PanelState<'a> {\n\n building_counts: Vec<(&'a str, u32)>,\n\n max_count: usize,\n\n total_trips: usize,\n\n}\n\n\n", "file_path": "apps/game/src/sandbox/dashboards/commuter.rs", "rank": 37, "score": 198237.97458197415 }, { "content": "struct Object<ID: ObjectID> {\n\n _id: ID,\n\n quadtree_id: ItemId,\n\n hitbox: Polygon,\n\n zorder: usize,\n\n draw_normal: ToggleZoomed,\n\n draw_hover: Option<ToggleZoomed>,\n\n tooltip: Option<Text>,\n\n clickable: bool,\n\n draggable: bool,\n\n // TODO How should we communicate these keypresses are possible? Something standard, like\n\n // button tooltips?\n\n keybindings: Vec<(MultiKey, &'static str)>,\n\n}\n\n\n\nimpl<ID: ObjectID> World<ID> {\n\n /// Creates an empty `World`, whose objects can exist anywhere from (0, 0) to the max f64.\n\n pub fn unbounded() -> World<ID> {\n\n World {\n\n objects: HashMap::new(),\n", "file_path": "widgetry/src/mapspace/world.rs", "rank": 38, "score": 195255.9215071075 }, { "content": "/// If true, the neighborhood has changed and the caller should recalculate stuff, including the\n\n/// panel\n\npub fn handle_world_outcome(\n\n ctx: &mut EventCtx,\n\n app: &mut App,\n\n outcome: WorldOutcome<FilterableObj>,\n\n) -> bool {\n\n let map = &app.map;\n\n match outcome {\n\n WorldOutcome::ClickedObject(FilterableObj::InteriorRoad(r)) => {\n\n let road = map.get_r(r);\n\n // Filtering on a road that's already marked bike-only doesn't make sense\n\n if !PathConstraints::Car.can_use_road(road, map) {\n\n return true;\n\n }\n\n\n\n app.session.modal_filters.before_edit();\n\n if app.session.modal_filters.roads.remove(&r).is_none() {\n\n // Place the filter on the part of the road that was clicked\n\n // These calls shouldn't fail -- since we clicked a road, the cursor must be in\n\n // map-space. And project_pt returns a point that's guaranteed to be on the\n\n // polyline.\n", "file_path": "apps/ltn/src/per_neighborhood.rs", "rank": 39, "score": 193480.98032584018 }, { "content": "/// If the sim has highlighted people, then fade all others out.\n\nfn grey_out_unhighlighted_people(color: Color, person: &Option<PersonID>, sim: &Sim) -> Color {\n\n if let Some(ref highlighted) = sim.get_highlighted_people() {\n\n if person\n\n .as_ref()\n\n .map(|p| !highlighted.contains(p))\n\n .unwrap_or(false)\n\n {\n\n return color.tint(0.5);\n\n }\n\n }\n\n color\n\n}\n", "file_path": "map_gui/src/render/mod.rs", "rank": 40, "score": 191506.2680286842 }, { "content": "/// Creates the top row for any layer panel.\n\npub fn header(ctx: &mut EventCtx, name: &str) -> Widget {\n\n Widget::row(vec![\n\n Image::from_path(\"system/assets/tools/layers.svg\")\n\n .into_widget(ctx)\n\n .centered_vert(),\n\n name.text_widget(ctx).centered_vert(),\n\n ctx.style().btn_close_widget(ctx),\n\n ])\n\n}\n\n\n\npub const PANEL_PLACEMENT: (HorizontalAlignment, VerticalAlignment) = (\n\n HorizontalAlignment::Percent(0.02),\n\n VerticalAlignment::Percent(0.2),\n\n);\n", "file_path": "apps/game/src/layer/mod.rs", "rank": 41, "score": 191443.80305171467 }, { "content": "struct Card<T: PartialEq> {\n\n value: T,\n\n dims: ScreenDims,\n\n default_batch: GeomBatch,\n\n hovering_batch: GeomBatch,\n\n selected_batch: GeomBatch,\n\n}\n\n\n", "file_path": "widgetry/src/widgets/drag_drop.rs", "rank": 42, "score": 191286.01730387117 }, { "content": "/// Adjust the path to start on the polygon's border, not center.\n\npub fn trim_path(poly: &Polygon, path: Line) -> Line {\n\n for line in poly.points().windows(2) {\n\n if let Ok(l1) = Line::new(line[0], line[1]) {\n\n if let Some(hit) = l1.intersection(&path) {\n\n if let Ok(l2) = Line::new(hit, path.pt2()) {\n\n return l2;\n\n }\n\n }\n\n }\n\n }\n\n // Just give up\n\n path\n\n}\n", "file_path": "map_model/src/make/mod.rs", "rank": 43, "score": 188444.61039393634 }, { "content": "/// A button to change maps, with default keybindings\n\npub fn change_map_btn(ctx: &EventCtx, app: &dyn AppLike) -> Widget {\n\n ctx.style()\n\n .btn_popup_icon_text(\n\n \"system/assets/tools/map.svg\",\n\n nice_map_name(app.map().get_name()),\n\n )\n\n .hotkey(lctrl(Key::L))\n\n .build_widget(ctx, \"change map\")\n\n}\n\n\n", "file_path": "map_gui/src/tools/mod.rs", "rank": 44, "score": 186114.34898716243 }, { "content": "fn make_panel(ctx: &mut EventCtx) -> Panel {\n\n Panel::new_builder(Widget::col(vec![\n\n Widget::row(vec![\n\n Line(\"Blockfinder\").small_heading().into_widget(ctx),\n\n ctx.style().btn_close_widget(ctx),\n\n ]),\n\n \"Click a block to examine.\".text_widget(ctx),\n\n \"Press space to mark/unmark for merging\".text_widget(ctx),\n\n ctx.style()\n\n .btn_outline\n\n .text(\"Merge\")\n\n .hotkey(Key::M)\n\n .build_def(ctx),\n\n ctx.style()\n\n .btn_outline\n\n .text(\"Collapse dead-ends\")\n\n .hotkey(Key::D)\n\n .build_def(ctx),\n\n ctx.style()\n\n .btn_outline\n", "file_path": "apps/game/src/debug/blockfinder.rs", "rank": 45, "score": 186111.2236985793 }, { "content": "// TODO This is A/B Street specific\n\npub fn loading_tips() -> Text {\n\n Text::from_multiline(vec![\n\n Line(\"Have you tried...\"),\n\n Line(\"\"),\n\n Line(\"- simulating cities in Britain, Taiwan, Poland, and more?\"),\n\n Line(\"- the 15-minute neighborhood tool?\"),\n\n Line(\"- exploring all of the map layers?\"),\n\n Line(\"- playing 15-minute Santa, our arcade game spin-off?\"),\n\n ])\n\n}\n\n\n", "file_path": "map_gui/src/tools/mod.rs", "rank": 46, "score": 185202.42151366768 }, { "content": "/// A standard way to group a home button back to the title screen, the title of the current app,\n\n/// and a button to change maps. Callers must handle the `change map` and `home` click events.\n\npub fn app_header(ctx: &EventCtx, app: &dyn AppLike, title: &str) -> Widget {\n\n Widget::col(vec![\n\n Widget::row(vec![\n\n home_btn(ctx),\n\n Line(title).small_heading().into_widget(ctx).centered_vert(),\n\n ]),\n\n change_map_btn(ctx, app),\n\n ])\n\n}\n\n\n", "file_path": "map_gui/src/tools/mod.rs", "rank": 47, "score": 176869.81279287767 }, { "content": "fn options_from_controls(panel: &Panel) -> MovementOptions {\n\n if panel.is_checked(\"walking / biking\") {\n\n MovementOptions::Walking(WalkingOptions {\n\n allow_shoulders: panel\n\n .maybe_is_checked(\"Allow walking on the shoulder of the road without a sidewalk\")\n\n .unwrap_or(true),\n\n walking_speed: panel\n\n .maybe_dropdown_value(\"speed\")\n\n .unwrap_or_else(WalkingOptions::default_speed),\n\n })\n\n } else {\n\n MovementOptions::Biking\n\n }\n\n}\n\n\n", "file_path": "apps/fifteen_min/src/viewer.rs", "rank": 48, "score": 175848.12060041775 }, { "content": "fn options_to_controls(ctx: &mut EventCtx, opts: &Options) -> Widget {\n\n let mut rows = vec![Toggle::choice(\n\n ctx,\n\n \"walking / biking\",\n\n \"walking\",\n\n \"biking\",\n\n None,\n\n match opts.movement {\n\n MovementOptions::Walking(_) => true,\n\n MovementOptions::Biking => false,\n\n },\n\n )];\n\n match opts.movement {\n\n MovementOptions::Walking(ref opts) => {\n\n rows.push(Toggle::switch(\n\n ctx,\n\n \"Allow walking on the shoulder of the road without a sidewalk\",\n\n None,\n\n opts.allow_shoulders,\n\n ));\n", "file_path": "apps/fifteen_min/src/viewer.rs", "rank": 49, "score": 175438.64895215654 }, { "content": "pub trait TripManagementState<A: AppLike + 'static>: State<A> {\n\n fn mut_files(&mut self) -> &mut TripManagement<A, Self>\n\n where\n\n Self: Sized;\n\n fn app_session_current_trip_name(app: &mut A) -> &mut Option<String>\n\n where\n\n Self: Sized;\n\n fn sync_from_file_management(&mut self, ctx: &mut EventCtx, app: &mut A);\n\n}\n\n\n\n#[derive(Clone, PartialEq, Serialize, Deserialize)]\n\npub struct NamedTrip {\n\n name: String,\n\n pub waypoints: Vec<TripEndpoint>,\n\n}\n\n\n", "file_path": "map_gui/src/tools/trip_files.rs", "rank": 50, "score": 174910.3937006655 }, { "content": "pub fn color_for_agent_type(app: &App, a: AgentType) -> Color {\n\n match a {\n\n AgentType::Pedestrian => app.cs.unzoomed_pedestrian,\n\n AgentType::Bike => app.cs.unzoomed_bike,\n\n AgentType::Bus | AgentType::Train => app.cs.unzoomed_bus,\n\n AgentType::TransitRider => app.cs.bus_trip,\n\n AgentType::Car => app.cs.unzoomed_car,\n\n }\n\n}\n\n\n", "file_path": "apps/game/src/common/mod.rs", "rank": 51, "score": 174849.10191036866 }, { "content": "pub fn color_for_trip_phase(app: &App, tpt: TripPhaseType) -> Color {\n\n match tpt {\n\n TripPhaseType::Driving => app.cs.unzoomed_car,\n\n TripPhaseType::Walking => app.cs.unzoomed_pedestrian,\n\n TripPhaseType::Biking => app.cs.bike_trip,\n\n TripPhaseType::Parking => app.cs.parking_trip,\n\n TripPhaseType::WaitingForBus(_, _) => app.cs.bus_layer,\n\n TripPhaseType::RidingBus(_, _, _) => app.cs.bus_trip,\n\n TripPhaseType::Cancelled | TripPhaseType::Finished => unreachable!(),\n\n TripPhaseType::DelayedStart => Color::YELLOW,\n\n }\n\n}\n\n\n", "file_path": "apps/game/src/common/mod.rs", "rank": 52, "score": 171410.9494515384 }, { "content": "fn make_select_panel(ctx: &mut EventCtx, selector: &RoadSelector) -> Panel {\n\n Panel::new_builder(Widget::col(vec![\n\n Widget::row(vec![\n\n Line(\"Select many roads\").small_heading().into_widget(ctx),\n\n ctx.style().btn_close_widget(ctx),\n\n ]),\n\n selector.make_controls(ctx),\n\n ]))\n\n .aligned(HorizontalAlignment::Center, VerticalAlignment::Top)\n\n .build(ctx)\n\n}\n\n\n\nimpl State<App> for BulkSelect {\n\n fn event(&mut self, ctx: &mut EventCtx, app: &mut App) -> Transition {\n\n match self.panel.event(ctx) {\n\n Outcome::Clicked(x) => match x.as_ref() {\n\n \"close\" => {\n\n return Transition::Pop;\n\n }\n\n x => {\n", "file_path": "apps/game/src/debug/select_roads.rs", "rank": 53, "score": 170769.51372903492 }, { "content": "pub fn color_for_mode(app: &dyn AppLike, m: TripMode) -> Color {\n\n match m {\n\n TripMode::Walk => app.cs().unzoomed_pedestrian,\n\n TripMode::Bike => app.cs().unzoomed_bike,\n\n TripMode::Transit => app.cs().unzoomed_bus,\n\n TripMode::Drive => app.cs().unzoomed_car,\n\n }\n\n}\n\n\n", "file_path": "map_gui/src/tools/ui.rs", "rank": 54, "score": 170245.02585371205 }, { "content": "pub fn find_next_file(orig: String) -> Option<String> {\n\n let files = list_dir(parent_path(&orig));\n\n files.into_iter().find(|f| *f > orig)\n\n}\n\n\n", "file_path": "abstio/src/io.rs", "rank": 55, "score": 169355.80170551618 }, { "content": "/// Keeps file extensions\n\npub fn find_prev_file(orig: String) -> Option<String> {\n\n let mut files = list_dir(parent_path(&orig));\n\n files.reverse();\n\n files.into_iter().find(|f| *f < orig)\n\n}\n\n\n", "file_path": "abstio/src/io.rs", "rank": 56, "score": 169355.80170551618 }, { "content": "pub fn make_legend<X: Axis<X>, Y: Axis<Y>>(\n\n ctx: &EventCtx,\n\n series: &[Series<X, Y>],\n\n opts: &PlotOptions<X, Y>,\n\n) -> Widget {\n\n let mut row = Vec::new();\n\n let mut seen = HashSet::new();\n\n for s in series {\n\n if seen.contains(&s.label) {\n\n continue;\n\n }\n\n seen.insert(s.label.clone());\n\n if opts.filterable {\n\n row.push(Toggle::colored_checkbox(\n\n ctx,\n\n &s.label,\n\n s.color,\n\n !opts.disabled.contains(&s.label),\n\n ));\n\n } else {\n", "file_path": "widgetry/src/widgets/plots.rs", "rank": 57, "score": 168870.38588204666 }, { "content": "pub fn info(ctx: &EventCtx, app: &App, details: &mut Details, id: LaneID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::LaneInfo(id)),\n\n info_body(ctx, app, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "apps/game/src/info/lane.rs", "rank": 58, "score": 168735.88270966467 }, { "content": "pub fn crowd(ctx: &EventCtx, app: &App, details: &mut Details, members: &[PedestrianID]) -> Widget {\n\n let header = Widget::custom_col(vec![\n\n Line(\"Pedestrian crowd\").small_heading().into_widget(ctx),\n\n header_btns(ctx),\n\n ]);\n\n Widget::custom_col(vec![\n\n header,\n\n crowd_body(ctx, app, details, members).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "apps/game/src/info/person.rs", "rank": 59, "score": 168735.88270966464 }, { "content": "pub fn info(ctx: &EventCtx, app: &App, details: &mut Details, id: IntersectionID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::IntersectionInfo(id)),\n\n info_body(ctx, app, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "apps/game/src/info/intersection.rs", "rank": 60, "score": 168735.88270966464 }, { "content": "use std::cell::RefCell;\n\nuse std::collections::HashSet;\n\nuse std::rc::Rc;\n\n\n\nuse stretch::geometry::Size;\n\nuse stretch::node::{Node, Stretch};\n\nuse stretch::number::Number;\n\nuse stretch::style::{Dimension, Style};\n\n\n\nuse geom::Polygon;\n\n\n\nuse crate::widgets::slider;\n\nuse crate::widgets::spinner::SpinnerValue;\n\nuse crate::widgets::Container;\n\nuse crate::{\n\n Autocomplete, Button, Color, Dropdown, EventCtx, GfxCtx, HorizontalAlignment, Menu, Outcome,\n\n PersistentSplit, ScreenDims, ScreenPt, ScreenRectangle, Slider, Spinner, Stash, TextBox,\n\n Toggle, VerticalAlignment, Widget, WidgetImpl, WidgetOutput,\n\n};\n\n\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 61, "score": 168520.42468881654 }, { "content": "pub struct Panel {\n\n top_level: Widget,\n\n // (layout, root_dims)\n\n cached_flexbox: Option<(Stretch, Vec<Node>, ScreenDims)>,\n\n horiz: HorizontalAlignment,\n\n vert: VerticalAlignment,\n\n dims_x: PanelDims,\n\n dims_y: PanelDims,\n\n\n\n scrollable_x: bool,\n\n scrollable_y: bool,\n\n contents_dims: ScreenDims,\n\n container_dims: ScreenDims,\n\n clip_rect: Option<ScreenRectangle>,\n\n}\n\n\n\nimpl Panel {\n\n pub fn new_builder(top_level: Widget) -> PanelBuilder {\n\n PanelBuilder {\n\n top_level,\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 62, "score": 168518.45034058258 }, { "content": " pub fn dims_width(mut self, dims: PanelDims) -> PanelBuilder {\n\n self.dims_x = dims;\n\n self\n\n }\n\n\n\n pub fn dims_height(mut self, dims: PanelDims) -> PanelBuilder {\n\n self.dims_y = dims;\n\n self\n\n }\n\n\n\n // TODO Change all callers\n\n pub fn exact_size_percent(self, x: usize, y: usize) -> PanelBuilder {\n\n self.dims_width(PanelDims::ExactPercent((x as f64) / 100.0))\n\n .dims_height(PanelDims::ExactPercent((y as f64) / 100.0))\n\n }\n\n\n\n /// When a panel is built, a fake, \"no-op\" mouseover event is immediately fired, to let all\n\n /// widgets initially pick up the position of the mouse. Normally this event should only\n\n /// produce `Outcome::Nothing`, since other outcomes will be lost -- there's no way for the\n\n /// caller to see that first outcome.\n\n ///\n\n /// If a caller expects this first mouseover to possibly produce an outcome, they can call this\n\n /// and avoid the assertion.\n\n pub fn ignore_initial_events(mut self) -> PanelBuilder {\n\n self.ignore_initial_events = true;\n\n self\n\n }\n\n}\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 63, "score": 168517.59500937784 }, { "content": "\n\n /// Removes a widget from the panel. Does not recalculate layout!\n\n pub fn take(&mut self, id: &str) -> Widget {\n\n self.top_level.take(id).unwrap()\n\n }\n\n\n\n pub fn clicked_outside(&self, ctx: &mut EventCtx) -> bool {\n\n // TODO No great way to populate OSD from here with \"click to cancel\"\n\n !self.top_level.rect.contains(ctx.canvas.get_cursor()) && ctx.normal_left_click()\n\n }\n\n\n\n pub fn currently_hovering(&self) -> Option<&String> {\n\n self.top_level.currently_hovering()\n\n }\n\n}\n\n\n\npub struct PanelBuilder {\n\n top_level: Widget,\n\n horiz: HorizontalAlignment,\n\n vert: VerticalAlignment,\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 64, "score": 168516.89356802913 }, { "content": " &self,\n\n name: I,\n\n ) -> Option<T> {\n\n let name = name.as_ref();\n\n if self.has_widget(name) {\n\n Some(self.find::<Dropdown<T>>(name).current_value())\n\n } else {\n\n None\n\n }\n\n }\n\n pub fn persistent_split_value<T: 'static + PartialEq + Clone>(&self, name: &str) -> T {\n\n self.find::<PersistentSplit<T>>(name).current_value()\n\n }\n\n\n\n /// Consumes the autocomplete widget. It's fine if the panel survives past this event; the\n\n /// autocomplete just needs to be interacted with again to produce more values.\n\n pub fn autocomplete_done<T: 'static + Clone>(&mut self, name: &str) -> Option<Vec<T>> {\n\n self.find_mut::<Autocomplete<T>>(name).take_final_value()\n\n }\n\n\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 65, "score": 168514.82481211552 }, { "content": "\n\n pub fn get_all_click_actions(&self) -> HashSet<String> {\n\n let mut actions = HashSet::new();\n\n self.top_level.get_all_click_actions(&mut actions);\n\n actions\n\n }\n\n\n\n pub fn restore(&mut self, ctx: &mut EventCtx, prev: &Panel) {\n\n self.set_scroll_offset(ctx, prev.scroll_offset());\n\n\n\n self.top_level.restore(ctx, prev);\n\n\n\n // Since we just moved things around, let all widgets respond to the mouse being somewhere\n\n ctx.no_op_event(true, |ctx| {\n\n assert!(matches!(self.event(ctx), Outcome::Nothing))\n\n });\n\n }\n\n\n\n pub fn restore_scroll(&mut self, ctx: &mut EventCtx, prev: &Panel) {\n\n self.set_scroll_offset(ctx, prev.scroll_offset());\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 66, "score": 168513.37960777394 }, { "content": " /// Grab a stashed value, with the ability to pass it around and modify it.\n\n pub fn stash<T: 'static>(&self, name: &str) -> Rc<RefCell<T>> {\n\n self.find::<Stash<T>>(name).get_value()\n\n }\n\n\n\n /// Grab a stashed value and clone it.\n\n pub fn clone_stashed<T: 'static + Clone>(&self, name: &str) -> T {\n\n self.find::<Stash<T>>(name).get_value().borrow().clone()\n\n }\n\n\n\n pub fn is_button_enabled(&self, name: &str) -> bool {\n\n self.find::<Button>(name).is_enabled()\n\n }\n\n\n\n pub fn maybe_find_widget(&self, name: &str) -> Option<&Widget> {\n\n self.top_level.find(name)\n\n }\n\n\n\n pub fn maybe_find<T: WidgetImpl>(&self, name: &str) -> Option<&T> {\n\n self.maybe_find_widget(name).map(|w| {\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 67, "score": 168512.4454859675 }, { "content": " panel.event(ctx);\n\n } else {\n\n assert!(matches!(panel.event(ctx), Outcome::Nothing))\n\n }\n\n });\n\n panel\n\n }\n\n\n\n pub fn aligned(mut self, horiz: HorizontalAlignment, vert: VerticalAlignment) -> PanelBuilder {\n\n self.horiz = horiz;\n\n self.vert = vert;\n\n self\n\n }\n\n\n\n pub fn aligned_pair(mut self, pair: (HorizontalAlignment, VerticalAlignment)) -> PanelBuilder {\n\n self.horiz = pair.0;\n\n self.vert = pair.1;\n\n self\n\n }\n\n\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 68, "score": 168512.10494838556 }, { "content": " }\n\n\n\n /// All margins/padding/etc from the previous widget are retained. The ID is set on the new\n\n /// widget; no need to do that yourself.\n\n pub fn replace(&mut self, ctx: &mut EventCtx, id: &str, mut new: Widget) {\n\n if let Some(ref new_id) = new.id {\n\n assert_eq!(id, new_id);\n\n }\n\n new = new.named(id);\n\n let old = self\n\n .top_level\n\n .find_mut(id)\n\n .unwrap_or_else(|| panic!(\"Panel doesn't have {}\", id));\n\n new.layout.style = old.layout.style;\n\n *old = new;\n\n self.recompute_layout(ctx, true);\n\n // TODO Since we just moved things around, let all widgets respond to the mouse being\n\n // somewhere? Maybe always do this in recompute_layout?\n\n //ctx.no_op_event(true, |ctx| assert_eq!(self.event(ctx), Outcome::Nothing));\n\n }\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 69, "score": 168511.8946910092 }, { "content": " dims_x: PanelDims,\n\n dims_y: PanelDims,\n\n ignore_initial_events: bool,\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub enum PanelDims {\n\n MaxPercent(f64),\n\n ExactPercent(f64),\n\n ExactPixels(f64),\n\n}\n\n\n\nimpl PanelBuilder {\n\n pub fn build(mut self, ctx: &mut EventCtx) -> Panel {\n\n self.top_level = self.top_level.padding(16).bg(ctx.style.panel_bg);\n\n self.build_custom(ctx)\n\n }\n\n\n\n pub fn build_custom(self, ctx: &mut EventCtx) -> Panel {\n\n let ignore_initial_events = self.ignore_initial_events;\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 70, "score": 168511.82074369743 }, { "content": " pub fn text_box(&self, name: &str) -> String {\n\n self.find::<TextBox>(name).get_line()\n\n }\n\n\n\n pub fn spinner<T: 'static + SpinnerValue>(&self, name: &str) -> T {\n\n self.find::<Spinner<T>>(name).current\n\n }\n\n pub fn modify_spinner<T: 'static + SpinnerValue>(\n\n &mut self,\n\n ctx: &EventCtx,\n\n name: &str,\n\n delta: T,\n\n ) {\n\n self.find_mut::<Spinner<T>>(name).modify(ctx, delta)\n\n }\n\n\n\n pub fn dropdown_value<T: 'static + PartialEq + Clone, I: AsRef<str>>(&self, name: I) -> T {\n\n self.find::<Dropdown<T>>(name.as_ref()).current_value()\n\n }\n\n pub fn maybe_dropdown_value<T: 'static + PartialEq + Clone, I: AsRef<str>>(\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 71, "score": 168509.9010286558 }, { "content": " }\n\n\n\n pub fn take_menu_choice<T: 'static>(&mut self, name: &str) -> T {\n\n self.find_mut::<Menu<T>>(name).take_current_choice()\n\n }\n\n\n\n pub fn is_checked(&self, name: &str) -> bool {\n\n self.find::<Toggle>(name).enabled\n\n }\n\n pub fn maybe_is_checked(&self, name: &str) -> Option<bool> {\n\n if self.has_widget(name) {\n\n Some(self.find::<Toggle>(name).enabled)\n\n } else {\n\n None\n\n }\n\n }\n\n pub fn set_checked(&mut self, name: &str, on_off: bool) {\n\n self.find_mut::<Toggle>(name).enabled = on_off\n\n }\n\n\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 72, "score": 168509.69394057366 }, { "content": " } else {\n\n panic!(\"Can't find widget {}\", name);\n\n }\n\n }\n\n\n\n /// Swap the inner content of a `container` widget with `new_inner_content`.\n\n pub(crate) fn swap_inner_content(\n\n &mut self,\n\n ctx: &EventCtx,\n\n container_name: &str,\n\n new_inner_content: &mut Widget,\n\n ) {\n\n let old_container: &mut Container = self.find_mut(container_name);\n\n assert_eq!(\n\n old_container.members.len(),\n\n 1,\n\n \"method only intended to be used for containers created with `Widget::container`\"\n\n );\n\n std::mem::swap(&mut old_container.members[0], new_inner_content);\n\n self.recompute_layout(ctx, true);\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 73, "score": 168509.00385241708 }, { "content": " }\n\n\n\n pub fn draw(&self, g: &mut GfxCtx) {\n\n if let Some(ref rect) = self.clip_rect {\n\n g.enable_clipping(rect.clone());\n\n g.canvas.mark_covered_area(rect.clone());\n\n } else {\n\n g.canvas.mark_covered_area(self.top_level.rect.clone());\n\n }\n\n\n\n // Debugging\n\n if false {\n\n g.fork_screenspace();\n\n g.draw_polygon(Color::RED.alpha(0.5), self.top_level.rect.to_polygon());\n\n\n\n let top_left = g\n\n .canvas\n\n .align_window(self.container_dims, self.horiz, self.vert);\n\n g.draw_polygon(\n\n Color::BLUE.alpha(0.5),\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 74, "score": 168508.76670958 }, { "content": " horiz: HorizontalAlignment::Center,\n\n vert: VerticalAlignment::Center,\n\n dims_x: PanelDims::MaxPercent(1.0),\n\n dims_y: PanelDims::MaxPercent(1.0),\n\n ignore_initial_events: false,\n\n }\n\n }\n\n\n\n /// Returns an empty panel. `event` and `draw` will have no effect.\n\n pub fn empty(ctx: &mut EventCtx) -> Panel {\n\n Panel::new_builder(Widget::col(vec![])).build_custom(ctx)\n\n }\n\n\n\n fn update_container_dims_for_canvas_dims(&mut self, canvas_dims: ScreenDims) {\n\n let width = match self.dims_x {\n\n PanelDims::MaxPercent(pct) => self.contents_dims.width.min(pct * canvas_dims.width),\n\n PanelDims::ExactPercent(pct) => pct * canvas_dims.width,\n\n PanelDims::ExactPixels(x) => x,\n\n };\n\n let height = match self.dims_y {\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 75, "score": 168508.6808054429 }, { "content": " // 2. panels don't responsively change `contents_dims`\n\n //\n\n // - `panel.top_level.rect`, used here to set content_dims, is set by `recompute_layout`.\n\n // - the output of `recompute_layout` depends on `container_dims`\n\n // - `container_dims`, in the case of `MaxPercent`, depend on `content_dims`\n\n //\n\n // TODO: to support Panel's that can resize their `contents_dims`, we'll need to detangle\n\n // this dependency. This might entail decomposing the flexbox calculation to layout first\n\n // the inner content, and then potentially a second pass to layout any x/y scrollbars.\n\n panel.recompute_layout(ctx, false);\n\n panel.contents_dims =\n\n ScreenDims::new(panel.top_level.rect.width(), panel.top_level.rect.height());\n\n panel.update_container_dims_for_canvas_dims(ctx.canvas.get_window_dims());\n\n panel.recompute_layout(ctx, false);\n\n\n\n // Just trigger error if a button is double-defined\n\n panel.get_all_click_actions();\n\n // Let all widgets initially respond to the mouse being somewhere\n\n ctx.no_op_event(true, |ctx| {\n\n if ignore_initial_events {\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 76, "score": 168507.85812204028 }, { "content": " if let Some(x) = w.widget.downcast_ref::<T>() {\n\n x\n\n } else {\n\n panic!(\"Found widget {}, but wrong type\", name);\n\n }\n\n })\n\n }\n\n\n\n pub fn find<T: WidgetImpl>(&self, name: &str) -> &T {\n\n self.maybe_find(name)\n\n .unwrap_or_else(|| panic!(\"Can't find widget {}\", name))\n\n }\n\n\n\n pub fn find_mut<T: WidgetImpl>(&mut self, name: &str) -> &mut T {\n\n if let Some(w) = self.top_level.find_mut(name) {\n\n if let Some(x) = w.widget.downcast_mut::<T>() {\n\n x\n\n } else {\n\n panic!(\"Found widget {}, but wrong type\", name);\n\n }\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 77, "score": 168506.99919010804 }, { "content": " }\n\n\n\n pub fn rect_of(&self, name: &str) -> &ScreenRectangle {\n\n &self.top_level.find(name).unwrap().rect\n\n }\n\n // TODO Deprecate\n\n pub fn center_of(&self, name: &str) -> ScreenPt {\n\n self.rect_of(name).center()\n\n }\n\n pub fn center_of_panel(&self) -> ScreenPt {\n\n self.top_level.rect.center()\n\n }\n\n pub fn panel_dims(&self) -> ScreenDims {\n\n self.top_level.rect.dims()\n\n }\n\n\n\n pub fn align(&mut self, horiz: HorizontalAlignment, vert: VerticalAlignment) {\n\n self.horiz = horiz;\n\n self.vert = vert;\n\n // TODO Recompute layout and fire no_op_event?\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 78, "score": 168506.3923858941 }, { "content": " self.recompute_layout(ctx, false);\n\n }\n\n\n\n let before = self.scroll_offset();\n\n let mut output = WidgetOutput::new();\n\n self.top_level.widget.event(ctx, &mut output);\n\n\n\n if output.redo_layout {\n\n self.recompute_layout(ctx, true);\n\n } else if self.scroll_offset() != before {\n\n self.recompute_layout_if_needed(ctx, true);\n\n }\n\n\n\n // Remember this for the next event\n\n if let Outcome::Focused(ref id) = output.outcome {\n\n assert!(ctx.next_focus_owned_by.is_none());\n\n ctx.next_focus_owned_by = Some(id.clone());\n\n }\n\n\n\n output.outcome\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 79, "score": 168506.19612639365 }, { "content": " PanelDims::MaxPercent(pct) => self.contents_dims.height.min(pct * canvas_dims.height),\n\n PanelDims::ExactPercent(pct) => pct * canvas_dims.height,\n\n PanelDims::ExactPixels(x) => x,\n\n };\n\n self.container_dims = ScreenDims::new(width, height);\n\n }\n\n\n\n fn recompute_scrollbar_layout(&mut self, ctx: &EventCtx) {\n\n let old_scrollable_x = self.scrollable_x;\n\n let old_scrollable_y = self.scrollable_y;\n\n let old_scroll_offset = self.scroll_offset();\n\n let mut was_dragging_x = false;\n\n let mut was_dragging_y = false;\n\n\n\n self.scrollable_x = self.contents_dims.width > self.container_dims.width;\n\n self.scrollable_y = self.contents_dims.height > self.container_dims.height;\n\n\n\n // Unwrap the main widget from any scrollable containers if necessary.\n\n if old_scrollable_y {\n\n let container = self.top_level.widget.downcast_mut::<Container>().unwrap();\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 80, "score": 168505.0053960916 }, { "content": " }\n\n\n\n pub fn scroll_to_member(&mut self, ctx: &EventCtx, name: String) {\n\n if let Some(w) = self.top_level.find(&name) {\n\n let y1 = w.rect.y1;\n\n self.set_scroll_offset(ctx, (0.0, y1));\n\n } else {\n\n panic!(\"Can't scroll_to_member of unknown {}\", name);\n\n }\n\n }\n\n\n\n pub fn has_widget(&self, name: &str) -> bool {\n\n self.top_level.find(name).is_some()\n\n }\n\n\n\n pub fn slider(&self, name: &str) -> &Slider {\n\n self.find(name)\n\n }\n\n pub fn slider_mut(&mut self, name: &str) -> &mut Slider {\n\n self.find_mut(name)\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 81, "score": 168504.98366785297 }, { "content": " self.slider_mut(\"vert scrollbar\").set_percent(ctx, 0.0);\n\n } else {\n\n self.slider_mut(\"vert scrollbar\")\n\n .set_percent(ctx, offset.1.clamp(0.0, max) / max);\n\n }\n\n }\n\n changed\n\n }\n\n\n\n fn set_scroll_offset(&mut self, ctx: &EventCtx, offset: (f64, f64)) {\n\n if self.update_scroll_sliders(ctx, offset) {\n\n self.recompute_layout_if_needed(ctx, false);\n\n }\n\n }\n\n\n\n pub fn event(&mut self, ctx: &mut EventCtx) -> Outcome {\n\n if (self.scrollable_x || self.scrollable_y)\n\n && ctx\n\n .canvas\n\n .get_cursor_in_screen_space()\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 82, "score": 168502.65322501582 }, { "content": " panel.top_level.layout.style.min_size.width =\n\n Dimension::Points((pct * ctx.canvas.window_width) as f32);\n\n }\n\n PanelDims::ExactPixels(x) => {\n\n panel.top_level.layout.style.min_size.width = Dimension::Points(x as f32);\n\n }\n\n }\n\n match self.dims_y {\n\n PanelDims::MaxPercent(_) => {}\n\n PanelDims::ExactPercent(pct) => {\n\n panel.top_level.layout.style.min_size.height =\n\n Dimension::Points((pct * ctx.canvas.window_height) as f32);\n\n }\n\n PanelDims::ExactPixels(x) => {\n\n panel.top_level.layout.style.min_size.height = Dimension::Points(x as f32);\n\n }\n\n }\n\n\n\n // There is a dependency cycle in our layout logic. As a consequence:\n\n // 1. we have to call `recompute_layout` twice here\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 83, "score": 168500.9624329859 }, { "content": " let mut panel = Panel {\n\n top_level: self.top_level,\n\n\n\n horiz: self.horiz,\n\n vert: self.vert,\n\n dims_x: self.dims_x,\n\n dims_y: self.dims_y,\n\n\n\n scrollable_x: false,\n\n scrollable_y: false,\n\n contents_dims: ScreenDims::new(0.0, 0.0),\n\n container_dims: ScreenDims::new(0.0, 0.0),\n\n clip_rect: None,\n\n cached_flexbox: None,\n\n };\n\n match self.dims_x {\n\n PanelDims::MaxPercent(_) => {}\n\n PanelDims::ExactPercent(pct) => {\n\n // Don't set size, because then scrolling breaks -- the actual size has to be based\n\n // on the contents.\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 84, "score": 168500.5180337752 }, { "content": " if was_dragging_x {\n\n slider.widget.downcast_mut::<Slider>().unwrap().dragging = true;\n\n }\n\n\n\n let old_top_level = std::mem::replace(&mut self.top_level, Widget::nothing());\n\n self.top_level = Widget::custom_col(vec![old_top_level, slider]);\n\n }\n\n\n\n if self.scrollable_y {\n\n let mut slider = Slider::vertical_scrollbar(\n\n ctx,\n\n self.container_dims.height,\n\n self.container_dims.height\n\n * (self.container_dims.height / self.contents_dims.height),\n\n 0.0,\n\n )\n\n .named(\"vert scrollbar\")\n\n .abs(top_left.x + self.container_dims.width, top_left.y);\n\n if was_dragging_y {\n\n slider.widget.downcast_mut::<Slider>().unwrap().dragging = true;\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 85, "score": 168500.3301608312 }, { "content": " was_dragging_y = container.members[1]\n\n .widget\n\n .downcast_ref::<Slider>()\n\n .unwrap()\n\n .dragging;\n\n self.top_level = container.members.remove(0);\n\n }\n\n\n\n if old_scrollable_x {\n\n let container = self.top_level.widget.downcast_mut::<Container>().unwrap();\n\n was_dragging_x = container.members[1]\n\n .widget\n\n .downcast_ref::<Slider>()\n\n .unwrap()\n\n .dragging;\n\n self.top_level = container.members.remove(0);\n\n }\n\n\n\n let mut container_dims = self.container_dims;\n\n // TODO Handle making room for a horizontal scrollbar on the bottom. The equivalent change\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 86, "score": 168500.19158281107 }, { "content": " }\n\n\n\n let old_top_level = std::mem::replace(&mut self.top_level, Widget::nothing());\n\n self.top_level = Widget::custom_row(vec![old_top_level, slider]);\n\n }\n\n\n\n self.update_scroll_sliders(ctx, old_scroll_offset);\n\n\n\n self.clip_rect = if self.scrollable_x || self.scrollable_y {\n\n Some(ScreenRectangle::top_left(top_left, self.container_dims))\n\n } else {\n\n None\n\n };\n\n }\n\n\n\n // TODO: this method potentially gets called multiple times in a render pass as an\n\n // optimization, we could replace all the current call sites with a \"dirty\" flag, e.g.\n\n // `set_needs_layout()` and then call `layout_if_needed()` once at the last possible moment\n\n fn recompute_layout(&mut self, ctx: &EventCtx, recompute_bg: bool) {\n\n self.invalidate_flexbox();\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 87, "score": 168499.05752469497 }, { "content": " // to container_dims.height doesn't work as expected.\n\n if self.scrollable_y {\n\n container_dims.width += slider::SCROLLBAR_BG_WIDTH;\n\n }\n\n let top_left = ctx\n\n .canvas\n\n .align_window(container_dims, self.horiz, self.vert);\n\n\n\n // Wrap the main widget in scrollable containers if necessary.\n\n if self.scrollable_x {\n\n let mut slider = Slider::horizontal_scrollbar(\n\n ctx,\n\n self.container_dims.width,\n\n self.container_dims.width * (self.container_dims.width / self.contents_dims.width),\n\n 0.0,\n\n )\n\n .named(\"horiz scrollbar\")\n\n .abs(top_left.x, top_left.y + self.container_dims.height);\n\n // We constantly destroy and recreate the scrollbar slider while dragging it. Preserve\n\n // the dragging property, so we can keep dragging it.\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 88, "score": 168497.72852958797 }, { "content": " nodes.reverse();\n\n\n\n // TODO Express more simply. Constraining this seems useless.\n\n let container_size = Size {\n\n width: Number::Undefined,\n\n height: Number::Undefined,\n\n };\n\n stretch.compute_layout(root, container_size).unwrap();\n\n\n\n // TODO I'm so confused why these 2 are acting differently. :(\n\n let effective_dims = if self.scrollable_x || self.scrollable_y {\n\n self.container_dims\n\n } else {\n\n let result = stretch.layout(root).unwrap();\n\n ScreenDims::new(result.size.width.into(), result.size.height.into())\n\n };\n\n\n\n (stretch, nodes, effective_dims)\n\n }\n\n\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 89, "score": 168493.50587686367 }, { "content": " fn recompute_layout_if_needed(&mut self, ctx: &EventCtx, recompute_bg: bool) {\n\n self.recompute_scrollbar_layout(ctx);\n\n let (stretch, nodes, effective_dims) = self\n\n .cached_flexbox\n\n .take()\n\n .unwrap_or_else(|| self.compute_flexbox());\n\n\n\n {\n\n let top_left = ctx\n\n .canvas\n\n .align_window(effective_dims, self.horiz, self.vert);\n\n let offset = self.scroll_offset();\n\n let mut nodes = nodes.clone();\n\n self.top_level.apply_flexbox(\n\n &stretch,\n\n &mut nodes,\n\n top_left.x,\n\n top_left.y,\n\n offset,\n\n ctx,\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 90, "score": 168493.50587686367 }, { "content": " .map(|pt| self.top_level.rect.contains(pt))\n\n .unwrap_or(false)\n\n {\n\n if let Some((dx, dy)) = ctx.input.get_mouse_scroll() {\n\n let x_offset = if self.scrollable_x {\n\n self.scroll_offset().0 + dx * (ctx.canvas.settings.gui_scroll_speed as f64)\n\n } else {\n\n 0.0\n\n };\n\n let y_offset = if self.scrollable_y {\n\n self.scroll_offset().1 - dy * (ctx.canvas.settings.gui_scroll_speed as f64)\n\n } else {\n\n 0.0\n\n };\n\n self.set_scroll_offset(ctx, (x_offset, y_offset));\n\n }\n\n }\n\n\n\n if ctx.input.is_window_resized() {\n\n self.update_container_dims_for_canvas_dims(ctx.canvas.get_window_dims());\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 91, "score": 168493.50587686367 }, { "content": " self.recompute_layout_if_needed(ctx, recompute_bg)\n\n }\n\n\n\n fn invalidate_flexbox(&mut self) {\n\n self.cached_flexbox = None;\n\n }\n\n\n\n fn compute_flexbox(&self) -> (Stretch, Vec<Node>, ScreenDims) {\n\n let mut stretch = Stretch::new();\n\n let root = stretch\n\n .new_node(\n\n Style {\n\n ..Default::default()\n\n },\n\n Vec::new(),\n\n )\n\n .unwrap();\n\n\n\n let mut nodes = vec![];\n\n self.top_level.get_flexbox(root, &mut stretch, &mut nodes);\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 92, "score": 168493.50587686367 }, { "content": " recompute_bg,\n\n false,\n\n );\n\n assert!(nodes.is_empty());\n\n }\n\n self.cached_flexbox = Some((stretch, nodes, effective_dims));\n\n }\n\n\n\n fn scroll_offset(&self) -> (f64, f64) {\n\n let x = if self.scrollable_x {\n\n self.slider(\"horiz scrollbar\").get_percent()\n\n * (self.contents_dims.width - self.container_dims.width).max(0.0)\n\n } else {\n\n 0.0\n\n };\n\n let y = if self.scrollable_y {\n\n self.slider(\"vert scrollbar\").get_percent()\n\n * (self.contents_dims.height - self.container_dims.height).max(0.0)\n\n } else {\n\n 0.0\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 93, "score": 168493.50587686367 }, { "content": " };\n\n (x, y)\n\n }\n\n\n\n fn update_scroll_sliders(&mut self, ctx: &EventCtx, offset: (f64, f64)) -> bool {\n\n let mut changed = false;\n\n if self.scrollable_x {\n\n changed = true;\n\n let max = (self.contents_dims.width - self.container_dims.width).max(0.0);\n\n if max == 0.0 {\n\n self.slider_mut(\"horiz scrollbar\").set_percent(ctx, 0.0);\n\n } else {\n\n self.slider_mut(\"horiz scrollbar\")\n\n .set_percent(ctx, offset.0.clamp(0.0, max) / max);\n\n }\n\n }\n\n if self.scrollable_y {\n\n changed = true;\n\n let max = (self.contents_dims.height - self.container_dims.height).max(0.0);\n\n if max == 0.0 {\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 94, "score": 168493.50587686367 }, { "content": " Polygon::rectangle(self.container_dims.width, self.container_dims.height)\n\n .translate(top_left.x, top_left.y),\n\n );\n\n g.unfork();\n\n }\n\n\n\n self.top_level.draw(g);\n\n if self.scrollable_x || self.scrollable_y {\n\n g.disable_clipping();\n\n\n\n // Draw the scrollbars after clipping is disabled, because they actually live just\n\n // outside the rectangle.\n\n if self.scrollable_x {\n\n self.slider(\"horiz scrollbar\").draw(g);\n\n }\n\n if self.scrollable_y {\n\n self.slider(\"vert scrollbar\").draw(g);\n\n }\n\n }\n\n }\n", "file_path": "widgetry/src/widgets/panel.rs", "rank": 95, "score": 168493.50587686367 }, { "content": "fn make_pagination(ctx: &mut EventCtx, total: usize, skip: usize) -> Widget {\n\n let next = ctx\n\n .style()\n\n .btn_next()\n\n .disabled(skip + 1 + ROWS >= total)\n\n .hotkey(Key::RightArrow);\n\n let prev = ctx\n\n .style()\n\n .btn_prev()\n\n .disabled(skip == 0)\n\n .hotkey(Key::LeftArrow);\n\n\n\n Widget::row(vec![\n\n prev.build_widget(ctx, \"previous\"),\n\n format!(\n\n \"{}-{} of {}\",\n\n if total > 0 {\n\n prettyprint_usize(skip + 1)\n\n } else {\n\n \"0\".to_string()\n\n },\n\n prettyprint_usize((skip + 1 + ROWS).min(total)),\n\n prettyprint_usize(total)\n\n )\n\n .text_widget(ctx)\n\n .centered_vert(),\n\n next.build_widget(ctx, \"next\"),\n\n ])\n\n}\n\n\n", "file_path": "widgetry/src/widgets/table.rs", "rank": 96, "score": 167446.34225164715 }, { "content": "// TODO If this proves useful, lift to geom\n\npub fn thick_lineseries(pts: Vec<Pt2D>, width: Distance) -> Polygon {\n\n use lyon::math::{point, Point};\n\n use lyon::path::Path;\n\n use lyon::tessellation::geometry_builder::{BuffersBuilder, Positions, VertexBuffers};\n\n use lyon::tessellation::{StrokeOptions, StrokeTessellator};\n\n\n\n let mut builder = Path::builder();\n\n for (idx, pt) in pts.into_iter().enumerate() {\n\n let pt = point(pt.x() as f32, pt.y() as f32);\n\n if idx == 0 {\n\n builder.move_to(pt);\n\n } else {\n\n builder.line_to(pt);\n\n }\n\n }\n\n let path = builder.build();\n\n\n\n let mut geom: VertexBuffers<Point, u32> = VertexBuffers::new();\n\n let mut buffer = BuffersBuilder::new(&mut geom, Positions);\n\n StrokeTessellator::new()\n", "file_path": "widgetry/src/widgets/plots.rs", "rank": 97, "score": 165991.56655318115 }, { "content": "pub fn people(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BuildingID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::BldgPeople(id)),\n\n people_body(ctx, app, details, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "apps/game/src/info/building.rs", "rank": 98, "score": 164269.9473396111 }, { "content": "pub fn info(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BuildingID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::BldgInfo(id)),\n\n info_body(ctx, app, details, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "apps/game/src/info/building.rs", "rank": 99, "score": 164269.9473396111 } ]
Rust
tests/expectations/tests/issue-648-derive-debug-with-padding.rs
rust-lang-nursery/rust-bindgen
5a01c551993e56d20240ef64d0ec78cd4195855d
#![allow( dead_code, non_snake_case, non_camel_case_types, non_upper_case_globals )] #[repr(C)] #[repr(align(64))] #[derive(Copy, Clone)] pub struct NoDebug { pub c: ::std::os::raw::c_char, } #[test] fn bindgen_test_layout_NoDebug() { assert_eq!( ::std::mem::size_of::<NoDebug>(), 64usize, concat!("Size of: ", stringify!(NoDebug)) ); assert_eq!( ::std::mem::align_of::<NoDebug>(), 64usize, concat!("Alignment of ", stringify!(NoDebug)) ); assert_eq!( unsafe { let uninit = ::std::mem::MaybeUninit::<NoDebug>::uninit(); let ptr = uninit.as_ptr(); ::std::ptr::addr_of!((*ptr).c) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(NoDebug), "::", stringify!(c) ) ); } impl Default for NoDebug { fn default() -> Self { let mut s = ::std::mem::MaybeUninit::<Self>::uninit(); unsafe { ::std::ptr::write_bytes(s.as_mut_ptr(), 0, 1); s.assume_init() } } } impl ::std::cmp::PartialEq for NoDebug { fn eq(&self, other: &NoDebug) -> bool { self.c == other.c } } #[repr(C)] #[repr(align(64))] #[derive(Copy, Clone)] pub struct ShouldDeriveDebugButDoesNot { pub c: [::std::os::raw::c_char; 32usize], pub d: ::std::os::raw::c_char, } #[test] fn bindgen_test_layout_ShouldDeriveDebugButDoesNot() { assert_eq!( ::std::mem::size_of::<ShouldDeriveDebugButDoesNot>(), 64usize, concat!("Size of: ", stringify!(ShouldDeriveDebugButDoesNot)) ); assert_eq!( ::std::mem::align_of::<ShouldDeriveDebugButDoesNot>(), 64usize, concat!("Alignment of ", stringify!(ShouldDeriveDebugButDoesNot)) ); assert_eq!( unsafe { let uninit = ::std::mem::MaybeUninit::<ShouldDeriveDebugButDoesNot>::uninit( ); let ptr = uninit.as_ptr(); ::std::ptr::addr_of!((*ptr).c) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(ShouldDeriveDebugButDoesNot), "::", stringify!(c) ) ); assert_eq!( unsafe { let uninit = ::std::mem::MaybeUninit::<ShouldDeriveDebugButDoesNot>::uninit( ); let ptr = uninit.as_ptr(); ::std::ptr::addr_of!((*ptr).d) as usize - ptr as usize }, 32usize, concat!( "Offset of field: ", stringify!(ShouldDeriveDebugButDoesNot), "::", stringify!(d) ) ); } impl Default for ShouldDeriveDebugButDoesNot { fn default() -> Self { let mut s = ::std::mem::MaybeUninit::<Self>::uninit(); unsafe { ::std::ptr::write_bytes(s.as_mut_ptr(), 0, 1); s.assume_init() } } } impl ::std::cmp::PartialEq for ShouldDeriveDebugButDoesNot { fn eq(&self, other: &ShouldDeriveDebugButDoesNot) -> bool { self.c == other.c && self.d == other.d } }
#![allow( dead_code, non_snake_case, non_camel_case_types, non_upper_case_globals )] #[repr(C)] #[repr(align(64))] #[derive(Copy, Clone)] pub struct NoDebug { pub c: ::std::os::raw::c_char, } #[test] fn bindgen_test_layout_NoDebug() { assert_eq!( ::std::mem::size_of::<NoDebug>(), 64usize, concat!("Size of: ", stringify!(NoDebug)) ); assert_eq!( ::std::mem::align_of::<NoDebug>(), 64usize, concat!("Alignment of ", stringify!(NoDebug)) ); assert_eq!( unsafe { let uninit = ::std::mem::MaybeUninit::<NoDebug>::uninit(); let ptr = uninit.as_ptr(); ::std::ptr::addr_of!((*ptr).c) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(NoDebug), "::", stringify!(c) ) ); } impl Default for NoDebug { fn default() -> Self { l
} impl ::std::cmp::PartialEq for NoDebug { fn eq(&self, other: &NoDebug) -> bool { self.c == other.c } } #[repr(C)] #[repr(align(64))] #[derive(Copy, Clone)] pub struct ShouldDeriveDebugButDoesNot { pub c: [::std::os::raw::c_char; 32usize], pub d: ::std::os::raw::c_char, } #[test] fn bindgen_test_layout_ShouldDeriveDebugButDoesNot() { assert_eq!( ::std::mem::size_of::<ShouldDeriveDebugButDoesNot>(), 64usize, concat!("Size of: ", stringify!(ShouldDeriveDebugButDoesNot)) ); assert_eq!( ::std::mem::align_of::<ShouldDeriveDebugButDoesNot>(), 64usize, concat!("Alignment of ", stringify!(ShouldDeriveDebugButDoesNot)) ); assert_eq!( unsafe { let uninit = ::std::mem::MaybeUninit::<ShouldDeriveDebugButDoesNot>::uninit( ); let ptr = uninit.as_ptr(); ::std::ptr::addr_of!((*ptr).c) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(ShouldDeriveDebugButDoesNot), "::", stringify!(c) ) ); assert_eq!( unsafe { let uninit = ::std::mem::MaybeUninit::<ShouldDeriveDebugButDoesNot>::uninit( ); let ptr = uninit.as_ptr(); ::std::ptr::addr_of!((*ptr).d) as usize - ptr as usize }, 32usize, concat!( "Offset of field: ", stringify!(ShouldDeriveDebugButDoesNot), "::", stringify!(d) ) ); } impl Default for ShouldDeriveDebugButDoesNot { fn default() -> Self { let mut s = ::std::mem::MaybeUninit::<Self>::uninit(); unsafe { ::std::ptr::write_bytes(s.as_mut_ptr(), 0, 1); s.assume_init() } } } impl ::std::cmp::PartialEq for ShouldDeriveDebugButDoesNot { fn eq(&self, other: &ShouldDeriveDebugButDoesNot) -> bool { self.c == other.c && self.d == other.d } }
et mut s = ::std::mem::MaybeUninit::<Self>::uninit(); unsafe { ::std::ptr::write_bytes(s.as_mut_ptr(), 0, 1); s.assume_init() } }
function_block-function_prefixed
[ { "content": "#[test]\n\nfn bindgen_test_layout_ShouldImplClone() {\n\n assert_eq!(\n\n ::std::mem::size_of::<ShouldImplClone>(),\n\n 132usize,\n\n concat!(\"Size of: \", stringify!(ShouldImplClone))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<ShouldImplClone>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ShouldImplClone))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<ShouldImplClone>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).large) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/derive-clone_1_0.rs", "rank": 0, "score": 231210.73684126727 }, { "content": "struct DoIsDefaultConstructibleImpl\n\n{\n\n template<typename T, typename = decltype(T())>\n\n static TrueType test(int);\n\n template<typename T>\n\n static FalseType test(...);\n\n};\n\n\n\ntemplate<typename T>\n", "file_path": "tests/stylo.hpp", "rank": 1, "score": 229827.10155197207 }, { "content": "struct C {\n\n B<unsigned int> mB;\n\n B<const int*> mBConstPtr;\n\n B<const mozilla::Foo*> mBConstStructPtr;\n\n B<const mozilla::Foo*[1]> mBConstStructPtrArray;\n\n B<const int> mBConst;\n\n B<volatile int> mBVolatile;\n\n B<const bool> mBConstBool;\n\n B<const char16_t> mBConstChar;\n\n B<int[1]> mBArray;\n\n B<int*[1]> mBPtrArray;\n\n B<int(*)[1]> mBArrayPtr;\n\n B<int&> mBRef;\n\n B<const int&> mBConstRef;\n\n B<int*&> mPtrRef;\n\n B<int(&)[1]> mArrayRef;\n\n B<const int[1]> mBConstArray;\n\n};\n\n\n\ntemplate<typename T>\n", "file_path": "tests/headers/template.hpp", "rank": 2, "score": 226089.24375293337 }, { "content": "struct IsDefaultConstructibleImpl : public DoIsDefaultConstructibleImpl\n\n{\n\n typedef decltype(test<T>(0)) Type;\n\n};\n\n\n\n}\n\n# 611 \"/home/fitzgen/stylo/obj-x86_64-pc-linux-gnu/dist/include/mozilla/TypeTraits.h\"\n\ntemplate<typename T>\n", "file_path": "tests/stylo.hpp", "rank": 3, "score": 221339.49489960872 }, { "content": "struct C;\n\n\n\n/**\n\n * <div rustbindgen opaque></div>\n\n */\n", "file_path": "tests/headers/annotation_hide.hpp", "rank": 4, "score": 219339.97922916544 }, { "content": "/// Returns a size aligned to a given value.\n\npub fn align_to(size: usize, align: usize) -> usize {\n\n if align == 0 {\n\n return size;\n\n }\n\n\n\n let rem = size % align;\n\n if rem == 0 {\n\n return size;\n\n }\n\n\n\n size + align - rem\n\n}\n\n\n", "file_path": "src/codegen/struct_layout.rs", "rank": 5, "score": 218995.51975477964 }, { "content": "pub fn gen_debug_impl(\n\n ctx: &BindgenContext,\n\n fields: &[Field],\n\n item: &Item,\n\n kind: CompKind,\n\n) -> proc_macro2::TokenStream {\n\n let struct_name = item.canonical_name(ctx);\n\n let mut format_string = format!(\"{} {{{{ \", struct_name);\n\n let mut tokens = vec![];\n\n\n\n if item.is_opaque(ctx, &()) {\n\n format_string.push_str(\"opaque\");\n\n } else {\n\n match kind {\n\n CompKind::Union => {\n\n format_string.push_str(\"union\");\n\n }\n\n CompKind::Struct => {\n\n let processed_fields = fields.iter().filter_map(|f| match f {\n\n Field::DataMember(ref fd) => fd.impl_debug(ctx, ()),\n", "file_path": "src/codegen/impl_debug.rs", "rank": 6, "score": 216357.03463971015 }, { "content": "/// Generate a manual implementation of `PartialEq` trait for the\n\n/// specified compound type.\n\npub fn gen_partialeq_impl(\n\n ctx: &BindgenContext,\n\n comp_info: &CompInfo,\n\n item: &Item,\n\n ty_for_impl: &proc_macro2::TokenStream,\n\n) -> Option<proc_macro2::TokenStream> {\n\n let mut tokens = vec![];\n\n\n\n if item.is_opaque(ctx, &()) {\n\n tokens.push(quote! {\n\n &self._bindgen_opaque_blob[..] == &other._bindgen_opaque_blob[..]\n\n });\n\n } else if comp_info.kind() == CompKind::Union {\n\n assert!(!ctx.options().rust_features().untagged_union);\n\n tokens.push(quote! {\n\n &self.bindgen_union_field[..] == &other.bindgen_union_field[..]\n\n });\n\n } else {\n\n for base in comp_info.base_members().iter() {\n\n if !base.requires_storage(ctx) {\n", "file_path": "src/codegen/impl_partialeq.rs", "rank": 7, "score": 216357.03463971015 }, { "content": "#[test]\n\nfn bindgen_test_layout_PubPriv() {\n\n assert_eq!(\n\n ::std::mem::size_of::<PubPriv>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(PubPriv))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<PubPriv>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(PubPriv))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<PubPriv>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).x) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/private_fields.rs", "rank": 8, "score": 214390.92245031096 }, { "content": "#[test]\n\nfn bindgen_test_layout_C() {\n\n assert_eq!(\n\n ::std::mem::size_of::<C>(),\n\n 20usize,\n\n concat!(\"Size of: \", stringify!(C))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<C>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(C))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<C>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).d) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\"Offset of field: \", stringify!(C), \"::\", stringify!(d))\n\n );\n", "file_path": "tests/expectations/tests/class_with_inner_struct.rs", "rank": 9, "score": 213750.59118766495 }, { "content": "#[test]\n\nfn bindgen_test_layout_C() {\n\n assert_eq!(\n\n ::std::mem::size_of::<C>(),\n\n 20usize,\n\n concat!(\"Size of: \", stringify!(C))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<C>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(C))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<C>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).d) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\"Offset of field: \", stringify!(C), \"::\", stringify!(d))\n\n );\n\n}\n\nimpl Clone for C {\n\n fn clone(&self) -> Self {\n\n *self\n\n }\n\n}\n", "file_path": "tests/expectations/tests/class_with_inner_struct_1_0.rs", "rank": 10, "score": 213750.59118766495 }, { "content": "#[test]\n\nfn bindgen_test_layout_WithAnonStruct() {\n\n assert_eq!(\n\n ::std::mem::size_of::<WithAnonStruct>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(WithAnonStruct))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<WithAnonStruct>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(WithAnonStruct))\n\n );\n\n}\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct WithAnonUnion {\n\n __bindgen_anon_1: WithAnonUnion__bindgen_ty_1,\n\n}\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub union WithAnonUnion__bindgen_ty_1 {\n\n pub _address: u8,\n\n}\n", "file_path": "tests/expectations/tests/private_fields.rs", "rank": 11, "score": 213747.4896161778 }, { "content": "#[test]\n\nfn bindgen_test_layout_Foo() {\n\n assert_eq!(\n\n ::std::mem::size_of::<Foo>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(Foo))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<Foo>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(Foo))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<Foo>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).bar) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\"Offset of field: \", stringify!(Foo), \"::\", stringify!(bar))\n\n );\n\n}\n", "file_path": "tests/expectations/tests/func_ptr_in_struct.rs", "rank": 12, "score": 213744.62461206218 }, { "content": "/// Returns the lower power of two byte count that can hold at most n bits.\n\npub fn bytes_from_bits_pow2(mut n: usize) -> usize {\n\n if n == 0 {\n\n return 0;\n\n }\n\n\n\n if n <= 8 {\n\n return 1;\n\n }\n\n\n\n if !n.is_power_of_two() {\n\n n = n.next_power_of_two();\n\n }\n\n\n\n n / 8\n\n}\n\n\n", "file_path": "src/codegen/struct_layout.rs", "rank": 13, "score": 213370.612125481 }, { "content": "#[test]\n\nfn bindgen_test_layout_C_Segment() {\n\n assert_eq!(\n\n ::std::mem::size_of::<C_Segment>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(C_Segment))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<C_Segment>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(C_Segment))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<C_Segment>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).begin) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/class_with_inner_struct_1_0.rs", "rank": 14, "score": 208285.1430937646 }, { "content": "#[test]\n\nfn bindgen_test_layout_C_Segment() {\n\n assert_eq!(\n\n ::std::mem::size_of::<C_Segment>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(C_Segment))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<C_Segment>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(C_Segment))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<C_Segment>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).begin) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/class_with_inner_struct.rs", "rank": 15, "score": 208285.1430937646 }, { "content": "#[test]\n\nfn bindgen_test_layout_C__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<C__bindgen_ty_1>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(C__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<C__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(C__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<C__bindgen_ty_1>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).mFunc) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/class_with_inner_struct.rs", "rank": 16, "score": 203170.67224041984 }, { "content": "#[test]\n\nfn bindgen_test_layout_C__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<C__bindgen_ty_1>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(C__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<C__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(C__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<C__bindgen_ty_1>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).mFunc) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/class_with_inner_struct_1_0.rs", "rank": 17, "score": 203170.67224041984 }, { "content": "#[test]\n\nfn bindgen_test_layout_WithAnonStruct__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<WithAnonStruct__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(WithAnonStruct__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<WithAnonStruct__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(WithAnonStruct__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit =\n\n ::std::mem::MaybeUninit::<WithAnonStruct__bindgen_ty_1>::uninit(\n\n );\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).a) as usize - ptr as usize\n\n },\n\n 0usize,\n", "file_path": "tests/expectations/tests/private_fields.rs", "rank": 18, "score": 203167.78740130347 }, { "content": "#[test]\n\nfn bindgen_test_layout_WithAnonStruct__bindgen_ty_2() {\n\n assert_eq!(\n\n ::std::mem::size_of::<WithAnonStruct__bindgen_ty_2>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(WithAnonStruct__bindgen_ty_2))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<WithAnonStruct__bindgen_ty_2>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(WithAnonStruct__bindgen_ty_2))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit =\n\n ::std::mem::MaybeUninit::<WithAnonStruct__bindgen_ty_2>::uninit(\n\n );\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).b) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(WithAnonStruct__bindgen_ty_2),\n\n \"::\",\n\n stringify!(b)\n\n )\n\n );\n\n}\n", "file_path": "tests/expectations/tests/private_fields.rs", "rank": 19, "score": 203167.78740130347 }, { "content": "#[test]\n\nfn bindgen_test_layout_NoDefault() {\n\n assert_eq!(\n\n ::std::mem::size_of::<NoDefault>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(NoDefault))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<NoDefault>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(NoDefault))\n\n );\n\n}\n", "file_path": "tests/expectations/tests/no_default_opaque.rs", "rank": 20, "score": 194067.76402201358 }, { "content": "#[test]\n\nfn bindgen_test_layout_NoDefault() {\n\n assert_eq!(\n\n ::std::mem::size_of::<NoDefault>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(NoDefault))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<NoDefault>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(NoDefault))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<NoDefault>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).i) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(NoDefault),\n\n \"::\",\n\n stringify!(i)\n\n )\n\n );\n\n}\n", "file_path": "tests/expectations/tests/no_default_allowlisted.rs", "rank": 21, "score": 194067.76402201358 }, { "content": "#[test]\n\nfn bindgen_test_layout_C__bindgen_ty_1__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<C__bindgen_ty_1__bindgen_ty_1>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(C__bindgen_ty_1__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<C__bindgen_ty_1__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(C__bindgen_ty_1__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = :: std :: mem :: MaybeUninit :: < C__bindgen_ty_1__bindgen_ty_1 > :: uninit () ;\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).mX1) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/class_with_inner_struct_1_0.rs", "rank": 22, "score": 193863.65269929823 }, { "content": "#[test]\n\nfn bindgen_test_layout_C__bindgen_ty_1__bindgen_ty_2() {\n\n assert_eq!(\n\n ::std::mem::size_of::<C__bindgen_ty_1__bindgen_ty_2>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(C__bindgen_ty_1__bindgen_ty_2))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<C__bindgen_ty_1__bindgen_ty_2>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(C__bindgen_ty_1__bindgen_ty_2))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = :: std :: mem :: MaybeUninit :: < C__bindgen_ty_1__bindgen_ty_2 > :: uninit () ;\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).mStepSyntax) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/class_with_inner_struct_1_0.rs", "rank": 23, "score": 193863.65269929823 }, { "content": "#[test]\n\nfn bindgen_test_layout_C__bindgen_ty_1__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<C__bindgen_ty_1__bindgen_ty_1>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(C__bindgen_ty_1__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<C__bindgen_ty_1__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(C__bindgen_ty_1__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = :: std :: mem :: MaybeUninit :: < C__bindgen_ty_1__bindgen_ty_1 > :: uninit () ;\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).mX1) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/class_with_inner_struct.rs", "rank": 24, "score": 193863.65269929823 }, { "content": "#[test]\n\nfn bindgen_test_layout_C__bindgen_ty_1__bindgen_ty_2() {\n\n assert_eq!(\n\n ::std::mem::size_of::<C__bindgen_ty_1__bindgen_ty_2>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(C__bindgen_ty_1__bindgen_ty_2))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<C__bindgen_ty_1__bindgen_ty_2>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(C__bindgen_ty_1__bindgen_ty_2))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = :: std :: mem :: MaybeUninit :: < C__bindgen_ty_1__bindgen_ty_2 > :: uninit () ;\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).mStepSyntax) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/class_with_inner_struct.rs", "rank": 25, "score": 193863.65269929823 }, { "content": "#[test]\n\nfn bindgen_test_layout_MutPtrConstObj() {\n\n assert_eq!(\n\n ::std::mem::size_of::<MutPtrConstObj>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(MutPtrConstObj))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<MutPtrConstObj>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(MutPtrConstObj))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<MutPtrConstObj>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).bar) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/derive-hash-struct-with-pointer.rs", "rank": 26, "score": 193858.46552795212 }, { "content": "#[test]\n\nfn bindgen_test_layout_ConstPtrConstObj() {\n\n assert_eq!(\n\n ::std::mem::size_of::<ConstPtrConstObj>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(ConstPtrConstObj))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<ConstPtrConstObj>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(ConstPtrConstObj))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<ConstPtrConstObj>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).bar) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/derive-hash-struct-with-pointer.rs", "rank": 27, "score": 193858.46552795212 }, { "content": "#[test]\n\nfn bindgen_test_layout_MutPtrMutObj() {\n\n assert_eq!(\n\n ::std::mem::size_of::<MutPtrMutObj>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(MutPtrMutObj))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<MutPtrMutObj>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(MutPtrMutObj))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<MutPtrMutObj>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).bar) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/derive-hash-struct-with-pointer.rs", "rank": 28, "score": 193858.46552795212 }, { "content": "#[test]\n\nfn bindgen_test_layout_ConstPtrMutObj() {\n\n assert_eq!(\n\n ::std::mem::size_of::<ConstPtrMutObj>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(ConstPtrMutObj))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<ConstPtrMutObj>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(ConstPtrMutObj))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<ConstPtrMutObj>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).bar) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/derive-hash-struct-with-pointer.rs", "rank": 29, "score": 193858.46552795212 }, { "content": "#[test]\n\nfn bindgen_test_layout_foo() {\n\n assert_eq!(\n\n ::std::mem::size_of::<foo>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(foo))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<foo>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(foo))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<foo>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).bar) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\"Offset of field: \", stringify!(foo), \"::\", stringify!(bar))\n\n );\n\n}\n", "file_path": "tests/expectations/tests/struct_with_struct.rs", "rank": 30, "score": 193152.3072273081 }, { "content": "/// Instantiate a Quickcheck object and use it to run property tests using\n\n/// fuzzed C headers generated with types defined in the `fuzzers` module.\n\n/// Success/Failure is dictated by the result of passing the fuzzed headers\n\n/// to the `csmith-fuzzing/predicate.py` script.\n\npub fn test_bindgen(generate_range: usize, tests: usize, output_path: Option<&str>) {\n\n match output_path {\n\n Some(path) => {\n\n CONTEXT.lock().unwrap().output_path =\n\n Some(String::from(PathBuf::from(path).to_str().unwrap()));\n\n }\n\n None => {} // Path not specified, don't provide output.\n\n }\n\n\n\n QuickCheck::new()\n\n .tests(tests)\n\n .gen(StdGen::new(thread_rng(), generate_range))\n\n .quickcheck(bindgen_prop as fn(fuzzers::HeaderC) -> TestResult)\n\n}\n", "file_path": "tests/quickchecking/src/lib.rs", "rank": 31, "score": 191296.92781830594 }, { "content": "#[test]\n\nfn bindgen_test_layout_ShouldDeriveClone() {\n\n assert_eq!(\n\n ::std::mem::size_of::<ShouldDeriveClone>(),\n\n 132usize,\n\n concat!(\"Size of: \", stringify!(ShouldDeriveClone))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<ShouldDeriveClone>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ShouldDeriveClone))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<ShouldDeriveClone>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).large) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/derive-clone.rs", "rank": 32, "score": 189871.74393091106 }, { "content": "#[test]\n\nfn bindgen_test_layout_Foo() {\n\n assert_eq!(\n\n ::std::mem::size_of::<Foo>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(Foo))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<Foo>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(Foo))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<Foo>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).callback) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/derive-fn-ptr.rs", "rank": 33, "score": 189747.66390228685 }, { "content": "#[test]\n\nfn bindgen_test_layout_Bar() {\n\n assert_eq!(\n\n ::std::mem::size_of::<Bar>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(Bar))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<Bar>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(Bar))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<Bar>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).callback) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(Bar),\n\n \"::\",\n\n stringify!(callback)\n\n )\n\n );\n\n}\n", "file_path": "tests/expectations/tests/derive-fn-ptr.rs", "rank": 34, "score": 189747.66390228685 }, { "content": "struct StorePtrPassByPtr\n\n{\n\n typedef T* stored_type;\n\n typedef T* passed_type;\n\n stored_type m;\n\n template <typename A>\n\n StorePtrPassByPtr(A a) : m(a) {}\n\n passed_type PassAsParameter() { return m; }\n\n};\n\ntemplate<typename S>\n", "file_path": "tests/stylo.hpp", "rank": 35, "score": 189092.11263035453 }, { "content": "#[test]\n\nfn bindgen_test_layout_foo() {\n\n assert_eq!(\n\n ::std::mem::size_of::<foo>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(foo))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<foo>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(foo))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<foo>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).bar) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\"Offset of field: \", stringify!(foo), \"::\", stringify!(bar))\n\n );\n\n}\n", "file_path": "tests/expectations/tests/struct_with_anon_struct.rs", "rank": 36, "score": 188946.4050372567 }, { "content": "fn gen_field(\n\n ctx: &BindgenContext,\n\n ty_item: &Item,\n\n name: &str,\n\n) -> proc_macro2::TokenStream {\n\n fn quote_equals(\n\n name_ident: proc_macro2::Ident,\n\n ) -> proc_macro2::TokenStream {\n\n quote! { self.#name_ident == other.#name_ident }\n\n }\n\n\n\n let name_ident = ctx.rust_ident(name);\n\n let ty = ty_item.expect_type();\n\n\n\n match *ty.kind() {\n\n TypeKind::Void |\n\n TypeKind::NullPtr |\n\n TypeKind::Int(..) |\n\n TypeKind::Complex(..) |\n\n TypeKind::Float(..) |\n", "file_path": "src/codegen/impl_partialeq.rs", "rank": 37, "score": 185849.65637609235 }, { "content": "#[test]\n\nfn bindgen_test_layout_ShouldNotDeriveDefault() {\n\n assert_eq!(\n\n ::std::mem::size_of::<ShouldNotDeriveDefault>(),\n\n 1usize,\n\n concat!(\"Size of: \", stringify!(ShouldNotDeriveDefault))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<ShouldNotDeriveDefault>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(ShouldNotDeriveDefault))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit =\n\n ::std::mem::MaybeUninit::<ShouldNotDeriveDefault>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).a) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n", "file_path": "tests/expectations/tests/derive-default-and-blocklist.rs", "rank": 38, "score": 185846.81690887804 }, { "content": "#[test]\n\nfn bindgen_test_layout_PrivateBitFields() {\n\n assert_eq!(\n\n ::std::mem::size_of::<PrivateBitFields>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(PrivateBitFields))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<PrivateBitFields>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(PrivateBitFields))\n\n );\n\n}\n\nimpl PrivateBitFields {\n\n #[inline]\n\n fn a(&self) -> ::std::os::raw::c_uint {\n\n unsafe {\n\n ::std::mem::transmute(self._bitfield_1.get(0usize, 4u8) as u32)\n\n }\n\n }\n\n #[inline]\n", "file_path": "tests/expectations/tests/private_fields.rs", "rank": 39, "score": 185833.75424942974 }, { "content": "#[test]\n\nfn bindgen_test_layout_MixedBitFields() {\n\n assert_eq!(\n\n ::std::mem::size_of::<MixedBitFields>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(MixedBitFields))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<MixedBitFields>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(MixedBitFields))\n\n );\n\n}\n\nimpl MixedBitFields {\n\n #[inline]\n\n fn a(&self) -> ::std::os::raw::c_uint {\n\n unsafe {\n\n ::std::mem::transmute(self._bitfield_1.get(0usize, 4u8) as u32)\n\n }\n\n }\n\n #[inline]\n", "file_path": "tests/expectations/tests/private_fields.rs", "rank": 40, "score": 185833.75424942974 }, { "content": "#[test]\n\nfn bindgen_test_layout_PublicBitFields() {\n\n assert_eq!(\n\n ::std::mem::size_of::<PublicBitFields>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(PublicBitFields))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<PublicBitFields>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(PublicBitFields))\n\n );\n\n}\n\nimpl PublicBitFields {\n\n #[inline]\n\n pub fn a(&self) -> ::std::os::raw::c_uint {\n\n unsafe {\n\n ::std::mem::transmute(self._bitfield_1.get(0usize, 4u8) as u32)\n\n }\n\n }\n\n #[inline]\n", "file_path": "tests/expectations/tests/private_fields.rs", "rank": 41, "score": 185833.75424942974 }, { "content": "struct DoIsDestructibleImpl\n\n{\n\n template<typename T, typename = decltype(DeclVal<T&>().~T())>\n\n static TrueType test(int);\n\n template<typename T>\n\n static FalseType test(...);\n\n};\n\n\n\ntemplate<typename T>\n", "file_path": "tests/stylo.hpp", "rank": 42, "score": 185683.46464364725 }, { "content": "struct TupleImpl;\n\n\n\n\n\n\n\n\n\n\n\ntemplate<std::size_t Index>\n", "file_path": "tests/stylo.hpp", "rank": 43, "score": 185683.46464364725 }, { "content": "struct VectorImpl\n\n{\n\n\n\n\n\n\n\n template<typename... Args>\n\n __attribute__ ((nonnull(1)))\n\n static inline void new_(T* aDst, Args&&... aArgs)\n\n {\n\n new(KnownNotNull, aDst) T(Forward<Args>(aArgs)...);\n\n }\n\n\n\n\n\n static inline void destroy(T* aBegin, T* aEnd)\n\n {\n\n do { } while (0);\n\n for (T* p = aBegin; p < aEnd; ++p) {\n\n p->~T();\n\n }\n\n }\n", "file_path": "tests/stylo.hpp", "rank": 44, "score": 185683.46464364725 }, { "content": "struct IsInRangeImpl {};\n\n\n\ntemplate<typename T, typename U, bool IsTSigned, bool IsUSigned>\n", "file_path": "tests/stylo.hpp", "rank": 45, "score": 185683.46464364725 }, { "content": "struct NegateImpl;\n\n\n\ntemplate<typename T>\n", "file_path": "tests/stylo.hpp", "rank": 46, "score": 185683.46464364725 }, { "content": "struct IsDefaultConstructible\n\n : public detail::IsDefaultConstructibleImpl<T>::Type\n\n{};\n\n\n\nnamespace detail {\n\n\n", "file_path": "tests/stylo.hpp", "rank": 47, "score": 185677.93381458617 }, { "content": "struct DefaultHasher\n\n{\n\n typedef Key Lookup;\n\n static HashNumber hash(const Lookup& l) {\n\n\n\n return l;\n\n }\n\n static bool match(const Key& k, const Lookup& l) {\n\n\n\n return k == l;\n\n }\n\n static void rekey(Key& k, const Key& newKey) {\n\n k = newKey;\n\n }\n\n};\n\n\n\n\n\n\n\ntemplate <class T>\n", "file_path": "tests/stylo.hpp", "rank": 48, "score": 185677.93381458617 }, { "content": "struct IsSmartPtr\n\n{\n\n private: typedef char yes[1]; typedef char no[2];\n\n\n\n private: template<typename V> static yes& Checkget(char (*)[sizeof(&V::get) + 1]); template<typename V> static no& Checkget(...); public: static bool const value = sizeof(Checkget<T>(nullptr)) == sizeof(yes);\n\n};\n\n\n\ntemplate<class T>\n", "file_path": "tests/stylo.hpp", "rank": 49, "score": 185661.92449592164 }, { "content": "struct IsFunPtr;\n\n\n\ntemplate<typename>\n", "file_path": "tests/stylo.hpp", "rank": 50, "score": 185661.92449592164 }, { "content": "struct IsFunPtr\n\n : public FalseType\n\n{};\n\n\n\ntemplate<typename Result, typename... ArgTypes>\n", "file_path": "tests/stylo.hpp", "rank": 51, "score": 185661.92449592164 }, { "content": "#[test]\n\nfn bindgen_test_layout_b() {\n\n assert_eq!(\n\n ::std::mem::size_of::<b>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(b))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<b>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(b))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<b>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).val_b) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\"Offset of field: \", stringify!(b), \"::\", stringify!(val_b))\n\n );\n\n}\n", "file_path": "tests/expectations/tests/struct_containing_forward_declared_struct.rs", "rank": 52, "score": 184978.46569216106 }, { "content": "#[test]\n\nfn bindgen_test_layout_foo() {\n\n assert_eq!(\n\n ::std::mem::size_of::<foo>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(foo))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<foo>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(foo))\n\n );\n\n}\n", "file_path": "tests/expectations/tests/struct_with_anon_unnamed_struct.rs", "rank": 53, "score": 184978.46569216106 }, { "content": "#[test]\n\nfn bindgen_test_layout_foo() {\n\n assert_eq!(\n\n ::std::mem::size_of::<foo>(),\n\n 208usize,\n\n concat!(\"Size of: \", stringify!(foo))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<foo>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(foo))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<foo>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).bar) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\"Offset of field: \", stringify!(foo), \"::\", stringify!(bar))\n\n );\n", "file_path": "tests/expectations/tests/struct_with_anon_struct_array.rs", "rank": 54, "score": 184978.46569216106 }, { "content": "#[test]\n\nfn bindgen_test_layout_foo__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<foo__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(foo__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<foo__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(foo__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<foo__bindgen_ty_1>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).x) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/struct_with_struct.rs", "rank": 55, "score": 184978.46569216106 }, { "content": "#[test]\n\nfn bindgen_test_layout_typedef_named_struct() {\n\n assert_eq!(\n\n ::std::mem::size_of::<typedef_named_struct>(),\n\n 1usize,\n\n concat!(\"Size of: \", stringify!(typedef_named_struct))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<typedef_named_struct>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(typedef_named_struct))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit =\n\n ::std::mem::MaybeUninit::<typedef_named_struct>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).has_name) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n", "file_path": "tests/expectations/tests/struct_typedef.rs", "rank": 56, "score": 184978.46569216106 }, { "content": "#[test]\n\nfn bindgen_test_layout_a() {\n\n assert_eq!(\n\n ::std::mem::size_of::<a>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(a))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<a>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(a))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<a>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).val_a) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\"Offset of field: \", stringify!(a), \"::\", stringify!(val_a))\n\n );\n", "file_path": "tests/expectations/tests/struct_containing_forward_declared_struct.rs", "rank": 57, "score": 184978.46569216106 }, { "content": "#[test]\n\nfn bindgen_test_layout_foo() {\n\n assert_eq!(\n\n ::std::mem::size_of::<foo>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(foo))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<foo>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(foo))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<foo>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).bar) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\"Offset of field: \", stringify!(foo), \"::\", stringify!(bar))\n\n );\n", "file_path": "tests/expectations/tests/struct_with_anon_struct_pointer.rs", "rank": 58, "score": 184978.46569216106 }, { "content": "struct ShouldNotDeriveDefault {\n\n BlocklistMe a;\n\n};\n", "file_path": "tests/headers/derive-default-and-blocklist.hpp", "rank": 59, "score": 183675.4180016505 }, { "content": "struct StoreRefPtrPassByPtr\n\n{\n\n typedef RefPtr<T> stored_type;\n\n typedef T* passed_type;\n\n stored_type m;\n\n template <typename A>\n\n StoreRefPtrPassByPtr(A&& a) : m(mozilla::Forward<A>(a)) {}\n\n passed_type PassAsParameter() { return m.get(); }\n\n};\n\ntemplate<typename S>\n", "file_path": "tests/stylo.hpp", "rank": 60, "score": 183656.4820872426 }, { "content": "#[test]\n\nfn bindgen_test_layout_ShouldManuallyImplDebug() {\n\n assert_eq!(\n\n ::std::mem::size_of::<ShouldManuallyImplDebug>(),\n\n 1usize,\n\n concat!(\"Size of: \", stringify!(ShouldManuallyImplDebug))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<ShouldManuallyImplDebug>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(ShouldManuallyImplDebug))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit =\n\n ::std::mem::MaybeUninit::<ShouldManuallyImplDebug>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).a) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n", "file_path": "tests/expectations/tests/blocklist-and-impl-debug.rs", "rank": 61, "score": 182080.1293939349 }, { "content": "#[test]\n\nfn bindgen_test_layout_A() {\n\n assert_eq!(\n\n ::std::mem::size_of::<A>(),\n\n 24usize,\n\n concat!(\"Size of: \", stringify!(A))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<A>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(A))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<A>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).len) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\"Offset of field: \", stringify!(A), \"::\", stringify!(len))\n\n );\n", "file_path": "tests/expectations/tests/size_t_is_usize.rs", "rank": 62, "score": 181925.5944254218 }, { "content": "#[test]\n\nfn bindgen_test_layout_C() {\n\n assert_eq!(\n\n ::std::mem::size_of::<C>(),\n\n 1usize,\n\n concat!(\"Size of: \", stringify!(C))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<C>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(C))\n\n );\n\n}\n\nimpl C {\n\n #[inline]\n\n pub fn a(&self) -> bool {\n\n unsafe {\n\n ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u8)\n\n }\n\n }\n\n #[inline]\n", "file_path": "tests/expectations/tests/only_bitfields.rs", "rank": 63, "score": 181889.37270815554 }, { "content": "#[test]\n\nfn bindgen_test_layout_C() {\n\n assert_eq!(\n\n ::std::mem::size_of::<C>(),\n\n 40usize,\n\n concat!(\"Size of: \", stringify!(C))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<C>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(C))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<C>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).a) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\"Offset of field: \", stringify!(C), \"::\", stringify!(a))\n\n );\n", "file_path": "tests/expectations/tests/class_1_0.rs", "rank": 64, "score": 181889.37270815554 }, { "content": "#[test]\n\nfn bindgen_test_layout_C() {\n\n assert_eq!(\n\n ::std::mem::size_of::<C>(),\n\n 40usize,\n\n concat!(\"Size of: \", stringify!(C))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<C>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(C))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<C>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).a) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\"Offset of field: \", stringify!(C), \"::\", stringify!(a))\n\n );\n", "file_path": "tests/expectations/tests/class.rs", "rank": 65, "score": 181889.37270815554 }, { "content": "#[test]\n\nfn bindgen_test_layout_C() {\n\n assert_eq!(\n\n ::std::mem::size_of::<C>(),\n\n 104usize,\n\n concat!(\"Size of: \", stringify!(C))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<C>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(C))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<C>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).mB) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\"Offset of field: \", stringify!(C), \"::\", stringify!(mB))\n\n );\n", "file_path": "tests/expectations/tests/template.rs", "rank": 66, "score": 181889.37270815554 }, { "content": "#[test]\n\nfn bindgen_test_layout_C() {\n\n assert_eq!(\n\n ::std::mem::size_of::<C>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(C))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<C>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(C))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<C>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).m_member) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/mutable.rs", "rank": 67, "score": 181889.37270815554 }, { "content": "#[test]\n\nfn bindgen_test_layout_ptr_t() {\n\n assert_eq!(\n\n ::std::mem::size_of::<ptr_t>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(ptr_t))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<ptr_t>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(ptr_t))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<ptr_t>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).__) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\"Offset of field: \", stringify!(ptr_t), \"::\", stringify!(__))\n\n );\n\n}\n", "file_path": "tests/expectations/tests/underscore.rs", "rank": 68, "score": 181882.9214630102 }, { "content": "struct DefaultHasher<mozilla::UniquePtr<T, D>>\n\n{\n\n using Lookup = mozilla::UniquePtr<T, D>;\n\n using PtrHasher = PointerHasher<T*, mozilla::tl::FloorLog2<sizeof(void*)>::value>;\n\n\n\n static HashNumber hash(const Lookup& l) {\n\n return PtrHasher::hash(l.get());\n\n }\n\n static bool match(const mozilla::UniquePtr<T, D>& k, const Lookup& l) {\n\n return PtrHasher::match(k.get(), l.get());\n\n }\n\n static void rekey(mozilla::UniquePtr<T, D>& k, mozilla::UniquePtr<T, D>&& newKey) {\n\n k = mozilla::Move(newKey);\n\n }\n\n};\n\n\n\n\n\ntemplate <>\n", "file_path": "tests/stylo.hpp", "rank": 69, "score": 181345.24025107786 }, { "content": "#[test]\n\nfn bindgen_test_layout_foo__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<foo__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(foo__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<foo__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(foo__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<foo__bindgen_ty_1>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).a) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/struct_with_anon_struct.rs", "rank": 70, "score": 181227.83721354097 }, { "content": "#[test]\n\nfn bindgen_test_layout_struct_a() {\n\n assert_eq!(\n\n ::std::mem::size_of::<struct_a>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(struct_a))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<struct_a>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(struct_a))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<struct_a>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).a) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/c_naming.rs", "rank": 71, "score": 181210.9181763964 }, { "content": "#[test]\n\nfn bindgen_test_layout_a() {\n\n assert_eq!(\n\n ::std::mem::size_of::<a>(),\n\n 3usize,\n\n concat!(\"Size of: \", stringify!(a))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<a>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(a))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<a>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).b) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\"Offset of field: \", stringify!(a), \"::\", stringify!(b))\n\n );\n", "file_path": "tests/expectations/tests/struct_with_packing.rs", "rank": 72, "score": 181210.9181763964 }, { "content": "// bindgen-flags: --with-derive-hash --with-derive-partialeq --with-derive-eq --rustified-enum \".*\"\n\nstruct Test {\n\n int foo;\n\n float bar;\n\n enum { T_NONE };\n\n};\n\n\n\ntypedef enum {\n\n Foo,\n\n Bar,\n\n} Baz;\n", "file_path": "tests/headers/anon_enum.hpp", "rank": 73, "score": 180516.2634516201 }, { "content": "struct Test {\n\n nsTArray<long> a;\n\n};\n", "file_path": "tests/headers/replace_use.hpp", "rank": 74, "score": 180516.2634516201 }, { "content": "struct Test {\n\n uint64_t foo;\n\n uint64_t x : 56;\n\n uint64_t y : 8;\n\n};\n", "file_path": "tests/headers/bitfield-linux-32.hpp", "rank": 75, "score": 180516.2634516201 }, { "content": "struct Test {};\n\nextern \"C\" void Servo_Test(Test* a);\n", "file_path": "tests/headers/allowlist_fix.hpp", "rank": 76, "score": 180516.2634516201 }, { "content": "// bindgen-flags: --enable-cxx-namespaces\n\nstruct Test {};\n", "file_path": "tests/headers/module-allowlisted.hpp", "rank": 77, "score": 180516.2634516201 }, { "content": "struct ServoUnsafeCell {\n\n T value;\n\n\n\n\n\n ServoUnsafeCell() : value() {};\n\n};\n\n\n\ntemplate<typename T>\n", "file_path": "tests/stylo.hpp", "rank": 78, "score": 178964.8313872125 }, { "content": "struct AllUnsafeAccessors {\n\n int mBothAccessors;\n\n int mAlsoBothAccessors;\n\n};\n\n\n\n/** <div rustbindgen accessor></div> */\n", "file_path": "tests/headers/accessors.hpp", "rank": 79, "score": 178964.8313872125 }, { "content": "struct is_allowed_extent_conversion\n\n : public mozilla::IntegralConstant<bool,\n\n From == To ||\n\n From == mozilla::dynamic_extent ||\n\n To == mozilla::dynamic_extent>\n\n{\n\n};\n\n\n\ntemplate<class From, class To>\n", "file_path": "tests/stylo.hpp", "rank": 80, "score": 178963.36286209582 }, { "content": "struct IsModValidImpl;\n\n\n\ntemplate<typename T>\n\ninline bool\n\nIsModValid(T aX, T aY)\n\n{\n\n return IsModValidImpl<T>::run(aX, aY);\n\n}\n\n# 376 \"/home/fitzgen/stylo/obj-x86_64-pc-linux-gnu/dist/include/mozilla/CheckedInt.h\"\n\ntemplate<typename T>\n", "file_path": "tests/stylo.hpp", "rank": 81, "score": 178933.56012233437 }, { "content": "struct IsMulValidImpl {};\n\n\n\ntemplate<typename T, bool IsTSigned>\n", "file_path": "tests/stylo.hpp", "rank": 82, "score": 178933.56012233437 }, { "content": "struct DeferredFinalizerImpl\n\n{\n\n typedef typename Conditional<IsSame<T, nsISupports>::value,\n\n nsCOMPtr<T>,\n\n typename Conditional<IsRefcounted<T>::value,\n\n RefPtr<T>,\n\n nsAutoPtr<T>>::Type>::Type SmartPtr;\n\n typedef SegmentedVector<SmartPtr> SmartPtrArray;\n\n\n\n static_assert(IsSame<T, nsISupports>::value || !IsBaseOf<nsISupports, T>::value,\n\n \"nsISupports classes should all use the nsISupports instantiation\");\n\n\n\n static inline void\n\n AppendAndTake(SegmentedVector<nsCOMPtr<nsISupports>>& smartPtrArray, nsISupports* ptr)\n\n {\n\n smartPtrArray.InfallibleAppend(dont_AddRef(ptr));\n\n }\n\n template<class U>\n\n static inline void\n\n AppendAndTake(SegmentedVector<RefPtr<U>>& smartPtrArray, U* ptr)\n", "file_path": "tests/stylo.hpp", "rank": 83, "score": 178933.56012233437 }, { "content": "struct PointerTypeImpl\n\n{\n\n typedef typename D::pointer Type;\n\n};\n\n\n\ntemplate <class T, class D>\n", "file_path": "tests/stylo.hpp", "rank": 84, "score": 178933.56012233437 }, { "content": "struct CheckConvertibilityImpl;\n\n\n\ntemplate<typename Source, typename Target>\n", "file_path": "tests/stylo.hpp", "rank": 85, "score": 178933.56012233437 }, { "content": "struct SelectResultImpl\n\n{\n\n static const PackingStrategy value =\n\n (IsEmpty<V>::value && UnusedZero<E>::value)\n\n ? PackingStrategy::NullIsOk\n\n : (detail::HasFreeLSB<V>::value && detail::HasFreeLSB<E>::value)\n\n ? PackingStrategy::LowBitTagIsError\n\n : (IsDefaultConstructible<V>::value && IsDefaultConstructible<E>::value &&\n\n IsPackableVariant<V, E>::value)\n\n ? PackingStrategy::PackedVariant\n\n : PackingStrategy::Variant;\n\n\n\n using Type = detail::ResultImplementation<V, E, value>;\n\n};\n\n\n\ntemplate <typename T>\n", "file_path": "tests/stylo.hpp", "rank": 86, "score": 178933.56012233437 }, { "content": "struct BoundsCheckImpl;\n\n\n\n\n\n\n\n\n\n\n\n\n", "file_path": "tests/stylo.hpp", "rank": 87, "score": 178933.56012233437 }, { "content": "struct CStringHasher\n\n{\n\n typedef const char* Lookup;\n\n static js::HashNumber hash(Lookup l) {\n\n return mozilla::HashString(l);\n\n }\n\n static bool match(const char* key, Lookup lookup) {\n\n return strcmp(key, lookup) == 0;\n\n }\n\n};\n\n# 708 \"/home/fitzgen/stylo/obj-x86_64-pc-linux-gnu/dist/include/js/HashTable.h\"\n\ntemplate <typename HashPolicy>\n", "file_path": "tests/stylo.hpp", "rank": 88, "score": 178920.36259774605 }, { "content": "struct UniquePtr_Simple {\n\n T* mPtr;\n\n};\n\n\n\nstatic_assert(sizeof(mozilla::UniquePtr<int>) == sizeof(UniquePtr_Simple<int>), \"Size mismatch between \" \"mozilla::UniquePtr<int>\" \" and \" \"UniquePtr_Simple<int>\"); static_assert(alignof(mozilla::UniquePtr<int>) == alignof(UniquePtr_Simple<int>), \"Align mismatch between \" \"mozilla::UniquePtr<int>\" \" and \" \"UniquePtr_Simple<int>\");;\n\n\n\n\n\n\n\n\n\ntemplate<typename T>\n", "file_path": "tests/stylo.hpp", "rank": 89, "score": 178913.01622534855 }, { "content": "struct RefPtrTraits\n\n{\n\n static void AddRef(U* aPtr) {\n\n aPtr->AddRef();\n\n }\n\n static void Release(U* aPtr) {\n\n aPtr->Release();\n\n }\n\n};\n\n\n\n}\n\n\n\ntemplate <class T>\n", "file_path": "tests/stylo.hpp", "rank": 90, "score": 178913.01622534852 }, { "content": "struct StrongPtrForMember\n\n{\n\n typedef typename Conditional<IsRefcounted<T>::value,\n\n RefPtr<T>, nsAutoPtr<T>>::Type Type;\n\n};\n\n\n\nnamespace binding_detail {\n\ninline\n\nJSObject*\n\nGetHackedNamespaceProtoObject(JSContext* aCx)\n\n{\n\n return JS_NewPlainObject(aCx);\n\n}\n\n}\n\n\n\n\n\n\n\n\n\nbool SystemGlobalResolve(JSContext* cx, JS::Handle<JSObject*> obj,\n\n JS::Handle<jsid> id, bool* resolvedp);\n", "file_path": "tests/stylo.hpp", "rank": 91, "score": 178913.01622534855 }, { "content": "struct ShouldManuallyImplDebug {\n\n BlocklistMe a;\n\n};\n", "file_path": "tests/headers/blocklist-and-impl-debug.hpp", "rank": 92, "score": 178639.7060205828 }, { "content": "struct StoreConstPtrPassByConstPtr\n\n{\n\n typedef const T* stored_type;\n\n typedef const T* passed_type;\n\n stored_type m;\n\n template <typename A>\n\n StoreConstPtrPassByConstPtr(A a) : m(a) {}\n\n passed_type PassAsParameter() { return m; }\n\n};\n\ntemplate<typename S>\n", "file_path": "tests/stylo.hpp", "rank": 93, "score": 178614.93852527958 }, { "content": "struct nsRunnableMethodTraits<PtrType, R(C::*)(As...), Owning, Cancelable>\n\n{\n\n typedef typename mozilla::RemoveRawOrSmartPointer<PtrType>::Type class_type;\n\n static_assert(mozilla::IsBaseOf<C, class_type>::value,\n\n \"Stored class must inherit from method's class\");\n\n typedef R return_type;\n\n typedef nsRunnableMethod<C, R, Owning, Cancelable> base_type;\n\n static const bool can_cancel = Cancelable;\n\n};\n\n\n\ntemplate<typename PtrType, class C, typename R, bool Owning, bool Cancelable, typename... As>\n", "file_path": "tests/stylo.hpp", "rank": 94, "score": 177758.07451719846 }, { "content": "#[test]\n\nfn bindgen_test_layout_foo__bindgen_ty_2() {\n\n assert_eq!(\n\n ::std::mem::size_of::<foo__bindgen_ty_2>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(foo__bindgen_ty_2))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<foo__bindgen_ty_2>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(foo__bindgen_ty_2))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<foo__bindgen_ty_2>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).a) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/struct_with_anon_struct_array.rs", "rank": 95, "score": 177676.2900302835 }, { "content": "#[test]\n\nfn bindgen_test_layout_foo__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<foo__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(foo__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<foo__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(foo__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<foo__bindgen_ty_1>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).a) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/struct_with_anon_struct_array.rs", "rank": 96, "score": 177676.2900302835 }, { "content": "#[test]\n\nfn bindgen_test_layout_foo__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<foo__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(foo__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<foo__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(foo__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<foo__bindgen_ty_1>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).a) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/struct_with_anon_unnamed_struct.rs", "rank": 97, "score": 177676.2900302835 }, { "content": "#[test]\n\nfn bindgen_test_layout_foo() {\n\n assert_eq!(\n\n ::std::mem::size_of::<foo>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(foo))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<foo>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(foo))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<foo>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).bar) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\"Offset of field: \", stringify!(foo), \"::\", stringify!(bar))\n\n );\n\n}\n", "file_path": "tests/expectations/tests/derive-hash-struct-with-anon-struct-float.rs", "rank": 98, "score": 177676.2900302835 }, { "content": "#[test]\n\nfn bindgen_test_layout_foo__bindgen_ty_1() {\n\n assert_eq!(\n\n ::std::mem::size_of::<foo__bindgen_ty_1>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(foo__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<foo__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(foo__bindgen_ty_1))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n let uninit = ::std::mem::MaybeUninit::<foo__bindgen_ty_1>::uninit();\n\n let ptr = uninit.as_ptr();\n\n ::std::ptr::addr_of!((*ptr).a) as usize - ptr as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n", "file_path": "tests/expectations/tests/struct_with_anon_struct_pointer.rs", "rank": 99, "score": 177676.2900302835 } ]
Rust
src/modules/centerdevice/auth.rs
dschneller/ceres
9449011264d17fdd4b03a262930d8fe19ff9dc8f
use clams::prelude::{Config as ClamsConfig}; use clap::{App, Arg, ArgMatches, SubCommand}; use centerdevice::{CenterDevice, Client, ClientCredentials, Token}; use centerdevice::client::AuthorizedClient; use centerdevice::client::auth::{Code, CodeProvider, IntoUrl}; use centerdevice::errors::{Result as CenterDeviceResult}; use failure::Fail; use std::io; use std::io::Write; use std::convert::TryInto; use config::{CeresConfig as Config, CenterDevice as CenterDeviceConfig, Profile}; use run_config::RunConfig; use modules::{Result as ModuleResult, Error as ModuleError, ErrorKind as ModuleErrorKind, Module}; use modules::centerdevice::errors::*; pub const NAME: &str = "auth"; pub struct SubModule; impl Module for SubModule { fn build_sub_cli() -> App<'static, 'static> { SubCommand::with_name(NAME) .about("Authenticate with CenterDevice") .arg( Arg::with_name("refresh") .short("r") .long("refresh") .help("Just refresh token without re-authentication"), ) .arg( Arg::with_name("show") .short("s") .long("show") .required_unless("save") .help("On successful authentication, print the received token to stdout"), ) .arg( Arg::with_name("save") .short("S") .long("save") .required_unless("show") .help("On successful authentication, save the received token to configuration file"), ) } fn call(cli_args: Option<&ArgMatches>, run_config: &RunConfig, config: &Config) -> ModuleResult<()> { let args = cli_args.unwrap(); do_call(args, run_config, config) .map_err(|e| ModuleError::with_chain(e, ModuleErrorKind::ModuleFailed(NAME.to_owned()))) } } struct CliCodeProvider {} impl CodeProvider for CliCodeProvider { fn get_code<T: IntoUrl>(&self, auth_url: T) -> CenterDeviceResult<Code> { let auth_url = auth_url.into_url().expect("Failed to parse auth url"); println!("Please authenticate at the following URL, wait for the redirect, enter the code into the terminal, and then press return ..."); println!("\n\t{}\n", auth_url); print!("Authentication code: "); let _ = std::io::stdout().flush(); let mut input = String::new(); let _ = io::stdin().read_line(&mut input); let code = input.trim(); let code = Code::new(code.to_string()); Ok(code) } } fn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> { let profile = match run_config.active_profile.as_ref() { "default" => config.get_default_profile(), s => config.get_profile(s), }.chain_err(|| ErrorKind::FailedToParseCmd("profile".to_string()))?; let centerdevice = profile.centerdevice.as_ref().ok_or( Error::from_kind(ErrorKind::NoCenterDeviceInProfile) )?; let token = if args.is_present("refresh") { refresh_token(&centerdevice)? } else { get_token(&centerdevice)? }; debug!("{:#?}", token); if args.is_present("show") { println!("{:#?}", token); } if args.is_present("save") { save_token(run_config, config, &token) .chain_err(|| ErrorKind::FailedToSaveToken)?; } Ok(()) } fn get_token(centerdevice: &CenterDeviceConfig) -> Result<Token> { let client_credentials = ClientCredentials::new( &centerdevice.client_id, &centerdevice.client_secret, ); let code_provider = CliCodeProvider {}; info!("Authenticating with CenterDevice at {}", centerdevice.base_domain); let client = Client::new(&centerdevice.base_domain, client_credentials) .authorize_with_code_flow(&centerdevice.redirect_uri, &code_provider) .map_err(|e| Error::with_chain(e.compat(), ErrorKind::FailedToAccessCenterDeviceApi))?; info!("Successfully authenticated."); Ok(client.token().clone()) } fn refresh_token(centerdevice: &CenterDeviceConfig) -> Result<Token> { info!("Refreshing token with CenterDevice at {}", centerdevice.base_domain); let client: AuthorizedClient = centerdevice.try_into()?; let token = client.refresh_access_token() .map_err(|e| Error::with_chain(e.compat(), ErrorKind::FailedToAccessCenterDeviceApi))?; info!("Successfully re-energized."); Ok(token) } fn save_token(run_config: &RunConfig, config: &Config, token: &Token) -> Result<()> { let new_config = update_config(run_config, config, token)?; new_config.save(run_config.active_config) .chain_err(|| ErrorKind::FailedToSaveConfig)?; Ok(()) } fn update_config(run_config: &RunConfig, config: &Config, token: &Token) -> Result<Config> { let profile = match run_config.active_profile.as_ref() { "default" => config.get_default_profile(), s => config.get_profile(s), }.chain_err(|| ErrorKind::FailedToParseCmd("profile".to_string()))?; let centerdevice = profile.centerdevice.as_ref().ok_or( Error::from_kind(ErrorKind::NoCenterDeviceInProfile) )?; let centerdevice = CenterDeviceConfig { access_token: Some(token.access_token().to_string()), refresh_token: Some(token.refresh_token().to_string()), ..(*centerdevice).clone() }; let profile = Profile { centerdevice: Some(centerdevice), ..(*profile).clone() }; let profile_name = match run_config.active_profile.as_ref() { "default" => config.default_profile.clone(), s => s.to_string(), }; let mut profiles = config.profiles.clone(); profiles.insert(profile_name, profile); let new_config = Config { profiles, ..(*config).clone() }; Ok(new_config) }
use clams::prelude::{Config as ClamsConfig}; use clap::{App, Arg, ArgMatches, SubCommand}; use centerdevice::{CenterDevice, Client, ClientCredentials, Token}; use centerdevice::client::AuthorizedClient; use centerdevice::client::auth::{Code, CodeProvider, IntoUrl}; use centerdevice::errors::{Result as CenterDeviceResult}; use failure::Fail; use std::io; use std::io::Write; use std::convert::TryInto; use config::{CeresConfig as Config, CenterDevice as CenterDeviceConfig, Profile}; use run_config::RunConfig; use modules::{Result as ModuleResult, Error as ModuleError, ErrorKind as ModuleErrorKind, Module}; use modules::centerdevice::errors::*; pub const NAME: &str = "auth"; pub struct SubModule; impl Module for SubModule { fn build_sub_cli() -> App<'static, 'static> { SubCommand::with_name(NAME) .about("Authenticate with CenterDevice") .arg( Arg::with_name("refresh") .short("r") .long("refresh") .help("Just refresh token without re-authentication"), ) .arg( Arg::with_name("show") .short("s") .long("show") .required_unless("save") .help("On successful authentication, print the received token to stdout"), ) .arg( Arg::with_name("save") .short("S") .long("save") .required_unless("show") .help("On successful authentication, save the received token to configuration file"), ) } fn call(cli_args: Option<&ArgMatches>, run_config: &RunConfig, config: &Config) -> ModuleResult<()> { let args = cli_args.unwrap(); do_call(args, run_config, config) .map_err(|e| ModuleError::with_chain(e, ModuleErrorKind::ModuleFailed(NAME.to_owned()))) } } struct CliCodeProvider {} impl CodeProvider for CliCodeProvider { fn get_code<T: IntoUrl>(&self, auth_url: T) -> CenterDeviceResult<Code> { let auth_url = auth_url.into_url().expect("Failed to parse auth url"); println!("Please authenticate at the following URL, wait for the redirect, enter the code into the terminal, and then press return ..."); println!("\n\t{}\n", auth_url); print!("Authentication code: "); let _ = std::io::stdout().flush(); let mut input = String::new(); let _ = io::stdin().read_line(&mut input); let code = input.trim(); let code = Code::new(code.to_string()); Ok(code) } } fn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> { let profile = match run_config.active_profile.as_ref() { "default" => config.get_default_profile(), s => config.get_profile(s), }.chain_err(|| ErrorKind::FailedToParseCmd("profile".to_string()))?; let centerdevice = profile.centerdevice.as_ref().ok_or( Error::from_kind(ErrorKind::NoCenterDeviceInProfile) )?; let token = if args.is_present("refresh") { refresh_token(&centerdevice)? } else { get_token(&centerdevice)? }; debug!("{:#?}", token); if args.is_present("show") { println!("{:#?}", token); }
Ok(()) } fn get_token(centerdevice: &CenterDeviceConfig) -> Result<Token> { let client_credentials = ClientCredentials::new( &centerdevice.client_id, &centerdevice.client_secret, ); let code_provider = CliCodeProvider {}; info!("Authenticating with CenterDevice at {}", centerdevice.base_domain); let client = Client::new(&centerdevice.base_domain, client_credentials) .authorize_with_code_flow(&centerdevice.redirect_uri, &code_provider) .map_err(|e| Error::with_chain(e.compat(), ErrorKind::FailedToAccessCenterDeviceApi))?; info!("Successfully authenticated."); Ok(client.token().clone()) } fn refresh_token(centerdevice: &CenterDeviceConfig) -> Result<Token> { info!("Refreshing token with CenterDevice at {}", centerdevice.base_domain); let client: AuthorizedClient = centerdevice.try_into()?; let token = client.refresh_access_token() .map_err(|e| Error::with_chain(e.compat(), ErrorKind::FailedToAccessCenterDeviceApi))?; info!("Successfully re-energized."); Ok(token) } fn save_token(run_config: &RunConfig, config: &Config, token: &Token) -> Result<()> { let new_config = update_config(run_config, config, token)?; new_config.save(run_config.active_config) .chain_err(|| ErrorKind::FailedToSaveConfig)?; Ok(()) } fn update_config(run_config: &RunConfig, config: &Config, token: &Token) -> Result<Config> { let profile = match run_config.active_profile.as_ref() { "default" => config.get_default_profile(), s => config.get_profile(s), }.chain_err(|| ErrorKind::FailedToParseCmd("profile".to_string()))?; let centerdevice = profile.centerdevice.as_ref().ok_or( Error::from_kind(ErrorKind::NoCenterDeviceInProfile) )?; let centerdevice = CenterDeviceConfig { access_token: Some(token.access_token().to_string()), refresh_token: Some(token.refresh_token().to_string()), ..(*centerdevice).clone() }; let profile = Profile { centerdevice: Some(centerdevice), ..(*profile).clone() }; let profile_name = match run_config.active_profile.as_ref() { "default" => config.default_profile.clone(), s => s.to_string(), }; let mut profiles = config.profiles.clone(); profiles.insert(profile_name, profile); let new_config = Config { profiles, ..(*config).clone() }; Ok(new_config) }
if args.is_present("save") { save_token(run_config, config, &token) .chain_err(|| ErrorKind::FailedToSaveToken)?; }
if_condition
[ { "content": "fn query_health(client: &ReqwestClient, name: &'static str, url: &str) -> impl Future<Item = HealthCheck, Error = Error> {\n\n trace!(\"Quering health for {}\", url);\n\n client\n\n .get(url)\n\n .header(Connection::close())\n\n .send()\n\n .map_err(|e| Error::with_chain(e, ErrorKind::FailedQueryHeatlhCheck(\"failed to request health check from server\".to_owned())))\n\n .and_then(|response| {\n\n trace!(\"Received response with status = {}.\", response.status());\n\n let res = if response.status() == StatusCode::Ok {\n\n Ok(response)\n\n } else {\n\n let reason = format!(\"of unexpected status code {} != 200\", response.status());\n\n Err(Error::from_kind(ErrorKind::FailedQueryHeatlhCheck(reason)))\n\n };\n\n result(res)\n\n })\n\n .and_then(|response| {\n\n let body = response.into_body();\n\n body.concat2()\n", "file_path": "src/modules/health/check.rs", "rank": 0, "score": 432434.0545498316 }, { "content": "fn query_page_status(client: &ReqwestClient, name: String, id: &str) -> impl Future<Item = PageStatusResult, Error = Error> {\n\n let base_url = format!(\"https://{}.statuspage.io/api/v2/status.json\", id);\n\n client\n\n .get(&base_url)\n\n .header(Connection::close())\n\n .send()\n\n .and_then(|res| {\n\n trace!(\"Received response with status = {}.\", res.status());\n\n let body = res.into_body();\n\n body.concat2()\n\n })\n\n .map_err(|_| Error::from_kind(ErrorKind::FailedToQueryStatusPage))\n\n .and_then(|body| {\n\n trace!(\"Parsing body.\");\n\n let res = serde_json::from_slice::<PageStatus>(&body)\n\n .map(|x| PageStatusResult { name, page_status: x })\n\n .chain_err(|| Error::from_kind(ErrorKind::FailedToQueryStatusPage));\n\n result(res)\n\n })\n\n}\n\n\n", "file_path": "src/modules/statuspages/show.rs", "rank": 3, "score": 346772.2866512973 }, { "content": "fn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> {\n\n info!(\"Terminating instances.\");\n\n let changes = terminate_instances(args, run_config, config)?;\n\n\n\n info!(\"Outputting instance state changes.\");\n\n output_changes(args, run_config, config, &changes)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/modules/instances/terminate.rs", "rank": 4, "score": 345596.14069663716 }, { "content": "fn do_call(args: &ArgMatches, _: &RunConfig, config: &Config) -> Result<()> {\n\n let status_pages = &config.status_pages;\n\n let output_type = args.value_of(\"output\").unwrap() // Safe\n\n .parse::<OutputType>()\n\n .chain_err(|| ErrorKind::FailedToParseOutputType)?;\n\n\n\n info!(\"Quering status\");\n\n let mut core = tokio_core::reactor::Core::new()\n\n .chain_err(|| ErrorKind::FailedToQueryStatusPage)?;\n\n let client = ReqwestClient::new(&core.handle());\n\n\n\n let queries = status_pages.iter().map(|(name, status_page)| {\n\n query_page_status(&client, name.to_string(), &status_page.id)\n\n });\n\n let work = join_all(queries);\n\n let result = core.run(work)?;\n\n\n\n info!(\"Outputting Page Status\");\n\n output_page_status(output_type, &result)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/modules/statuspages/show.rs", "rank": 7, "score": 320886.0168992431 }, { "content": "fn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::FailedToParseCmd(\"profile\".to_string()))?;\n\n let project_id = profile.story_tracker.project_id;\n\n\n\n let story_id = match args.value_of(\"story-id\") {\n\n Some(x) if x.starts_with('#') => x[1..].parse::<u64>()\n\n .chain_err(|| ErrorKind::FailedToParseCmd(\"story-id\".to_string())),\n\n Some(x) => x.parse::<u64>()\n\n .chain_err(|| ErrorKind::FailedToParseCmd(\"story-id\".to_string())),\n\n None => Err(Error::from_kind(ErrorKind::FailedToParseCmd(\"story-id\".to_string()))),\n\n }?;\n\n let force = args.is_present(\"force\");\n\n let token = &config.pivotal.token;\n\n\n\n info!(\"Quering existing tasks\");\n\n let mut core = tokio_core::reactor::Core::new()\n\n .chain_err(|| ErrorKind::FailedToQueryPivotalApi)?;\n", "file_path": "src/modules/stories/prepare.rs", "rank": 8, "score": 315810.92827688734 }, { "content": "fn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::FailedToParseCmd(\"profile\".to_string()))?;\n\n let centerdevice = profile.centerdevice.as_ref().ok_or(\n\n Error::from_kind(ErrorKind::NoCenterDeviceInProfile)\n\n )?;\n\n\n\n let output_type = args.value_of(\"output\").unwrap() // Safe\n\n .parse::<OutputType>()\n\n .chain_err(|| ErrorKind::FailedToParseOutputType)?;\n\n\n\n let fulltext_str; // Borrow checker\n\n let mut search = Search::new();\n\n if let Some(filenames) = args.values_of(\"filenames\") {\n\n search = search.filenames(filenames.collect());\n\n }\n\n if let Some(tags) = args.values_of(\"tags\") {\n\n search = search.tags(tags.collect());\n", "file_path": "src/modules/centerdevice/search.rs", "rank": 9, "score": 315810.92827688734 }, { "content": "fn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::FailedToParseCmd(\"profile\".to_string()))?;\n\n let project_id = profile.story_tracker.project_id;\n\n\n\n let story_id = match args.value_of(\"story-id\") {\n\n Some(x) if x.starts_with('#') => x[1..].parse::<u64>()\n\n .chain_err(|| ErrorKind::FailedToParseCmd(\"story-id\".to_string())),\n\n Some(x) => x.parse::<u64>()\n\n .chain_err(|| ErrorKind::FailedToParseCmd(\"story-id\".to_string())),\n\n None => Err(Error::from_kind(ErrorKind::FailedToParseCmd(\"story-id\".to_string()))),\n\n }?;\n\n let force = args.is_present(\"force\");\n\n let token = &config.pivotal.token;\n\n\n\n info!(\"Quering story state\");\n\n let mut core = tokio_core::reactor::Core::new()\n\n .chain_err(|| ErrorKind::FailedToQueryPivotalApi)?;\n", "file_path": "src/modules/stories/start.rs", "rank": 10, "score": 315810.92827688734 }, { "content": "fn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::FailedToParseCmd(\"profile\".to_string()))?;\n\n let centerdevice = profile.centerdevice.as_ref().ok_or(\n\n Error::from_kind(ErrorKind::NoCenterDeviceInProfile)\n\n )?;\n\n\n\n let document_ids: Vec<&str> = args.values_of(\"document-ids\").unwrap_or_else(|| Default::default()).collect();\n\n\n\n info!(\"Deleting documents at {}.\", centerdevice.base_domain);\n\n delete_documents(centerdevice, &document_ids)?;\n\n info!(\"Successfully deleted documents.\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/modules/centerdevice/delete.rs", "rank": 11, "score": 315810.92827688734 }, { "content": "fn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n\n\n // Parse my args\n\n let instance_ids: Vec<&str> = args.values_of(\"instance_ids\").unwrap_or_else(|| Default::default()).collect();\n\n let instance_ids: Vec<_> = read_instance_ids(&instance_ids)\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))?;\n\n let public_ip = args.is_present(\"public-ip\");\n\n\n\n let ssh_opts: Vec<&str> = args.values_of(\"ssh-opts\").unwrap_or_else(|| Default::default()).collect();\n\n let remote_commands_args: Vec<&str> = args.values_of(\"command_args\").unwrap_or_else(|| Default::default()).collect();\n\n\n\n let timeout = Duration::from_secs(\n\n args.value_of(\"timeout\").unwrap() // safe unwrap\n\n .parse()\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))?\n\n );\n", "file_path": "src/modules/instances/run.rs", "rank": 12, "score": 315810.92827688734 }, { "content": "fn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::FailedToParseCmd(\"profile\".to_string()))?;\n\n let centerdevice = profile.centerdevice.as_ref().ok_or(\n\n Error::from_kind(ErrorKind::NoCenterDeviceInProfile)\n\n )?;\n\n\n\n let output_type = args.value_of(\"output\").unwrap() // Safe\n\n .parse::<OutputType>()\n\n .chain_err(|| ErrorKind::FailedToParseOutputType)?;\n\n\n\n let query = UsersQuery {\n\n all: args.is_present(\"include-all\"),\n\n };\n\n debug!(\"{:#?}\", query);\n\n\n\n info!(\"Searching users at {}.\", centerdevice.base_domain);\n\n let mut result = search_users(centerdevice, query)?;\n", "file_path": "src/modules/centerdevice/users.rs", "rank": 13, "score": 315810.9282768874 }, { "content": "fn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::FailedToParseCmd(\"profile\".to_string()))?;\n\n let centerdevice = profile.centerdevice.as_ref().ok_or(\n\n Error::from_kind(ErrorKind::NoCenterDeviceInProfile)\n\n )?;\n\n\n\n // This happens here due to the borrow checker.\n\n let tags: Vec<&str> = args.values_of(\"tags\").unwrap_or_else(|| Default::default()).collect();\n\n\n\n let file_path = args.value_of(\"file\").unwrap(); // Safe\n\n\n\n let mime_type: Mime = if let Some(mt) = args.value_of(\"mime-type\") {\n\n mt.parse().map_err(|_| ErrorKind::FailedToPrepareApiCall)?\n\n } else {\n\n mime_guess::get_mime_type(&file_path)\n\n };\n\n\n", "file_path": "src/modules/centerdevice/upload.rs", "rank": 14, "score": 315810.92827688734 }, { "content": "fn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> {\n\n info!(\"Querying description for instances.\");\n\n let instances = list_instances(args, run_config, config)?;\n\n\n\n info!(\"Filtering instance descriptions\");\n\n let instances = filter_instances(args, run_config, config, instances)?;\n\n\n\n info!(\"Outputting instance descriptions\");\n\n output_instances(args, run_config, config, &instances)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/modules/instances/list.rs", "rank": 15, "score": 315810.9282768874 }, { "content": "fn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::FailedToParseCmd(\"profile\".to_string()))?;\n\n let centerdevice = profile.centerdevice.as_ref().ok_or(\n\n Error::from_kind(ErrorKind::NoCenterDeviceInProfile)\n\n )?;\n\n\n\n let document_id = args.value_of(\"document-id\").unwrap(); // Safe\n\n let dir_path = if let Some(dir) = args.value_of(\"dirname\") {\n\n PathBuf::from(dir)\n\n } else {\n\n env::current_dir().map_err(|_| ErrorKind::FailedToPrepareApiCall)?\n\n };\n\n\n\n let mut download = Download::new(document_id, &dir_path);\n\n if let Some(f) = args.value_of(\"filename\") {\n\n download = download.filename(Path::new(f));\n\n }\n", "file_path": "src/modules/centerdevice/download.rs", "rank": 16, "score": 315810.92827688734 }, { "content": "fn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> {\n\n info!(\"Starting instances.\");\n\n let changes = start_instances(args, run_config, config)?;\n\n\n\n info!(\"Outputting instance state changes.\");\n\n output_changes(args, run_config, config, &changes)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/modules/instances/start.rs", "rank": 17, "score": 315810.92827688734 }, { "content": "fn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> {\n\n info!(\"Querying description for instance.\");\n\n let instance = describe_instance(args, run_config, config)?;\n\n\n\n info!(\"Executing ssh.\");\n\n ssh_to_instance(args, run_config, config, instance)\n\n}\n\n\n", "file_path": "src/modules/instances/ssh.rs", "rank": 18, "score": 315810.92827688734 }, { "content": "fn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n\n\n let services = args.values_of_lossy(\"services\");\n\n let tags = args.values_of_lossy(\"tags\");\n\n let url = profile.consul\n\n .as_ref()\n\n .ok_or(Error::from_kind(ErrorKind::ConfigMissingInProfile(\"consul\".to_string())))?\n\n .urls\n\n .first()\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))?;\n\n\n\n info!(\"Quering for services = {}, tags = {}\",\n\n services.as_ref().map(|x| x.join(\",\")).unwrap_or_else(|| \"()\".to_owned()),\n\n tags.as_ref().map(|x| x.join(\",\")).unwrap_or_else(|| \"()\".to_owned())\n\n );\n\n let catalog = query_consul(url.to_string(), services, tags)\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))?;\n\n\n\n info!(\"Outputting catalog\");\n\n output_instances(args, run_config, config, &catalog)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/modules/consul/list.rs", "rank": 19, "score": 315810.92827688734 }, { "content": "fn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> {\n\n info!(\"Stopping instances.\");\n\n let changes = stop_instances(args, run_config, config)?;\n\n\n\n info!(\"Outputting instance state changes.\");\n\n output_changes(args, run_config, config, &changes)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/modules/instances/stop.rs", "rank": 20, "score": 315810.9282768874 }, { "content": "fn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::FailedToParseCmd(\"profile\".to_string()))?;\n\n let base_domain = &profile.health.base_domain;\n\n\n\n let output_type = args.value_of(\"output\").unwrap() // Safe\n\n .parse::<OutputType>()\n\n .chain_err(|| ErrorKind::FailedToParseOutputType)?;\n\n\n\n info!(\"Checking Health\");\n\n let mut core = tokio_core::reactor::Core::new()\n\n .chain_err(|| ErrorKind::FailedQueryHeatlhCheck(\"failed to create reactor\".to_owned()))?;\n\n let client = ReqwestClient::new(&core.handle());\n\n\n\n let queries = ENDPOINTS.iter().map(|name| {\n\n let url = format!(\"https://{}.{}/healthcheck\", name, base_domain);\n\n query_health(&client, name, &url)\n\n });\n", "file_path": "src/modules/health/check.rs", "rank": 21, "score": 315810.9282768874 }, { "content": "#[allow(unstable_name_collisions)] // flatten from itertools\n\nfn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n\n\n // Parse my args\n\n let instance_ids: Vec<&str> = args.values_of(\"instance_ids\").unwrap_or_else(|| Default::default()).collect();\n\n let instance_ids: Vec<_> = read_instance_ids(&instance_ids)\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))?;\n\n let public_ip = args.is_present(\"public-ip\");\n\n\n\n let ssh_opts: Vec<&str> = args.values_of(\"ssh-opts\").unwrap_or_else(|| Default::default()).collect();\n\n\n\n let timeout = Duration::from_secs(\n\n args.value_of(\"timeout\").unwrap() // safe unwrap\n\n .parse()\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))?\n\n );\n\n\n", "file_path": "src/modules/ops/asp/run.rs", "rank": 22, "score": 312496.10434480314 }, { "content": "#[allow(unstable_name_collisions)] // flatten from itertools\n\nfn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n\n\n let force = args.is_present(\"force\");\n\n let public_ip = args.is_present(\"public-ip\");\n\n\n\n let ssh_opts: Vec<&str> = args.values_of(\"ssh-opts\").unwrap_or_else(|| Default::default()).collect();\n\n\n\n let timeout = Duration::from_secs(\n\n args.value_of(\"timeout\").unwrap() // safe unwrap\n\n .parse()\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))?\n\n );\n\n\n\n let progress_bar = !args.is_present(\"no-progress-bar\");\n\n\n\n let show_all = args.is_present(\"show-all\");\n", "file_path": "src/modules/ops/webserver/backup.rs", "rank": 23, "score": 312496.1043448031 }, { "content": "fn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n let issue_tracker = &profile.issue_tracker;\n\n\n\n if args.is_present(\"browser\") {\n\n let template_name: &str = if let Some(ref name) = args.value_of(\"template\") {\n\n name\n\n } else {\n\n &issue_tracker.default_issue_template_name\n\n };\n\n let html_url = browse_create_issue(&issue_tracker.github_org, &issue_tracker.github_repo, template_name);\n\n info!(\"Opening browser to create new ops issue\");\n\n webbrowser::open(&html_url)\n\n .chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n return Ok(());\n\n }\n\n\n", "file_path": "src/modules/ops/issues/create.rs", "rank": 24, "score": 312492.4691425184 }, { "content": "fn do_call(args: &ArgMatches, run_config: &RunConfig, config: &Config) -> Result<()> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n let issue_tracker = &profile.issue_tracker;\n\n\n\n let url = if args.is_present(\"project\") {\n\n info!(\"Browsing to ops issues project\");\n\n format!(\"https://github.com/{}/{}/projects/{}\", issue_tracker.github_org, issue_tracker.github_repo, issue_tracker.project_number)\n\n } else {\n\n info!(\"Browsing to ops issues\");\n\n format!(\"https://github.com/{}/{}/issues\", issue_tracker.github_org, issue_tracker.github_repo)\n\n };\n\n\n\n webbrowser::open(&url)\n\n .chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/modules/ops/issues/browse.rs", "rank": 25, "score": 312492.4691425184 }, { "content": "fn get_story(client: &ReqwestClient, project_id: u64, story_id: u64, token: &str) -> impl Future<Item = StoryResponse, Error = Error> {\n\n let url = format!(\n\n \"https://www.pivotaltracker.com/services/v5/projects/{project_id}/stories/{story_id}\",\n\n project_id=project_id,\n\n story_id=story_id);\n\n client\n\n .get(&url)\n\n .header(Connection::close())\n\n .header(XTrackerToken(token.to_string()))\n\n .send()\n\n .and_then(|res| {\n\n trace!(\"Received response with status = {}.\", res.status());\n\n let body = res.into_body();\n\n body.concat2()\n\n })\n\n .map_err(|_| Error::from_kind(ErrorKind::FailedToQueryPivotalApi))\n\n .and_then(|body| {\n\n trace!(\"Parsing body.\");\n\n let res = serde_json::from_slice::<StoryResponse>(&body)\n\n .chain_err(|| Error::from_kind(ErrorKind::FailedToQueryPivotalApi));\n\n result(res)\n\n })\n\n}\n\n\n", "file_path": "src/modules/stories/start.rs", "rank": 26, "score": 308090.9245066706 }, { "content": "fn get_tasks(client: &ReqwestClient, project_id: u64, story_id: u64, token: &str) -> impl Future<Item = Vec<TaskResponse>, Error = Error> {\n\n let url = format!(\n\n \"https://www.pivotaltracker.com/services/v5/projects/{project_id}/stories/{story_id}/tasks\",\n\n project_id=project_id,\n\n story_id=story_id);\n\n client\n\n .get(&url)\n\n .header(Connection::close())\n\n .header(XTrackerToken(token.to_string()))\n\n .send()\n\n .and_then(|res| {\n\n trace!(\"Received response with status = {}.\", res.status());\n\n let body = res.into_body();\n\n body.concat2()\n\n })\n\n .map_err(|_| Error::from_kind(ErrorKind::FailedToQueryPivotalApi))\n\n .and_then(|body| {\n\n trace!(\"Parsing body.\");\n\n let res = serde_json::from_slice::<Vec<TaskResponse>>(&body)\n\n .chain_err(|| Error::from_kind(ErrorKind::FailedToQueryPivotalApi));\n\n result(res)\n\n })\n\n}\n\n\n", "file_path": "src/modules/stories/prepare.rs", "rank": 27, "score": 301944.6081850504 }, { "content": "fn start_logging(args: &ArgMatches, config: &CeresConfig) -> Result<()> {\n\n let verbosity: Level = args.occurrences_of(\"verbosity\").into();\n\n\n\n let default_level: log::LevelFilter = config\n\n .logging\n\n .default\n\n .parse()\n\n .map_err(|e| Error::with_chain(e, ErrorKind::FailedToInitLogging))?;\n\n let default = Level(default_level);\n\n\n\n let ceres_level: log::LevelFilter = config\n\n .logging\n\n .ceres\n\n .parse()\n\n .map_err(|e| Error::with_chain(e, ErrorKind::FailedToInitLogging))?;\n\n let ceres = Level(ceres_level);\n\n let ceres = ::std::cmp::max(ceres, verbosity);\n\n\n\n let log_config = LogConfig::new(\n\n std::io::stderr(),\n", "file_path": "src/main.rs", "rank": 29, "score": 244249.82292593413 }, { "content": "fn delete_documents(centerdevice: &CenterDeviceConfig, document_ids: &[&str]) -> Result<()> {\n\n let client: AuthorizedClient = centerdevice.try_into()?;\n\n let result = client\n\n .delete_documents(document_ids)\n\n .map_err(|e| Error::with_chain(e.compat(), ErrorKind::FailedToAccessCenterDeviceApi));\n\n debug!(\"Deletion result {:#?}\", result);\n\n\n\n result\n\n}\n", "file_path": "src/modules/centerdevice/delete.rs", "rank": 30, "score": 237898.5645914753 }, { "content": "fn create_tempfile_from_template(template: Option<&str>, default_template: &str) -> Result<PathBuf> {\n\n let template = if let Some(t) = template {\n\n t\n\n } else {\n\n default_template\n\n };\n\n\n\n let tmpfile_path = {\n\n let tmpfile = NamedTempFile::new()\n\n .chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n tmpfile.path().to_path_buf()\n\n };\n\n\n\n trace!(\"Copying {} to {:?}\", template, tmpfile_path);\n\n ::std::fs::copy(template, &tmpfile_path)\n\n .chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n\n\n Ok(tmpfile_path)\n\n}\n\n\n", "file_path": "src/modules/ops/issues/create.rs", "rank": 31, "score": 216325.71319889463 }, { "content": "fn send_issue(github_token: &str, org: &str, repo: &str, issue: &IssueOptions) -> Result<Issue> {\n\n let mut core = Core::new().expect(\"reactor fail\");\n\n let github = Github::new(\n\n concat!(env!(\"CARGO_PKG_NAME\"), \"/\", env!(\"CARGO_PKG_VERSION\")),\n\n Some(Credentials::Token(github_token.to_owned())),\n\n &core.handle()\n\n );\n\n\n\n let f = github\n\n .repo(org, repo)\n\n .issues()\n\n .create(issue);\n\n core.run(f)\n\n .chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))\n\n}\n\n\n", "file_path": "src/modules/ops/issues/create.rs", "rank": 32, "score": 214558.78549770836 }, { "content": "fn edit_file(file: &Path, editor: &OsString, wait_for_completion: bool) -> Result<()> {\n\n let mut ed = Command::new(editor)\n\n .arg(file.as_os_str())\n\n .spawn()\n\n .chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n let _ = ed.wait()\n\n .chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n\n\n if wait_for_completion {\n\n let _ = ask_for_confirmation(\"Press <Return> when finished ...\", \"\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/modules/ops/issues/create.rs", "rank": 33, "score": 212311.5854471579 }, { "content": "fn download_file(centerdevice: &CenterDeviceConfig, download: Download) -> Result<u64> {\n\n let client: AuthorizedClient = centerdevice.try_into()?;\n\n let result = client\n\n .download_file(download)\n\n .map_err(|e| Error::with_chain(e.compat(), ErrorKind::FailedToAccessCenterDeviceApi));\n\n debug!(\"Download result {:#?}\", result);\n\n\n\n result\n\n}\n\n\n\n\n", "file_path": "src/modules/centerdevice/download.rs", "rank": 34, "score": 209752.5756298383 }, { "content": "fn upload_file(centerdevice: &CenterDeviceConfig, upload: Upload) -> Result<ID> {\n\n let client: AuthorizedClient = centerdevice.try_into()?;\n\n let result = client\n\n .upload_file(upload)\n\n .map_err(|e| Error::with_chain(e.compat(), ErrorKind::FailedToAccessCenterDeviceApi));\n\n debug!(\"Upload result {:#?}\", result);\n\n\n\n result\n\n}\n", "file_path": "src/modules/centerdevice/upload.rs", "rank": 35, "score": 209752.57562983833 }, { "content": "fn download_file_with_progress(centerdevice: &CenterDeviceConfig, download: Download) -> Result<u64> {\n\n let mut progress = Progress::new();\n\n let client: AuthorizedClient = centerdevice.try_into()?;\n\n let result = client\n\n .download_file_with_progress(download, &mut progress)\n\n .map_err(|e| Error::with_chain(e.compat(), ErrorKind::FailedToAccessCenterDeviceApi));\n\n debug!(\"Download result {:#?}\", result);\n\n\n\n result\n\n}\n\n\n", "file_path": "src/modules/centerdevice/download.rs", "rank": 36, "score": 206650.3179064728 }, { "content": "fn plain_output(output_opts: &str) -> Result<PlainOutputCatalogResult> {\n\n let output = if output_opts.contains(\"all\") {\n\n Default::default()\n\n } else {\n\n PlainOutputCatalogResult { fields: output_fields(output_opts)? }\n\n };\n\n trace!(\"output = {:?}\", output.fields);\n\n\n\n Ok(output)\n\n}\n\n\n", "file_path": "src/modules/consul/list.rs", "rank": 37, "score": 194361.953728002 }, { "content": "fn human_output(output_opts: &str) -> Result<TableOutputCatalogResult> {\n\n let output = if output_opts.contains(\"all\") {\n\n Default::default()\n\n } else {\n\n TableOutputCatalogResult { fields: output_fields(output_opts)? }\n\n };\n\n trace!(\"output = {:?}\", output.fields);\n\n\n\n Ok(output)\n\n}\n\n\n", "file_path": "src/modules/consul/list.rs", "rank": 38, "score": 194361.953728002 }, { "content": "fn output_fields(field_str: &str) -> Result<Vec<NodeField>> {\n\n let fields: ::std::result::Result<Vec<_>, _> = field_str\n\n .split(',')\n\n .map(|s| s.parse::<NodeField>())\n\n .collect();\n\n let fields =\n\n fields.map_err(|e| Error::with_chain(e, ErrorKind::ModuleFailed(NAME.to_owned())))?;\n\n\n\n Ok(fields)\n\n}\n", "file_path": "src/modules/consul/list.rs", "rank": 39, "score": 194254.66365484733 }, { "content": "fn generate_completion(args: &ArgMatches) -> Result<()> {\n\n let bin_name = env!(\"CARGO_PKG_NAME\");\n\n let shell = args\n\n .value_of(\"shell\")\n\n .ok_or_else(|| ErrorKind::CliArgsParsingError(\"shell argument is missing\".to_string()))?;\n\n build_cli().gen_completions_to(\n\n bin_name,\n\n shell.parse::<Shell>().map_err(|_| {\n\n ErrorKind::CliArgsParsingError(\"completion script generation failed\".to_string())\n\n })?,\n\n &mut std::io::stdout(),\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 40, "score": 189810.91888610975 }, { "content": "fn search_documents(centerdevice: &CenterDeviceConfig, search: Search) -> Result<SearchResult> {\n\n let client: AuthorizedClient = centerdevice.try_into()?;\n\n let result = client\n\n .search_documents(search)\n\n .map_err(|e| Error::with_chain(e.compat(), ErrorKind::FailedToAccessCenterDeviceApi));\n\n debug!(\"Search result {:#?}\", result);\n\n\n\n result\n\n}\n\n\n", "file_path": "src/modules/centerdevice/search.rs", "rank": 41, "score": 186735.56985455882 }, { "content": "fn browse_create_issue(org: &str, repo: &str, template_name: &str) -> String {\n\n format!(\"https://github.com/{}/{}/issues/new?template={}\", org, repo, template_name)\n\n}\n\n\n", "file_path": "src/modules/ops/issues/create.rs", "rank": 42, "score": 178079.33201886452 }, { "content": "fn is_valid_file(filename: String) -> ::std::result::Result<(), String> {\n\n let path = Path::new(&filename);\n\n\n\n if let Some(parent) = path.parent() {\n\n if parent.to_string_lossy() != \"\" {\n\n let err_msg = format!(\"The filename '{}' contains a directory '{}'. Use '-d'.\", filename, parent.display());\n\n return Err(err_msg);\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/modules/centerdevice/download.rs", "rank": 43, "score": 176483.04866898293 }, { "content": "fn search_users(centerdevice: &CenterDeviceConfig, query: UsersQuery) -> Result<Vec<User>> {\n\n let client: AuthorizedClient = centerdevice.try_into()?;\n\n let result = client\n\n .search_users(query)\n\n .map(|x| x.users)\n\n .map_err(|e| Error::with_chain(e.compat(), ErrorKind::FailedToAccessCenterDeviceApi));\n\n debug!(\"Search result {:#?}\", result);\n\n\n\n result\n\n}\n\n\n", "file_path": "src/modules/centerdevice/users.rs", "rank": 45, "score": 172078.29155385453 }, { "content": "fn find_instances(profile: &Profile) -> Result<Vec<InstanceDescriptor>> {\n\n let Provider::Aws(provider) = profile.provider\n\n .as_ref()\n\n .ok_or(Error::from_kind(ErrorKind::ConfigMissingInProfile(\"provider\".to_string())))?;\n\n\n\n let all = provider.describe_instances()\n\n .chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n\n\n let mut tags = HashMap::new();\n\n tags.insert(\"Intent\".to_owned(), Some(\"webserver\"));\n\n let filter = filter::FilterBuilder::new()\n\n .tags(tags)\n\n .build()\n\n .chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n let webservers = all\n\n .into_iter()\n\n .filter(|i| filter.filter(i))\n\n .collect();\n\n\n\n Ok(webservers)\n\n}\n\n\n", "file_path": "src/modules/ops/webserver/backup.rs", "rank": 46, "score": 168784.6003458107 }, { "content": "fn describe_instances(instance_ids: &[String], profile: &Profile) -> Result<Vec<InstanceDescriptor>> {\n\n let Provider::Aws(provider) = profile.provider\n\n .as_ref()\n\n .ok_or(Error::from_kind(ErrorKind::ConfigMissingInProfile(\"provider\".to_string())))?;\n\n let res: Result<Vec<InstanceDescriptor>> = instance_ids.iter().\n\n map(|id| provider\n\n .describe_instance(id)\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))\n\n ).collect();\n\n\n\n res\n\n}\n\n\n", "file_path": "src/modules/instances/run.rs", "rank": 47, "score": 160883.04141473168 }, { "content": "fn query_consul(url: String, services: Option<Vec<String>>, tags: Option<Vec<String>>) -> Result<Catalog> {\n\n let consul = Consul::new(url);\n\n let catalog = consul.catalog_by(services, tags)\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))?;\n\n\n\n Ok(catalog)\n\n}\n\n\n", "file_path": "src/modules/consul/list.rs", "rank": 48, "score": 158656.2952247843 }, { "content": "fn describe_instances(instance_ids: &[String], profile: &Profile) -> Result<Vec<InstanceDescriptor>> {\n\n let Provider::Aws(provider) = profile.provider\n\n .as_ref()\n\n .ok_or(Error::from_kind(ErrorKind::ConfigMissingInProfile(\"provider\".to_string())))?;\n\n\n\n let res: Result<Vec<InstanceDescriptor>> = instance_ids.iter().\n\n map(|id| provider\n\n .describe_instance(id)\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))\n\n ).collect();\n\n\n\n res\n\n}\n\n\n", "file_path": "src/modules/ops/asp/run.rs", "rank": 49, "score": 158577.18707825302 }, { "content": "fn de_ser_region<'de, D>(deserializer: D) -> ::std::result::Result<Region, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct RegionVisitor;\n\n\n\n impl<'a> Visitor<'a> for RegionVisitor {\n\n type Value = Region;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"valid AWS region string\")\n\n }\n\n\n\n fn visit_str<E>(self, s: &str) -> ::std::result::Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n let region = Region::from_str(s)\n\n .map_err(|_| de::Error::custom(format!(\"invalid region string '{}'\", s)))?;\n\n Ok(region)\n", "file_path": "src/provider/aws.rs", "rank": 50, "score": 157634.06924633417 }, { "content": "fn make_row(hc_name: &str, previous_hc_name: &Option<&str>, resource_name: &str, resource: &HealthSample) -> Row {\n\n let service_cell = match previous_hc_name {\n\n Some(name) if name == &hc_name => Cell::new(\"\"),\n\n Some(_) | None => {\n\n Cell::new(hc_name)\n\n }\n\n };\n\n\n\n let healthy_cell = if resource.healthy {\n\n Cell::new(\"up\").with_style(Attr::ForegroundColor(color::GREEN))\n\n } else {\n\n Cell::new(\"down\").with_style(Attr::ForegroundColor(color::RED))\n\n };\n\n\n\n let updated_at: Option<DateTime<Local>> = resource.time_stamp.map(|x| {\n\n let naive_datetime =\n\n NaiveDateTime::from_timestamp(x / 1000, 0);\n\n Local.from_utc_datetime(&naive_datetime)\n\n });\n\n\n", "file_path": "src/output/health/mod.rs", "rank": 51, "score": 156998.46907824988 }, { "content": "fn describe(aws: &Aws, instance_id: &str) -> Result<InstanceDescriptor> {\n\n let credentials_provider = assume_role(aws)?;\n\n let default_client = default_tls_client().chain_err(|| ErrorKind::AwsApiError)?;\n\n let client = ec2::Ec2Client::new(default_client, credentials_provider, aws.region.clone());\n\n\n\n let request = DescribeInstancesRequest {\n\n dry_run: Some(false),\n\n filters: None,\n\n instance_ids: Some(vec![instance_id.to_string()]),\n\n max_results: None,\n\n next_token: None,\n\n };\n\n let result = client\n\n .describe_instances(&request)\n\n .chain_err(|| ErrorKind::AwsApiError)?;\n\n let mut reservations = result.reservations.ok_or_else(|| {\n\n Error::from_kind(ErrorKind::AwsApiResultError(\n\n \"no reservations found\".to_string(),\n\n ))\n\n })?;\n", "file_path": "src/provider/aws.rs", "rank": 52, "score": 156096.48856001708 }, { "content": "fn show_example_config() -> Result<()> {\n\n let example_config = include_str!(\"../examples/ceres.conf\");\n\n\n\n println!(\"{}\", example_config);\n\n\n\n Ok(())\n\n}\n\n\n\nerror_chain! {\n\n errors {\n\n CliArgsParsingError(cause: String) {\n\n description(\"Failed to parse CLI arguments\")\n\n display(\"Failed to parse CLI arguments because {}.\", cause)\n\n }\n\n FailedToLoadConfigFile(file: String) {\n\n description(\"Failed to load config file\")\n\n display(\"Failed to load config file '{}'\", file)\n\n }\n\n FailedToInitLogging {\n\n description(\"Failed to init logging framework\")\n\n }\n\n }\n\n links {\n\n Module(ceres::modules::Error, ceres::modules::ErrorKind);\n\n Utils(ceres::utils::Error, ceres::utils::ErrorKind);\n\n Config(clams::config::ConfigError, clams::config::ConfigErrorKind);\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 53, "score": 154996.14330907117 }, { "content": "fn ser_region<S>(region: &Region, serializer: S) -> ::std::result::Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n serializer.serialize_str(region.name())\n\n}\n\n\n", "file_path": "src/provider/aws.rs", "rank": 54, "score": 152498.73295743467 }, { "content": "fn is_valid_dir(dirname: String) -> ::std::result::Result<(), String> {\n\n let path = Path::new(&dirname);\n\n\n\n if !path.is_dir() {\n\n let err_msg = format!(\"The path '{}' is not a valid directory.\", dirname);\n\n return Err(err_msg);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\npub struct Progress {\n\n progress_bar: ProgressBar,\n\n}\n\n\n\nimpl Progress {\n\n fn new() -> Self {\n\n let progress_bar = ProgressBar::new(0);\n\n progress_bar.set_style(ProgressStyle::default_clams_bar());\n\n Progress {\n", "file_path": "src/modules/centerdevice/download.rs", "rank": 55, "score": 144638.64490611336 }, { "content": "fn create_issue(title: String, file_path: &Path, labels: Vec<String>) -> Result<IssueOptions> {\n\n let mut file = File::open(file_path)\n\n .chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n let mut body = String::new();\n\n file.read_to_string(&mut body)\n\n .chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n\n\n let issue = IssueOptions {\n\n title,\n\n body: Some(body),\n\n assignee: None,\n\n milestone: None,\n\n labels: labels,\n\n };\n\n\n\n Ok(issue)\n\n}\n\n\n", "file_path": "src/modules/ops/issues/create.rs", "rank": 56, "score": 141305.75454283765 }, { "content": "fn extract_metadata_filter(metadata_str: &str) -> Option<Vec<String>> {\n\n if metadata_str.len() < 9 {\n\n return None;\n\n };\n\n let metadata = &metadata_str[9..]; // Safe because we call this function only when the prefix 'Metadata:' has been seen\n\n let metadata_filter: Vec<_> = metadata.split(':').map(String::from).collect();\n\n\n\n Some(metadata_filter)\n\n}\n", "file_path": "src/modules/consul/mod.rs", "rank": 57, "score": 140548.36930122125 }, { "content": "fn output_results(output_type: OutputType, status: &[User]) -> Result<()> {\n\n let mut stdout = ::std::io::stdout();\n\n\n\n match output_type {\n\n OutputType::Human => {\n\n let output = TableOutputUsers;\n\n\n\n output\n\n .output(&mut stdout, status)\n\n .chain_err(|| ErrorKind::FailedOutput)\n\n },\n\n OutputType::Json => {\n\n let output = JsonOutputUsers;\n\n\n\n output\n\n .output(&mut stdout, status)\n\n .chain_err(|| ErrorKind::FailedOutput)\n\n },\n\n OutputType::Plain => {\n\n let output = PlainOutputUsers;\n\n\n\n output\n\n .output(&mut stdout, status)\n\n .chain_err(|| ErrorKind::FailedOutput)\n\n },\n\n }\n\n}\n", "file_path": "src/modules/centerdevice/users.rs", "rank": 58, "score": 137372.09395409652 }, { "content": "fn terminate_instances(\n\n args: &ArgMatches,\n\n run_config: &RunConfig,\n\n config: &Config,\n\n) -> Result<Vec<StateChange>> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n let Provider::Aws(provider) = profile.provider\n\n .as_ref()\n\n .ok_or(Error::from_kind(ErrorKind::ConfigMissingInProfile(\"provider\".to_string())))?;\n\n\n\n let dry = args.is_present(\"dry\");\n\n let yes = args.is_present(\"yes\");\n\n\n\n match (dry, yes) {\n\n (true, _) => {\n\n warn!(\"Running in dry mode -- no changes will be executed.\");\n\n }\n", "file_path": "src/modules/instances/terminate.rs", "rank": 59, "score": 134445.32920903648 }, { "content": "fn build_cli() -> App<'static, 'static> {\n\n let name = env!(\"CARGO_PKG_NAME\");\n\n let version = env!(\"CARGO_PKG_VERSION\");\n\n let about = env!(\"CARGO_PKG_DESCRIPTION\");\n\n\n\n let general = App::new(name)\n\n .setting(AppSettings::SubcommandRequired)\n\n .version(version)\n\n .about(about)\n\n .arg(\n\n Arg::with_name(\"config\")\n\n .long(\"config\")\n\n .takes_value(true)\n\n .help(\"Sets config file to use [default: ~/.ceres.conf]\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"no-color\")\n\n .long(\"no-color\")\n\n .help(\"Turns off colored output\"),\n\n )\n", "file_path": "src/main.rs", "rank": 60, "score": 133478.72941334237 }, { "content": "fn output_changes(\n\n args: &ArgMatches,\n\n _: &RunConfig,\n\n _: &Config,\n\n state_changes: &[StateChange],\n\n) -> Result<()> {\n\n let output_type = args.value_of(\"output\").unwrap() // Safe\n\n .parse::<OutputType>()\n\n .chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n let mut stdout = ::std::io::stdout();\n\n\n\n match output_type {\n\n OutputType::Human => {\n\n let output = TableOutputStatusChanges {};\n\n\n\n output\n\n .output(&mut stdout, state_changes)\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))\n\n },\n\n OutputType::Json => {\n", "file_path": "src/modules/instances/terminate.rs", "rank": 61, "score": 125476.26900776627 }, { "content": "fn output_results(\n\n output_type: OutputType,\n\n status: &[Document]\n\n) -> Result<()> {\n\n let mut stdout = ::std::io::stdout();\n\n\n\n match output_type {\n\n OutputType::Human => {\n\n let output = TableOutputSearchResult;\n\n\n\n output\n\n .output(&mut stdout, status)\n\n .chain_err(|| ErrorKind::FailedOutput)\n\n },\n\n OutputType::Json => {\n\n let output = JsonOutputSearchResult;\n\n\n\n output\n\n .output(&mut stdout, status)\n\n .chain_err(|| ErrorKind::FailedOutput)\n", "file_path": "src/modules/centerdevice/search.rs", "rank": 62, "score": 124632.89157698498 }, { "content": "fn run() -> Result<()> {\n\n let args = build_cli().get_matches();\n\n clams::console::set_color(!args.is_present(\"no-color\"));\n\n\n\n match args.subcommand_name() {\n\n Some(subcommand @ \"completions\") => {\n\n return generate_completion(args.subcommand_matches(subcommand).unwrap())\n\n } // Safe unwrap\n\n Some(\"show-example-config\") => return show_example_config(),\n\n _ => {}\n\n };\n\n\n\n let mut config_locations = default_locations(DEFAULT_CONFIG_FILE_NAME);\n\n if let Some(config) = args.value_of(\"config\") {\n\n config_locations.insert(0, config.into());\n\n }\n\n let (config, config_path) = CeresConfig::smart_load(&config_locations)?;\n\n\n\n start_logging(&args, &config)?;\n\n\n", "file_path": "src/main.rs", "rank": 63, "score": 122664.83638902928 }, { "content": "fn resources_by_project(resources: &[Resource]) -> HashMap<&str, Vec<&str>> {\n\n let mut map = HashMap::new();\n\n\n\n for resource in resources {\n\n let mut v = map.entry(resource.project.as_ref()).or_insert(Vec::new());\n\n v.push(resource.name.as_ref());\n\n }\n\n\n\n map\n\n}\n", "file_path": "src/output/infrastructure/mod.rs", "rank": 64, "score": 118837.99759611975 }, { "content": "fn extract_tags_filter(tags_str: &str) -> Option<Vec<String>> {\n\n if tags_str.len() < 5 {\n\n return None;\n\n };\n\n let tags = &tags_str[5..]; // Safe because we call this function only when the prefix 'Tags:' has been seen\n\n let tags_filter: Vec<_> = tags.split(':').map(String::from).collect();\n\n\n\n Some(tags_filter)\n\n}\n\n\n\n#[derive(Serialize)]\n\npub struct InstanceDescriptor {\n\n pub ami_launch_index: Option<i64>,\n\n pub architecture: Option<String>,\n\n pub block_device_mappings: Option<Vec<String>>,\n\n pub client_token: Option<String>,\n\n pub ebs_optimized: Option<bool>,\n\n // Won't convert this\n\n //pub elastic_gpu_associations: Option<Vec<ElasticGpuAssociation>>,\n\n pub ena_support: Option<bool>,\n", "file_path": "src/provider/mod.rs", "rank": 65, "score": 114133.94375262807 }, { "content": "pub trait Module {\n\n fn build_sub_cli() -> App<'static, 'static>;\n\n fn call(cli_args: Option<&ArgMatches>, run_config: &RunConfig, config: &Config) -> Result<()>;\n\n}\n\n\n\nmain_module!(\n\n consul,\n\n centerdevice,\n\n health,\n\n infrastructure,\n\n instances,\n\n ops,\n\n statuspages,\n\n stories\n\n);\n\n\n\nerror_chain! {\n\n errors {\n\n NoSuchCommand(command: String) {\n\n description(\"no such command\")\n", "file_path": "src/modules/mod.rs", "rank": 66, "score": 110044.92727943233 }, { "content": "fn header_for_field(field: &NodeField) -> &str {\n\n match *field {\n\n NodeField::Id => \"Node Id\",\n\n NodeField::Name => \"Node Name\",\n\n NodeField::MetaData(_) => \"Meta Data\",\n\n NodeField::Address => \"Node Address\",\n\n NodeField::ServicePort => \"Service Port\",\n\n NodeField::ServiceTags => \"Service Tags\",\n\n NodeField::ServiceId => \"Service Id\",\n\n NodeField::ServiceName => \"Service Name\",\n\n NodeField::Healthy => \"Healthy\",\n\n }\n\n}\n", "file_path": "src/output/consul/table_output.rs", "rank": 67, "score": 109473.3860076904 }, { "content": "fn header_for_field(field: &InstanceDescriptorFields) -> &str {\n\n match *field {\n\n InstanceDescriptorFields::BlockDeviceMappings => \"Block Device Mappings\",\n\n InstanceDescriptorFields::Hypervisor => \"Hypervisor\",\n\n InstanceDescriptorFields::IamInstanceProfile => \"Iam Instance Profile\",\n\n InstanceDescriptorFields::ImageId => \"Image Id\",\n\n InstanceDescriptorFields::InstanceId => \"Instance Id\",\n\n InstanceDescriptorFields::InstanceType => \"Instance Type\",\n\n InstanceDescriptorFields::LaunchTime => \"Launch Time\",\n\n InstanceDescriptorFields::Monitoring => \"Monitoring\",\n\n InstanceDescriptorFields::Placement => \"Placement\",\n\n InstanceDescriptorFields::PrivateDnsName => \"Private DNS Name\",\n\n InstanceDescriptorFields::PrivateIpAddress => \"Private IP Address\",\n\n InstanceDescriptorFields::PublicDnsName => \"Public DNS Name\",\n\n InstanceDescriptorFields::PublicIpAddress => \"Public IP Address\",\n\n InstanceDescriptorFields::RootDeviceName => \"Root Device Name\",\n\n InstanceDescriptorFields::RootDeviceType => \"Root Device Type\",\n\n InstanceDescriptorFields::SecurityGroups => \"Security Groups\",\n\n InstanceDescriptorFields::State => \"State\",\n\n InstanceDescriptorFields::StateReason => \"State Reason\",\n\n InstanceDescriptorFields::Tags(_) => \"Tags\",\n\n InstanceDescriptorFields::VirtualizationType => \"Virtualization Type\",\n\n InstanceDescriptorFields::VpcId => \"Vpc Id\",\n\n }\n\n}\n\n\n", "file_path": "src/output/instances/table_output.rs", "rank": 68, "score": 108034.7494456741 }, { "content": "pub trait TerminateInstances {\n\n fn terminate_instances(\n\n &self,\n\n dry: bool,\n\n instance_ids: &[InstanceId],\n\n ) -> Result<Vec<StateChange>>;\n\n}\n\n\n\n#[derive(Serialize)]\n\npub struct StateChange {\n\n pub instance_id: InstanceId,\n\n pub current_state: String,\n\n pub previous_state: String,\n\n}\n\n\n\nerror_chain! {\n\n errors {\n\n ProviderCallFailed(call: String) {\n\n description(\"API call to provider failed.\")\n\n display(\"API call '{}' to provider failed.\", call)\n", "file_path": "src/provider/mod.rs", "rank": 69, "score": 104429.3469630903 }, { "content": "fn list(aws: &Aws) -> Result<Vec<InstanceDescriptor>> {\n\n let credentials_provider = assume_role(aws)?;\n\n let default_client = default_tls_client().chain_err(|| ErrorKind::AwsApiError)?;\n\n let client = ec2::Ec2Client::new(default_client, credentials_provider, aws.region.clone());\n\n\n\n let request = Default::default();\n\n let result = client\n\n .describe_instances(&request)\n\n .chain_err(|| ErrorKind::AwsApiError)?;\n\n let reservations = result.reservations.ok_or_else(|| {\n\n Error::from_kind(ErrorKind::AwsApiResultError(\n\n \"no reservations found\".to_string(),\n\n ))\n\n })?;\n\n\n\n let mut instances: Vec<InstanceDescriptor> = Vec::new();\n\n for r in reservations {\n\n if let Some(resv_instances) = r.instances {\n\n for i in resv_instances {\n\n instances.push(i.into());\n", "file_path": "src/provider/aws.rs", "rank": 70, "score": 104196.58881528747 }, { "content": "fn assume_role(aws: &Aws) -> Result<StsAssumeRoleSessionCredentialsProvider> {\n\n //let base_provider = DefaultCredentialsProvider::new().chain_err(|| ErrorKind::AwsApiError)?;\n\n let base_provider = StaticProvider::new(\n\n aws.access_key_id.clone(),\n\n aws.secret_access_key.clone(),\n\n aws.token.clone(),\n\n None,\n\n );\n\n let default_client = default_tls_client().chain_err(|| ErrorKind::AwsApiError)?;\n\n let sts = StsClient::new(default_client, base_provider, aws.region.clone());\n\n\n\n let provider = StsAssumeRoleSessionCredentialsProvider::new(\n\n sts,\n\n aws.role_arn.clone(),\n\n \"default\".to_string(),\n\n None,\n\n None,\n\n None,\n\n None,\n\n );\n", "file_path": "src/provider/aws.rs", "rank": 71, "score": 100313.73933573066 }, { "content": "pub trait OutputSearchResult {\n\n fn output<T: Write>(&self, writer: &mut T, results: &[Document]) -> Result<()>;\n\n}\n\n\n\npub struct JsonOutputSearchResult;\n\n\n\nimpl OutputSearchResult for JsonOutputSearchResult {\n\n fn output<T: Write>(&self, writer: &mut T, result: &[Document]) -> Result<()> {\n\n serde_json::to_writer_pretty(writer, result).chain_err(|| ErrorKind::OutputFailed)\n\n }\n\n}\n\n\n\npub struct PlainOutputSearchResult;\n\n\n\nimpl OutputSearchResult for PlainOutputSearchResult {\n\n fn output<T: Write>(&self, writer: &mut T, result: &[Document]) -> Result<()> {\n\n for d in result {\n\n let line = format!(\n\n \"{} {} {} {} {} {}\\n\",\n\n d.id,\n", "file_path": "src/output/centerdevice/search.rs", "rank": 72, "score": 99969.55379784784 }, { "content": "pub trait OutputCommandResults {\n\n fn output<T: Write>(&self, writer: &mut T, results: &[CommandResult]) -> Result<()>;\n\n}\n", "file_path": "src/output/instances/mod.rs", "rank": 73, "score": 99969.55379784784 }, { "content": "pub trait OutputCatalogResult {\n\n fn output<T: Write>(&self, writer: &mut T, results: &Catalog) -> Result<()>;\n\n}\n\n\n", "file_path": "src/output/consul/mod.rs", "rank": 74, "score": 99969.55379784784 }, { "content": "pub trait OutputPageStatusResult {\n\n fn output<T: Write>(&self, writer: &mut T, results: &[PageStatusResult]) -> Result<()>;\n\n}\n\n\n\npub struct JsonOutputPageStatusResult;\n\n\n\nimpl OutputPageStatusResult for JsonOutputPageStatusResult {\n\n fn output<T: Write>(&self, writer: &mut T, result: &[PageStatusResult]) -> Result<()> {\n\n serde_json::to_writer_pretty(writer, result).chain_err(|| ErrorKind::OutputFailed)\n\n }\n\n}\n\n\n\npub struct PlainOutputPageStatusResult;\n\n\n\nimpl OutputPageStatusResult for PlainOutputPageStatusResult {\n\n fn output<T: Write>(&self, writer: &mut T, result: &[PageStatusResult]) -> Result<()> {\n\n for r in result {\n\n let line = format!(\n\n \"{} {} {} {} {} {}\\n\",\n\n r.name,\n", "file_path": "src/output/statuspages/mod.rs", "rank": 75, "score": 98293.03320511915 }, { "content": "pub trait OutputResourceListResult {\n\n fn output<T: Write>(&self, writer: &mut T, results: &[Resource]) -> Result<()>;\n\n}\n\n\n\npub struct JsonOutputResourceListResult;\n\n\n\nimpl OutputResourceListResult for JsonOutputResourceListResult {\n\n fn output<T: Write>(&self, writer: &mut T, result: &[Resource]) -> Result<()> {\n\n let by_project = resources_by_project(result);\n\n serde_json::to_writer_pretty(writer, &by_project).chain_err(|| ErrorKind::OutputFailed)\n\n }\n\n}\n\n\n\npub struct PlainOutputResourceListResult;\n\n\n\nimpl OutputResourceListResult for PlainOutputResourceListResult {\n\n fn output<T: Write>(&self, writer: &mut T, result: &[Resource]) -> Result<()> {\n\n for resource in result {\n\n let line = format!(\"{} {}\\n\", resource.project, resource.name);\n\n let _ = writer.write(line.as_bytes());\n", "file_path": "src/output/infrastructure/mod.rs", "rank": 76, "score": 98293.03320511915 }, { "content": "fn start(aws: &Aws, dry: bool, instance_ids: &[InstanceId]) -> Result<Vec<StateChange>> {\n\n let credentials_provider = assume_role(aws)?;\n\n let default_client = default_tls_client().chain_err(|| ErrorKind::AwsApiError)?;\n\n let client = ec2::Ec2Client::new(default_client, credentials_provider, aws.region.clone());\n\n\n\n let request = StartInstancesRequest {\n\n additional_info: None,\n\n dry_run: Some(dry),\n\n instance_ids: instance_ids\n\n .iter()\n\n .map(|x| x.to_owned())\n\n .collect::<Vec<_>>(),\n\n };\n\n // If run in dry mode, AWS returns an error of type DryRunOperation\n\n // cf. https://docs.rs/rusoto_ec2/0.31.0/rusoto_ec2/struct.TerminateInstancesRequest.html#structfield.dry_run\n\n let result = match client.start_instances(&request) {\n\n Err(StartInstancesError::Unknown(ref s)) if s.contains(\"DryRunOperation\") => {\n\n return Ok(create_dry_run_results(instance_ids))\n\n }\n\n Err(StartInstancesError::Unknown(ref s)) if s.contains(\"UnauthorizedOperation\") => {\n", "file_path": "src/provider/aws.rs", "rank": 77, "score": 90579.43534338588 }, { "content": "fn destroy(aws: &Aws, dry: bool, instance_ids: &[InstanceId]) -> Result<Vec<StateChange>> {\n\n let credentials_provider = assume_role(aws)?;\n\n let default_client = default_tls_client().chain_err(|| ErrorKind::AwsApiError)?;\n\n let client = ec2::Ec2Client::new(default_client, credentials_provider, aws.region.clone());\n\n\n\n let request = TerminateInstancesRequest {\n\n dry_run: Some(dry),\n\n instance_ids: instance_ids\n\n .iter()\n\n .map(|x| x.to_owned())\n\n .collect::<Vec<_>>(),\n\n };\n\n // If run in dry mode, AWS returns an error of type DryRunOperation\n\n // cf. https://docs.rs/rusoto_ec2/0.31.0/rusoto_ec2/struct.TerminateInstancesRequest.html#structfield.dry_run\n\n let result = match client.terminate_instances(&request) {\n\n Err(TerminateInstancesError::Unknown(ref s)) if s.contains(\"DryRunOperation\") => {\n\n return Ok(create_dry_run_results(instance_ids))\n\n }\n\n Err(TerminateInstancesError::Unknown(ref s)) if s.contains(\"UnauthorizedOperation\") => {\n\n return Err(Error::from_kind(ErrorKind::AwsApiResultError(\n", "file_path": "src/provider/aws.rs", "rank": 78, "score": 90579.43534338588 }, { "content": "fn start_story(\n\n client: &ReqwestClient,\n\n project_id: u64,\n\n story_id: u64,\n\n token: &str,\n\n) -> impl Future<Item = StoryResponse, Error = Error> {\n\n let url = format!(\n\n \"https://www.pivotaltracker.com/services/v5/projects/{project_id}/stories/{story_id}\",\n\n project_id=project_id,\n\n story_id=story_id);\n\n\n\n #[derive(Debug, Serialize)]\n\n struct StoryRequest {\n\n current_state: StoryState,\n\n }\n\n let data = serde_json::to_string( &StoryRequest { current_state: StoryState::Started } ).unwrap(); // This is safe\n\n\n\n trace!(\"Story StoryRequest: {:?}\", data);\n\n\n\n client\n", "file_path": "src/modules/stories/start.rs", "rank": 79, "score": 88672.35345780276 }, { "content": "fn output_instances(\n\n args: &ArgMatches,\n\n _: &RunConfig,\n\n _: &Config,\n\n instances: &[InstanceDescriptor],\n\n) -> Result<()> {\n\n let output_type = args.value_of(\"output\").unwrap() // Safe\n\n .parse::<OutputType>()\n\n .chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n let mut stdout = ::std::io::stdout();\n\n\n\n match output_type {\n\n OutputType::Human => {\n\n let fields: ::std::result::Result<Vec<_>, _> = args.value_of(\"output-options\").unwrap() // Safe unwrap\n\n .split(',')\n\n .map(|s| s.parse::<InstanceDescriptorFields>())\n\n .collect();\n\n let fields =\n\n fields.map_err(|e| Error::with_chain(e, ErrorKind::ModuleFailed(NAME.to_owned())))?;\n\n let output = TableOutputInstances { fields };\n", "file_path": "src/modules/instances/list.rs", "rank": 80, "score": 88672.35345780276 }, { "content": "fn output_instances(\n\n args: &ArgMatches,\n\n _: &RunConfig,\n\n _: &Config,\n\n catalog: &Catalog,\n\n) -> Result<()> {\n\n let output_type = args.value_of(\"output\").unwrap() // Safe\n\n .parse::<OutputType>()\n\n .chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n let mut stdout = ::std::io::stdout();\n\n\n\n match output_type {\n\n OutputType::Human => {\n\n let opts = args.value_of(\"output-options\").unwrap(); // Safe unwrap\n\n let output = human_output(opts)?;\n\n\n\n output\n\n .output(&mut stdout, catalog)\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))\n\n },\n", "file_path": "src/modules/consul/list.rs", "rank": 81, "score": 88672.35345780276 }, { "content": "fn create_task(\n\n client: &ReqwestClient,\n\n project_id: u64,\n\n story_id: u64,\n\n token: &str,\n\n position: usize,\n\n description: &str,\n\n) -> impl Future<Item = TaskResponse, Error = Error> {\n\n let url = format!(\n\n \"https://www.pivotaltracker.com/services/v5/projects/{project_id}/stories/{story_id}/tasks\",\n\n project_id=project_id,\n\n story_id=story_id);\n\n\n\n let data = json!({\n\n \"description\": format!(\"{}. {}\", position, description),\n\n \"position\": position\n\n }).to_string();\n\n\n\n trace!(\"Task: {:?}\", data);\n\n\n", "file_path": "src/modules/stories/prepare.rs", "rank": 82, "score": 88672.35345780276 }, { "content": "fn start_instances(\n\n args: &ArgMatches,\n\n run_config: &RunConfig,\n\n config: &Config,\n\n) -> Result<Vec<StateChange>> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n let Provider::Aws(provider) = profile.provider\n\n .as_ref()\n\n .ok_or(Error::from_kind(ErrorKind::ConfigMissingInProfile(\"provider\".to_string())))?;\n\n\n\n let dry = args.is_present(\"dry\");\n\n\n\n if dry {\n\n warn!(\"Running in dry mode -- no changes will be executed.\");\n\n }\n\n\n\n let instance_ids: Vec<&str> = args.values_of(\"instance_ids\").unwrap_or_else(|| Default::default()).collect();\n\n let instance_ids: Vec<_> = read_instance_ids(&instance_ids)\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))?;\n\n\n\n provider\n\n .start_instances(dry, &instance_ids)\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))\n\n}\n\n\n", "file_path": "src/modules/instances/start.rs", "rank": 83, "score": 88672.35345780276 }, { "content": "fn list_instances(\n\n _: &ArgMatches,\n\n run_config: &RunConfig,\n\n config: &Config,\n\n) -> Result<Vec<InstanceDescriptor>> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n let Provider::Aws(provider) = profile.provider\n\n .as_ref()\n\n .ok_or(Error::from_kind(ErrorKind::ConfigMissingInProfile(\"provider\".to_string())))?;\n\n\n\n provider\n\n .describe_instances()\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))\n\n}\n\n\n", "file_path": "src/modules/instances/list.rs", "rank": 84, "score": 88672.35345780276 }, { "content": "fn stop_instances(\n\n args: &ArgMatches,\n\n run_config: &RunConfig,\n\n config: &Config,\n\n) -> Result<Vec<StateChange>> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n let Provider::Aws(provider) = profile.provider\n\n .as_ref()\n\n .ok_or(Error::from_kind(ErrorKind::ConfigMissingInProfile(\"provider\".to_string())))?;\n\n\n\n let dry = args.is_present(\"dry\");\n\n let force = args.is_present(\"force\");\n\n let yes = args.is_present(\"yes\");\n\n\n\n if force {\n\n warn!(\"Going to force stopping instances -- file system caches will not be flushed.\");\n\n }\n", "file_path": "src/modules/instances/stop.rs", "rank": 85, "score": 88672.35345780276 }, { "content": "fn output_changes(\n\n args: &ArgMatches,\n\n _: &RunConfig,\n\n _: &Config,\n\n state_changes: &[StateChange],\n\n) -> Result<()> {\n\n let output_type = args.value_of(\"output\").unwrap() // Safe\n\n .parse::<OutputType>()\n\n .chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n let mut stdout = ::std::io::stdout();\n\n\n\n match output_type {\n\n OutputType::Human => {\n\n let output = TableOutputStatusChanges {};\n\n\n\n output\n\n .output(&mut stdout, state_changes)\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))\n\n },\n\n OutputType::Json => {\n", "file_path": "src/modules/instances/stop.rs", "rank": 86, "score": 88672.35345780276 }, { "content": "fn ssh_to_instance(\n\n args: &ArgMatches,\n\n run_config: &RunConfig,\n\n config: &Config,\n\n instance: InstanceDescriptor,\n\n) -> Result<()> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n\n\n let ip = if args.is_present(\"public-ip\") {\n\n instance.public_ip_address\n\n } else {\n\n instance.private_ip_address\n\n };\n\n\n\n let command = args.values_of(\"command_args\")\n\n .map(|x| x.collect::<Vec<_>>().join(\" \"));\n\n\n", "file_path": "src/modules/instances/ssh.rs", "rank": 87, "score": 88672.35345780276 }, { "content": "fn output_changes(\n\n args: &ArgMatches,\n\n _: &RunConfig,\n\n _: &Config,\n\n state_changes: &[StateChange],\n\n) -> Result<()> {\n\n let output_type = args.value_of(\"output\").unwrap() // Safe\n\n .parse::<OutputType>()\n\n .chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n let mut stdout = ::std::io::stdout();\n\n\n\n match output_type {\n\n OutputType::Human => {\n\n let output = TableOutputStatusChanges {};\n\n\n\n output\n\n .output(&mut stdout, state_changes)\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))\n\n },\n\n OutputType::Json => {\n", "file_path": "src/modules/instances/start.rs", "rank": 88, "score": 88672.35345780276 }, { "content": "fn describe_instance(\n\n args: &ArgMatches,\n\n run_config: &RunConfig,\n\n config: &Config,\n\n) -> Result<InstanceDescriptor> {\n\n let profile = match run_config.active_profile.as_ref() {\n\n \"default\" => config.get_default_profile(),\n\n s => config.get_profile(s),\n\n }.chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n let Provider::Aws(provider) = profile.provider\n\n .as_ref()\n\n .ok_or(Error::from_kind(ErrorKind::ConfigMissingInProfile(\"provider\".to_string())))?;\n\n\n\n let instance_id = args.value_of(\"instance_id\").unwrap(); // safe\n\n\n\n provider\n\n .describe_instance(instance_id)\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))\n\n}\n\n\n", "file_path": "src/modules/instances/ssh.rs", "rank": 89, "score": 88672.35345780276 }, { "content": "fn filter_instances(\n\n args: &ArgMatches,\n\n _: &RunConfig,\n\n _: &Config,\n\n instances: Vec<InstanceDescriptor>,\n\n) -> Result<Vec<InstanceDescriptor>> {\n\n let instances = if let Some(filter_str) = args.value_of(\"filter\") {\n\n let filter = filter_str\n\n .parse::<filter::Filter>()\n\n .chain_err(|| ErrorKind::ModuleFailed(NAME.to_owned()))?;\n\n instances\n\n .into_iter()\n\n .filter(|i| filter.filter(i))\n\n .collect::<Vec<_>>()\n\n } else {\n\n instances\n\n };\n\n\n\n Ok(instances)\n\n}\n\n\n", "file_path": "src/modules/instances/list.rs", "rank": 90, "score": 88672.35345780276 }, { "content": "fn iam_instance_profile_to_string(iip: &ec2::IamInstanceProfile) -> String {\n\n let empty = String::from(EMPTY);\n\n format!(\n\n \"id={}, arn={}\",\n\n iip.id.as_ref().unwrap_or(&empty),\n\n iip.arn.as_ref().unwrap_or(&empty),\n\n )\n\n}\n\n\n", "file_path": "src/provider/aws.rs", "rank": 91, "score": 88084.75482492287 }, { "content": "fn output_page_status(\n\n output_type: OutputType,\n\n status: &[PageStatusResult]\n\n) -> Result<()> {\n\n let mut stdout = ::std::io::stdout();\n\n\n\n match output_type {\n\n OutputType::Human => {\n\n let output = TableOutputPageStatusResult;\n\n\n\n output\n\n .output(&mut stdout, status)\n\n .chain_err(|| ErrorKind::FailedOutput)\n\n },\n\n OutputType::Json => {\n\n let output = JsonOutputPageStatusResult;\n\n\n\n output\n\n .output(&mut stdout, status)\n\n .chain_err(|| ErrorKind::FailedOutput)\n", "file_path": "src/modules/statuspages/show.rs", "rank": 92, "score": 86937.38100256331 }, { "content": "fn output_page_status(\n\n output_type: OutputType,\n\n health_checks: &[HealthCheck]\n\n) -> Result<()> {\n\n let mut stdout = ::std::io::stdout();\n\n\n\n match output_type {\n\n OutputType::Human => {\n\n let output = TableOutputHealthCheck;\n\n\n\n output\n\n .output(&mut stdout, health_checks)\n\n .chain_err(|| ErrorKind::FailedOutput)\n\n },\n\n OutputType::Json => {\n\n let output = JsonOutputHealthCheck;\n\n\n\n output\n\n .output(&mut stdout, health_checks)\n\n .chain_err(|| ErrorKind::FailedOutput)\n", "file_path": "src/modules/health/check.rs", "rank": 93, "score": 86937.38100256331 }, { "content": "fn create_dry_run_results(instance_ids: &[InstanceId]) -> Vec<StateChange> {\n\n instance_ids\n\n .iter()\n\n .map(|i| StateChange {\n\n instance_id: i.to_owned(),\n\n previous_state: \"- n/a -\".to_owned(),\n\n current_state: \"- n/a -\".to_owned(),\n\n })\n\n .collect::<Vec<_>>()\n\n}\n\n\n\nimpl From<ec2::InstanceStateChange> for StateChange {\n\n fn from(x: ec2::InstanceStateChange) -> Self {\n\n StateChange {\n\n instance_id: x.instance_id.unwrap_or_else(|| String::from(\"- n/a -\")),\n\n // TODO: Fix me!\n\n current_state: x\n\n .current_state\n\n .map(|x| x.name.unwrap_or_else(|| String::from(\"- n/a -\")))\n\n .unwrap_or_else(|| String::from(\"- n/a -\")),\n", "file_path": "src/provider/aws.rs", "rank": 94, "score": 76002.73737126964 }, { "content": "use clams::console::ask_for_confirmation;\n\nuse clap::{App, Arg, ArgMatches, SubCommand};\n\n\n\nuse config::{CeresConfig as Config, Provider};\n\nuse run_config::RunConfig;\n\nuse modules::*;\n\nuse output::OutputType;\n\nuse output::instances::{JsonOutputStateChanges, OutputStateChanges, TableOutputStatusChanges};\n\nuse provider::{StateChange, TerminateInstances};\n\nuse utils::cli::read_instance_ids;\n\n\n\npub const NAME: &str = \"terminate\";\n\n\n\npub struct SubModule;\n\n\n\nimpl Module for SubModule {\n\n fn build_sub_cli() -> App<'static, 'static> {\n\n SubCommand::with_name(NAME)\n\n .about(\"terminate instances\")\n\n .arg(\n", "file_path": "src/modules/instances/terminate.rs", "rank": 96, "score": 73457.01169230597 }, { "content": " (false, false) => {\n\n if !ask_for_confirmation( \"Going to terminate instances. Please type 'yes' to continue: \", \"yes\").unwrap()\n\n {\n\n return Err(Error::from_kind(ErrorKind::ModuleFailed(String::from(\n\n NAME,\n\n ))));\n\n }\n\n }\n\n (false, true) => {}\n\n }\n\n\n\n let instance_ids: Vec<&str> = args.values_of(\"instance_ids\").unwrap_or_else(|| Default::default()).collect();\n\n let instance_ids: Vec<_> = read_instance_ids(&instance_ids)\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))?;\n\n\n\n provider\n\n .terminate_instances(dry, &instance_ids)\n\n .chain_err(|| ErrorKind::ModuleFailed(String::from(NAME)))\n\n}\n\n\n", "file_path": "src/modules/instances/terminate.rs", "rank": 99, "score": 73442.41599497988 } ]
Rust
src/cli/doc_get.rs
couchbaselabs/couchbase-shell
28498991f17c7383e255cceb05eb71e543ae9d6e
use super::util::convert_json_value_to_nu_value; use crate::state::State; use crate::cli::doc_upsert::{build_batched_kv_items, prime_manifest_if_required}; use crate::cli::util::cluster_identifiers_from; use crate::client::KeyValueRequest; use futures::stream::FuturesUnordered; use futures::StreamExt; use log::debug; use nu_engine::{CommandArgs, Example}; use nu_errors::ShellError; use nu_protocol::{ MaybeOwned, Primitive, Signature, SyntaxShape, TaggedDictBuilder, UntaggedValue, }; use nu_source::Tag; use nu_stream::OutputStream; use std::ops::Add; use std::sync::{Arc, Mutex}; use tokio::runtime::Runtime; use tokio::time::Instant; pub struct DocGet { state: Arc<Mutex<State>>, } impl DocGet { pub fn new(state: Arc<Mutex<State>>) -> Self { Self { state } } } impl nu_engine::WholeStreamCommand for DocGet { fn name(&self) -> &str { "doc get" } fn signature(&self) -> Signature { Signature::build("doc get") .optional("id", SyntaxShape::String, "the document id") .named( "id-column", SyntaxShape::String, "the name of the id column if used with an input stream", None, ) .named( "bucket", SyntaxShape::String, "the name of the bucket", None, ) .named("scope", SyntaxShape::String, "the name of the scope", None) .named( "collection", SyntaxShape::String, "the name of the collection", None, ) .named( "clusters", SyntaxShape::String, "the clusters which should be contacted", None, ) .named( "batch-size", SyntaxShape::Number, "the maximum number of items to batch send at a time", None, ) } fn usage(&self) -> &str { "Fetches a document through the data service" } fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> { run_get(self.state.clone(), args) } fn examples(&self) -> Vec<Example> { vec![ Example { description: "Fetches a single document with the ID as an argument", example: "doc get my_doc_id", result: None, }, Example { description: "Fetches multiple documents with IDs from the previous command", example: "echo [[id]; [airline_10] [airline_11]] | doc get", result: None, }, ] } } fn run_get(state: Arc<Mutex<State>>, mut args: CommandArgs) -> Result<OutputStream, ShellError> { let ctrl_c = args.ctrl_c(); let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?; let batch_size: Option<i32> = args.get_flag("batch-size")?; let id_column: String = args.get_flag("id-column")?.unwrap_or_else(|| "id".into()); let ids = ids_from_input(&mut args, id_column.clone())?; let mut workers = FuturesUnordered::new(); let guard = state.lock().unwrap(); let mut all_ids: Vec<Vec<String>> = vec![]; if let Some(size) = batch_size { all_ids = build_batched_kv_items(size as u32, ids.clone()); } let mut results = vec![]; for identifier in cluster_identifiers { let active_cluster = match guard.clusters().get(&identifier) { Some(c) => c, None => { return Err(ShellError::unexpected("Cluster not found")); } }; let bucket = match args .get_flag("bucket")? .or_else(|| active_cluster.active_bucket()) { Some(v) => Ok(v), None => Err(ShellError::unexpected( "Could not auto-select a bucket - please use --bucket instead".to_string(), )), }?; let scope = match args.get_flag("scope")? { Some(s) => s, None => match active_cluster.active_scope() { Some(s) => s, None => "".into(), }, }; let collection = match args.get_flag("collection")? { Some(c) => c, None => match active_cluster.active_collection() { Some(c) => c, None => "".into(), }, }; if all_ids.is_empty() { all_ids = build_batched_kv_items(active_cluster.kv_batch_size(), ids.clone()); } debug!("Running kv get for docs {:?}", &ids); let rt = Runtime::new().unwrap(); let deadline = Instant::now().add(active_cluster.timeouts().data_timeout()); let mut client = rt.block_on(active_cluster.cluster().key_value_client( bucket.clone(), deadline, ctrl_c.clone(), ))?; prime_manifest_if_required( &rt, scope.clone(), collection.clone(), ctrl_c.clone(), Instant::now().add(active_cluster.timeouts().data_timeout()), &mut client, )?; let client = Arc::new(client); for ids in all_ids.clone() { for id in ids { let deadline = Instant::now().add(active_cluster.timeouts().data_timeout()); let scope = scope.clone(); let collection = collection.clone(); let ctrl_c = ctrl_c.clone(); let id = id.clone(); let client = client.clone(); workers.push(async move { client .request( KeyValueRequest::Get { key: id }, scope, collection, deadline, ctrl_c, ) .await }); } rt.block_on(async { while let Some(response) = workers.next().await { match response { Ok(mut res) => { let tag = Tag::default(); let mut collected = TaggedDictBuilder::new(&tag); collected.insert_value(&id_column, res.key()); collected.insert_value( "cas", UntaggedValue::int(res.cas() as i64).into_untagged_value(), ); let content = res.content().unwrap(); match convert_json_value_to_nu_value(&content, Tag::default()) { Ok(c) => { collected.insert_value("content", c); collected.insert_value("error", "".to_string()); } Err(e) => { collected.insert_value("content", "".to_string()); collected.insert_value("error", e.to_string()); } } collected.insert_value("cluster", identifier.clone()); results.push(collected.into_value()); } Err(e) => { let tag = Tag::default(); let mut collected = TaggedDictBuilder::new(&tag); collected.insert_value( &id_column, e.key().unwrap_or_else(|| "".to_string()), ); collected.insert_value("cas", "".to_string()); collected.insert_value("content", "".to_string()); collected.insert_value("error", e.to_string()); collected.insert_value("cluster", identifier.clone()); results.push(collected.into_value()); } } } }); } } Ok(OutputStream::from(results)) } pub(crate) fn ids_from_input( args: &mut CommandArgs, id_column: String, ) -> Result<Vec<String>, ShellError> { let mut ids = vec![]; for item in &mut args.input { let untagged = item.into(); match untagged { UntaggedValue::Primitive(Primitive::String(s)) => ids.push(s.clone()), UntaggedValue::Row(d) => { if let MaybeOwned::Borrowed(d) = d.get_data(id_column.as_ref()) { let untagged = &d.value; if let UntaggedValue::Primitive(Primitive::String(s)) = untagged { ids.push(s.clone()) } } } _ => {} } } if let Some(id) = args.opt(0)? { ids.push(id); } Ok(ids) }
use super::util::convert_json_value_to_nu_value; use crate::state::State; use crate::cli::doc_upsert::{build_batched_kv_items, prime_manifest_if_required}; use crate::cli::util::cluster_identifiers_from; use crate::client::KeyValueRequest; use futures::stream::FuturesUnordered; use futures::StreamExt; use log::debug; use nu_engine::{CommandArgs, Example}; use nu_errors::ShellError; use nu_protocol::{ MaybeOwned, Primitive, Signature, SyntaxShape, TaggedDictBuilder, UntaggedValue, }; use nu_source::Tag; use nu_stream::OutputStream; use std::ops::Add; use std::sync::{Arc, Mutex}; use tokio::runtime::Runtime; use tokio::time::Instant; pub struct DocGet { state: Arc<Mutex<State>>, } impl DocGet { pub fn new(state: Arc<Mutex<State>>) -> Self { Self { state } } } impl nu_engine::WholeStreamCommand for DocGet { fn name(&self) -> &str { "doc get" } fn signature(&self) -> Signature { Signature::build("doc get") .optional("id", SyntaxShape::String, "the document id") .named( "id-column", SyntaxShape::String, "the name of the id column if used with an input stream", None, ) .named( "bucket", SyntaxShape::String, "the name of the bucket", None, ) .named("scope", SyntaxShape::String, "the name of the scope", None) .named( "collection", SyntaxShape::String, "the name of the collection", None, ) .named( "clusters", SyntaxShape::String, "the clusters which should be contacted", None, ) .named( "batch-size", SyntaxShape::Number, "the maximum number of items to batch send at a time", None, ) } fn usage(&self) -> &str { "Fetches a document through the data service" } fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> { run_get(self.state.clone(), args) } fn examples(&self) -> Vec<Example> { vec![ Example { description: "Fetches a single document with the ID as an argument", example: "doc get my_doc_id", result: None, }, Example { description: "Fetches multiple documents with IDs from the previous com
} fn run_get(state: Arc<Mutex<State>>, mut args: CommandArgs) -> Result<OutputStream, ShellError> { let ctrl_c = args.ctrl_c(); let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?; let batch_size: Option<i32> = args.get_flag("batch-size")?; let id_column: String = args.get_flag("id-column")?.unwrap_or_else(|| "id".into()); let ids = ids_from_input(&mut args, id_column.clone())?; let mut workers = FuturesUnordered::new(); let guard = state.lock().unwrap(); let mut all_ids: Vec<Vec<String>> = vec![]; if let Some(size) = batch_size { all_ids = build_batched_kv_items(size as u32, ids.clone()); } let mut results = vec![]; for identifier in cluster_identifiers { let active_cluster = match guard.clusters().get(&identifier) { Some(c) => c, None => { return Err(ShellError::unexpected("Cluster not found")); } }; let bucket = match args .get_flag("bucket")? .or_else(|| active_cluster.active_bucket()) { Some(v) => Ok(v), None => Err(ShellError::unexpected( "Could not auto-select a bucket - please use --bucket instead".to_string(), )), }?; let scope = match args.get_flag("scope")? { Some(s) => s, None => match active_cluster.active_scope() { Some(s) => s, None => "".into(), }, }; let collection = match args.get_flag("collection")? { Some(c) => c, None => match active_cluster.active_collection() { Some(c) => c, None => "".into(), }, }; if all_ids.is_empty() { all_ids = build_batched_kv_items(active_cluster.kv_batch_size(), ids.clone()); } debug!("Running kv get for docs {:?}", &ids); let rt = Runtime::new().unwrap(); let deadline = Instant::now().add(active_cluster.timeouts().data_timeout()); let mut client = rt.block_on(active_cluster.cluster().key_value_client( bucket.clone(), deadline, ctrl_c.clone(), ))?; prime_manifest_if_required( &rt, scope.clone(), collection.clone(), ctrl_c.clone(), Instant::now().add(active_cluster.timeouts().data_timeout()), &mut client, )?; let client = Arc::new(client); for ids in all_ids.clone() { for id in ids { let deadline = Instant::now().add(active_cluster.timeouts().data_timeout()); let scope = scope.clone(); let collection = collection.clone(); let ctrl_c = ctrl_c.clone(); let id = id.clone(); let client = client.clone(); workers.push(async move { client .request( KeyValueRequest::Get { key: id }, scope, collection, deadline, ctrl_c, ) .await }); } rt.block_on(async { while let Some(response) = workers.next().await { match response { Ok(mut res) => { let tag = Tag::default(); let mut collected = TaggedDictBuilder::new(&tag); collected.insert_value(&id_column, res.key()); collected.insert_value( "cas", UntaggedValue::int(res.cas() as i64).into_untagged_value(), ); let content = res.content().unwrap(); match convert_json_value_to_nu_value(&content, Tag::default()) { Ok(c) => { collected.insert_value("content", c); collected.insert_value("error", "".to_string()); } Err(e) => { collected.insert_value("content", "".to_string()); collected.insert_value("error", e.to_string()); } } collected.insert_value("cluster", identifier.clone()); results.push(collected.into_value()); } Err(e) => { let tag = Tag::default(); let mut collected = TaggedDictBuilder::new(&tag); collected.insert_value( &id_column, e.key().unwrap_or_else(|| "".to_string()), ); collected.insert_value("cas", "".to_string()); collected.insert_value("content", "".to_string()); collected.insert_value("error", e.to_string()); collected.insert_value("cluster", identifier.clone()); results.push(collected.into_value()); } } } }); } } Ok(OutputStream::from(results)) } pub(crate) fn ids_from_input( args: &mut CommandArgs, id_column: String, ) -> Result<Vec<String>, ShellError> { let mut ids = vec![]; for item in &mut args.input { let untagged = item.into(); match untagged { UntaggedValue::Primitive(Primitive::String(s)) => ids.push(s.clone()), UntaggedValue::Row(d) => { if let MaybeOwned::Borrowed(d) = d.get_data(id_column.as_ref()) { let untagged = &d.value; if let UntaggedValue::Primitive(Primitive::String(s)) = untagged { ids.push(s.clone()) } } } _ => {} } } if let Some(id) = args.opt(0)? { ids.push(id); } Ok(ids) }
mand", example: "echo [[id]; [airline_10] [airline_11]] | doc get", result: None, }, ] }
function_block-function_prefixed
[ { "content": "fn buckets_get(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let bucket: String = args.req(0)?;\n\n\n\n debug!(\"Running buckets get for bucket {:?}\", &bucket);\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let guard = state.lock().unwrap();\n\n let cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n\n\n if let Some(plane) = cluster.cloud_org() {\n\n let cloud = guard.cloud_org_for_cluster(plane)?.client();\n", "file_path": "src/cli/buckets_get.rs", "rank": 0, "score": 375815.32895218005 }, { "content": "fn clusters_get(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let name: String = args.req(0)?;\n\n\n\n debug!(\"Running clouds clusters get for {}\", &name);\n\n\n\n let guard = state.lock().unwrap();\n\n let control = guard.active_cloud_org()?;\n\n let client = control.client();\n\n\n\n let deadline = Instant::now().add(control.timeout());\n\n let cluster_id = find_cloud_cluster_id(ctrl_c.clone(), name, &client, deadline)?;\n\n let response =\n\n client.cloud_request(CloudRequest::GetCluster { cluster_id }, deadline, ctrl_c)?;\n\n if response.status() != 200 {\n\n return Err(ShellError::unexpected(response.content().to_string()));\n\n };\n\n let cluster: JSONCloudCluster = serde_json::from_str(response.content())?;\n\n\n\n let mut collected = TaggedDictBuilder::new(Tag::default());\n", "file_path": "src/cli/clouds_clusters_get.rs", "rank": 1, "score": 370026.75717000064 }, { "content": "fn scopes_get(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let guard = state.lock().unwrap();\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_not_cloud(\n\n active_cluster,\n\n \"scopes get cannot be run against cloud clusters\",\n\n )?;\n\n\n\n let bucket = match args.get_flag(\"bucket\")? {\n", "file_path": "src/cli/scopes.rs", "rank": 2, "score": 369385.9182692157 }, { "content": "fn clusters(args: CommandArgs, state: Arc<Mutex<State>>) -> Result<OutputStream, ShellError> {\n\n let identifiers = cluster_identifiers_from(&state, &args, false)?;\n\n\n\n let active = state.lock().unwrap().active();\n\n let clusters = state\n\n .lock()\n\n .unwrap()\n\n .clusters()\n\n .iter()\n\n .filter(|(k, _)| identifiers.contains(k))\n\n .map(|(k, v)| {\n\n let mut collected = TaggedDictBuilder::new(Tag::default());\n\n collected.insert_untagged(\"active\", UntaggedValue::boolean(k == &active));\n\n collected.insert_value(\"tls\", UntaggedValue::boolean(v.tls_config().enabled()));\n\n collected.insert_value(\"identifier\", k.clone());\n\n collected.insert_value(\"username\", String::from(v.username()));\n\n collected.insert_value(\n\n \"cloud_organization\",\n\n v.cloud_org().unwrap_or_else(|| \"\".to_string()),\n\n );\n\n collected.into_value()\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n Ok(clusters.into())\n\n}\n", "file_path": "src/cli/clusters.rs", "rank": 4, "score": 347564.1266073813 }, { "content": "fn buckets(args: CommandArgs, state: Arc<Mutex<State>>) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let bucket_name = args.req(0)?;\n\n\n\n let guard = state.lock().unwrap();\n\n let active_cluster = guard.active_cluster();\n\n let cluster = active_cluster.cluster();\n\n\n\n validate_is_not_cloud(\n\n active_cluster,\n\n \"buckets config cannot be run against cloud clusters\",\n\n )?;\n\n\n\n let response = cluster.http_client().management_request(\n\n ManagementRequest::GetBucket { name: bucket_name },\n\n Instant::now().add(active_cluster.timeouts().management_timeout()),\n\n ctrl_c,\n\n )?;\n\n\n\n let content = serde_json::from_str(response.content())?;\n\n let converted = convert_json_value_to_nu_value(&content, Tag::default())?;\n\n\n\n Ok(vec![converted].into())\n\n}\n", "file_path": "src/cli/buckets_config.rs", "rank": 5, "score": 341789.56893246644 }, { "content": "fn run_get(state: Arc<Mutex<State>>, mut args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let id_column = args\n\n .get_flag(\"id-column\")?\n\n .unwrap_or_else(|| String::from(\"id\"));\n\n\n\n let ids = ids_from_input(&mut args, id_column)?;\n\n let batch_size: Option<i32> = args.get_flag(\"batch-size\")?;\n\n let mut all_ids: Vec<Vec<String>> = vec![];\n\n if let Some(size) = batch_size {\n\n all_ids = build_batched_kv_items(size as u32, ids.clone());\n\n }\n\n\n\n let bucket_flag = args.get_flag(\"bucket\")?;\n\n let scope_flag = args.get_flag(\"scope\")?;\n\n let collection_flag = args.get_flag(\"collection\")?;\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n", "file_path": "src/cli/doc_remove.rs", "rank": 6, "score": 341081.34180581616 }, { "content": "fn scopes_create(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n let scope: String = args.req(0)?;\n\n\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_not_cloud(\n\n active_cluster,\n\n \"scopes create cannot be run against cloud clusters\",\n\n )?;\n\n\n\n let bucket = match args.get_flag(\"bucket\")? {\n", "file_path": "src/cli/scopes_create.rs", "rank": 7, "score": 336620.7969926261 }, { "content": "fn scopes_drop(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n let scope: String = args.req(0)?;\n\n\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_not_cloud(\n\n active_cluster,\n\n \"scopes create cannot be run against cloud clusters\",\n\n )?;\n\n\n\n let bucket = match args.get_flag(\"bucket\")? {\n", "file_path": "src/cli/scopes_drop.rs", "rank": 8, "score": 336620.79699262616 }, { "content": "fn use_cmd(args: CommandArgs, state: Arc<Mutex<State>>) -> Result<OutputStream, ShellError> {\n\n let show_cloud = args.has_flag(\"cloud\");\n\n let show_timeouts = args.has_flag(\"timeouts\");\n\n\n\n let guard = state.lock().unwrap();\n\n let active = guard.active_cluster();\n\n let mut using_now = TaggedDictBuilder::new(Tag::default());\n\n if show_cloud {\n\n let project = match guard.active_cloud() {\n\n Ok(c) => c.active_project().unwrap_or_else(|| \"\".to_string()),\n\n Err(_e) => \"\".to_string(),\n\n };\n\n\n\n using_now.insert_value(\n\n \"cloud-organization\",\n\n guard\n\n .active_cloud_org_name()\n\n .unwrap_or_else(|| String::from(\"\")),\n\n );\n\n using_now.insert_value(\n", "file_path": "src/cli/use_cmd.rs", "rank": 9, "score": 336599.444076682 }, { "content": "fn users_get(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let username: String = args.req(0)?;\n\n\n\n debug!(\"Running users get {}\", username);\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n let mut results = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n\n\n let mut stream: Vec<Value> = if let Some(plane) = active_cluster.cloud_org() {\n\n let cloud = guard.cloud_org_for_cluster(plane)?.client();\n", "file_path": "src/cli/users_get.rs", "rank": 10, "score": 336569.9499490569 }, { "content": "fn cloud_clusters(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n debug!(\"Running clouds clusters\");\n\n\n\n let guard = state.lock().unwrap();\n\n let control = guard.active_cloud_org()?;\n\n let client = control.client();\n\n let response = client.cloud_request(\n\n CloudRequest::GetClusters {},\n\n Instant::now().add(control.timeout()),\n\n ctrl_c,\n\n )?;\n\n if response.status() != 200 {\n\n return Err(ShellError::unexpected(response.content().to_string()));\n\n };\n\n\n\n let content: JSONCloudClustersSummaries = serde_json::from_str(response.content())?;\n\n\n\n let mut results = vec![];\n", "file_path": "src/cli/clouds_clusters.rs", "rank": 11, "score": 336526.756956572 }, { "content": "fn buckets_drop(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let name: String = args.req(0)?;\n\n let guard = state.lock().unwrap();\n\n\n\n debug!(\"Running buckets drop for bucket {:?}\", &name);\n\n\n\n for identifier in cluster_identifiers {\n\n let cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n\n\n let result: HttpResponse;\n\n if let Some(plane) = cluster.cloud_org() {\n\n let cloud = guard.cloud_org_for_cluster(plane)?.client();\n", "file_path": "src/cli/buckets_drop.rs", "rank": 12, "score": 336408.8829314826 }, { "content": "fn buckets_create(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let name: String = args.req(0)?;\n\n let ram = args.req(1)?;\n\n\n\n let bucket_type: Option<String> = args.get_flag(\"type\")?;\n\n let replicas: Option<i32> = args.get_flag(\"replicas\")?;\n\n let flush = args.get_flag(\"flush\")?.unwrap_or(false);\n\n let durability: Option<String> = args.get_flag(\"durability\")?;\n\n let expiry = args.get_flag(\"expiry\")?;\n\n\n\n debug!(\"Running buckets create for bucket {}\", &name);\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n let mut builder = BucketSettingsBuilder::new(name).ram_quota_mb(ram);\n\n if let Some(ref t) = bucket_type {\n\n builder = builder.bucket_type(match BucketType::try_from(t.as_str()) {\n\n Ok(bt) => bt,\n", "file_path": "src/cli/buckets_create.rs", "rank": 13, "score": 336408.8829314826 }, { "content": "fn buckets_flush(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let name: String = args.req(0)?;\n\n let bucket: String = args.get_flag(\"bucket\")?.unwrap_or_else(|| \"\".into());\n\n\n\n debug!(\"Running buckets flush for bucket {:?}\", &bucket);\n\n\n\n for identifier in cluster_identifiers {\n\n let guard = state.lock().unwrap();\n\n let cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_not_cloud(\n\n cluster,\n\n \"buckets flush cannot be run against cloud clusters\",\n", "file_path": "src/cli/buckets_flush.rs", "rank": 14, "score": 336408.8829314826 }, { "content": "fn buckets_update(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let name: String = args.req(0)?;\n\n let ram = args.get_flag(\"ram\")?;\n\n let replicas = args.get_flag(\"replicas\")?;\n\n let flush = args.get_flag(\"flush\")?.unwrap_or(false);\n\n let durability = args.get_flag(\"durability\")?;\n\n let expiry = args.get_flag(\"expiry\")?;\n\n\n\n debug!(\"Running buckets update for bucket {}\", &name);\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n", "file_path": "src/cli/buckets_update.rs", "rank": 15, "score": 336408.8829314826 }, { "content": "fn clusters_drop(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let name: String = args.req(0)?;\n\n\n\n debug!(\"Running clouds clusters drop for {}\", &name);\n\n\n\n let guard = state.lock().unwrap();\n\n let control = guard.active_cloud_org()?;\n\n let client = control.client();\n\n\n\n let deadline = Instant::now().add(control.timeout());\n\n let cluster_id = find_cloud_cluster_id(ctrl_c.clone(), name, &client, deadline)?;\n\n let response =\n\n client.cloud_request(CloudRequest::DeleteCluster { cluster_id }, deadline, ctrl_c)?;\n\n if response.status() != 202 {\n\n return Err(ShellError::unexpected(response.content().to_string()));\n\n };\n\n\n\n Ok(OutputStream::empty())\n\n}\n", "file_path": "src/cli/clouds_clusters_drop.rs", "rank": 16, "score": 331395.3883841606 }, { "content": "fn users_get_all(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n debug!(\"Running users get all\");\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n let mut results = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n let mut stream: Vec<Value> = if let Some(plane) = active_cluster.cloud_org() {\n\n let cloud = guard.cloud_org_for_cluster(plane)?.client();\n\n let deadline = Instant::now().add(active_cluster.timeouts().management_timeout());\n\n let cluster_id =\n\n cloud.find_cluster_id(identifier.clone(), deadline.clone(), ctrl_c.clone())?;\n", "file_path": "src/cli/users.rs", "rank": 17, "score": 329120.76874700666 }, { "content": "fn health(args: CommandArgs, state: Arc<Mutex<State>>) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let mut converted = vec![];\n\n for identifier in cluster_identifiers {\n\n let guard = state.lock().unwrap();\n\n let cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n\n\n if let Some(plane) = cluster.cloud_org() {\n\n let cloud = guard.cloud_org_for_cluster(plane)?;\n\n let values =\n\n check_cloud_health(&identifier, cloud, cluster.timeouts(), ctrl_c.clone())?;\n\n for value in values {\n", "file_path": "src/cli/clusters_health.rs", "rank": 18, "score": 329090.757710184 }, { "content": "fn dataverses(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let statement = \"SELECT `Bucket`.* FROM `Metadata`.`Bucket`\";\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let guard = state.lock().unwrap();\n\n debug!(\"Running analytics query {}\", &statement);\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n\n\n let response = active_cluster\n\n .cluster()\n", "file_path": "src/cli/analytics_buckets.rs", "rank": 19, "score": 329008.8573484847 }, { "content": "fn run_replace(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let results = run_kv_store_ops(state, args, build_req)?;\n\n\n\n Ok(OutputStream::from(results))\n\n}\n", "file_path": "src/cli/doc_replace.rs", "rank": 20, "score": 323764.3983849548 }, { "content": "fn run_upsert(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let results = run_kv_store_ops(state, args, build_req)?;\n\n\n\n Ok(OutputStream::from(results))\n\n}\n\n\n\npub(crate) fn run_kv_store_ops(\n\n state: Arc<Mutex<State>>,\n\n args: CommandArgs,\n\n req_builder: fn(String, Vec<u8>, u32) -> KeyValueRequest,\n\n) -> Result<Vec<Value>, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let id_column = args\n\n .get_flag(\"id-column\")?\n\n .unwrap_or_else(|| String::from(\"id\"));\n\n\n\n let content_column = args\n\n .get_flag(\"content-column\")?\n\n .unwrap_or_else(|| String::from(\"content\"));\n", "file_path": "src/cli/doc_upsert.rs", "rank": 21, "score": 323764.3983849549 }, { "content": "fn run_insert(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let results = run_kv_store_ops(state, args, build_req)?;\n\n\n\n Ok(OutputStream::from(results))\n\n}\n", "file_path": "src/cli/doc_insert.rs", "rank": 22, "score": 323764.3983849549 }, { "content": "fn run_fake(_state: Arc<Mutex<State>>, args: CommandArgs) -> Result<ActionStream, ShellError> {\n\n let list_functions = args.has_flag(\"list-functions\");\n\n\n\n let ctx = Context::new();\n\n let mut tera = Tera::default();\n\n\n\n register_functions(&mut tera);\n\n\n\n if list_functions {\n\n let generated = tera\n\n .render_str(LIST_FUNCTIONS, &ctx)\n\n .map_err(|e| ShellError::unexpected(format!(\"{}\", e)))?;\n\n let content = serde_json::from_str(&generated)\n\n .map_err(|e| ShellError::unexpected(format!(\"{}\", e)))?;\n\n match content {\n\n serde_json::Value::Array(values) => {\n\n let converted = values.into_iter().map(|v| {\n\n match convert_json_value_to_nu_value(&v, Tag::default()) {\n\n Ok(c) => Ok(ReturnSuccess::Value(c)),\n\n Err(e) => Err(e),\n", "file_path": "src/cli/fake_data.rs", "rank": 23, "score": 315916.1858007701 }, { "content": "fn addresses(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n debug!(\"Running addresses\");\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n let mut results = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::untagged_runtime_error(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_cloud(\n\n active_cluster,\n\n \"addresses can only be used with clusters registered to a cloud control pane\",\n\n )?;\n\n\n", "file_path": "src/cli/addresses.rs", "rank": 24, "score": 306555.32006239344 }, { "content": "fn run(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let statement: String = args.req(0)?;\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let guard = state.lock().unwrap();\n\n\n\n let scope = args.get_flag(\"scope\")?;\n\n\n\n let with_meta = args.has_flag(\"with-meta\");\n\n\n\n debug!(\"Running analytics query {}\", &statement);\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n", "file_path": "src/cli/analytics.rs", "rank": 25, "score": 306555.32006239344 }, { "content": "fn nodes(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let guard = state.lock().unwrap();\n\n let mut nodes = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n if let Some(plane) = active_cluster.cloud_org() {\n\n let cloud = guard.cloud_org_for_cluster(plane)?.client();\n\n let deadline = Instant::now().add(active_cluster.timeouts().management_timeout());\n\n let cluster_id =\n\n cloud.find_cluster_id(identifier.clone(), deadline.clone(), ctrl_c.clone())?;\n\n let response = cloud.cloud_request(\n", "file_path": "src/cli/nodes.rs", "rank": 26, "score": 306555.32006239344 }, { "content": "fn run(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n let statement: String = args.req(0)?;\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n let bucket = args\n\n .get_flag(\"bucket\")?\n\n .or_else(|| active_cluster.active_bucket());\n\n\n\n let scope = args.get_flag(\"scope\")?;\n", "file_path": "src/cli/query.rs", "rank": 27, "score": 306555.32006239344 }, { "content": "fn whoami(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let mut entries = vec![];\n\n for identifier in cluster_identifiers {\n\n let guard = state.lock().unwrap();\n\n let cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_not_cloud(cluster, \"whoami cannot be run against cloud clusters\")?;\n\n\n\n let response = cluster.cluster().http_client().management_request(\n\n ManagementRequest::Whoami,\n\n Instant::now().add(cluster.timeouts().management_timeout()),\n\n ctrl_c.clone(),\n\n )?;\n\n let mut content: Map<String, Value> = serde_json::from_str(response.content())?;\n\n content.insert(\"cluster\".into(), json!(identifier.clone()));\n\n let converted = convert_json_value_to_nu_value(&Value::Object(content), Tag::default())?;\n\n entries.push(converted);\n\n }\n\n\n\n Ok(entries.into())\n\n}\n", "file_path": "src/cli/whoami.rs", "rank": 28, "score": 306555.32006239344 }, { "content": "fn clouds(state: Arc<Mutex<State>>, _args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let guard = state.lock().unwrap();\n\n let active_cloud = guard.active_cloud_name().unwrap_or_else(|| \"\".to_string());\n\n let mut results = vec![];\n\n for cloud in guard.clouds() {\n\n let mut collected = TaggedDictBuilder::new(Tag::default());\n\n collected.insert_untagged(\n\n \"active\",\n\n UntaggedValue::boolean(cloud.0.clone() == active_cloud.clone()),\n\n );\n\n collected.insert_value(\"identifier\", cloud.0.clone());\n\n results.push(collected.into_value())\n\n }\n\n\n\n Ok(OutputStream::from(results))\n\n}\n", "file_path": "src/cli/clouds.rs", "rank": 29, "score": 306555.32006239344 }, { "content": "fn run(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let index: String = args.req(0)?;\n\n let query: String = args.req(1)?;\n\n\n\n debug!(\"Running search query {} against {}\", &query, &index);\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n let mut results = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n let response = active_cluster\n\n .cluster()\n", "file_path": "src/cli/search.rs", "rank": 30, "score": 306555.32006239344 }, { "content": "fn projects(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n debug!(\"Running projects\");\n\n\n\n let guard = state.lock().unwrap();\n\n let control = guard.active_cloud_org()?;\n\n let client = control.client();\n\n let response = client.cloud_request(\n\n CloudRequest::GetProjects {},\n\n Instant::now().add(control.timeout()),\n\n ctrl_c,\n\n )?;\n\n if response.status() != 200 {\n\n return Err(ShellError::unexpected(response.content().to_string()));\n\n };\n\n\n\n let content: JSONCloudsProjectsResponse = serde_json::from_str(response.content())?;\n\n\n\n let mut results = vec![];\n\n for project in content.items() {\n\n let mut collected = TaggedDictBuilder::new(Tag::default());\n\n collected.insert_value(\"name\", project.name());\n\n collected.insert_value(\"id\", project.id());\n\n results.push(collected.into_value())\n\n }\n\n\n\n Ok(OutputStream::from(results))\n\n}\n", "file_path": "src/cli/projects.rs", "rank": 31, "score": 306555.32006239344 }, { "content": "fn dataverses(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let statement = \"SELECT d.* FROM Metadata.`Dataverse` d WHERE d.DataverseName <> \\\"Metadata\\\"\";\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n debug!(\"Running analytics query {}\", &statement);\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n\n\n let response = active_cluster\n\n .cluster()\n\n .http_client()\n", "file_path": "src/cli/analytics_dataverses.rs", "rank": 32, "score": 301740.6722419938 }, { "content": "fn indexes(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let statement = \"SELECT d.* FROM Metadata.`Index` d WHERE d.DataverseName <> \\\"Metadata\\\"\";\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n debug!(\"Running analytics query {}\", &statement);\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n\n\n let response = active_cluster\n\n .cluster()\n\n .http_client()\n", "file_path": "src/cli/analytics_indexes.rs", "rank": 33, "score": 301740.6722419938 }, { "content": "fn datasets(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let statement = \"SELECT d.* FROM Metadata.`Dataset` d WHERE d.DataverseName <> \\\"Metadata\\\"\";\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let guard = state.lock().unwrap();\n\n debug!(\"Running analytics query {}\", &statement);\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n let response = active_cluster\n\n .cluster()\n\n .http_client()\n", "file_path": "src/cli/analytics_datasets.rs", "rank": 34, "score": 301740.67224199383 }, { "content": "fn run_ping(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let guard = state.lock().unwrap();\n\n\n\n debug!(\"Running ping\");\n\n\n\n let rt = Runtime::new().unwrap();\n\n let clusters_len = cluster_identifiers.len();\n\n let mut results = vec![];\n\n for identifier in cluster_identifiers {\n\n let cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => continue, // This can't actually happen, we filter the clusters in cluster_identifiers_from\n\n };\n\n let deadline = Instant::now().add(cluster.timeouts().management_timeout());\n\n\n\n let client = cluster.cluster().http_client();\n", "file_path": "src/cli/ping.rs", "rank": 35, "score": 301740.6722419938 }, { "content": "fn dataverses(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let statement = \"SELECT `Link`.* FROM `Metadata`.`Link`\";\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n debug!(\"Running analytics query {}\", &statement);\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n let response = active_cluster\n\n .cluster()\n\n .http_client()\n\n .analytics_query_request(\n", "file_path": "src/cli/analytics_links.rs", "rank": 36, "score": 301740.6722419938 }, { "content": "fn indexes(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let with_meta = args.has_flag(\"with-meta\");\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n let fetch_defs = args.has_flag(\"definitions\");\n\n\n\n let statement = \"select keyspace_id as `bucket`, name, state, `using` as `type`, ifmissing(condition, null) as condition, ifmissing(is_primary, false) as `primary`, index_key from system:indexes\";\n\n\n\n debug!(\"Running n1ql query {}\", &statement);\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n", "file_path": "src/cli/query_indexes.rs", "rank": 37, "score": 301740.6722419938 }, { "content": "fn run(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let with_meta = args.has_flag(\"with-meta\");\n\n\n\n let statement: String = args.req(0)?;\n\n let statement = format!(\"ADVISE {}\", statement);\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n debug!(\"Running n1ql query {}\", &statement);\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n let response = active_cluster.cluster().http_client().query_request(\n", "file_path": "src/cli/query_advise.rs", "rank": 38, "score": 301740.67224199383 }, { "content": "pub fn validate_is_not_cloud(cluster: &RemoteCluster, err_msg: &str) -> Result<(), ShellError> {\n\n if cluster.cloud_org().is_some() {\n\n return Err(ShellError::unexpected(err_msg));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\npub(crate) fn find_project_id(\n\n ctrl_c: Arc<AtomicBool>,\n\n name: String,\n\n client: &Arc<CloudClient>,\n\n deadline: Instant,\n\n) -> Result<String, ShellError> {\n\n let response = client.cloud_request(CloudRequest::GetProjects {}, deadline, ctrl_c)?;\n\n if response.status() != 200 {\n\n return Err(ShellError::unexpected(response.content().to_string()));\n\n };\n\n let content: JSONCloudsProjectsResponse = serde_json::from_str(response.content())?;\n\n\n", "file_path": "src/cli/util.rs", "rank": 39, "score": 298063.4967334956 }, { "content": "pub fn validate_is_cloud(cluster: &RemoteCluster, err_msg: &str) -> Result<(), ShellError> {\n\n if cluster.cloud_org().is_none() {\n\n return Err(ShellError::unexpected(err_msg));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cli/util.rs", "rank": 40, "score": 298063.4967334956 }, { "content": "fn clouds_status(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let all = args.get_flag(\"all\")?.unwrap_or(false);\n\n\n\n debug!(\"Running clouds status\");\n\n\n\n let guard = state.lock().unwrap();\n\n let control = guard.active_cloud_org()?;\n\n let client = control.client();\n\n let response = client.cloud_request(\n\n CloudRequest::GetClouds {},\n\n Instant::now().add(control.timeout()),\n\n ctrl_c,\n\n )?;\n\n if response.status() != 200 {\n\n return Err(ShellError::unexpected(response.content().to_string()));\n\n };\n\n\n\n let content: JSONCloudsResponse = serde_json::from_str(response.content())?;\n\n\n", "file_path": "src/cli/clouds_status.rs", "rank": 41, "score": 297163.5039283595 }, { "content": "fn users_drop(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let username: String = args.req(0)?;\n\n\n\n debug!(\"Running users drop {}\", username);\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n let response = if let Some(plane) = active_cluster.cloud_org() {\n\n let cloud = guard.cloud_org_for_cluster(plane)?.client();\n\n let deadline = Instant::now().add(active_cluster.timeouts().management_timeout());\n\n let cluster_id =\n", "file_path": "src/cli/users_drop.rs", "rank": 42, "score": 297163.5039283595 }, { "content": "fn run_async(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n let permission = args.get_flag(\"permission\")?;\n\n\n\n let mut entries = vec![];\n\n for identifier in cluster_identifiers {\n\n let guard = state.lock().unwrap();\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_not_cloud(\n\n active_cluster,\n\n \"user roles cannot be run against cloud clusters\",\n\n )?;\n", "file_path": "src/cli/users_roles.rs", "rank": 43, "score": 297163.5039283595 }, { "content": "fn addresses_add(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let address: String = args.req(0)?;\n\n let duration = args.get_flag(\"duration\")?;\n\n\n\n debug!(\"Running address add for {}\", &address);\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_cloud(\n\n active_cluster,\n\n \"addresses can only be used with clusters registered to a cloud control pane\",\n", "file_path": "src/cli/addresses_add.rs", "rank": 44, "score": 297163.5039283595 }, { "content": "fn users_upsert(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let username: String = args.req(0)?;\n\n let roles: String = args.req(1)?;\n\n let password = args.get_flag(\"password\")?;\n\n let display_name = args.get_flag(\"display_name\")?;\n\n let groups = args.get_flag(\"groups\")?;\n\n\n\n debug!(\"Running users upsert for user {}\", &username);\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n", "file_path": "src/cli/users_upsert.rs", "rank": 45, "score": 297163.5039283595 }, { "content": "fn addresses_drop(state: Arc<Mutex<State>>, args: CommandArgs) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let address: String = args.req(0)?;\n\n\n\n debug!(\"Running address drop for {}\", &address);\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::untagged_runtime_error(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_cloud(\n\n active_cluster,\n\n \"addresses can only be used with clusters registered to a cloud control pane\",\n\n )?;\n", "file_path": "src/cli/addresses_drop.rs", "rank": 46, "score": 297163.5039283595 }, { "content": "pub fn update_config_file(guard: &mut MutexGuard<State>) -> Result<(), ShellError> {\n\n let path = match guard.config_path() {\n\n Some(p) => p,\n\n None => {\n\n return Err(ShellError::unexpected(\n\n \"A config path must be discoverable to save config\",\n\n ));\n\n }\n\n };\n\n let mut cluster_configs = Vec::new();\n\n for (identifier, cluster) in guard.clusters() {\n\n cluster_configs.push(ClusterConfig::from((identifier.clone(), cluster)))\n\n }\n\n let mut cloud_configs = Vec::new();\n\n for (identifier, cloud) in guard.clouds() {\n\n cloud_configs.push(CloudConfig::new(identifier.clone(), cloud.active_project()))\n\n }\n\n let mut control_plane_configs = Vec::new();\n\n for (identifier, c) in guard.cloud_orgs() {\n\n control_plane_configs.push(CloudOrganizationConfig::new(\n", "file_path": "src/cli/clusters_register.rs", "rank": 47, "score": 282044.6017613235 }, { "content": "fn run_tutorial_next(state: Arc<Mutex<State>>) -> Result<OutputStream, ShellError> {\n\n let guard = state.lock().unwrap();\n\n let tutorial = guard.tutorial();\n\n Ok(OutputStream::one(\n\n UntaggedValue::string(tutorial.next_tutorial_step()).into_value(Tag::unknown()),\n\n ))\n\n}\n", "file_path": "src/cli/tutorial_next.rs", "rank": 48, "score": 273401.9216272007 }, { "content": "fn run_tutorial_prev(state: Arc<Mutex<State>>) -> Result<OutputStream, ShellError> {\n\n let guard = state.lock().unwrap();\n\n let tutorial = guard.tutorial();\n\n Ok(OutputStream::one(\n\n UntaggedValue::string(tutorial.prev_tutorial_step()).into_value(Tag::unknown()),\n\n ))\n\n}\n", "file_path": "src/cli/tutorial_prev.rs", "rank": 49, "score": 273401.9216272007 }, { "content": "fn json_list(input: &[Value]) -> Result<Vec<serde_json::Value>, ShellError> {\n\n let mut out = vec![];\n\n\n\n for value in input {\n\n out.push(convert_nu_value_to_json_value(value)?);\n\n }\n\n\n\n Ok(out)\n\n}\n\n\n", "file_path": "src/cli/util.rs", "rank": 50, "score": 235153.17257702159 }, { "content": "pub fn in_directory(str: impl AsRef<Path>) -> String {\n\n let path = str.as_ref();\n\n let path = if path.is_relative() {\n\n root().join(path)\n\n } else {\n\n path.to_path_buf()\n\n };\n\n\n\n path.display().to_string()\n\n}\n", "file_path": "tests/integration/util/fs.rs", "rank": 51, "score": 229325.31457367493 }, { "content": "fn for_spec(name: &str, ty: &str, required: bool, tag: impl Into<Tag>) -> Value {\n\n let tag = tag.into();\n\n\n\n let mut spec = TaggedDictBuilder::new(tag);\n\n\n\n spec.insert_untagged(\"name\", UntaggedValue::string(name));\n\n spec.insert_untagged(\"type\", UntaggedValue::string(ty));\n\n spec.insert_untagged(\n\n \"required\",\n\n UntaggedValue::string(if required { \"yes\" } else { \"no\" }),\n\n );\n\n\n\n spec.into_value()\n\n}\n\n\n", "file_path": "src/cli/help.rs", "rank": 52, "score": 223249.03180508787 }, { "content": "pub fn signature_dict(signature: Signature, tag: impl Into<Tag>) -> Value {\n\n let tag = tag.into();\n\n let mut sig = TaggedListBuilder::new(&tag);\n\n\n\n for arg in signature.positional.iter() {\n\n let is_required = matches!(arg.0, PositionalType::Mandatory(_, _));\n\n\n\n sig.push_value(for_spec(arg.0.name(), \"argument\", is_required, &tag));\n\n }\n\n\n\n if signature.rest_positional.is_some() {\n\n let is_required = false;\n\n sig.push_value(for_spec(\"rest\", \"argument\", is_required, &tag));\n\n }\n\n\n\n for (name, ty) in signature.named.iter() {\n\n match ty.0 {\n\n NamedType::Mandatory(_, _) => sig.push_value(for_spec(name, \"flag\", true, &tag)),\n\n NamedType::Optional(_, _) => sig.push_value(for_spec(name, \"flag\", false, &tag)),\n\n NamedType::Switch(_) => sig.push_value(for_spec(name, \"switch\", false, &tag)),\n\n }\n\n }\n\n\n\n sig.into_value()\n\n}\n\n\n", "file_path": "src/cli/help.rs", "rank": 53, "score": 213702.03957347653 }, { "content": "fn help(args: CommandArgs) -> Result<ActionStream, ShellError> {\n\n let name = args.call_info.name_tag.clone();\n\n let scope = args.scope().clone();\n\n let rest: Vec<Tagged<String>> = args.rest(0)?;\n\n\n\n if !rest.is_empty() {\n\n if rest[0].item == \"commands\" {\n\n let mut sorted_names = scope.get_command_names();\n\n sorted_names.sort();\n\n\n\n let (mut subcommand_names, command_names) = sorted_names\n\n .into_iter()\n\n // Internal only commands shouldn't be displayed\n\n .filter(|cmd_name| {\n\n scope\n\n .get_command(&cmd_name)\n\n .filter(|command| !command.is_private())\n\n .is_some()\n\n })\n\n .partition::<Vec<_>, _>(|cmd_name| cmd_name.contains(' '));\n", "file_path": "src/cli/help.rs", "rank": 54, "score": 210927.11191550578 }, { "content": "/// Dumps a packet into a easily debuggable string format.\n\n///\n\n/// Note that this is only really suitable when you want to println a full\n\n/// packet, but nonetheless it is helpful for testing.\n\npub fn _dump(input: &Bytes) -> String {\n\n if input.len() < HEADER_SIZE {\n\n return \"Received less bytes than a KV header, invalid data?\".into();\n\n }\n\n\n\n let mut slice = input.slice(0..input.len());\n\n\n\n let mut output = String::new();\n\n output.push_str(\"--- Packet Dump Info --\\n\");\n\n let magic = slice.get_u8();\n\n output.push_str(&format!(\n\n \" Magic: 0x{:x} ({:?})\\n\",\n\n magic,\n\n Magic::from(magic)\n\n ));\n\n let opcode = slice.get_u8();\n\n output.push_str(&format!(\n\n \" Opcode: 0x{:x} ({:?})\\n\",\n\n opcode,\n\n Opcode::try_from(opcode).unwrap()\n", "file_path": "src/client/protocol.rs", "rank": 55, "score": 187521.42394671845 }, { "content": "/// Takes a full packet and extracts the body as a slice if possible.\n\npub fn _body(input: &Bytes) -> Option<Bytes> {\n\n let mut slice = input.slice(0..input.len());\n\n\n\n let flexible = Magic::from(slice.get_u8()).is_flexible();\n\n\n\n let flexible_extras_len = if flexible {\n\n slice.advance(1);\n\n slice.get_u8()\n\n } else {\n\n 0\n\n } as usize;\n\n let key_len = if flexible {\n\n slice.get_u8() as u16\n\n } else {\n\n slice.advance(1);\n\n slice.get_u16()\n\n } as usize;\n\n let extras_len = slice.get_u8() as usize;\n\n slice.advance(3);\n\n let total_body_len = slice.get_u32() as usize;\n\n let body_len = total_body_len - key_len - extras_len - flexible_extras_len;\n\n\n\n if body_len > 0 {\n\n Some(input.slice((HEADER_SIZE + flexible_extras_len + extras_len + key_len)..))\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/client/protocol.rs", "rank": 56, "score": 180331.7155444498 }, { "content": "fn grab_bucket_names(\n\n cluster: &RemoteCluster,\n\n ctrl_c: Arc<AtomicBool>,\n\n) -> Result<Vec<String>, ShellError> {\n\n let response = cluster.cluster().http_client().management_request(\n\n ManagementRequest::GetBuckets,\n\n Instant::now().add(cluster.timeouts().management_timeout()),\n\n ctrl_c,\n\n )?;\n\n let resp: Vec<BucketInfo> = serde_json::from_str(response.content())?;\n\n Ok(resp.into_iter().map(|b| b.name).collect::<Vec<_>>())\n\n}\n\n\n", "file_path": "src/cli/clusters_health.rs", "rank": 57, "score": 177928.98095543822 }, { "content": "// Sad panda noises\n\npub fn tests(config: Arc<TestConfig>) -> Vec<TestFn> {\n\n vec![\n\n TestFn::new(\n\n \"test_get_a_document\",\n\n Box::pin(doc_get::test_get_a_document(config.clone())),\n\n ),\n\n TestFn::new(\n\n \"test_get_a_document_not_found\",\n\n Box::pin(doc_get::test_get_a_document_not_found(config.clone())),\n\n ),\n\n TestFn::new(\n\n \"test_upserts_a_document\",\n\n Box::pin(doc_upsert::test_upserts_a_document(config.clone())),\n\n ),\n\n ]\n\n}\n\n\n\npub struct TestFn {\n\n pub name: String,\n\n pub func: Pin<Box<dyn Future<Output = bool> + Send + 'static>>,\n", "file_path": "tests/integration/test_functions.rs", "rank": 58, "score": 176880.1785111238 }, { "content": "pub fn namespace_from_args(\n\n bucket_flag: Option<String>,\n\n scope_flag: Option<String>,\n\n collection_flag: Option<String>,\n\n active_cluster: &RemoteCluster,\n\n) -> Result<(String, String, String), ShellError> {\n\n let bucket = match bucket_flag.or_else(|| active_cluster.active_bucket()) {\n\n Some(v) => Ok(v),\n\n None => Err(ShellError::unexpected(\n\n \"Could not auto-select a bucket - please use --bucket instead\".to_string(),\n\n )),\n\n }?;\n\n\n\n let scope = match scope_flag {\n\n Some(s) => s,\n\n None => match active_cluster.active_scope() {\n\n Some(s) => s,\n\n None => \"\".into(),\n\n },\n\n };\n", "file_path": "src/cli/util.rs", "rank": 59, "score": 175202.43288773706 }, { "content": "pub fn cluster_identifiers_from(\n\n state: &Arc<Mutex<State>>,\n\n args: &CommandArgs,\n\n default_active: bool,\n\n) -> Result<Vec<String>, ShellError> {\n\n let state = state.lock().unwrap();\n\n let identifier_arg: String = match args.get_flag(\"clusters\")? {\n\n Some(arg) => arg,\n\n None => {\n\n if default_active {\n\n return Ok(vec![state.active()]);\n\n }\n\n \"\".into()\n\n }\n\n };\n\n\n\n let re = match Regex::new(identifier_arg.as_str()) {\n\n Ok(v) => v,\n\n Err(e) => {\n\n return Err(ShellError::unexpected(format!(\n", "file_path": "src/cli/util.rs", "rank": 60, "score": 175175.33753952273 }, { "content": "// Adapted from https://github.com/nushell/nushell/blob/main/crates/nu-command/src/commands/formats/to/json.rs\n\npub fn convert_nu_value_to_json_value(v: &Value) -> Result<serde_json::Value, ShellError> {\n\n Ok(match &v.value {\n\n UntaggedValue::Primitive(Primitive::Boolean(b)) => serde_json::Value::Bool(*b),\n\n UntaggedValue::Primitive(Primitive::Filesize(b)) => serde_json::Value::Number(\n\n serde_json::Number::from(b.to_u64().expect(\"What about really big numbers\")),\n\n ),\n\n UntaggedValue::Primitive(Primitive::Duration(i)) => {\n\n serde_json::Value::String(i.to_string())\n\n }\n\n UntaggedValue::Primitive(Primitive::Date(d)) => serde_json::Value::String(d.to_string()),\n\n UntaggedValue::Primitive(Primitive::EndOfStream) => serde_json::Value::Null,\n\n UntaggedValue::Primitive(Primitive::BeginningOfStream) => serde_json::Value::Null,\n\n UntaggedValue::Primitive(Primitive::Decimal(f)) => {\n\n if let Some(f) = f.to_f64() {\n\n if let Some(num) = serde_json::Number::from_f64(\n\n f.to_f64().expect(\"TODO: What about really big decimals?\"),\n\n ) {\n\n serde_json::Value::Number(num)\n\n } else {\n\n return Err(ShellError::labeled_error(\n", "file_path": "src/cli/util.rs", "rank": 61, "score": 174287.4564345782 }, { "content": "fn collections_get(\n\n state: Arc<Mutex<State>>,\n\n args: CommandArgs,\n\n) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n\n\n let scope: Option<String> = args.get_flag(\"scope\")?;\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_not_cloud(\n\n active_cluster,\n", "file_path": "src/cli/collections.rs", "rank": 62, "score": 161791.12583758993 }, { "content": "fn buckets_get_all(\n\n state: Arc<Mutex<State>>,\n\n args: CommandArgs,\n\n) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n\n\n debug!(\"Running buckets\");\n\n\n\n let guard = state.lock().unwrap();\n\n let mut results = vec![];\n\n for identifier in cluster_identifiers {\n\n let cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n\n", "file_path": "src/cli/buckets.rs", "rank": 63, "score": 161536.94209249923 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct SearchResultData {\n\n hits: Vec<SearchResultHit>,\n\n}\n\n\n", "file_path": "src/cli/search.rs", "rank": 64, "score": 151524.2334930274 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct BucketStats {\n\n op: BucketStatsOp,\n\n}\n\n\n", "file_path": "src/cli/clusters_health.rs", "rank": 65, "score": 151346.74139657448 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct BucketInfo {\n\n name: String,\n\n}\n\n\n", "file_path": "src/cli/clusters_health.rs", "rank": 66, "score": 151346.74139657448 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct CollectionManifestScope {\n\n uid: String,\n\n name: String,\n\n collections: Vec<CollectionManifestCollection>,\n\n}\n\n\n", "file_path": "src/client/kv_client.rs", "rank": 67, "score": 147045.10271692142 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct BucketStatsSamples {\n\n #[serde(rename = \"vb_active_resident_items_ratio\")]\n\n active_resident_ratios: Vec<u32>,\n\n}\n", "file_path": "src/cli/clusters_health.rs", "rank": 68, "score": 146773.4890530506 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct BucketStatsOp {\n\n samples: BucketStatsSamples,\n\n}\n\n\n", "file_path": "src/cli/clusters_health.rs", "rank": 69, "score": 146773.4890530506 }, { "content": "fn validate_hostnames(hostnames: Vec<String>) -> Vec<String> {\n\n let mut validated = vec![];\n\n for hostname in hostnames {\n\n let host = if let Some(stripped_couchbase) = hostname.strip_prefix(\"couchbase://\") {\n\n if let Some(stripped_port) = stripped_couchbase.strip_suffix(\":11210\") {\n\n stripped_port.to_string()\n\n } else if stripped_couchbase.contains(':') {\n\n error!(\"Couchbase scheme and non-default port detected, http scheme must be used with custom port (management port)\");\n\n std::process::exit(1);\n\n } else {\n\n stripped_couchbase.to_string()\n\n }\n\n } else if let Some(stripped_couchbase) = hostname.strip_prefix(\"couchbases://\") {\n\n if let Some(stripped_port) = stripped_couchbase.strip_suffix(\":11211\") {\n\n stripped_port.to_string()\n\n } else if stripped_couchbase.contains(':') {\n\n error!(\"Couchbases scheme and non-default port detected, http scheme must be used with custom port (management port)\");\n\n std::process::exit(1);\n\n } else {\n\n stripped_couchbase.to_string()\n", "file_path": "src/main.rs", "rank": 70, "score": 139294.01043150618 }, { "content": "/// Creates a regular, non-flex response with all fields necessary.\n\npub fn _response(\n\n opcode: Opcode,\n\n datatype: u8,\n\n status: u16,\n\n opaque: u32,\n\n cas: u64,\n\n key: Option<Bytes>,\n\n extras: Option<Bytes>,\n\n body: Option<Bytes>,\n\n) -> BytesMut {\n\n let key_size = key.as_ref().map(|b| b.len()).unwrap_or_default();\n\n let extras_size = extras.as_ref().map(|b| b.len()).unwrap_or_default();\n\n let total_body_size =\n\n key_size + extras_size + body.as_ref().map(|b| b.len()).unwrap_or_default();\n\n\n\n let mut builder = BytesMut::with_capacity(HEADER_SIZE + total_body_size);\n\n builder.put_u8(Magic::Response.encoded());\n\n builder.put_u8(opcode.encoded());\n\n builder.put_u16(key_size as u16);\n\n builder.put_u8(extras_size as u8);\n", "file_path": "src/client/protocol.rs", "rank": 71, "score": 139030.78663344754 }, { "content": "/// Creates a regular, non-flex request with all fields necessary.\n\npub fn request(req: KvRequest, collections_enabled: bool) -> BytesMut {\n\n let key = match req.key {\n\n Some(k) => {\n\n if collections_enabled {\n\n let cid = make_uleb128_32(k, req.collection_id);\n\n Some(cid)\n\n } else {\n\n Some(k)\n\n }\n\n }\n\n None => None,\n\n };\n\n\n\n let key_size = key.as_ref().map(|b| b.len()).unwrap_or_default();\n\n let extras_size = req.extras.as_ref().map(|b| b.len()).unwrap_or_default();\n\n let total_body_size =\n\n key_size + extras_size + req.body.as_ref().map(|b| b.len()).unwrap_or_default();\n\n\n\n let mut builder = BytesMut::with_capacity(HEADER_SIZE + total_body_size);\n\n builder.put_u8(Magic::Request.encoded());\n", "file_path": "src/client/protocol.rs", "rank": 72, "score": 137834.65456832587 }, { "content": "// Creates a flexible request with optional framing extras\n\npub fn _flexible_request(\n\n opcode: Opcode,\n\n datatype: u8,\n\n partition: u16,\n\n opaque: u32,\n\n cas: u64,\n\n key: Option<Bytes>,\n\n framing_extras: Option<Bytes>,\n\n extras: Option<Bytes>,\n\n body: Option<Bytes>,\n\n) -> BytesMut {\n\n let key_size = key.as_ref().map(|b| b.len()).unwrap_or_default();\n\n let extras_size = extras.as_ref().map(|b| b.len()).unwrap_or_default();\n\n let framing_extras_size = framing_extras.as_ref().map(|b| b.len()).unwrap_or_default();\n\n let total_body_size = key_size\n\n + extras_size\n\n + framing_extras_size\n\n + body.as_ref().map(|b| b.len()).unwrap_or_default();\n\n\n\n let mut builder = BytesMut::with_capacity(HEADER_SIZE + total_body_size);\n", "file_path": "src/client/protocol.rs", "rank": 73, "score": 135726.94582561968 }, { "content": "pub fn convert_row_to_nu_value(\n\n v: &serde_json::Value,\n\n tag: impl Into<Tag>,\n\n cluster_identifier: String,\n\n) -> Result<Value, ShellError> {\n\n let tag = tag.into();\n\n\n\n match v {\n\n serde_json::Value::Object(o) => {\n\n let mut collected = TaggedDictBuilder::new(&tag);\n\n for (k, v) in o.iter() {\n\n collected.insert_value(k.clone(), convert_json_value_to_nu_value(v, &tag)?);\n\n }\n\n collected.insert_value(\"cluster\", cluster_identifier);\n\n\n\n Ok(collected.into_value())\n\n }\n\n _ => Err(ShellError::unexpected(\n\n \"row not an object - malformed response\",\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/cli/util.rs", "rank": 74, "score": 129857.08015514538 }, { "content": "pub fn convert_json_value_to_nu_value(\n\n v: &serde_json::Value,\n\n tag: impl Into<Tag>,\n\n) -> Result<Value, ShellError> {\n\n let tag = tag.into();\n\n let span = tag.span;\n\n\n\n let result = match v {\n\n serde_json::Value::Null => UntaggedValue::Primitive(Primitive::Nothing).into_value(&tag),\n\n serde_json::Value::Bool(b) => UntaggedValue::boolean(*b).into_value(&tag),\n\n serde_json::Value::Number(n) => {\n\n if n.is_i64() {\n\n if let Some(nas) = n.as_i64() {\n\n UntaggedValue::int(nas).into_value(&tag)\n\n } else {\n\n return Err(ShellError::unexpected(format!(\n\n \"Could not get value as number {}\",\n\n v\n\n )));\n\n }\n", "file_path": "src/cli/util.rs", "rank": 75, "score": 127238.1029988701 }, { "content": "pub fn binaries() -> PathBuf {\n\n std::env::var(\"CARGO_TARGET_DIR\")\n\n .ok()\n\n .map(|target_dir| PathBuf::from(target_dir).join(\"debug\"))\n\n .unwrap_or_else(|| root().join(\"target/debug\"))\n\n}\n\n\n", "file_path": "tests/integration/util/fs.rs", "rank": 76, "score": 127139.0716546211 }, { "content": "pub fn root() -> PathBuf {\n\n let manifest_dir = if let Ok(manifest_dir) = std::env::var(\"CARGO_MANIFEST_DIR\") {\n\n PathBuf::from(manifest_dir)\n\n } else {\n\n PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"))\n\n };\n\n\n\n let test_path = manifest_dir.join(\"Cargo.lock\");\n\n if test_path.exists() {\n\n manifest_dir\n\n } else {\n\n manifest_dir\n\n .parent()\n\n .expect(\"Couldn't find the debug binaries directory\")\n\n .parent()\n\n .expect(\"Couldn't find the debug binaries directory\")\n\n .to_path_buf()\n\n }\n\n}\n\n\n", "file_path": "tests/integration/util/fs.rs", "rank": 77, "score": 127139.0716546211 }, { "content": "fn build_req(key: String, value: Vec<u8>, expiry: u32) -> KeyValueRequest {\n\n KeyValueRequest::Insert { key, value, expiry }\n\n}\n\n\n", "file_path": "src/cli/doc_insert.rs", "rank": 78, "score": 127041.42832479897 }, { "content": "fn build_req(key: String, value: Vec<u8>, expiry: u32) -> KeyValueRequest {\n\n KeyValueRequest::Set { key, value, expiry }\n\n}\n\n\n", "file_path": "src/cli/doc_upsert.rs", "rank": 79, "score": 127041.42832479897 }, { "content": "fn build_req(key: String, value: Vec<u8>, expiry: u32) -> KeyValueRequest {\n\n KeyValueRequest::Replace { key, value, expiry }\n\n}\n\n\n", "file_path": "src/cli/doc_replace.rs", "rank": 80, "score": 127041.42832479897 }, { "content": "pub fn executable_path() -> PathBuf {\n\n let mut path = binaries();\n\n path.push(\"cbsh\");\n\n path\n\n}\n\n\n", "file_path": "tests/integration/util/fs.rs", "rank": 81, "score": 124520.09449834582 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n let mut logger_builder = env_logger::Builder::from_env(\n\n Env::default().default_filter_or(\"info,isahc=error,surf=error\"),\n\n );\n\n logger_builder.format(|buf, record| {\n\n let mut style = buf.style();\n\n style.set_intense(true);\n\n style.set_bold(true);\n\n writeln!(\n\n buf,\n\n \"{}: {}\",\n\n buf.default_styled_level(record.level()),\n\n style.value(record.args())\n\n )\n\n });\n\n\n\n const DEFAULT_PASSWORD: &str = \"password\";\n\n const DEFAULT_HOSTNAME: &str = \"localhost\";\n\n const DEFAULT_USERNAME: &str = \"Administrator\";\n\n\n", "file_path": "src/main.rs", "rank": 82, "score": 123549.06974728026 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct CollectionManifestCollection {\n\n uid: String,\n\n name: String,\n\n #[serde(alias = \"maxTTL\")]\n\n max_ttl: Option<u32>,\n\n}\n\n\n", "file_path": "src/client/kv_client.rs", "rank": 83, "score": 121641.06130671746 }, { "content": "fn make_uleb128_32(key: Bytes, collection_id: u32) -> Bytes {\n\n let mut cid = collection_id;\n\n let mut builder = BytesMut::with_capacity(key.len() + 5);\n\n loop {\n\n let mut c: u8 = (cid & 0x7f) as u8;\n\n cid >>= 7;\n\n if cid != 0 {\n\n c |= 0x80;\n\n }\n\n\n\n builder.put_u8(c);\n\n if c & 0x80 == 0 {\n\n break;\n\n }\n\n }\n\n for k in key {\n\n builder.put_u8(k);\n\n }\n\n\n\n builder.freeze()\n\n}\n", "file_path": "src/client/protocol.rs", "rank": 84, "score": 119437.36150649066 }, { "content": "fn collections_drop(\n\n state: Arc<Mutex<State>>,\n\n args: CommandArgs,\n\n) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n let collection: String = args.req(0)?;\n\n\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_not_cloud(\n\n active_cluster,\n\n \"collections drop cannot be run against cloud clusters\",\n", "file_path": "src/cli/collections_drop.rs", "rank": 85, "score": 117587.99388106704 }, { "content": "fn collections_create(\n\n state: Arc<Mutex<State>>,\n\n args: CommandArgs,\n\n) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let guard = state.lock().unwrap();\n\n let collection: String = args.req(0)?;\n\n let expiry = args.get_flag(\"max-expiry\")?.unwrap_or(0);\n\n\n\n for identifier in cluster_identifiers {\n\n let active_cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n validate_is_not_cloud(\n\n active_cluster,\n", "file_path": "src/cli/collections_create.rs", "rank": 86, "score": 117587.99388106704 }, { "content": "fn clusters_unregister(\n\n args: CommandArgs,\n\n state: Arc<Mutex<State>>,\n\n) -> Result<OutputStream, ShellError> {\n\n let identifier: String = args.req(0)?;\n\n let save = args.get_flag(\"save\")?.unwrap_or(false);\n\n\n\n let mut guard = state.lock().unwrap();\n\n if guard.active() == identifier.clone() {\n\n return Err(ShellError::unexpected(\n\n \"Cannot unregister the active cluster\",\n\n ));\n\n }\n\n\n\n if guard.remove_cluster(identifier).is_none() {\n\n return Err(ShellError::unexpected(\n\n \"identifier is not registered to a cluster\",\n\n ));\n\n };\n\n\n\n if save {\n\n update_config_file(&mut guard)?;\n\n };\n\n\n\n Ok(OutputStream::empty())\n\n}\n", "file_path": "src/cli/clusters_unregister.rs", "rank": 87, "score": 117495.7799751299 }, { "content": "fn clusters_register(\n\n args: CommandArgs,\n\n state: Arc<Mutex<State>>,\n\n) -> Result<OutputStream, ShellError> {\n\n let identifier: String = args.req(0)?;\n\n\n\n let hostnames = args\n\n .req::<String>(1)?\n\n .split(',')\n\n .map(|v| v.to_owned())\n\n .collect();\n\n let username = args.req(2)?;\n\n let password = args.req(3)?;\n\n let bucket = args.get_flag(\"default-bucket\")?;\n\n let scope = args.get_flag(\"default-scope\")?;\n\n let collection = args.get_flag(\"default-collection\")?;\n\n let tls_enabled = args.get_flag(\"tls-enabled\")?.unwrap_or(true);\n\n let tls_accept_all_certs = args.get_flag(\"tls-accept-all-certs\")?.unwrap_or(true);\n\n let tls_accept_all_hosts = args.get_flag(\"tls-validate-hosts\")?.unwrap_or(true);\n\n let cert_path = args.get_flag(\"tls-cert-path\")?;\n", "file_path": "src/cli/clusters_register.rs", "rank": 88, "score": 117495.7799751299 }, { "content": "// duration_to_golang_string creates a golang formatted string to use with timeouts. Unlike Golang\n\n// strings it does not deal with fracational seconds, we do not need that accuracy.\n\npub fn duration_to_golang_string(duration: Duration) -> String {\n\n let mut total_secs = duration.as_secs();\n\n let secs = total_secs % 60;\n\n total_secs = total_secs / 60;\n\n let mut golang_string = format!(\"{}s\", secs);\n\n if total_secs > 0 {\n\n let minutes = total_secs % 60;\n\n total_secs = total_secs / 60;\n\n golang_string = format!(\"{}m{}\", minutes, golang_string);\n\n if total_secs > 0 {\n\n golang_string = format!(\"{}h{}\", total_secs, golang_string)\n\n }\n\n }\n\n\n\n golang_string\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::cli::util::duration_to_golang_string;\n", "file_path": "src/cli/util.rs", "rank": 89, "score": 117362.40208213581 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct SearchResult {\n\n data: SearchResultData,\n\n}\n", "file_path": "src/cli/search.rs", "rank": 90, "score": 115319.41284789064 }, { "content": "fn clusters_create(\n\n state: Arc<Mutex<State>>,\n\n args: CommandArgs,\n\n) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n let definition: String = args.req(0)?;\n\n\n\n debug!(\"Running clouds clusters create for {}\", &definition);\n\n\n\n let guard = state.lock().unwrap();\n\n let control = guard.active_cloud_org()?;\n\n let client = control.client();\n\n\n\n let deadline = Instant::now().add(control.timeout());\n\n let cloud = guard.active_cloud()?;\n\n let cloud_name = guard.active_cloud_name().unwrap();\n\n let project_name = match cloud.active_project() {\n\n Some(p) => p,\n\n None => return Err(ShellError::unexpected(\"Could not auto-select a project\")),\n\n };\n", "file_path": "src/cli/clouds_clusters_create.rs", "rank": 91, "score": 114641.27941855026 }, { "content": "fn update_bucket_settings(\n\n settings: &mut BucketSettings,\n\n ram: Option<u64>,\n\n replicas: Option<u64>,\n\n flush: bool,\n\n durability: Option<String>,\n\n expiry: Option<u64>,\n\n) -> Result<(), ShellError> {\n\n if let Some(r) = ram {\n\n settings.set_ram_quota_mb(r);\n\n }\n\n if let Some(r) = replicas {\n\n settings.set_num_replicas(match u32::try_from(r) {\n\n Ok(bt) => bt,\n\n Err(e) => {\n\n return Err(ShellError::unexpected(format!(\n\n \"Failed to parse durability level {}\",\n\n e\n\n )));\n\n }\n", "file_path": "src/cli/buckets_update.rs", "rank": 92, "score": 114490.02360054287 }, { "content": "fn load_sample_bucket(\n\n state: Arc<Mutex<State>>,\n\n args: CommandArgs,\n\n) -> Result<OutputStream, ShellError> {\n\n let ctrl_c = args.ctrl_c();\n\n\n\n let cluster_identifiers = cluster_identifiers_from(&state, &args, true)?;\n\n let bucket_name: String = args.req(0)?;\n\n\n\n let mut results: Vec<Value> = vec![];\n\n for identifier in cluster_identifiers {\n\n let guard = state.lock().unwrap();\n\n let cluster = match guard.clusters().get(&identifier) {\n\n Some(c) => c,\n\n None => {\n\n return Err(ShellError::unexpected(\"Cluster not found\"));\n\n }\n\n };\n\n\n\n validate_is_not_cloud(\n", "file_path": "src/cli/buckets_sample.rs", "rank": 93, "score": 114490.02360054287 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct CollectionManifest {\n\n uid: String,\n\n scopes: Vec<CollectionManifestScope>,\n\n}\n\n\n", "file_path": "src/client/kv_client.rs", "rank": 94, "score": 112086.64248096323 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct ClusterConfig {\n\n rev: u64,\n\n #[serde(alias = \"nodesExt\")]\n\n nodes_ext: Vec<NodeConfig>,\n\n loaded_from: Option<String>,\n\n}\n\n\n\nimpl ClusterConfig {\n\n pub fn management_seeds(&self, tls: bool) -> Vec<(String, u32)> {\n\n let key = if tls { \"mgmtSSL\" } else { \"mgmt\" };\n\n\n\n self.seeds(key)\n\n }\n\n\n\n pub fn query_seeds(&self, tls: bool) -> Vec<(String, u32)> {\n\n let key = if tls { \"n1qlSSL\" } else { \"n1ql\" };\n\n\n\n self.seeds(key)\n\n }\n\n\n", "file_path": "src/client/http_client.rs", "rank": 95, "score": 112016.47865720728 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct LimitedClusterSummary {\n\n id: String,\n\n name: String,\n\n}\n\n\n\npub struct CloudClient {\n\n secret_key: String,\n\n access_key: String,\n\n}\n\n\n\nimpl CloudClient {\n\n pub fn new(secret_key: String, access_key: String) -> Self {\n\n Self {\n\n secret_key,\n\n access_key,\n\n }\n\n }\n\n\n\n fn http_do(\n\n &self,\n", "file_path": "src/client/cloud.rs", "rank": 96, "score": 112016.47865720728 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct SearchResultHit {\n\n score: f32,\n\n index: String,\n\n id: String,\n\n}\n\n\n", "file_path": "src/cli/search.rs", "rank": 97, "score": 112011.0428624413 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct AdviseResult {\n\n query: String,\n\n advice: Advice,\n\n}\n\n\n", "file_path": "src/cli/query_advise.rs", "rank": 98, "score": 112011.0428624413 }, { "content": "struct CtrlcState {\n\n interrupt: Arc<AtomicBool>,\n\n waker: Option<Waker>,\n\n halt: Arc<AtomicBool>,\n\n}\n\n\n\nimpl CtrlcFuture {\n\n pub fn new(ctrl_c: Arc<AtomicBool>) -> CtrlcFuture {\n\n let state = Arc::new(Mutex::new(CtrlcState {\n\n interrupt: ctrl_c,\n\n waker: None,\n\n halt: Arc::new(AtomicBool::new(false)),\n\n }));\n\n\n\n let state_clone = state.clone();\n\n thread::spawn(move || loop {\n\n let mut state = state_clone.lock().unwrap();\n\n if state.halt.load(Ordering::SeqCst) {\n\n return;\n\n }\n", "file_path": "src/cli/ctrlc_future.rs", "rank": 99, "score": 111972.83412163286 } ]
Rust
crates/interledger-settlement-engines/src/stores/redis_ethereum_ledger/store.rs
pensivej/interledger-rs
f86937f11ee4557887b66c2a7dc935a6475bbd3a
use futures::{ future::{err, ok}, Future, }; use ethereum_tx_sign::web3::types::{Address as EthAddress, H256, U256}; use interledger_service::Account as AccountTrait; use std::collections::HashMap; use crate::engines::ethereum_ledger::{EthereumAccount, EthereumAddresses, EthereumStore}; use redis::{self, cmd, r#async::SharedConnection, ConnectionInfo, PipelineCommands, Value}; use log::{debug, error}; use crate::stores::redis_store_common::{EngineRedisStore, EngineRedisStoreBuilder}; static RECENTLY_OBSERVED_BLOCK_KEY: &str = "recently_observed_block"; static SAVED_TRANSACTIONS_KEY: &str = "transactions"; static SETTLEMENT_ENGINES_KEY: &str = "settlement"; static LEDGER_KEY: &str = "ledger"; static ETHEREUM_KEY: &str = "eth"; #[derive(Clone, Debug, Serialize)] pub struct Account { pub(crate) id: u64, pub(crate) own_address: EthAddress, pub(crate) token_address: Option<EthAddress>, } impl AccountTrait for Account { type AccountId = u64; fn id(&self) -> Self::AccountId { self.id } } fn ethereum_transactions_key(tx_hash: H256) -> String { format!( "{}:{}:{}:{}", ETHEREUM_KEY, LEDGER_KEY, SAVED_TRANSACTIONS_KEY, tx_hash, ) } fn ethereum_ledger_key(account_id: u64) -> String { format!( "{}:{}:{}:{}", ETHEREUM_KEY, LEDGER_KEY, SETTLEMENT_ENGINES_KEY, account_id ) } impl EthereumAccount for Account { fn token_address(&self) -> Option<EthAddress> { self.token_address } fn own_address(&self) -> EthAddress { self.own_address } } pub struct EthereumLedgerRedisStoreBuilder { redis_store_builder: EngineRedisStoreBuilder, } impl EthereumLedgerRedisStoreBuilder { pub fn new(redis_uri: ConnectionInfo) -> Self { EthereumLedgerRedisStoreBuilder { redis_store_builder: EngineRedisStoreBuilder::new(redis_uri), } } pub fn connect(&self) -> impl Future<Item = EthereumLedgerRedisStore, Error = ()> { self.redis_store_builder .connect() .and_then(move |redis_store| { let connection = redis_store.connection.clone(); Ok(EthereumLedgerRedisStore { redis_store, connection, }) }) } } #[derive(Clone)] pub struct EthereumLedgerRedisStore { redis_store: EngineRedisStore, connection: SharedConnection, } impl EthereumLedgerRedisStore { pub fn new(redis_store: EngineRedisStore) -> Self { let connection = redis_store.connection.clone(); EthereumLedgerRedisStore { redis_store, connection, } } } impl EthereumStore for EthereumLedgerRedisStore { type Account = Account; fn load_account_addresses( &self, account_ids: Vec<<Self::Account as AccountTrait>::AccountId>, ) -> Box<dyn Future<Item = Vec<EthereumAddresses>, Error = ()> + Send> { debug!("Loading account addresses {:?}", account_ids); let mut pipe = redis::pipe(); for account_id in account_ids.iter() { pipe.hgetall(ethereum_ledger_key(*account_id)); } Box::new( pipe.query_async(self.connection.clone()) .map_err(move |err| { error!( "Error the addresses for accounts: {:?} {:?}", account_ids, err ) }) .and_then( move |(_conn, addresses): (_, Vec<HashMap<String, Vec<u8>>>)| { debug!("Loaded account addresses {:?}", addresses); let mut ret = Vec::with_capacity(addresses.len()); for addr in &addresses { let own_address = if let Some(own_address) = addr.get("own_address") { own_address } else { return err(()); }; let own_address = EthAddress::from(&own_address[..]); let token_address = if let Some(token_address) = addr.get("token_address") { token_address } else { return err(()); }; let token_address = if token_address.len() == 20 { Some(EthAddress::from(&token_address[..])) } else { None }; ret.push(EthereumAddresses { own_address, token_address, }); } ok(ret) }, ), ) } fn save_account_addresses( &self, data: HashMap<<Self::Account as AccountTrait>::AccountId, EthereumAddresses>, ) -> Box<dyn Future<Item = (), Error = ()> + Send> { let mut pipe = redis::pipe(); for (account_id, d) in data { let token_address = if let Some(token_address) = d.token_address { token_address.to_vec() } else { vec![] }; let acc_id = ethereum_ledger_key(account_id); let addrs = &[ ("own_address", d.own_address.to_vec()), ("token_address", token_address), ]; pipe.hset_multiple(acc_id, addrs).ignore(); pipe.set(addrs_to_key(d), account_id).ignore(); } Box::new( pipe.query_async(self.connection.clone()) .map_err(move |err| error!("Error saving account data: {:?}", err)) .and_then(move |(_conn, _ret): (_, Value)| Ok(())), ) } fn save_recently_observed_block( &self, block: U256, ) -> Box<dyn Future<Item = (), Error = ()> + Send> { let mut pipe = redis::pipe(); pipe.set(RECENTLY_OBSERVED_BLOCK_KEY, block.low_u64()) .ignore(); Box::new( pipe.query_async(self.connection.clone()) .map_err(move |err| { error!("Error saving last observed block {:?}: {:?}", block, err) }) .and_then(move |(_conn, _ret): (_, Value)| Ok(())), ) } fn load_recently_observed_block(&self) -> Box<dyn Future<Item = U256, Error = ()> + Send> { let mut pipe = redis::pipe(); pipe.get(RECENTLY_OBSERVED_BLOCK_KEY); Box::new( pipe.query_async(self.connection.clone()) .map_err(move |err| error!("Error loading last observed block: {:?}", err)) .and_then(move |(_conn, block): (_, Vec<u64>)| { if !block.is_empty() { let block = U256::from(block[0]); ok(block) } else { ok(U256::from(0)) } }), ) } fn load_account_id_from_address( &self, eth_address: EthereumAddresses, ) -> Box<dyn Future<Item = <Self::Account as AccountTrait>::AccountId, Error = ()> + Send> { let mut pipe = redis::pipe(); pipe.get(addrs_to_key(eth_address)); Box::new( pipe.query_async(self.connection.clone()) .map_err(move |err| error!("Error loading account data: {:?}", err)) .and_then(move |(_conn, account_id): (_, Vec<u64>)| ok(account_id[0])), ) } fn check_if_tx_processed( &self, tx_hash: H256, ) -> Box<dyn Future<Item = bool, Error = ()> + Send> { Box::new( cmd("EXISTS") .arg(ethereum_transactions_key(tx_hash)) .query_async(self.connection.clone()) .map_err(move |err| error!("Error loading account data: {:?}", err)) .and_then(move |(_conn, ret): (_, bool)| Ok(ret)), ) } fn mark_tx_processed(&self, tx_hash: H256) -> Box<dyn Future<Item = (), Error = ()> + Send> { Box::new( cmd("SETNX") .arg(ethereum_transactions_key(tx_hash)) .arg(true) .query_async(self.connection.clone()) .map_err(move |err| error!("Error loading account data: {:?}", err)) .and_then( move |(_conn, ret): (_, bool)| { if ret { ok(()) } else { err(()) } }, ), ) } } fn addrs_to_key(address: EthereumAddresses) -> String { let token_address = if let Some(token_address) = address.token_address { token_address.to_string() } else { "null".to_string() }; format!( "account:{}:{}", address.own_address.to_string(), token_address ) } #[cfg(test)] mod tests { use super::super::super::test_helpers::store_helpers::{ block_on, test_eth_store as test_store, }; use super::*; use std::iter::FromIterator; use std::str::FromStr; #[test] fn saves_and_loads_ethereum_addreses_properly() { block_on(test_store().and_then(|(store, context)| { let account_ids = vec![30, 42]; let account_addresses = vec![ EthereumAddresses { own_address: EthAddress::from_str("3cdb3d9e1b74692bb1e3bb5fc81938151ca64b02") .unwrap(), token_address: Some( EthAddress::from_str("c92be489639a9c61f517bd3b955840fa19bc9b7c").unwrap(), ), }, EthereumAddresses { own_address: EthAddress::from_str("2fcd07047c209c46a767f8338cb0b14955826826") .unwrap(), token_address: None, }, ]; let input = HashMap::from_iter(vec![ (account_ids[0], account_addresses[0]), (account_ids[1], account_addresses[1]), ]); store .save_account_addresses(input) .map_err(|err| eprintln!("Redis error: {:?}", err)) .and_then(move |_| { store .load_account_addresses(account_ids.clone()) .map_err(|err| eprintln!("Redis error: {:?}", err)) .and_then(move |data| { assert_eq!(data[0], account_addresses[0]); assert_eq!(data[1], account_addresses[1]); let _ = context; store .load_account_id_from_address(account_addresses[0]) .map_err(|err| eprintln!("Redis error: {:?}", err)) .and_then(move |acc_id| { assert_eq!(acc_id, account_ids[0]); let _ = context; store .load_account_id_from_address(account_addresses[1]) .map_err(|err| eprintln!("Redis error: {:?}", err)) .and_then(move |acc_id| { assert_eq!(acc_id, account_ids[1]); let _ = context; Ok(()) }) }) }) }) })) .unwrap() } #[test] fn saves_and_loads_last_observed_data_properly() { block_on(test_store().and_then(|(store, context)| { let block = U256::from(2); store .save_recently_observed_block(block) .map_err(|err| eprintln!("Redis error: {:?}", err)) .and_then(move |_| { store .load_recently_observed_block() .map_err(|err| eprintln!("Redis error: {:?}", err)) .and_then(move |data| { assert_eq!(data, block); let _ = context; Ok(()) }) }) })) .unwrap() } #[test] fn saves_tx_hashes_properly() { block_on(test_store().and_then(|(store, context)| { let tx_hash = H256::from("0xb28675771f555adf614f1401838b9fffb43bc285387679bcbd313a8dc5bdc00e"); store .mark_tx_processed(tx_hash) .map_err(|err| eprintln!("Redis error: {:?}", err)) .and_then(move |_| { store .check_if_tx_processed(tx_hash) .map_err(|err| eprintln!("Redis error: {:?}", err)) .and_then(move |seen2| { assert_eq!(seen2, true); let _ = context; Ok(()) }) }) })) .unwrap() } }
use futures::{ future::{err, ok}, Future, }; use ethereum_tx_sign::web3::types::{Address as EthAddress, H256, U256}; use interledger_service::Account as AccountTrait; use std::collections::HashMap; use crate::engines::ethereum_ledger::{EthereumAccount, EthereumAddresses, EthereumStore}; use redis::{self, cmd, r#async::SharedConnection, ConnectionInfo, PipelineCommands, Value}; use log::{debug, error}; use crate::stores::redis_store_common::{EngineRedisStore, EngineRedisStoreBuilder}; static RECENTLY_OBSERVED_BLOCK_KEY: &str = "recently_observed_block"; static SAVED_TRANSACTIONS_KEY: &str = "transactions"; static SETTLEMENT_ENGINES_KEY: &str = "settlement"; static LEDGER_KEY: &str = "ledger"; static ETHEREUM_KEY: &str = "eth"; #[derive(Clone, Debug, Serialize)] pub struct Account { pub(crate) id: u64, pub(crate) own_address: EthAddress, pub(crate) token_address: Option<EthAddress>, } impl AccountTrait for Account { type AccountId = u64; fn id(&self) -> Self::AccountId { self.id } } fn ethereum_transactions_key(tx_hash: H256) -> String { format!( "{}:{}:{}:{}", ETHEREUM_KEY, LEDGER_KEY, SAVED_TRANSACTIONS_KEY, tx_hash, ) } fn ethereum_ledger_key(account_id: u64) -> String { format!( "{}:{}:{}:{}", ETHEREUM_KEY, LEDGER_KEY, SETTLEMENT_ENGINES_KEY, account_id ) } impl EthereumAccount for Account { fn token_address(&self) -> Option<EthAddress> { self.token_address } fn own_address(&self) -> EthAddress { self.own_address } } pub struct EthereumLedgerRedisStoreBuilder { redis_store_builder: EngineRedisStoreBuilder, } impl EthereumLedgerRedisStoreBuilder { pub fn new(redis_uri: ConnectionInfo) -> Self { EthereumLedgerRedisStoreBuilder { redis_store_builder: EngineRedisStoreBuilder::new(redis_uri), } } pub fn connect(&self) -> impl Future<Item = EthereumLedgerRedisStore, Error = ()> { self.redis_store_builder .connect() .and_then(move |redis_store| { let connection = redis_store.connection.clone(); Ok(EthereumLedgerRedisStore { redis_store, connection, }) }) } } #[derive(Clone)] pub struct EthereumLedgerRedisStore { redis_store: EngineRedisStore, connection: SharedConnection, } impl EthereumLedgerRedisStore { pub fn new(redis_store: EngineRedisStore) -> Self { let connection = redis_store.connection.clone(); EthereumLedgerRedisStore { redis_store, connection, } } } impl EthereumStore for EthereumLedgerRedisStore { type Account = Account; fn load_account_addresses( &self, account_ids: Vec<<Self::Account as AccountTrait>::AccountId>, ) -> Box<dyn Future<Item = Vec<EthereumAddresses>, Error = ()> + Send> { debug!("Loading account addresses {:?}", account_ids); let mut pipe = redis::pipe(); for account_id in account_ids.iter() { pipe.hgetall(ethereum_ledger_key(*account_id)); } Box::new( pipe.query_async(self.connection.clone()) .map_err(move |err| { error!( "Error the addresses for accounts: {:?} {:?}", account_ids, err ) }) .and_then( move |(_conn, addresses): (_, Vec<HashMap<String, Vec<u8>>>)| { debug!("Loaded account addresses {:?}", addresses); let mut ret = Vec::with_capacity(addresses.len()); for addr in &addresses { let own_address = if let Some(own_address) = addr.get("own_address") { own_address } else { return err(()); }; let own_address = EthAddress::from(&own_address[..]); let token_address = if let Some(token_address) = addr.get("token_address") { token_address } else { return err(()); }; let token_address = if token_address.len() == 20 { Some(EthAddress::from(&token_address[..])) } else { None }; ret.push(EthereumAddresses { own_address, token_address, }); } ok(ret) }, ), ) } fn save_account_addresses( &self, data: HashMap<<Self::Account as AccountTrait>::AccountId, EthereumAddresses>, ) -> Box<dyn Future<Item = (), Error = ()> + Send> { let mut pipe = redis::pipe(); for (account_id, d) in data { let token_address = if let Some(token_address) = d.token_address { token_address.to_vec() } else { vec![] }; let acc_id = ethereum_ledger_key(account_id); let addrs = &[ ("own_address", d.own_address.to_vec()), ("token_address", token_address), ]; pipe.hset_multiple(acc_id, addrs).ignore(); pipe.set(addrs_to_key(d), account_id).ignore(); } Box::new( pipe.query_async(self.connection.clone()) .map_err(move |err| error!("Error saving account data: {:?}", err)) .and_then(move |(_conn, _ret): (_, Value)| Ok(())), ) } fn save_recently_observed_block( &self, block: U256, ) -> Box<dyn Future<Item = (), Error = ()> + Send> { let mut pipe = redis::pipe(); pipe.set(RECENTLY_OBSERVED_BLOCK_KEY, block.low_u64()) .ignore(); Box::new( pipe.query_async(self.connection.clone()) .map_err(move |err| { error!("Error saving last observed block {:?}: {:?}", block, err) }) .and_then(move |(_conn, _ret): (_, Value)| Ok(())), ) } fn load_recently_observed_block(&self) -> Box<dyn Future<Item = U256, Error = ()> + Send> { let mut pipe = redis::pipe(); pipe.get(RECENTLY_OBSERVED_BLOCK_KEY); Box::new( pipe.query_async(self.connection.clone()) .map_err(move |err| error!("Error loading last observed block: {:?}", err)) .and_then(move |(_conn, block): (_, Vec<u64>)| { if !block.is_empty() { let block = U256::from(block[0]); ok(block) } else { ok(U256::from(0)) } }), ) } fn load_account_id_from_address( &self, eth_address: EthereumAddresses, ) -> Box<dyn Future<Item = <Self::Account as AccountTrait>::AccountId, Error = ()> + Send> { let mut pipe = redis::pipe(); pipe.get(addrs_to_key(eth_address)); Box::new( pipe.query_async(self.connection.clone()) .map_err(move |err| error!("Error loading account data: {:?}", err)) .and_then(move |(_conn, account_id): (_, Vec<u64>)| ok(account_id[0])), ) } fn check_if_tx_processed( &self, tx_hash: H256, ) -> Box<dyn Future<Item = bool, Error = ()> + Send> { Box::new( cmd("EXISTS") .arg(ethereum_transactions_key(tx_hash)) .query_async(self.connection.clone()) .map_err(move |err| error!("Error loading account data: {:?}", err)) .and_then(move |(_conn, ret): (_, bool)| Ok(ret)), ) } fn mark_tx_processed(&self, tx_hash: H256) -> Box<dyn Future<Item = (), Error = ()> + Send> { Box::new( cmd("SETNX") .arg(ethereum_transactions_key(tx_hash)) .arg(true) .query_async(self.connection.clone()) .map_err(move |err| error!("Error loading account data: {:?}", err)) .and_then( move |(_conn, ret): (_, bool)| { if ret { ok(()) } else { err(()) } }, ), ) } } fn addrs_to_key(address: EthereumAddresses) -> String { let token_address = if let Some(token_address) = address.token_address { token_address.to_string() } else { "null".to_string() }; format!( "account:{}:{}", address.own_address.to_string(), token_address ) } #[cfg(test)] mod tests { use super::super::super::test_helpers::store_helpers::{ block_on, test_eth_store as test_store, }; use super::*; use std::iter::FromIterator; use std::str::FromStr; #[test] fn saves_and_loads_ethereum_addreses_properly() {
.unwrap() } #[test] fn saves_and_loads_last_observed_data_properly() { block_on(test_store().and_then(|(store, context)| { let block = U256::from(2); store .save_recently_observed_block(block) .map_err(|err| eprintln!("Redis error: {:?}", err)) .and_then(move |_| { store .load_recently_observed_block() .map_err(|err| eprintln!("Redis error: {:?}", err)) .and_then(move |data| { assert_eq!(data, block); let _ = context; Ok(()) }) }) })) .unwrap() } #[test] fn saves_tx_hashes_properly() { block_on(test_store().and_then(|(store, context)| { let tx_hash = H256::from("0xb28675771f555adf614f1401838b9fffb43bc285387679bcbd313a8dc5bdc00e"); store .mark_tx_processed(tx_hash) .map_err(|err| eprintln!("Redis error: {:?}", err)) .and_then(move |_| { store .check_if_tx_processed(tx_hash) .map_err(|err| eprintln!("Redis error: {:?}", err)) .and_then(move |seen2| { assert_eq!(seen2, true); let _ = context; Ok(()) }) }) })) .unwrap() } }
block_on(test_store().and_then(|(store, context)| { let account_ids = vec![30, 42]; let account_addresses = vec![ EthereumAddresses { own_address: EthAddress::from_str("3cdb3d9e1b74692bb1e3bb5fc81938151ca64b02") .unwrap(), token_address: Some( EthAddress::from_str("c92be489639a9c61f517bd3b955840fa19bc9b7c").unwrap(), ), }, EthereumAddresses { own_address: EthAddress::from_str("2fcd07047c209c46a767f8338cb0b14955826826") .unwrap(), token_address: None, }, ]; let input = HashMap::from_iter(vec![ (account_ids[0], account_addresses[0]), (account_ids[1], account_addresses[1]), ]); store .save_account_addresses(input) .map_err(|err| eprintln!("Redis error: {:?}", err)) .and_then(move |_| { store .load_account_addresses(account_ids.clone()) .map_err(|err| eprintln!("Redis error: {:?}", err)) .and_then(move |data| { assert_eq!(data[0], account_addresses[0]); assert_eq!(data[1], account_addresses[1]); let _ = context; store .load_account_id_from_address(account_addresses[0]) .map_err(|err| eprintln!("Redis error: {:?}", err)) .and_then(move |acc_id| { assert_eq!(acc_id, account_ids[0]); let _ = context; store .load_account_id_from_address(account_addresses[1]) .map_err(|err| eprintln!("Redis error: {:?}", err)) .and_then(move |acc_id| { assert_eq!(acc_id, account_ids[1]); let _ = context; Ok(()) }) }) }) }) }))
call_expression
[ { "content": "pub fn delay(ms: u64) -> impl Future<Item = (), Error = ()> {\n\n Delay::new(Instant::now() + Duration::from_millis(ms)).map_err(|err| panic!(err))\n\n}\n\n\n", "file_path": "crates/interledger-settlement-engines/tests/redis_helpers.rs", "rank": 0, "score": 401299.29343494965 }, { "content": "// Helper function which is used to construct an Ethereum transaction sending\n\n// `value` tokens to `to`. If a `token_address` is provided, then an ERC20\n\n// transaction is created instead for that token. The `nonce`, `gas` and\n\n// `gas_price` fields are set to 0 and are expected to be set with the values\n\n// returned by the corresponding `eth_getTransactionCount`, `eth_estimateGas`,\n\n// `eth_gasPrice` calls to an Ethereum node.\n\npub fn make_tx(to: Address, value: U256, token_address: Option<Address>) -> RawTransaction {\n\n if let Some(token_address) = token_address {\n\n // Ethereum contract transactions format:\n\n // [transfer function selector][`to` padded ][`value` padded]\n\n // transfer function selector: sha3(\"transfer(to,address)\")[0:8] =\n\n // \"a9059cbb\"\n\n // The actual receiver of the transaction is the ERC20 `token_address`\n\n // The value of the transaction is 0 wei since we are transferring an ERC20\n\n let mut data = hex::decode(\"a9059cbb\").unwrap();\n\n data.extend(ethabi::encode(&[Token::Address(to), Token::Uint(value)]));\n\n RawTransaction {\n\n to: Some(token_address),\n\n nonce: U256::from(0),\n\n data,\n\n gas: U256::from(0),\n\n gas_price: U256::from(0),\n\n value: U256::zero(),\n\n }\n\n } else {\n\n // Ethereum account transaction:\n", "file_path": "crates/interledger-settlement-engines/src/engines/ethereum_ledger/utils.rs", "rank": 1, "score": 389675.2607022585 }, { "content": "pub fn test_eth_store() -> impl Future<Item = (EthereumLedgerRedisStore, TestContext), Error = ()> {\n\n let context = TestContext::new();\n\n EngineRedisStoreBuilder::new(context.get_client_connection_info())\n\n .connect()\n\n .and_then(|redis_store| Ok((EthereumLedgerRedisStore::new(redis_store), context)))\n\n}\n\n\n", "file_path": "crates/interledger-settlement-engines/src/stores/test_helpers/store_helpers.rs", "rank": 2, "score": 383592.1162949122 }, { "content": "pub fn delay(ms: u64) -> impl Future<Item = (), Error = ()> {\n\n Delay::new(Instant::now() + Duration::from_millis(ms)).map_err(|err| panic!(err))\n\n}\n\n\n", "file_path": "crates/interledger/tests/redis_helpers.rs", "rank": 3, "score": 376032.6060365944 }, { "content": "pub fn sent_to_us(tx: Transaction, our_address: Address) -> (Address, U256, Option<Address>) {\n\n if let Some(to) = tx.to {\n\n if tx.value > U256::from(0) && to == our_address {\n\n return (tx.from, tx.value, None);\n\n }\n\n }\n\n (tx.from, U256::from(0), None) // if it's not for us the amount is 0\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_erc20_make_tx() {\n\n // https://etherscan.io/tx/0x6fd1b68f02f4201a38662647b7f09170b159faec6af4825ae509beefeb8e8130\n\n let to = \"c92be489639a9c61f517bd3b955840fa19bc9b7c\".parse().unwrap();\n\n let value = \"16345785d8a0000\".into();\n\n let token_address = Some(\"B8c77482e45F1F44dE1745F52C74426C631bDD52\".into());\n\n let tx = make_tx(to, value, token_address);\n", "file_path": "crates/interledger-settlement-engines/src/engines/ethereum_ledger/utils.rs", "rank": 5, "score": 348291.4001432056 }, { "content": "fn address_to_string<S>(address: &Address, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n serializer.serialize_str(str::from_utf8(address.as_ref()).unwrap_or(\"\"))\n\n}\n\n\n", "file_path": "crates/interledger-store-redis/src/account.rs", "rank": 6, "score": 347432.4762892856 }, { "content": "pub fn query(server: &str) -> impl Future<Item = SpspResponse, Error = Error> {\n\n let server = payment_pointer_to_url(server);\n\n trace!(\"Querying receiver: {}\", server);\n\n\n\n let client = Client::new();\n\n client\n\n .get(&server)\n\n .header(\"Accept\", \"application/spsp4+json\")\n\n .send()\n\n .map_err(|err| Error::HttpError(format!(\"Error querying SPSP receiver: {:?}\", err)))\n\n .and_then(|mut res| {\n\n res.json::<SpspResponse>()\n\n .map_err(|err| Error::InvalidResponseError(format!(\"{:?}\", err)))\n\n })\n\n}\n\n\n", "file_path": "crates/interledger-spsp/src/client.rs", "rank": 7, "score": 336656.8026440439 }, { "content": "pub fn test_store() -> impl Future<Item = (EngineRedisStore, TestContext), Error = ()> {\n\n let context = TestContext::new();\n\n EngineRedisStoreBuilder::new(context.get_client_connection_info())\n\n .connect()\n\n .and_then(|store| Ok((store, context)))\n\n}\n\n\n", "file_path": "crates/interledger-settlement-engines/src/stores/test_helpers/store_helpers.rs", "rank": 8, "score": 328708.87616002164 }, { "content": "fn get_bool(key: &str, map: &HashMap<String, Value>) -> bool {\n\n if let Some(ref value) = map.get(key) {\n\n if let Ok(value) = from_redis_value(value) as Result<String, RedisError> {\n\n if value.to_lowercase() == \"true\" {\n\n return true;\n\n }\n\n }\n\n }\n\n false\n\n}\n\n\n\nimpl AccountTrait for Account {\n\n type AccountId = u64;\n\n\n\n fn id(&self) -> Self::AccountId {\n\n self.id\n\n }\n\n}\n\n\n\nimpl IldcpAccount for Account {\n", "file_path": "crates/interledger-store-redis/src/account.rs", "rank": 10, "score": 316575.06855860143 }, { "content": "pub fn test_store(store_fails: bool, account_has_engine: bool) -> TestStore {\n\n let mut acc = TEST_ACCOUNT_0.clone();\n\n acc.no_details = !account_has_engine;\n\n\n\n TestStore::new(vec![acc], store_fails)\n\n}\n\n\n", "file_path": "crates/interledger-settlement/src/test_helpers.rs", "rank": 11, "score": 315011.1218241779 }, { "content": "// Futures helper taken from the store_helpers in interledger-store-redis.\n\npub fn block_on<F>(f: F) -> Result<F::Item, F::Error>\n\nwhere\n\n F: Future + Send + 'static,\n\n F::Item: Send,\n\n F::Error: Send,\n\n{\n\n let _ = env_logger::try_init();\n\n let mut runtime = Runtime::new().unwrap();\n\n runtime.block_on(f)\n\n}\n", "file_path": "crates/interledger-settlement-engines/src/engines/ethereum_ledger/test_helpers.rs", "rank": 12, "score": 314611.57715996954 }, { "content": "fn account_details_key(account_id: u64) -> String {\n\n format!(\"accounts:{}\", account_id)\n\n}\n\n\n\npub struct RedisStoreBuilder {\n\n redis_uri: ConnectionInfo,\n\n secret: [u8; 32],\n\n poll_interval: u64,\n\n}\n\n\n\nimpl RedisStoreBuilder {\n\n pub fn new(redis_uri: ConnectionInfo, secret: [u8; 32]) -> Self {\n\n RedisStoreBuilder {\n\n redis_uri,\n\n secret,\n\n poll_interval: DEFAULT_POLL_INTERVAL,\n\n }\n\n }\n\n\n\n pub fn poll_interval(&mut self, poll_interval: u64) -> &mut Self {\n", "file_path": "crates/interledger-store-redis/src/store.rs", "rank": 13, "score": 312555.4466972386 }, { "content": "pub fn test_store() -> impl Future<Item = (RedisStore, TestContext), Error = ()> {\n\n let context = TestContext::new();\n\n RedisStoreBuilder::new(context.get_client_connection_info(), [0; 32])\n\n .connect()\n\n .and_then(|store| {\n\n let store_clone = store.clone();\n\n store\n\n .clone()\n\n .insert_account(ACCOUNT_DETAILS_0.clone())\n\n .and_then(move |_| store_clone.insert_account(ACCOUNT_DETAILS_1.clone()))\n\n .and_then(|_| Ok((store, context)))\n\n })\n\n}\n\n\n", "file_path": "crates/interledger-store-redis/tests/common/store_helpers.rs", "rank": 15, "score": 307465.2986169406 }, { "content": "// Futures helper taken from the store_helpers in interledger-store-redis.\n\npub fn block_on<F>(f: F) -> Result<F::Item, F::Error>\n\nwhere\n\n F: Future + Send + 'static,\n\n F::Item: Send,\n\n F::Error: Send,\n\n{\n\n // Only run one test at a time\n\n let _ = env_logger::try_init();\n\n let mut runtime = Runtime::new().unwrap();\n\n runtime.block_on(f)\n\n}\n\n\n", "file_path": "crates/interledger-settlement/src/test_helpers.rs", "rank": 16, "score": 296605.22438113095 }, { "content": "/// An Ethereum account is associated with an address. We additionally require\n\n/// that an optional `token_address` is implemented. If the `token_address` of an\n\n/// Ethereum Account is not `None`, than that account is used with the ERC20 token\n\n/// associated with that `token_address`.\n\npub trait EthereumAccount: Account {\n\n fn own_address(&self) -> Address;\n\n\n\n fn token_address(&self) -> Option<Address> {\n\n None\n\n }\n\n}\n\n\n\n#[derive(Debug, Extract, Serialize, Deserialize, Clone, PartialEq, Eq, Hash, Copy)]\n\npub struct Addresses {\n\n pub own_address: Address,\n\n pub token_address: Option<Address>,\n\n}\n\n\n", "file_path": "crates/interledger-settlement-engines/src/engines/ethereum_ledger/types.rs", "rank": 17, "score": 290467.4754618389 }, { "content": "#[derive(serde::Deserialize)]\n\nstruct DeliveryData {\n\n delivered_amount: u64,\n\n}\n\n\n\n#[test]\n", "file_path": "crates/interledger-settlement-engines/tests/eth_ledger_settlement.rs", "rank": 18, "score": 288171.6860609696 }, { "content": "// TODO replace this with pubsub when async pubsub is added upstream: https://github.com/mitsuhiko/redis-rs/issues/183\n\ntype RouteVec = Vec<(String, u64)>;\n\n\n", "file_path": "crates/interledger-store-redis/src/store.rs", "rank": 19, "score": 287019.88718455686 }, { "content": "pub fn block_on<F>(f: F) -> Result<F::Item, F::Error>\n\nwhere\n\n F: Future + Send + 'static,\n\n F::Item: Send,\n\n F::Error: Send,\n\n{\n\n // Only run one test at a time\n\n let _ = env_logger::try_init();\n\n let mut runtime = Runtime::new().unwrap();\n\n runtime.block_on(f)\n\n}\n", "file_path": "crates/interledger-settlement-engines/src/stores/test_helpers/store_helpers.rs", "rank": 20, "score": 280815.5739929375 }, { "content": "fn get_value<V>(key: &str, map: &HashMap<String, Value>) -> Result<V, RedisError>\n\nwhere\n\n V: FromRedisValue,\n\n{\n\n if let Some(ref value) = map.get(key) {\n\n from_redis_value(value)\n\n } else {\n\n Err(RedisError::from((\n\n ErrorKind::TypeError,\n\n \"Account is missing field\",\n\n key.to_string(),\n\n )))\n\n }\n\n}\n\n\n", "file_path": "crates/interledger-store-redis/src/account.rs", "rank": 21, "score": 269632.7035020954 }, { "content": "#[test]\n\nfn saves_and_loads_idempotency_key_data_properly() {\n\n block_on(test_store().and_then(|(store, context)| {\n\n let input_hash: [u8; 32] = Default::default();\n\n store\n\n .save_idempotent_data(\n\n IDEMPOTENCY_KEY.clone(),\n\n input_hash,\n\n StatusCode::OK,\n\n Bytes::from(\"TEST\"),\n\n )\n\n .map_err(|err| eprintln!(\"Redis error: {:?}\", err))\n\n .and_then(move |_| {\n\n store\n\n .load_idempotent_data(IDEMPOTENCY_KEY.clone())\n\n .map_err(|err| eprintln!(\"Redis error: {:?}\", err))\n\n .and_then(move |data1| {\n\n assert_eq!(data1, (StatusCode::OK, Bytes::from(\"TEST\"), input_hash));\n\n let _ = context;\n\n\n\n store\n", "file_path": "crates/interledger-store-redis/tests/settlement_test.rs", "rank": 22, "score": 267487.26398705394 }, { "content": "/// In this test we have Alice and Bob who have peered with each other and run\n\n/// Ethereum ledger settlement engines. Alice proceeds to make SPSP payments to\n\n/// Bob, until she eventually reaches Bob's `settle_threshold`. Once that's\n\n/// exceeded, her engine makes a settlement request to Bob. Alice's connector\n\n/// immediately applies the balance change. Bob's engine listens for incoming\n\n/// transactions, and once the transaction has sufficient confirmations it\n\n/// lets Bob's connector know about it, so that it adjusts their credit.\n\nfn eth_ledger_settlement() {\n\n // Nodes 1 and 2 are peers, Node 2 is the parent of Node 3\n\n let _ = env_logger::try_init();\n\n let context = TestContext::new();\n\n\n\n let mut ganache_pid = start_ganache();\n\n\n\n // Each node will use its own DB within the redis instance\n\n let mut connection_info1 = context.get_client_connection_info();\n\n connection_info1.db = 1;\n\n let mut connection_info2 = context.get_client_connection_info();\n\n connection_info2.db = 2;\n\n\n\n let node1_http = get_open_port(Some(3010));\n\n let node1_settlement = get_open_port(Some(3011));\n\n let node1_engine = get_open_port(Some(3012));\n\n let alice_key = \"380eb0f3d505f087e438eca80bc4df9a7faa24f868e69fc0440261a0fc0567dc\".to_string();\n\n let node2_http = get_open_port(Some(3020));\n\n let node2_settlement = get_open_port(Some(3021));\n\n let node2_engine = get_open_port(Some(3022));\n", "file_path": "crates/interledger-settlement-engines/tests/eth_ledger_settlement.rs", "rank": 23, "score": 263473.59050673665 }, { "content": "fn get_value_option<V>(key: &str, map: &HashMap<String, Value>) -> Result<Option<V>, RedisError>\n\nwhere\n\n V: FromRedisValue,\n\n{\n\n if let Some(ref value) = map.get(key) {\n\n from_redis_value(value).map(Some)\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "crates/interledger-store-redis/src/account.rs", "rank": 24, "score": 260492.37256332455 }, { "content": "/// The base trait that Account types from other Services extend.\n\n/// This trait only assumes that the account has an ID that can be compared with others.\n\n///\n\n/// Each service can extend the Account type to include additional details they require.\n\n/// Store implementations will implement these Account traits for a concrete type that\n\n/// they will load from the database.\n\npub trait Account: Clone + Send + Sized + Debug {\n\n type AccountId: Eq + Hash + Debug + Display + Default + FromStr + Send + Sync + Copy + Serialize;\n\n\n\n fn id(&self) -> Self::AccountId;\n\n}\n\n\n\n/// A struct representing an incoming ILP Prepare packet or an outgoing one before the next hop is set.\n\n#[derive(Debug, Clone)]\n\npub struct IncomingRequest<A: Account> {\n\n pub from: A,\n\n pub prepare: Prepare,\n\n}\n\n\n\n/// A struct representing an ILP Prepare packet with the incoming and outgoing accounts set.\n\n#[derive(Debug, Clone)]\n\npub struct OutgoingRequest<A: Account> {\n\n pub from: A,\n\n pub to: A,\n\n pub original_amount: u64,\n\n pub prepare: Prepare,\n", "file_path": "crates/interledger-service/src/lib.rs", "rank": 25, "score": 260076.59198358032 }, { "content": "pub fn test_store(\n\n account: TestAccount,\n\n store_fails: bool,\n\n account_has_engine: bool,\n\n initialize: bool,\n\n) -> TestStore {\n\n let mut acc = account.clone();\n\n acc.no_details = !account_has_engine;\n\n TestStore::new(vec![acc], store_fails, initialize)\n\n}\n\n\n", "file_path": "crates/interledger-settlement-engines/src/engines/ethereum_ledger/test_helpers.rs", "rank": 26, "score": 255875.43698769237 }, { "content": "fn optional_url_to_string<S>(url: &Option<Url>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n if let Some(ref url) = url {\n\n serializer.serialize_str(url.as_ref())\n\n } else {\n\n serializer.serialize_none()\n\n }\n\n}\n\n\n\n// This needs to be pass by ref because serde expects this function to take a ref\n", "file_path": "crates/interledger-store-redis/src/account.rs", "rank": 27, "score": 255189.22499352382 }, { "content": "pub fn block_on<F>(f: F) -> Result<F::Item, F::Error>\n\nwhere\n\n F: Future + Send + 'static,\n\n F::Item: Send,\n\n F::Error: Send,\n\n{\n\n // Only run one test at a time\n\n let _ = env_logger::try_init();\n\n let lock = TEST_MUTEX.lock();\n\n let mut runtime = Runtime::new().unwrap();\n\n let result = runtime.block_on(f);\n\n drop(lock);\n\n result\n\n}\n", "file_path": "crates/interledger-store-redis/tests/common/store_helpers.rs", "rank": 28, "score": 254083.3219191786 }, { "content": "fn get_bytes_option(key: &str, map: &HashMap<String, Value>) -> Result<Option<Bytes>, RedisError> {\n\n if let Some(ref value) = map.get(key) {\n\n let vec: Vec<u8> = from_redis_value(value)?;\n\n Ok(Some(Bytes::from(vec)))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "crates/interledger-store-redis/src/account.rs", "rank": 29, "score": 253959.26044806326 }, { "content": "fn get_url_option(key: &str, map: &HashMap<String, Value>) -> Result<Option<Url>, RedisError> {\n\n if let Some(ref value) = map.get(key) {\n\n let value: String = from_redis_value(value)?;\n\n if let Ok(url) = Url::parse(&value) {\n\n Ok(Some(url))\n\n } else {\n\n Err(RedisError::from((ErrorKind::TypeError, \"Invalid URL\")))\n\n }\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "crates/interledger-store-redis/src/account.rs", "rank": 30, "score": 253959.26044806326 }, { "content": "fn read_protocol_data<T>(reader: &mut T) -> Result<Vec<ProtocolData>, ParseError>\n\nwhere\n\n T: ReadOerExt,\n\n{\n\n let mut protocol_data = Vec::new();\n\n\n\n let num_entries = reader.read_var_uint()?;\n\n let mut i = BigUint::from(0 as u32);\n\n while i < num_entries {\n\n i = i.add(BigUint::from(1 as u8)); // this is probably slow\n\n let protocol_name = String::from_utf8(reader.read_var_octet_string()?)?;\n\n let content_type = ContentType::from(reader.read_u8()?);\n\n let data = reader.read_var_octet_string()?;\n\n protocol_data.push(ProtocolData {\n\n protocol_name,\n\n content_type,\n\n data,\n\n });\n\n }\n\n Ok(protocol_data)\n\n}\n\n\n", "file_path": "crates/interledger-btp/src/packet.rs", "rank": 31, "score": 247955.83609515446 }, { "content": "fn prefixed_mesage(challenge: Vec<u8>) -> Vec<u8> {\n\n let mut ret = ETH_CREATE_ACCOUNT_PREFIX.to_vec();\n\n ret.extend(challenge);\n\n ret\n\n}\n\n\n", "file_path": "crates/interledger-settlement-engines/src/engines/ethereum_ledger/eth_engine.rs", "rank": 32, "score": 242828.06004590957 }, { "content": "fn get_auth<C>(connection: C) -> impl Future<Item = (Auth, C), Error = ()>\n\nwhere\n\n C: Stream<Item = Message> + Sink<SinkItem = Message>,\n\n{\n\n connection\n\n .into_future()\n\n .map_err(|_err| ())\n\n .and_then(move |(message, connection)| {\n\n // The first packet sent on the connection MUST be the auth packet\n\n result(parse_auth(message).map(|auth| (auth, connection)).ok_or(()))\n\n })\n\n}\n\n\n", "file_path": "crates/interledger-btp/src/server.rs", "rank": 33, "score": 239471.17855452723 }, { "content": "// Helper to create a new engine and spin a new ganache instance.\n\npub fn test_engine<Si, S, A>(\n\n store: S,\n\n key: Si,\n\n confs: u8,\n\n connector_url: &str,\n\n watch_incoming: bool,\n\n) -> EthereumLedgerSettlementEngine<S, Si, A>\n\nwhere\n\n Si: EthereumLedgerTxSigner + Clone + Send + Sync + 'static,\n\n S: EthereumStore<Account = A> + IdempotentStore + Clone + Send + Sync + 'static,\n\n A: EthereumAccount + Send + Sync + 'static,\n\n{\n\n EthereumLedgerSettlementEngineBuilder::new(store, key)\n\n .connector_url(connector_url)\n\n .confirmations(confs)\n\n .watch_incoming(watch_incoming)\n\n .poll_frequency(1000)\n\n .connect()\n\n}\n\n\n", "file_path": "crates/interledger-settlement-engines/src/engines/ethereum_ledger/test_helpers.rs", "rank": 34, "score": 239028.02606193762 }, { "content": "pub fn test_api(\n\n test_store: TestStore,\n\n should_fulfill: bool,\n\n) -> SettlementApi<TestStore, impl OutgoingService<TestAccount> + Clone + Send + Sync, TestAccount>\n\n{\n\n let outgoing = outgoing_service_fn(move |_| {\n\n Box::new(if should_fulfill {\n\n ok(FulfillBuilder {\n\n fulfillment: &[0; 32],\n\n data: b\"hello!\",\n\n }\n\n .build())\n\n } else {\n\n err(RejectBuilder {\n\n code: ErrorCode::F02_UNREACHABLE,\n\n message: b\"No other outgoing handler!\",\n\n data: &[],\n\n triggered_by: Some(&SERVICE_ADDRESS),\n\n }\n\n .build())\n\n })\n\n });\n\n SettlementApi::new(test_store, outgoing)\n\n}\n", "file_path": "crates/interledger-settlement/src/test_helpers.rs", "rank": 35, "score": 230665.30195314222 }, { "content": "pub fn test_service(\n\n) -> SettlementMessageService<impl IncomingService<TestAccount> + Clone, TestAccount> {\n\n SettlementMessageService::new(\n\n SERVICE_ADDRESS.clone(),\n\n incoming_service_fn(|_request| {\n\n Box::new(err(RejectBuilder {\n\n code: ErrorCode::F02_UNREACHABLE,\n\n message: b\"No other incoming handler!\",\n\n data: &[],\n\n triggered_by: Some(&SERVICE_ADDRESS),\n\n }\n\n .build()))\n\n }),\n\n )\n\n}\n\n\n", "file_path": "crates/interledger-settlement/src/test_helpers.rs", "rank": 36, "score": 230665.30195314222 }, { "content": "#[cfg(test)]\n\n#[derive(Clone, Debug)]\n\nstruct TestAccount(u64);\n\n#[cfg(test)]\n\nimpl Account for TestAccount {\n\n type AccountId = u64;\n\n\n\n fn id(&self) -> u64 {\n\n self.0\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod incoming {\n\n use super::*;\n\n use interledger_packet::*;\n\n use interledger_service::incoming_service_fn;\n\n use std::str::FromStr;\n\n use std::{\n\n sync::{Arc, Mutex},\n\n time::SystemTime,\n\n };\n", "file_path": "crates/interledger-service-util/src/validator_service.rs", "rank": 37, "score": 227945.68548044332 }, { "content": "#[doc(hidden)]\n\n#[allow(clippy::all)]\n\npub fn run_ethereum_engine<R, Si>(\n\n redis_uri: R,\n\n ethereum_endpoint: String,\n\n settlement_port: u16,\n\n secret_seed: &[u8; 32],\n\n private_key: Si,\n\n chain_id: u8,\n\n confirmations: u8,\n\n asset_scale: u8,\n\n poll_frequency: u64,\n\n connector_url: String,\n\n token_address: Option<Address>,\n\n watch_incoming: bool,\n\n) -> impl Future<Item = (), Error = ()>\n\nwhere\n\n R: IntoConnectionInfo,\n\n Si: EthereumLedgerTxSigner + Clone + Send + Sync + 'static,\n\n{\n\n let redis_secret = generate_redis_secret(secret_seed);\n\n let redis_uri = redis_uri.into_connection_info().unwrap();\n", "file_path": "crates/interledger-settlement-engines/src/engines/ethereum_ledger/eth_engine.rs", "rank": 38, "score": 227423.08830973023 }, { "content": "pub fn extract_var_octet_string(mut buffer: BytesMut) -> Result<BytesMut> {\n\n let buffer_length = buffer.len();\n\n let mut reader = &buffer[..];\n\n let content_length = reader.read_var_octet_string_length()?;\n\n let content_offset = buffer_length - reader.len();\n\n\n\n let mut remaining = buffer.split_off(content_offset);\n\n if remaining.len() < content_length {\n\n Err(Error::new(ErrorKind::UnexpectedEof, \"buffer too small\"))\n\n } else {\n\n Ok(remaining.split_to(content_length))\n\n }\n\n}\n\n\n", "file_path": "crates/interledger-packet/src/oer.rs", "rank": 39, "score": 225317.2950358555 }, { "content": "pub fn start_ganache() -> std::process::Child {\n\n let mut ganache = Command::new(\"ganache-cli\");\n\n let ganache = ganache.stdout(std::process::Stdio::null()).arg(\"-m\").arg(\n\n \"abstract vacuum mammal awkward pudding scene penalty purchase dinner depart evoke puzzle\",\n\n );\n\n let ganache_pid = ganache.spawn().expect(\"couldnt start ganache-cli\");\n\n // wait a couple of seconds for ganache to boot up\n\n sleep(Duration::from_secs(5));\n\n ganache_pid\n\n}\n\n\n", "file_path": "crates/interledger-settlement-engines/src/engines/ethereum_ledger/test_helpers.rs", "rank": 40, "score": 224008.57064698995 }, { "content": "fn validate_auth<U, C, A>(store: U, connection: C) -> impl Future<Item = (A, C), Error = ()>\n\nwhere\n\n U: BtpStore<Account = A> + 'static,\n\n C: Stream<Item = Message> + Sink<SinkItem = Message>,\n\n A: BtpAccount + 'static,\n\n{\n\n get_auth(connection).and_then(move |(auth, connection)| {\n\n let token = auth.token.clone();\n\n store\n\n .get_account_from_btp_token(&auth.token)\n\n .map_err(move |_| warn!(\"Got unauthorized connection with token: {}\", token))\n\n .and_then(move |account| {\n\n let auth_response = Message::Binary(\n\n BtpResponse {\n\n request_id: auth.request_id,\n\n protocol_data: Vec::new(),\n\n }\n\n .to_bytes(),\n\n );\n\n connection\n\n .send(auth_response)\n\n .map_err(|_err| error!(\"Error sending auth response\"))\n\n .and_then(|connection| Ok((account, connection)))\n\n })\n\n })\n\n}\n\n\n", "file_path": "crates/interledger-btp/src/server.rs", "rank": 41, "score": 223857.8661473184 }, { "content": "fn start_ganache() -> std::process::Child {\n\n let mut ganache = Command::new(\"ganache-cli\");\n\n let ganache = ganache.stdout(std::process::Stdio::null()).arg(\"-m\").arg(\n\n \"abstract vacuum mammal awkward pudding scene penalty purchase dinner depart evoke puzzle\",\n\n );\n\n let ganache_pid = ganache.spawn().expect(\"couldnt start ganache-cli\");\n\n // wait a couple of seconds for ganache to boot up\n\n sleep(Duration::from_secs(5));\n\n ganache_pid\n\n}\n\nuse interledger_settlement_engines::engines::ethereum_ledger::run_ethereum_engine;\n\n\n", "file_path": "crates/interledger-settlement-engines/tests/eth_ledger_settlement.rs", "rank": 42, "score": 222860.34098717495 }, { "content": "fn optional_bytes_to_utf8<S>(bytes: &Option<Bytes>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n if let Some(bytes) = bytes {\n\n serializer.serialize_some(str::from_utf8(bytes.as_ref()).unwrap_or(\"\"))\n\n } else {\n\n serializer.serialize_none()\n\n }\n\n}\n\n\n", "file_path": "crates/interledger-store-redis/src/account.rs", "rank": 43, "score": 221594.13769625392 }, { "content": "/// A trait for Store implmentations that have ILP routing tables.\n\npub trait RouterStore: AccountStore + Clone + Send + Sync + 'static {\n\n /// **Synchronously** return a copy of the routing table.\n\n /// Note that this is synchronous because it assumes that Stores should\n\n /// keep the routing table in memory and use PubSub or polling to keep it updated.\n\n /// This ensures that individual packets can be routed without hitting the underlying store.\n\n // TODO avoid using HashMap because it means it'll be cloned a lot\n\n fn routing_table(&self) -> HashMap<Bytes, <Self::Account as Account>::AccountId>;\n\n}\n", "file_path": "crates/interledger-router/src/lib.rs", "rank": 44, "score": 221023.70484933376 }, { "content": "fn deserialize_string_to_address<'de, D>(deserializer: D) -> Result<Address, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n Address::from_str(&String::deserialize(deserializer)?)\n\n .map_err(|err| DeserializeError::custom(format!(\"Invalid address: {:?}\", err)))\n\n}\n\n\n", "file_path": "crates/interledger/src/node.rs", "rank": 45, "score": 220577.20819566108 }, { "content": "pub fn parse_btp_url(uri: &str) -> Result<Url, ParseError> {\n\n let uri = if uri.starts_with(\"btp+\") {\n\n uri.split_at(4).1\n\n } else {\n\n uri\n\n };\n\n Url::parse(uri)\n\n}\n\n\n", "file_path": "crates/interledger-btp/src/client.rs", "rank": 46, "score": 212682.09694301675 }, { "content": "#[test]\n\nfn errors_for_unknown_accounts() {\n\n let result = block_on(test_store().and_then(|(store, context)| {\n\n store.get_accounts(vec![0, 5]).then(move |result| {\n\n let _ = context;\n\n result\n\n })\n\n }));\n\n assert!(result.is_err());\n\n}\n", "file_path": "crates/interledger-store-redis/tests/accounts_test.rs", "rank": 47, "score": 211914.4054065036 }, { "content": "#[doc(hidden)]\n\npub fn random_token() -> String {\n\n let mut bytes: [u8; 18] = [0; 18];\n\n SystemRandom::new().fill(&mut bytes).unwrap();\n\n base64::encode_config(&bytes, base64::URL_SAFE_NO_PAD)\n\n}\n\n\n", "file_path": "crates/interledger/src/cli.rs", "rank": 48, "score": 210205.70504446328 }, { "content": "type OutgoingRequests = Arc<Mutex<Vec<OutgoingRequest<TestAccount>>>>;\n\n\n", "file_path": "crates/interledger-ccp/src/test_helpers.rs", "rank": 49, "score": 209555.91121178545 }, { "content": "struct SendMoneyFuture<S: IncomingService<A>, A: Account> {\n\n state: SendMoneyFutureState,\n\n next: Option<S>,\n\n from_account: A,\n\n source_account: Address,\n\n destination_account: Address,\n\n shared_secret: Bytes,\n\n source_amount: u64,\n\n congestion_controller: CongestionController,\n\n pending_requests: Cell<Vec<PendingRequest>>,\n\n delivered_amount: u64,\n\n should_send_source_account: bool,\n\n sequence: u64,\n\n rejected_packets: u64,\n\n error: Option<Error>,\n\n}\n\n\n", "file_path": "crates/interledger-stream/src/client.rs", "rank": 50, "score": 207654.48373595398 }, { "content": "/// Filters out transactions where the `from` and `to` fields match the provides\n\n/// addreses.\n\npub fn filter_transfer_logs(\n\n web3: Web3<Http>,\n\n contract_address: Address,\n\n from: Option<Address>,\n\n to: Option<Address>,\n\n from_block: BlockNumber,\n\n to_block: BlockNumber,\n\n) -> impl Future<Item = Vec<ERC20Transfer>, Error = ()> {\n\n let from = if let Some(from) = from {\n\n Some(vec![H256::from(from)])\n\n } else {\n\n None\n\n };\n\n let to = if let Some(to) = to {\n\n Some(vec![H256::from(to)])\n\n } else {\n\n None\n\n };\n\n\n\n // create a filter for Transfer events from `from_block` until `to_block\n", "file_path": "crates/interledger-settlement-engines/src/engines/ethereum_ledger/utils.rs", "rank": 51, "score": 207010.1910738848 }, { "content": "pub trait SettlementAccount: Account {\n\n fn settlement_engine_details(&self) -> Option<SettlementEngineDetails> {\n\n None\n\n }\n\n}\n\n\n", "file_path": "crates/interledger-settlement/src/lib.rs", "rank": 52, "score": 203245.60281849044 }, { "content": "#[allow(dead_code)]\n\npub fn mock_settlement(status_code: usize) -> mockito::Mock {\n\n mock(\"POST\", SETTLEMENT_API.clone())\n\n // The settlement API receives json data\n\n .match_header(\"Content-Type\", \"application/json\")\n\n .with_status(status_code)\n\n .with_body(BODY)\n\n}\n\n\n", "file_path": "crates/interledger-settlement/src/test_helpers.rs", "rank": 53, "score": 202725.5001383836 }, { "content": "fn payment_pointer_to_url(payment_pointer: &str) -> String {\n\n let mut url: String = if payment_pointer.starts_with('$') {\n\n let mut url = \"https://\".to_string();\n\n url.push_str(&payment_pointer[1..]);\n\n url\n\n } else {\n\n payment_pointer.to_string()\n\n };\n\n\n\n let num_slashes = url.matches('/').count();\n\n if num_slashes == 2 {\n\n url.push_str(\"/.well-known/pay\");\n\n } else if num_slashes == 1 && url.ends_with('/') {\n\n url.push_str(\".well-known/pay\");\n\n }\n\n trace!(\n\n \"Converted payment pointer: {} to URL: {}\",\n\n payment_pointer,\n\n url\n\n );\n", "file_path": "crates/interledger-spsp/src/client.rs", "rank": 54, "score": 197811.6735813315 }, { "content": "#[test]\n\nfn gets_accounts_to_send_routes_to() {\n\n block_on(test_store().and_then(|(store, context)| {\n\n store\n\n .get_accounts_to_send_routes_to()\n\n .and_then(move |accounts| {\n\n assert_eq!(accounts[0].id(), 1);\n\n assert_eq!(accounts.len(), 1);\n\n let _ = context;\n\n Ok(())\n\n })\n\n }))\n\n .unwrap()\n\n}\n\n\n", "file_path": "crates/interledger-store-redis/tests/routing_test.rs", "rank": 55, "score": 197681.16669668994 }, { "content": "pub fn is_ildcp_request(prepare: &Prepare) -> bool {\n\n prepare.execution_condition() == PEER_PROTOCOL_CONDITION\n\n && prepare.destination() == *ILDCP_DESTINATION\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub struct IldcpRequest {}\n\n\n\nimpl IldcpRequest {\n\n pub fn new() -> Self {\n\n IldcpRequest {}\n\n }\n\n\n\n pub fn to_prepare(&self) -> Prepare {\n\n PrepareBuilder {\n\n destination: (*ILDCP_DESTINATION).clone(),\n\n amount: 0,\n\n execution_condition: &PEER_PROTOCOL_CONDITION,\n\n expires_at: SystemTime::now() + *PEER_PROTOCOL_EXPIRY_DURATION,\n\n data: &[],\n", "file_path": "crates/interledger-ildcp/src/packet.rs", "rank": 56, "score": 192917.82047083313 }, { "content": "/// The interface for Stores that can be used with the HttpServerService.\n\n// TODO do we need all of these constraints?\n\npub trait HttpStore: Clone + Send + Sync + 'static {\n\n type Account: HttpAccount;\n\n\n\n /// Load account details based on the full HTTP Authorization header\n\n /// received on the incoming HTTP request.\n\n fn get_account_from_http_token(\n\n &self,\n\n token: &str,\n\n ) -> Box<dyn Future<Item = Self::Account, Error = ()> + Send>;\n\n}\n", "file_path": "crates/interledger-http/src/lib.rs", "rank": 57, "score": 192262.99664450242 }, { "content": "pub trait NodeStore: Clone + Send + Sync + 'static {\n\n type Account: AccountTrait;\n\n\n\n fn insert_account(\n\n &self,\n\n account: AccountDetails,\n\n ) -> Box<dyn Future<Item = Self::Account, Error = ()> + Send>;\n\n\n\n // TODO limit the number of results and page through them\n\n fn get_all_accounts(&self) -> Box<dyn Future<Item = Vec<Self::Account>, Error = ()> + Send>;\n\n\n\n fn set_rates<R>(&self, rates: R) -> Box<dyn Future<Item = (), Error = ()> + Send>\n\n where\n\n R: IntoIterator<Item = (String, f64)>;\n\n\n\n fn set_static_routes<R>(&self, routes: R) -> Box<dyn Future<Item = (), Error = ()> + Send>\n\n where\n\n R: IntoIterator<Item = (String, <Self::Account as AccountTrait>::AccountId)>;\n\n\n\n fn set_static_route(\n", "file_path": "crates/interledger-api/src/lib.rs", "rank": 58, "score": 192258.2037684894 }, { "content": "fn default_settlement_address() -> SocketAddr {\n\n SocketAddr::from(([127, 0, 0, 1], 7771))\n\n}\n", "file_path": "crates/interledger/src/node.rs", "rank": 59, "score": 191481.5450642481 }, { "content": "pub fn mock_message(status_code: usize) -> mockito::Mock {\n\n mock(\"POST\", MESSAGES_API.clone())\n\n // The messages API receives raw data\n\n .match_header(\"Content-Type\", \"application/octet-stream\")\n\n .with_status(status_code)\n\n .with_body(BODY)\n\n}\n\n\n", "file_path": "crates/interledger-settlement/src/test_helpers.rs", "rank": 60, "score": 191384.118291836 }, { "content": "/// Implement this trait for datatypes which can be used to sign an Ethereum\n\n/// Transaction, e.g. an HSM, Ledger, Trezor connection, or a private key\n\n/// string.\n\n/// TODO: All methods should be converted to return a Future, since an HSM\n\n/// connection is asynchronous\n\npub trait EthereumLedgerTxSigner {\n\n /// Takes a transaction and returns an RLP encoded signed version of it\n\n fn sign_raw_tx(&self, tx: RawTransaction, chain_id: u8) -> Vec<u8>;\n\n\n\n /// Takes a message and returns a signature on it\n\n fn sign_message(&self, message: &[u8]) -> Signature;\n\n\n\n /// Returns the Ethereum address associated with the signer\n\n fn address(&self) -> Address;\n\n}\n\n\n\nimpl EthereumLedgerTxSigner for String {\n\n fn sign_raw_tx(&self, tx: RawTransaction, chain_id: u8) -> Vec<u8> {\n\n tx.sign(&H256::from_str(self).unwrap(), &chain_id)\n\n }\n\n\n\n fn sign_message(&self, message: &[u8]) -> Signature {\n\n let private_key: PrivateKey = self.parse().unwrap();\n\n let hash = Sha3::digest(message);\n\n private_key.sign_hash(&hash)\n", "file_path": "crates/interledger-settlement-engines/src/engines/ethereum_ledger/types.rs", "rank": 61, "score": 189763.07683523392 }, { "content": "pub fn test_service_with_routes() -> (\n\n CcpRouteManager<\n\n impl IncomingService<TestAccount, Future = BoxedIlpFuture> + Clone,\n\n impl OutgoingService<TestAccount, Future = BoxedIlpFuture> + Clone,\n\n TestStore,\n\n TestAccount,\n\n >,\n\n OutgoingRequests,\n\n) {\n\n let local_routes = HashMap::from_iter(vec![\n\n (\n\n Bytes::from(\"example.local.1\"),\n\n TestAccount::new(1, \"example.local.1\"),\n\n ),\n\n (\n\n Bytes::from(\"example.connector.other-local\"),\n\n TestAccount {\n\n id: 3,\n\n ilp_address: Address::from_str(\"example.connector.other-local\").unwrap(),\n\n send_routes: false,\n", "file_path": "crates/interledger-ccp/src/test_helpers.rs", "rank": 62, "score": 187786.28889840303 }, { "content": "#[derive(Debug, Clone, Serialize, Deserialize)]\n\nstruct PaymentDetailsResponse {\n\n to: Addresses,\n\n sig: Signature,\n\n}\n\n\n\nimpl PaymentDetailsResponse {\n\n fn new(to: Addresses, sig: Signature) -> Self {\n\n PaymentDetailsResponse { to, sig }\n\n }\n\n}\n\n\n\n/// # Ethereum Ledger Settlement Engine\n\n///\n\n/// Settlement Engine compliant to [RFC536](https://github.com/interledger/rfcs/pull/536/)\n\n///\n\n/// The engine connects to an Ethereum node (over HTTP) as well as the connector. Its\n\n/// functions are exposed via the Settlement Engine API.\n\n///\n\n/// It requires a `confirmations` security parameter which is used to ensure\n\n/// that all transactions that get sent to the connector have sufficient\n", "file_path": "crates/interledger-settlement-engines/src/engines/ethereum_ledger/eth_engine.rs", "rank": 63, "score": 186685.12674806506 }, { "content": "pub fn get_open_port(try_port: Option<u16>) -> u16 {\n\n if let Some(port) = try_port {\n\n let listener = net2::TcpBuilder::new_v4().unwrap();\n\n listener.reuse_address(true).unwrap();\n\n if let Ok(listener) = listener.bind(&format!(\"127.0.0.1:{}\", port)) {\n\n return listener.listen(1).unwrap().local_addr().unwrap().port();\n\n }\n\n }\n\n\n\n for _i in 0..1000 {\n\n let listener = net2::TcpBuilder::new_v4().unwrap();\n\n listener.reuse_address(true).unwrap();\n\n if let Ok(listener) = listener.bind(\"127.0.0.1:0\") {\n\n return listener.listen(1).unwrap().local_addr().unwrap().port();\n\n }\n\n }\n\n panic!(\"Cannot find open port!\");\n\n}\n\n\n", "file_path": "crates/interledger-settlement-engines/tests/redis_helpers.rs", "rank": 64, "score": 185747.92943173286 }, { "content": "/// Trait used to store Ethereum account addresses, as well as any data related\n\n/// to the connector notifier service such as the most recently observed block\n\n/// and account balance\n\npub trait EthereumStore {\n\n type Account: EthereumAccount;\n\n\n\n /// Saves the Ethereum address associated with this account\n\n /// called when creating an account on the API.\n\n fn save_account_addresses(\n\n &self,\n\n data: HashMap<<Self::Account as Account>::AccountId, Addresses>,\n\n ) -> Box<dyn Future<Item = (), Error = ()> + Send>;\n\n\n\n /// Loads the Ethereum address associated with this account\n\n fn load_account_addresses(\n\n &self,\n\n account_ids: Vec<<Self::Account as Account>::AccountId>,\n\n ) -> Box<dyn Future<Item = Vec<Addresses>, Error = ()> + Send>;\n\n\n\n /// Saves the latest block number, up to which all\n\n /// transactions have been communicated to the connector\n\n fn save_recently_observed_block(\n\n &self,\n", "file_path": "crates/interledger-settlement-engines/src/engines/ethereum_ledger/types.rs", "rank": 65, "score": 184319.28012471698 }, { "content": "pub fn decrypt(shared_secret: &[u8], mut ciphertext: BytesMut) -> Result<BytesMut, ()> {\n\n let key = hmac_sha256(shared_secret, &ENCRYPTION_KEY_STRING);\n\n let key = aead::OpeningKey::new(&aead::AES_256_GCM, &key)\n\n .expect(\"Failed to create a new opening key for decrypting data!\");\n\n\n\n let mut nonce: [u8; NONCE_LENGTH] = [0; NONCE_LENGTH];\n\n nonce.copy_from_slice(&ciphertext.split_to(NONCE_LENGTH));\n\n\n\n let auth_tag = ciphertext.split_to(AUTH_TAG_LENGTH);\n\n let additional_data: &[u8] = &[];\n\n\n\n // Ring expects the tag to come after the data\n\n ciphertext.unsplit(auth_tag);\n\n\n\n let length = aead::open_in_place(\n\n &key,\n\n aead::Nonce::assume_unique_for_key(nonce),\n\n aead::Aad::from(additional_data),\n\n 0,\n\n ciphertext.as_mut(),\n", "file_path": "crates/interledger-stream/src/crypto.rs", "rank": 66, "score": 183636.20246926975 }, { "content": "fn deserialize_redis_connection<'de, D>(deserializer: D) -> Result<ConnectionInfo, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n Url::parse(&String::deserialize(deserializer)?)\n\n .map_err(|err| DeserializeError::custom(format!(\"Invalid URL: {:?}\", err)))?\n\n .into_connection_info()\n\n .map_err(|err| {\n\n DeserializeError::custom(format!(\n\n \"Error converting into Redis connection info: {:?}\",\n\n err\n\n ))\n\n })\n\n}\n\n\n\n/// An all-in-one Interledger node that includes sender and receiver functionality,\n\n/// a connector, and a management API. The node uses Redis for persistence.\n\n#[derive(Deserialize, Clone)]\n\npub struct InterledgerNode {\n\n /// ILP address of the node\n", "file_path": "crates/interledger/src/node.rs", "rank": 67, "score": 181857.53078058304 }, { "content": "#[allow(clippy::cognitive_complexity)]\n\npub fn main() {\n\n env_logger::init();\n\n\n\n let mut app = App::new(\"interledger-settlement-engines\")\n\n .about(\"Interledger Settlement Engines CLI\")\n\n .subcommands(vec![\n\n SubCommand::with_name(\"ethereum-ledger\")\n\n .about(\"Ethereum settlement engine which performs ledger (layer 1) transactions\")\n\n .args(&[\n\n Arg::with_name(\"port\")\n\n .long(\"port\")\n\n .help(\"Port to listen for settlement requests on\")\n\n .default_value(\"3000\"),\n\n Arg::with_name(\"key\")\n\n .long(\"key\")\n\n .help(\"private key for settlement account\")\n\n .takes_value(true)\n\n .required(true),\n\n Arg::with_name(\"ethereum_endpoint\")\n\n .long(\"ethereum_endpoint\")\n", "file_path": "crates/interledger-settlement-engines/src/main.rs", "rank": 68, "score": 181841.86844556668 }, { "content": "#[doc(hidden)]\n\npub fn send_spsp_payment_btp(\n\n btp_server: &str,\n\n receiver: &str,\n\n amount: u64,\n\n quiet: bool,\n\n) -> impl Future<Item = (), Error = ()> {\n\n let receiver = receiver.to_string();\n\n let btp_server = parse_btp_url(btp_server).unwrap();\n\n let account = AccountBuilder::new(LOCAL_ILP_ADDRESS.clone())\n\n .additional_routes(&[&b\"\"[..]])\n\n .btp_outgoing_token(btp_server.password().unwrap_or_default().to_string())\n\n .btp_uri(btp_server)\n\n .build();\n\n connect_client(\n\n vec![account.clone()],\n\n true,\n\n outgoing_service_fn(|request: OutgoingRequest<Account>| {\n\n Err(RejectBuilder {\n\n code: ErrorCode::F02_UNREACHABLE,\n\n message: &format!(\n", "file_path": "crates/interledger/src/cli.rs", "rank": 69, "score": 180479.50964327692 }, { "content": "#[doc(hidden)]\n\npub fn send_spsp_payment_http(\n\n http_server: &str,\n\n receiver: &str,\n\n amount: u64,\n\n quiet: bool,\n\n) -> impl Future<Item = (), Error = ()> {\n\n let receiver = receiver.to_string();\n\n let url = Url::parse(http_server).expect(\"Cannot parse HTTP URL\");\n\n let account = if let Some(token) = url.password() {\n\n AccountBuilder::new(LOCAL_ILP_ADDRESS.clone())\n\n .additional_routes(&[&b\"\"[..]])\n\n .http_endpoint(Url::parse(http_server).unwrap())\n\n .http_outgoing_token(token.to_string())\n\n .build()\n\n } else {\n\n AccountBuilder::new(LOCAL_ILP_ADDRESS.clone())\n\n .additional_routes(&[&b\"\"[..]])\n\n .http_endpoint(Url::parse(http_server).unwrap())\n\n .build()\n\n };\n", "file_path": "crates/interledger/src/cli.rs", "rank": 70, "score": 180479.50964327692 }, { "content": "pub fn test_service() -> CcpRouteManager<\n\n impl IncomingService<TestAccount, Future = BoxedIlpFuture> + Clone,\n\n impl OutgoingService<TestAccount, Future = BoxedIlpFuture> + Clone,\n\n TestStore,\n\n TestAccount,\n\n> {\n\n let addr = Address::from_str(\"example.connector\").unwrap();\n\n CcpRouteManagerBuilder::new(\n\n addr.clone(),\n\n TestStore::new(),\n\n outgoing_service_fn(|_request| {\n\n Box::new(err(RejectBuilder {\n\n code: ErrorCode::F02_UNREACHABLE,\n\n message: b\"No other outgoing handler!\",\n\n data: &[],\n\n triggered_by: Some(&EXAMPLE_CONNECTOR),\n\n }\n\n .build()))\n\n }),\n\n incoming_service_fn(|_request| {\n", "file_path": "crates/interledger-ccp/src/test_helpers.rs", "rank": 71, "score": 179424.40732386857 }, { "content": " runtime.spawn(\n\n run_ethereum_engine(\n\n connection_info2,\n\n \"http://localhost:8545\".to_string(),\n\n node2_engine,\n\n &node2_secret,\n\n bob_key,\n\n 1,\n\n 0,\n\n 18,\n\n 1000,\n\n format!(\"http://127.0.0.1:{}\", node2_settlement),\n\n None,\n\n true,\n\n )\n\n .and_then(move |_| {\n\n node2\n\n .insert_account(AccountDetails {\n\n ilp_address: Address::from_str(\"example.bob\").unwrap(),\n\n asset_code: \"ETH\".to_string(),\n", "file_path": "crates/interledger-settlement-engines/tests/eth_ledger_settlement.rs", "rank": 72, "score": 178151.2617642873 }, { "content": " \"receiver\": format!(\"http://localhost:{}/.well-known/pay\", to),\n\n \"source_amount\": amount,\n\n }))\n\n .send()\n\n .map_err(|err| {\n\n eprintln!(\"Error sending SPSP payment: {:?}\", err);\n\n err\n\n })\n\n .and_then(|res| res.error_for_status())\n\n .and_then(|res| res.into_body().concat2())\n\n .and_then(move |body| {\n\n let ret: DeliveryData = serde_json::from_slice(&body).unwrap();\n\n assert_eq!(ret.delivered_amount, amount);\n\n Ok(())\n\n })\n\n };\n\n\n\n let create1 = create_account(node1_engine, \"1\");\n\n let create2 = create_account(node2_engine, \"1\");\n\n\n", "file_path": "crates/interledger-settlement-engines/tests/eth_ledger_settlement.rs", "rank": 73, "score": 178147.20134372165 }, { "content": " runtime.spawn(\n\n run_ethereum_engine(\n\n connection_info1,\n\n \"http://localhost:8545\".to_string(),\n\n node1_engine,\n\n &node1_secret,\n\n alice_key,\n\n 1,\n\n 0,\n\n 18,\n\n 1000,\n\n format!(\"http://127.0.0.1:{}\", node1_settlement),\n\n None,\n\n true,\n\n )\n\n .and_then(move |_| {\n\n // TODO insert the accounts via HTTP request\n\n node1_clone\n\n .insert_account(AccountDetails {\n\n ilp_address: Address::from_str(\"example.alice\").unwrap(),\n", "file_path": "crates/interledger-settlement-engines/tests/eth_ledger_settlement.rs", "rank": 74, "score": 178146.14485972383 }, { "content": " })\n\n })\n\n // Up to here, Alice's balance should be -70 and Bob's\n\n // balance should be 70. Once we make 1 more payment, we\n\n // exceed the settle_threshold and thus a settlement is made\n\n .and_then(move |_| send4)\n\n .and_then(move |_| {\n\n // Wait a few seconds so that the receiver's engine\n\n // gets the data\n\n sleep(Duration::from_secs(5));\n\n // Since the credit connection reached -71, and the\n\n // settle_to is -10, a 61 Wei transaction is made.\n\n get_balance(1, node1_http, \"bob\").and_then(move |ret| {\n\n let ret = str::from_utf8(&ret).unwrap();\n\n assert_eq!(ret, \"{\\\"balance\\\":\\\"10\\\"}\");\n\n get_balance(1, node2_http, \"alice\").and_then(move |ret| {\n\n let ret = str::from_utf8(&ret).unwrap();\n\n assert_eq!(ret, \"{\\\"balance\\\":\\\"-10\\\"}\");\n\n ganache_pid.kill().unwrap();\n\n Ok(())\n\n })\n\n })\n\n })\n\n }),\n\n )\n\n .unwrap();\n\n}\n", "file_path": "crates/interledger-settlement-engines/tests/eth_ledger_settlement.rs", "rank": 75, "score": 178145.3719654929 }, { "content": " settlement_engine_url: Some(format!(\"http://localhost:{}\", node1_engine)),\n\n settlement_engine_asset_scale: Some(18),\n\n })\n\n })\n\n .and_then(move |_| node1.serve())\n\n }),\n\n );\n\n\n\n let node2_secret = cli::random_secret();\n\n let node2 = InterledgerNode {\n\n ilp_address: Address::from_str(\"example.bob\").unwrap(),\n\n default_spsp_account: Some(0),\n\n admin_auth_token: \"admin\".to_string(),\n\n redis_connection: connection_info2.clone(),\n\n btp_address: ([127, 0, 0, 1], get_open_port(None)).into(),\n\n http_address: ([127, 0, 0, 1], node2_http).into(),\n\n settlement_address: ([127, 0, 0, 1], node2_settlement).into(),\n\n secret_seed: node2_secret,\n\n route_broadcast_interval: Some(200),\n\n };\n", "file_path": "crates/interledger-settlement-engines/tests/eth_ledger_settlement.rs", "rank": 76, "score": 178145.21191120008 }, { "content": " .and_then(move |_| {\n\n node1_clone.insert_account(AccountDetails {\n\n ilp_address: Address::from_str(\"example.bob\").unwrap(),\n\n asset_code: \"ETH\".to_string(),\n\n asset_scale: 18,\n\n btp_incoming_token: None,\n\n btp_uri: None,\n\n http_endpoint: Some(format!(\"http://localhost:{}/ilp\", node2_http)),\n\n http_incoming_token: Some(\"bob\".to_string()),\n\n http_outgoing_token: Some(\"alice\".to_string()),\n\n max_packet_amount: 10,\n\n min_balance: Some(-100),\n\n settle_threshold: Some(70),\n\n settle_to: Some(10),\n\n send_routes: false,\n\n receive_routes: false,\n\n routing_relation: None,\n\n round_trip_time: None,\n\n packets_per_minute_limit: None,\n\n amount_per_minute_limit: None,\n", "file_path": "crates/interledger-settlement-engines/tests/eth_ledger_settlement.rs", "rank": 77, "score": 178144.569574594 }, { "content": " let bob_key = \"cc96601bc52293b53c4736a12af9130abf347669b3813f9ec4cafdf6991b087e\".to_string();\n\n\n\n let mut runtime = RuntimeBuilder::new()\n\n .panic_handler(|_| panic!(\"Tokio worker panicked\"))\n\n .build()\n\n .unwrap();\n\n\n\n let node1_secret = cli::random_secret();\n\n let node1 = InterledgerNode {\n\n ilp_address: Address::from_str(\"example.alice\").unwrap(),\n\n default_spsp_account: Some(0),\n\n admin_auth_token: \"hi_alice\".to_string(),\n\n redis_connection: connection_info1.clone(),\n\n btp_address: ([127, 0, 0, 1], get_open_port(None)).into(),\n\n http_address: ([127, 0, 0, 1], node1_http).into(),\n\n settlement_address: ([127, 0, 0, 1], node1_settlement).into(),\n\n secret_seed: node1_secret,\n\n route_broadcast_interval: Some(200),\n\n };\n\n let node1_clone = node1.clone();\n", "file_path": "crates/interledger-settlement-engines/tests/eth_ledger_settlement.rs", "rank": 78, "score": 178144.34209840657 }, { "content": " node2\n\n .insert_account(AccountDetails {\n\n ilp_address: Address::from_str(\"example.alice\").unwrap(),\n\n asset_code: \"ETH\".to_string(),\n\n asset_scale: 18,\n\n btp_incoming_token: None,\n\n btp_uri: None,\n\n http_endpoint: Some(format!(\"http://localhost:{}/ilp\", node1_http)),\n\n http_incoming_token: Some(\"alice\".to_string()),\n\n http_outgoing_token: Some(\"bob\".to_string()),\n\n max_packet_amount: 10,\n\n min_balance: Some(-100),\n\n settle_threshold: Some(70),\n\n settle_to: Some(-10),\n\n send_routes: false,\n\n receive_routes: false,\n\n routing_relation: None,\n\n round_trip_time: None,\n\n packets_per_minute_limit: None,\n\n amount_per_minute_limit: None,\n", "file_path": "crates/interledger-settlement-engines/tests/eth_ledger_settlement.rs", "rank": 79, "score": 178141.74765471247 }, { "content": "#![recursion_limit = \"128\"]\n\n\n\nuse env_logger;\n\nuse futures::{Future, Stream};\n\nuse interledger::{\n\n cli,\n\n node::{AccountDetails, InterledgerNode},\n\n};\n\nuse interledger_packet::Address;\n\nuse serde_json::json;\n\nuse std::str;\n\nuse std::str::FromStr;\n\nuse std::thread::sleep;\n\nuse std::time::Duration;\n\nuse tokio::runtime::Builder as RuntimeBuilder;\n\n\n\nmod redis_helpers;\n\nuse redis_helpers::*;\n\nuse std::process::Command;\n\n\n", "file_path": "crates/interledger-settlement-engines/tests/eth_ledger_settlement.rs", "rank": 80, "score": 178139.80674619743 }, { "content": " // Make 4 subsequent payments (we could also do a 71 payment\n\n // directly)\n\n let send1 = send_money(node1_http, node2_http, 10);\n\n let send2 = send_money(node1_http, node2_http, 20);\n\n let send3 = send_money(node1_http, node2_http, 40);\n\n let send4 = send_money(node1_http, node2_http, 1);\n\n\n\n let get_balance = |account_id, node_port, admin_token| {\n\n let client = reqwest::r#async::Client::new();\n\n client\n\n .get(&format!(\n\n \"http://localhost:{}/accounts/{}/balance\",\n\n node_port, account_id\n\n ))\n\n .header(\"Authorization\", format!(\"Bearer {}\", admin_token))\n\n .send()\n\n .map_err(|err| {\n\n eprintln!(\"Error getting account data: {:?}\", err);\n\n err\n\n })\n", "file_path": "crates/interledger-settlement-engines/tests/eth_ledger_settlement.rs", "rank": 81, "score": 178139.38016244295 }, { "content": " // accounts via the Accounts API.\n\n let client = reqwest::r#async::Client::new();\n\n\n\n let create_account = |engine_port, account_id| {\n\n client\n\n .post(&format!(\"http://localhost:{}/accounts\", engine_port))\n\n .json(&json!({ \"id\": account_id }))\n\n .send()\n\n .map_err(|err| {\n\n eprintln!(\"Error creating account: {:?}\", err);\n\n err\n\n })\n\n .and_then(|res| res.error_for_status())\n\n };\n\n\n\n let send_money = |from, to, amount| {\n\n client\n\n .post(&format!(\"http://localhost:{}/pay\", from))\n\n .header(\"Authorization\", \"Bearer in_alice\")\n\n .json(&json!({\n", "file_path": "crates/interledger-settlement-engines/tests/eth_ledger_settlement.rs", "rank": 82, "score": 178138.72750508544 }, { "content": " settlement_engine_url: Some(format!(\n\n \"http://localhost:{}\",\n\n node2_engine\n\n )),\n\n settlement_engine_asset_scale: Some(18),\n\n })\n\n .and_then(move |_| node2.serve())\n\n })\n\n }),\n\n );\n\n\n\n runtime\n\n .block_on(\n\n // Wait for the nodes to spin up\n\n delay(500)\n\n .map_err(|_| panic!(\"Something strange happened\"))\n\n .and_then(move |_| {\n\n // The 2 nodes are peered, we make a POST to the engine's\n\n // create account endpoint so that they trade addresses.\n\n // This would happen automatically if we inserted the\n", "file_path": "crates/interledger-settlement-engines/tests/eth_ledger_settlement.rs", "rank": 83, "score": 178138.53969318885 }, { "content": " .and_then(|res| res.error_for_status())\n\n .and_then(|res| res.into_body().concat2())\n\n };\n\n\n\n create1\n\n .and_then(move |_| create2)\n\n .and_then(move |_| send1)\n\n .and_then(move |_| {\n\n get_balance(1, node1_http, \"bob\").and_then(move |ret| {\n\n let ret = str::from_utf8(&ret).unwrap();\n\n assert_eq!(ret, \"{\\\"balance\\\":\\\"10\\\"}\");\n\n get_balance(1, node2_http, \"alice\").and_then(move |ret| {\n\n let ret = str::from_utf8(&ret).unwrap();\n\n assert_eq!(ret, \"{\\\"balance\\\":\\\"-10\\\"}\");\n\n Ok(())\n\n })\n\n })\n\n })\n\n .and_then(move |_| send2)\n\n .and_then(move |_| {\n", "file_path": "crates/interledger-settlement-engines/tests/eth_ledger_settlement.rs", "rank": 84, "score": 178137.78067175407 }, { "content": " asset_code: \"ETH\".to_string(),\n\n asset_scale: 18,\n\n btp_incoming_token: None,\n\n btp_uri: None,\n\n http_endpoint: None,\n\n http_incoming_token: Some(\"in_alice\".to_string()),\n\n http_outgoing_token: Some(\"out_alice\".to_string()),\n\n max_packet_amount: 10,\n\n min_balance: None,\n\n settle_threshold: None,\n\n settle_to: Some(-10),\n\n send_routes: false,\n\n receive_routes: false,\n\n routing_relation: None,\n\n round_trip_time: None,\n\n packets_per_minute_limit: None,\n\n amount_per_minute_limit: None,\n\n settlement_engine_url: None,\n\n settlement_engine_asset_scale: None,\n\n })\n", "file_path": "crates/interledger-settlement-engines/tests/eth_ledger_settlement.rs", "rank": 85, "score": 178135.79982473337 }, { "content": " get_balance(1, node1_http, \"bob\").and_then(move |ret| {\n\n let ret = str::from_utf8(&ret).unwrap();\n\n assert_eq!(ret, \"{\\\"balance\\\":\\\"30\\\"}\");\n\n get_balance(1, node2_http, \"alice\").and_then(move |ret| {\n\n let ret = str::from_utf8(&ret).unwrap();\n\n assert_eq!(ret, \"{\\\"balance\\\":\\\"-30\\\"}\");\n\n Ok(())\n\n })\n\n })\n\n })\n\n .and_then(move |_| send3)\n\n .and_then(move |_| {\n\n get_balance(1, node1_http, \"bob\").and_then(move |ret| {\n\n let ret = str::from_utf8(&ret).unwrap();\n\n assert_eq!(ret, \"{\\\"balance\\\":\\\"70\\\"}\");\n\n get_balance(1, node2_http, \"alice\").and_then(move |ret| {\n\n let ret = str::from_utf8(&ret).unwrap();\n\n assert_eq!(ret, \"{\\\"balance\\\":\\\"-70\\\"}\");\n\n Ok(())\n\n })\n", "file_path": "crates/interledger-settlement-engines/tests/eth_ledger_settlement.rs", "rank": 86, "score": 178135.70264712235 }, { "content": " asset_scale: 18,\n\n btp_incoming_token: None,\n\n btp_uri: None,\n\n http_endpoint: None,\n\n http_incoming_token: Some(\"in_bob\".to_string()),\n\n http_outgoing_token: Some(\"out_bob\".to_string()),\n\n max_packet_amount: 10,\n\n min_balance: None,\n\n settle_threshold: None,\n\n settle_to: None,\n\n send_routes: false,\n\n receive_routes: false,\n\n routing_relation: None,\n\n round_trip_time: None,\n\n packets_per_minute_limit: None,\n\n amount_per_minute_limit: None,\n\n settlement_engine_url: None,\n\n settlement_engine_asset_scale: None,\n\n })\n\n .and_then(move |_| {\n", "file_path": "crates/interledger-settlement-engines/tests/eth_ledger_settlement.rs", "rank": 87, "score": 178135.33702617482 }, { "content": "fn benchmark_serialize(c: &mut Criterion) {\n\n let prepare_bytes = BytesMut::from(PREPARE.build());\n\n c.bench_function(\"Prepare (serialize)\", move |b| {\n\n b.iter(|| {\n\n assert_eq!(BytesMut::from(PREPARE.build()), prepare_bytes);\n\n });\n\n });\n\n\n\n let fulfill_bytes = BytesMut::from(FULFILL.build());\n\n c.bench_function(\"Fulfill (serialize)\", move |b| {\n\n b.iter(|| {\n\n assert_eq!(BytesMut::from(FULFILL.build()), fulfill_bytes);\n\n });\n\n });\n\n\n\n let reject_bytes = BytesMut::from(REJECT.build());\n\n c.bench_function(\"Reject (serialize)\", move |b| {\n\n b.iter(|| {\n\n assert_eq!(BytesMut::from(REJECT.build()), reject_bytes);\n\n });\n\n });\n\n}\n\n\n", "file_path": "crates/interledger-packet/benches/packets.rs", "rank": 88, "score": 176908.4058253242 }, { "content": "#[doc(hidden)]\n\npub fn insert_account_redis<R>(\n\n redis_uri: R,\n\n secret_seed: &[u8; 32],\n\n account: AccountDetails,\n\n) -> impl Future<Item = (), Error = ()>\n\nwhere\n\n R: IntoConnectionInfo,\n\n{\n\n let redis_secret = generate_redis_secret(secret_seed);\n\n result(redis_uri.into_connection_info())\n\n .map_err(|err| error!(\"Invalid Redis connection details: {:?}\", err))\n\n .and_then(move |redis_uri| RedisStoreBuilder::new(redis_uri, redis_secret).connect())\n\n .map_err(|err| error!(\"Error connecting to Redis: {:?}\", err))\n\n .and_then(move |store| {\n\n store\n\n .insert_account(account)\n\n .map_err(|_| error!(\"Unable to create account\"))\n\n .and_then(|account| {\n\n debug!(\"Created account: {}\", account.id());\n\n Ok(())\n\n })\n\n })\n\n}\n\n\n", "file_path": "crates/interledger/src/node.rs", "rank": 89, "score": 176438.11054661425 }, { "content": "fn parse_body_into_payment_details(\n\n resp: HttpResponse,\n\n) -> impl Future<Item = PaymentDetailsResponse, Error = ApiResponse> {\n\n resp.into_body()\n\n .concat2()\n\n .map_err(|err| {\n\n let err = format!(\"Couldn't retrieve body {:?}\", err);\n\n error!(\"{}\", err);\n\n (StatusCode::from_u16(500).unwrap(), err)\n\n })\n\n .and_then(move |body| {\n\n serde_json::from_slice::<PaymentDetailsResponse>(&body).map_err(|err| {\n\n let err = format!(\n\n \"Couldn't parse body {:?} into payment details {:?}\",\n\n body, err\n\n );\n\n error!(\"{}\", err);\n\n (StatusCode::from_u16(500).unwrap(), err)\n\n })\n\n })\n\n}\n\n\n", "file_path": "crates/interledger-settlement-engines/src/engines/ethereum_ledger/eth_engine.rs", "rank": 90, "score": 176323.46032885194 }, { "content": "pub fn generate_condition(shared_secret: &[u8], data: &[u8]) -> [u8; 32] {\n\n let fulfillment = generate_fulfillment(&shared_secret, &data);\n\n hash_sha256(&fulfillment)\n\n}\n\n\n", "file_path": "crates/interledger-stream/src/crypto.rs", "rank": 91, "score": 174942.77248493832 }, { "content": "pub fn generate_fulfillment(shared_secret: &[u8], data: &[u8]) -> [u8; 32] {\n\n let key = hmac_sha256(&shared_secret[..], &FULFILLMENT_GENERATION_STRING);\n\n hmac_sha256(&key[..], &data[..])\n\n}\n\n\n", "file_path": "crates/interledger-stream/src/crypto.rs", "rank": 92, "score": 174942.77248493832 }, { "content": "/// Create a BtpOutgoingService wrapping BTP connections to the accounts specified.\n\n/// Calling `handle_incoming` with an `IncomingService` will turn the returned\n\n/// BtpOutgoingService into a bidirectional handler.\n\npub fn connect_client<A, S>(\n\n accounts: Vec<A>,\n\n error_on_unavailable: bool,\n\n next_outgoing: S,\n\n) -> impl Future<Item = BtpOutgoingService<S, A>, Error = ()>\n\nwhere\n\n S: OutgoingService<A> + Clone + 'static,\n\n A: BtpAccount + 'static,\n\n{\n\n join_all(accounts.into_iter().map(move |account| {\n\n let account_id = account.id();\n\n let mut url = account\n\n .get_btp_uri()\n\n .expect(\"Accounts must have BTP URLs\")\n\n .clone();\n\n if url.scheme().starts_with(\"btp+\") {\n\n url.set_scheme(&url.scheme().replace(\"btp+\", \"\")).unwrap();\n\n }\n\n let token = account\n\n .get_btp_token()\n", "file_path": "crates/interledger-btp/src/client.rs", "rank": 93, "score": 173644.25314811856 }, { "content": "/// Send a given amount of money using the STREAM transport protocol.\n\n///\n\n/// This returns the amount delivered, as reported by the receiver and in the receiver's asset's units.\n\npub fn send_money<S, A>(\n\n service: S,\n\n from_account: &A,\n\n destination_account: Address,\n\n shared_secret: &[u8],\n\n source_amount: u64,\n\n) -> impl Future<Item = (u64, S), Error = Error>\n\nwhere\n\n S: IncomingService<A> + Clone,\n\n A: Account,\n\n{\n\n let shared_secret = Bytes::from(shared_secret);\n\n let from_account = from_account.clone();\n\n // TODO can/should we avoid cloning the account?\n\n get_ildcp_info(&mut service.clone(), from_account.clone())\n\n .map_err(|_err| Error::ConnectionError(\"Unable to get ILDCP info: {:?}\".to_string()))\n\n .and_then(move |account_details| SendMoneyFuture {\n\n state: SendMoneyFutureState::SendMoney,\n\n next: Some(service),\n\n from_account,\n", "file_path": "crates/interledger-stream/src/client.rs", "rank": 94, "score": 173622.65750767273 }, { "content": "#[test]\n\nfn insert_accounts() {\n\n block_on(test_store().and_then(|(store, context)| {\n\n store\n\n .insert_account(ACCOUNT_DETAILS_2.clone())\n\n .and_then(move |account| {\n\n assert_eq!(account.id(), 2);\n\n let _ = context;\n\n Ok(())\n\n })\n\n }))\n\n .unwrap();\n\n}\n\n\n", "file_path": "crates/interledger-store-redis/tests/accounts_test.rs", "rank": 95, "score": 173462.90982897035 }, { "content": "#[test]\n\nfn get_all_accounts() {\n\n block_on(test_store().and_then(|(store, context)| {\n\n store.get_all_accounts().and_then(move |accounts| {\n\n assert_eq!(accounts.len(), 2);\n\n let _ = context;\n\n Ok(())\n\n })\n\n }))\n\n .unwrap();\n\n}\n\n\n", "file_path": "crates/interledger-store-redis/tests/accounts_test.rs", "rank": 96, "score": 173462.90982897035 }, { "content": "#[test]\n\nfn gets_single_account() {\n\n block_on(test_store().and_then(|(store, context)| {\n\n store.get_accounts(vec![1]).and_then(move |accounts| {\n\n assert_eq!(\n\n accounts[0].client_address(),\n\n &Address::from_str(\"example.bob\").unwrap()\n\n );\n\n let _ = context;\n\n Ok(())\n\n })\n\n }))\n\n .unwrap();\n\n}\n\n\n", "file_path": "crates/interledger-store-redis/tests/accounts_test.rs", "rank": 97, "score": 171146.05271923452 }, { "content": "fn put_protocol_data<T>(buf: &mut T, protocol_data: &[ProtocolData])\n\nwhere\n\n T: BufMut,\n\n{\n\n let length = BigUint::from(protocol_data.len());\n\n buf.put_var_uint(&length);\n\n for entry in protocol_data {\n\n buf.put_var_octet_string(entry.protocol_name.as_bytes());\n\n buf.put_u8(entry.content_type.clone() as u8);\n\n buf.put_var_octet_string(&entry.data);\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct BtpMessage {\n\n pub request_id: u32,\n\n pub protocol_data: Vec<ProtocolData>,\n\n}\n\nimpl Serializable<BtpMessage> for BtpMessage {\n\n fn from_bytes(bytes: &[u8]) -> Result<BtpMessage, ParseError> {\n", "file_path": "crates/interledger-btp/src/packet.rs", "rank": 98, "score": 169992.42742047837 }, { "content": "#[test]\n\nfn idempotent_settlement_calls() {\n\n block_on(test_store().and_then(|(store, context)| {\n\n context.async_connection().and_then(move |conn| {\n\n store\n\n .update_balance_for_incoming_settlement(0, 100, Some(IDEMPOTENCY_KEY.clone()))\n\n .and_then(move |_| {\n\n cmd(\"HMGET\")\n\n .arg(\"accounts:0\")\n\n .arg(\"balance\")\n\n .arg(\"prepaid_amount\")\n\n .query_async(conn)\n\n .map_err(|err| eprintln!(\"Redis error: {:?}\", err))\n\n .and_then(move |(conn, (balance, prepaid_amount)): (_, (i64, i64))| {\n\n assert_eq!(balance, 0);\n\n assert_eq!(prepaid_amount, 100);\n\n\n\n store\n\n .update_balance_for_incoming_settlement(\n\n 0,\n\n 100,\n", "file_path": "crates/interledger-store-redis/tests/settlement_test.rs", "rank": 99, "score": 169864.83912443655 } ]
Rust
kernel-hal/src/bare/arch/riscv/vm.rs
SummerVibes/zCore
09c69b2adc920b6edc78a7d45d9237bfd8b43d40
use core::fmt::{Debug, Formatter, Result}; use core::slice; use riscv::{asm, register::satp}; use spin::Mutex; use crate::addr::{align_down, align_up}; use crate::utils::page_table::{GenericPTE, PageTableImpl, PageTableLevel3}; use crate::{mem::phys_to_virt, MMUFlags, PhysAddr, VirtAddr, KCONFIG, PAGE_SIZE}; lazy_static! { static ref KERNEL_PT: Mutex<PageTable> = Mutex::new(init_kernel_page_table().unwrap()); } fn init_kernel_page_table() -> PagingResult<PageTable> { extern "C" { fn stext(); fn etext(); fn srodata(); fn erodata(); fn sdata(); fn edata(); fn sbss(); fn ebss(); fn bootstack(); fn bootstacktop(); fn end(); } let mut pt = PageTable::new(); let mut map_range = |start: VirtAddr, end: VirtAddr, flags: MMUFlags| -> PagingResult { pt.map_cont( start, crate::addr::align_up(end - start), start - KCONFIG.phys_to_virt_offset, flags | MMUFlags::HUGE_PAGE, ) }; map_range( stext as usize, etext as usize, MMUFlags::READ | MMUFlags::EXECUTE, )?; map_range(srodata as usize, erodata as usize, MMUFlags::READ)?; map_range( sdata as usize, edata as usize, MMUFlags::READ | MMUFlags::WRITE, )?; map_range( sbss as usize, ebss as usize, MMUFlags::READ | MMUFlags::WRITE, )?; map_range( bootstack as usize, bootstacktop as usize, MMUFlags::READ | MMUFlags::WRITE, )?; map_range( align_up(end as usize + PAGE_SIZE), phys_to_virt(align_down(KCONFIG.phys_mem_end)), MMUFlags::READ | MMUFlags::WRITE, )?; info!("initialized kernel page table @ {:#x}", pt.table_phys()); Ok(pt) } pub(super) fn kernel_page_table() -> &'static Mutex<PageTable> { &KERNEL_PT } pub(super) fn init() { unsafe { KERNEL_PT.lock().activate() }; } hal_fn_impl! { impl mod crate::hal_fn::vm { fn activate_paging(vmtoken: PhysAddr) { let old_token = current_vmtoken(); if old_token != vmtoken { #[cfg(target_arch = "riscv64")] let mode = satp::Mode::Sv39; debug!("switch table {:x?} -> {:x?}", old_token, vmtoken); unsafe { satp::set(mode, 0, vmtoken >> 12); asm::sfence_vma_all(); } } } fn current_vmtoken() -> PhysAddr { satp::read().ppn() << 12 } fn flush_tlb(vaddr: Option<VirtAddr>) { unsafe { if let Some(vaddr) = vaddr { asm::sfence_vma(0, vaddr) } else { asm::sfence_vma_all(); } } } fn pt_clone_kernel_space(dst_pt_root: PhysAddr, src_pt_root: PhysAddr) { let entry_range = 0x100..0x200; let dst_table = unsafe { slice::from_raw_parts_mut(phys_to_virt(dst_pt_root) as *mut Rv64PTE, 512) }; let src_table = unsafe { slice::from_raw_parts(phys_to_virt(src_pt_root) as *const Rv64PTE, 512) }; for i in entry_range { dst_table[i] = src_table[i]; if !dst_table[i].is_unused() { dst_table[i].0 |= PTF::GLOBAL.bits() as u64; } } } } } bitflags::bitflags! { pub struct PTF: usize { const VALID = 1 << 0; const READABLE = 1 << 1; const WRITABLE = 1 << 2; const EXECUTABLE = 1 << 3; const USER = 1 << 4; const GLOBAL = 1 << 5; const ACCESSED = 1 << 6; const DIRTY = 1 << 7; const RESERVED1 = 1 << 8; const RESERVED2 = 1 << 9; } } impl From<MMUFlags> for PTF { fn from(f: MMUFlags) -> Self { if f.is_empty() { return PTF::empty(); } let mut flags = PTF::VALID; if f.contains(MMUFlags::READ) { flags |= PTF::READABLE; } if f.contains(MMUFlags::WRITE) { flags |= PTF::READABLE | PTF::WRITABLE; } if f.contains(MMUFlags::EXECUTE) { flags |= PTF::EXECUTABLE; } if f.contains(MMUFlags::USER) { flags |= PTF::USER; } flags } } impl From<PTF> for MMUFlags { fn from(f: PTF) -> Self { let mut ret = Self::empty(); if f.contains(PTF::READABLE) { ret |= Self::READ; } if f.contains(PTF::WRITABLE) { ret |= Self::WRITE; } if f.contains(PTF::EXECUTABLE) { ret |= Self::EXECUTE; } if f.contains(PTF::USER) { ret |= Self::USER; } ret } } const PHYS_ADDR_MASK: u64 = 0x003f_ffff_ffff_fc00; #[derive(Clone, Copy)] #[repr(transparent)] pub struct Rv64PTE(u64); impl GenericPTE for Rv64PTE { fn addr(&self) -> PhysAddr { ((self.0 & PHYS_ADDR_MASK) << 2) as _ } fn flags(&self) -> MMUFlags { PTF::from_bits_truncate(self.0 as usize).into() } fn is_unused(&self) -> bool { self.0 == 0 } fn is_present(&self) -> bool { PTF::from_bits_truncate(self.0 as usize).contains(PTF::VALID) } fn is_leaf(&self) -> bool { PTF::from_bits_truncate(self.0 as usize).intersects(PTF::READABLE | PTF::EXECUTABLE) } fn set_addr(&mut self, paddr: PhysAddr) { self.0 = (self.0 & !PHYS_ADDR_MASK) | ((paddr as u64 >> 2) & PHYS_ADDR_MASK); } fn set_flags(&mut self, flags: MMUFlags, _is_huge: bool) { let flags = PTF::from(flags) | PTF::ACCESSED | PTF::DIRTY; debug_assert!(flags.contains(PTF::READABLE | PTF::EXECUTABLE)); self.0 = (self.0 & PHYS_ADDR_MASK) | flags.bits() as u64; } fn set_table(&mut self, paddr: PhysAddr) { self.0 = ((paddr as u64 >> 2) & PHYS_ADDR_MASK) | PTF::VALID.bits() as u64; } fn clear(&mut self) { self.0 = 0 } } impl Debug for Rv64PTE { fn fmt(&self, f: &mut Formatter) -> Result { let mut f = f.debug_struct("Rv64PTE"); f.field("raw", &self.0); f.field("addr", &self.addr()); f.field("flags", &self.flags()); f.finish() } } pub type PageTable = PageTableImpl<PageTableLevel3, Rv64PTE>;
use core::fmt::{Debug, Formatter, Result}; use core::slice; use riscv::{asm, register::satp}; use spin::Mutex; use crate::addr::{align_down, align_up}; use crate::utils::page_table::{GenericPTE, PageTableImpl, PageTableLevel3}; use crate::{mem::phys_to_virt, MMUFlags, PhysAddr, VirtAddr, KCONFIG, PAGE_SIZE}; lazy_static! { static ref KERNEL_PT: Mutex<PageTable> = Mutex::new(init_kernel_page_table().unwrap()); } fn init_kernel_page_table() -> PagingResult<PageTable> { extern "C" { fn stext(); fn etext(); fn srodata(); fn erodata(); fn sdata(); fn edata(); fn sbss(); fn ebss(); fn bootstack(); fn bootstacktop(); fn end(); } let mut pt = PageTable::new(); let mut map_range = |start: VirtAddr, end: VirtAddr, flags: MMUFlags| -> PagingResult { pt.map_cont( start, crate::addr::align_up(end - start), start - KCONFIG.phys_to_virt_offset, flags | MMUFlags::HUGE_PAGE, ) }; map_range( stext as usize, etext as usize, MMUFlags::READ | MMUFlags::EXECUTE, )?; map_range(srodata as usize, erodata as usize, MMUFlags::READ)?; map_range( sdata as usize, edata as usize, MMUFlags::READ | MMUFlags::WRITE, )?; map_range( sbss as usize, ebss as usize, MMUFlags::READ | MMUFlags::WRITE, )?; map_range( bootstack as usize, bootstacktop as usize, MMUFlags::READ | MMUFlags::WRITE, )?; map_range( align_up(end as usize + PAGE_SIZE), phys_to_virt(align_down(KCONFIG.phys_mem_end)), MMUFlags::READ | MMUFlags::WRITE, )?; info!("initialized kernel page table @ {:#x}", pt.table_phys()); Ok(pt) } pub(super) fn kernel_page_table() -> &'static Mutex<PageTable> { &KERNEL_PT } pub(super) fn init() { unsafe { KERNEL_PT.lock().activate() }; } hal_fn_impl! { impl mod crate::hal_fn::vm { fn activate_paging(vmtoken: PhysAddr) { let old_token = current_vmtoken(); if old_token != vmtoken { #[cfg(target_arch = "riscv64")] let mode = satp::Mode::Sv39; debug!("switch table {:x?} -> {:x?}", old_token, vmtoken); unsafe { satp::set(mode, 0, vmtoken >> 12); asm::sfence_vma_all(); } } } fn current_vmtoken() -> PhysAddr { satp::read().ppn() << 12 } fn flush_tlb(vaddr: Option<VirtAddr>) { unsafe { if let Some(vaddr) = vaddr { asm::sfence_vma(0, vaddr) } else { asm::sfence_vma_all(); } } } fn pt_clone_kernel_space(dst_pt_root: PhysAddr, src_pt_root: PhysAddr) { let entry_range = 0x100..0x200; let dst_table = unsafe { slice::from_raw_parts_mut(phys_to_virt(dst_pt_root) as *mut Rv64PTE, 512) }; let src_table = unsafe { slice::from_raw_parts(phys_to_virt(src_pt_root) as *const Rv64PTE, 512) }; for i in entry_range { dst_table[i] = src_table[i]; if !dst_table[i].is_unused() { dst_table[i].0 |= PTF::GLOBAL.bits() as u64; } } } } } bitflags::bitflags! { pub struct PTF: usize { const VALID = 1 << 0; const READABLE = 1 << 1; const WRITABLE = 1 << 2; const EXECUTABLE = 1 << 3; const USER = 1 << 4; const GLOBAL = 1 << 5; const ACCESSED = 1 << 6; const DIRTY = 1 << 7; const RESERVED1 = 1 << 8; const RESERVED2 = 1 << 9; } } impl From<MMUFlags> for PTF { fn from(
} impl From<PTF> for MMUFlags { fn from(f: PTF) -> Self { let mut ret = Self::empty(); if f.contains(PTF::READABLE) { ret |= Self::READ; } if f.contains(PTF::WRITABLE) { ret |= Self::WRITE; } if f.contains(PTF::EXECUTABLE) { ret |= Self::EXECUTE; } if f.contains(PTF::USER) { ret |= Self::USER; } ret } } const PHYS_ADDR_MASK: u64 = 0x003f_ffff_ffff_fc00; #[derive(Clone, Copy)] #[repr(transparent)] pub struct Rv64PTE(u64); impl GenericPTE for Rv64PTE { fn addr(&self) -> PhysAddr { ((self.0 & PHYS_ADDR_MASK) << 2) as _ } fn flags(&self) -> MMUFlags { PTF::from_bits_truncate(self.0 as usize).into() } fn is_unused(&self) -> bool { self.0 == 0 } fn is_present(&self) -> bool { PTF::from_bits_truncate(self.0 as usize).contains(PTF::VALID) } fn is_leaf(&self) -> bool { PTF::from_bits_truncate(self.0 as usize).intersects(PTF::READABLE | PTF::EXECUTABLE) } fn set_addr(&mut self, paddr: PhysAddr) { self.0 = (self.0 & !PHYS_ADDR_MASK) | ((paddr as u64 >> 2) & PHYS_ADDR_MASK); } fn set_flags(&mut self, flags: MMUFlags, _is_huge: bool) { let flags = PTF::from(flags) | PTF::ACCESSED | PTF::DIRTY; debug_assert!(flags.contains(PTF::READABLE | PTF::EXECUTABLE)); self.0 = (self.0 & PHYS_ADDR_MASK) | flags.bits() as u64; } fn set_table(&mut self, paddr: PhysAddr) { self.0 = ((paddr as u64 >> 2) & PHYS_ADDR_MASK) | PTF::VALID.bits() as u64; } fn clear(&mut self) { self.0 = 0 } } impl Debug for Rv64PTE { fn fmt(&self, f: &mut Formatter) -> Result { let mut f = f.debug_struct("Rv64PTE"); f.field("raw", &self.0); f.field("addr", &self.addr()); f.field("flags", &self.flags()); f.finish() } } pub type PageTable = PageTableImpl<PageTableLevel3, Rv64PTE>;
f: MMUFlags) -> Self { if f.is_empty() { return PTF::empty(); } let mut flags = PTF::VALID; if f.contains(MMUFlags::READ) { flags |= PTF::READABLE; } if f.contains(MMUFlags::WRITE) { flags |= PTF::READABLE | PTF::WRITABLE; } if f.contains(MMUFlags::EXECUTE) { flags |= PTF::EXECUTABLE; } if f.contains(MMUFlags::USER) { flags |= PTF::USER; } flags }
function_block-function_prefixed
[]
Rust
build/main.rs
cdecompilador/rust-xcb
d83de0035743766c4503b9e699fb22cc1a6d2986
extern crate quick_xml; mod ast; mod codegen; mod output; mod parse; use std::env; use std::fs; use std::path::{Path, PathBuf}; use ast::{Event, ExtInfo, OpCopy, OpCopyMap}; use codegen::{CodeGen, DepInfo}; use output::Output; use parse::{Parser, Result}; fn xcb_mod_map(name: &str) -> &str { match name { "bigreq" => "big_requests", "ge" => "genericevent", "xselinux" => "selinux", "xprint" => "x_print", "xtest" => "test", _ => name, } } fn is_always(name: &str) -> bool { matches!(name, "xproto" | "big_requests" | "xc_misc") } fn has_feature(name: &str) -> bool { env::var(format!("CARGO_FEATURE_{}", name.to_ascii_uppercase())).is_ok() } fn main() { let root = env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string()); let xml_dir = Path::new(&root).join("xml"); let out_dir = env::var("OUT_DIR").unwrap_or_else(|_| "./gen/current".to_string()); let out_dir = Path::new(&out_dir); let rustfmt = env::var("RXCB_RUSTFMT").ok().and_then(|var| { if var == "1" || var == "y" || var == "Y" { find_exe("rustfmt") } else { None } }); let gen_all = env::var("RXCB_GENALL").is_ok(); let mut dep_info = Vec::new(); for xml_file in iter_xml(&xml_dir) { if xml_file.file_stem().unwrap().to_str().unwrap() == "xinput" { continue; } process_xcb_gen(&xml_file, out_dir, &rustfmt, gen_all, &mut dep_info).unwrap_or_else( |err| { panic!( "Error during processing of {}: {:?}", xml_file.display(), err ) }, ); } #[cfg(target_os = "freebsd")] println!("cargo:rustc-link-search=/usr/local/lib"); } fn iter_xml(xml_dir: &Path) -> impl Iterator<Item = PathBuf> { fs::read_dir(xml_dir) .unwrap() .map(|e| e.unwrap().path()) .filter(|p| match p.extension() { Some(e) => e == "xml", _ => false, }) } fn find_exe<P>(exe_name: P) -> Option<PathBuf> where P: AsRef<Path>, { env::var_os("PATH").and_then(|paths| { env::split_paths(&paths) .filter_map(|dir| { let full_path = dir.join(&exe_name); if full_path.is_file() { Some(full_path) } else { None } }) .next() }) } fn process_xcb_gen( xml_file: &Path, out_dir: &Path, rustfmt: &Option<PathBuf>, gen_all: bool, dep_info: &mut Vec<DepInfo>, ) -> Result<()> { let xcb_mod = xml_file.file_stem().unwrap(); let xcb_mod = xcb_mod.to_str().unwrap(); let xcb_mod = xcb_mod_map(xcb_mod); if dep_info.iter().any(|di| di.xcb_mod == xcb_mod) { return Ok(()); } if !gen_all && !is_always(xcb_mod) && !has_feature(xcb_mod) { return Ok(()); } let ffi_file = out_dir.join("ffi").join(&xcb_mod).with_extension("rs"); let rs_file = out_dir.join(&xcb_mod).with_extension("rs"); let ffi = Output::new(rustfmt, &ffi_file) .unwrap_or_else(|_| panic!("cannot create FFI output file: {}", ffi_file.display())); let rs = Output::new(rustfmt, &rs_file) .unwrap_or_else(|_| panic!("cannot create Rust output file: {}", rs_file.display())); let mut parser = Parser::from_file(xml_file); let mut imports = Vec::new(); let mut events = Vec::new(); let mut evcopies: OpCopyMap = OpCopyMap::new(); let mut info: Option<(String, Option<ExtInfo>)> = None; for e in &mut parser { match e? { Event::Ignore => {} Event::Info(mod_name, ext_info) => { info = Some((mod_name, ext_info)); } Event::Import(imp) => imports.push(imp), Event::Event { number, stru, no_seq_number, xge, } => { evcopies.insert(stru.name.clone(), Vec::new()); events.push(Event::Event { number, stru, no_seq_number, xge, }); } Event::EventCopy { name, number, ref_ } => { if let Some(copies) = evcopies.get_mut(&ref_) { copies.push(OpCopy { name, number }); } else { events.push(Event::EventCopy { name, number, ref_ }); } } ev => { events.push(ev); } } } let info = info.expect("no xcb protocol opening"); let deps = { let mut deps = Vec::new(); for i in imports.iter() { let xml_file = xml_file.with_file_name(&format!("{}.xml", i)); process_xcb_gen(&xml_file, out_dir, rustfmt, gen_all, dep_info).unwrap_or_else(|err| { panic!( "Error during processing of {}: {:?}", xml_file.display(), err ) }); let i = xcb_mod_map(i); deps.push( dep_info .iter() .find(|di| di.xcb_mod == i) .unwrap_or_else(|| panic!("can't find dependency {} of {}", i, xcb_mod)) .clone(), ); } deps }; let mut cg = CodeGen::new(xcb_mod, ffi, rs, deps, evcopies); cg.prologue(imports, &info.1)?; for ev in events { cg.event(ev)?; } cg.epilogue()?; dep_info.push(cg.into_depinfo()); Ok(()) }
extern crate quick_xml; mod ast; mod codegen; mod output; mod parse; use std::env; use std::fs; use std::path::{Path, PathBuf}; use ast::{Event, ExtInfo, OpCopy, OpCopyMap}; use codegen::{CodeGen, DepInfo}; use output::Output; use parse::{Parser, Result}; fn xcb_mod_map(name: &str) -> &str {
} fn is_always(name: &str) -> bool { matches!(name, "xproto" | "big_requests" | "xc_misc") } fn has_feature(name: &str) -> bool { env::var(format!("CARGO_FEATURE_{}", name.to_ascii_uppercase())).is_ok() } fn main() { let root = env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string()); let xml_dir = Path::new(&root).join("xml"); let out_dir = env::var("OUT_DIR").unwrap_or_else(|_| "./gen/current".to_string()); let out_dir = Path::new(&out_dir); let rustfmt = env::var("RXCB_RUSTFMT").ok().and_then(|var| { if var == "1" || var == "y" || var == "Y" { find_exe("rustfmt") } else { None } }); let gen_all = env::var("RXCB_GENALL").is_ok(); let mut dep_info = Vec::new(); for xml_file in iter_xml(&xml_dir) { if xml_file.file_stem().unwrap().to_str().unwrap() == "xinput" { continue; } process_xcb_gen(&xml_file, out_dir, &rustfmt, gen_all, &mut dep_info).unwrap_or_else( |err| { panic!( "Error during processing of {}: {:?}", xml_file.display(), err ) }, ); } #[cfg(target_os = "freebsd")] println!("cargo:rustc-link-search=/usr/local/lib"); } fn iter_xml(xml_dir: &Path) -> impl Iterator<Item = PathBuf> { fs::read_dir(xml_dir) .unwrap() .map(|e| e.unwrap().path()) .filter(|p| match p.extension() { Some(e) => e == "xml", _ => false, }) } fn find_exe<P>(exe_name: P) -> Option<PathBuf> where P: AsRef<Path>, { env::var_os("PATH").and_then(|paths| { env::split_paths(&paths) .filter_map(|dir| { let full_path = dir.join(&exe_name); if full_path.is_file() { Some(full_path) } else { None } }) .next() }) } fn process_xcb_gen( xml_file: &Path, out_dir: &Path, rustfmt: &Option<PathBuf>, gen_all: bool, dep_info: &mut Vec<DepInfo>, ) -> Result<()> { let xcb_mod = xml_file.file_stem().unwrap(); let xcb_mod = xcb_mod.to_str().unwrap(); let xcb_mod = xcb_mod_map(xcb_mod); if dep_info.iter().any(|di| di.xcb_mod == xcb_mod) { return Ok(()); } if !gen_all && !is_always(xcb_mod) && !has_feature(xcb_mod) { return Ok(()); } let ffi_file = out_dir.join("ffi").join(&xcb_mod).with_extension("rs"); let rs_file = out_dir.join(&xcb_mod).with_extension("rs"); let ffi = Output::new(rustfmt, &ffi_file) .unwrap_or_else(|_| panic!("cannot create FFI output file: {}", ffi_file.display())); let rs = Output::new(rustfmt, &rs_file) .unwrap_or_else(|_| panic!("cannot create Rust output file: {}", rs_file.display())); let mut parser = Parser::from_file(xml_file); let mut imports = Vec::new(); let mut events = Vec::new(); let mut evcopies: OpCopyMap = OpCopyMap::new(); let mut info: Option<(String, Option<ExtInfo>)> = None; for e in &mut parser { match e? { Event::Ignore => {} Event::Info(mod_name, ext_info) => { info = Some((mod_name, ext_info)); } Event::Import(imp) => imports.push(imp), Event::Event { number, stru, no_seq_number, xge, } => { evcopies.insert(stru.name.clone(), Vec::new()); events.push(Event::Event { number, stru, no_seq_number, xge, }); } Event::EventCopy { name, number, ref_ } => { if let Some(copies) = evcopies.get_mut(&ref_) { copies.push(OpCopy { name, number }); } else { events.push(Event::EventCopy { name, number, ref_ }); } } ev => { events.push(ev); } } } let info = info.expect("no xcb protocol opening"); let deps = { let mut deps = Vec::new(); for i in imports.iter() { let xml_file = xml_file.with_file_name(&format!("{}.xml", i)); process_xcb_gen(&xml_file, out_dir, rustfmt, gen_all, dep_info).unwrap_or_else(|err| { panic!( "Error during processing of {}: {:?}", xml_file.display(), err ) }); let i = xcb_mod_map(i); deps.push( dep_info .iter() .find(|di| di.xcb_mod == i) .unwrap_or_else(|| panic!("can't find dependency {} of {}", i, xcb_mod)) .clone(), ); } deps }; let mut cg = CodeGen::new(xcb_mod, ffi, rs, deps, evcopies); cg.prologue(imports, &info.1)?; for ev in events { cg.event(ev)?; } cg.epilogue()?; dep_info.push(cg.into_depinfo()); Ok(()) }
match name { "bigreq" => "big_requests", "ge" => "genericevent", "xselinux" => "selinux", "xprint" => "x_print", "xtest" => "test", _ => name, }
if_condition
[ { "content": "fn main() {\n\n let dpy = \":0\";\n\n if let Ok((_, _)) = xcb::Connection::connect(Some(&dpy)) {\n\n println!(\"Connected to X on display \\\"{}\\\"!\", dpy);\n\n } else {\n\n println!(\"Could not connect to X!\");\n\n }\n\n}\n", "file_path": "examples/connect_str.rs", "rank": 0, "score": 68672.83320659264 }, { "content": "fn check_glx_extension(glx_exts: &str, ext_name: &str) -> bool {\n\n for glx_ext in glx_exts.split(\" \") {\n\n if glx_ext == ext_name {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n\n\n\nstatic mut ctx_error_occurred: bool = false;\n\nunsafe extern \"C\" fn ctx_error_handler(\n\n _dpy: *mut xlib::Display,\n\n _ev: *mut xlib::XErrorEvent,\n\n) -> i32 {\n\n ctx_error_occurred = true;\n\n 0\n\n}\n\n\n\nunsafe fn check_gl_error() {\n\n let err = gl::GetError();\n\n if err != gl::NO_ERROR {\n\n println!(\"got gl error {}\", err);\n\n }\n\n}\n\n\n", "file_path": "examples/opengl_window.rs", "rank": 1, "score": 68438.0048901057 }, { "content": "type GlXCreateContextAttribsARBProc = unsafe extern \"C\" fn(\n\n dpy: *mut xlib::Display,\n\n fbc: GLXFBConfig,\n\n share_context: GLXContext,\n\n direct: xlib::Bool,\n\n attribs: *const c_int,\n\n) -> GLXContext;\n\n\n\nunsafe fn load_gl_func(name: &str) -> *mut c_void {\n\n let cname = CString::new(name).unwrap();\n\n let ptr: *mut c_void = std::mem::transmute(glXGetProcAddress(cname.as_ptr() as *const u8));\n\n if ptr.is_null() {\n\n panic!(\"could not load {}\", name);\n\n }\n\n ptr\n\n}\n\n\n", "file_path": "examples/opengl_window.rs", "rank": 2, "score": 54455.83881888511 }, { "content": "fn main() {\n\n if let Ok((_, screen_num)) = xcb::Connection::connect(None) {\n\n println!(\"Connected to X on screen \\\"{}\\\"!\", screen_num);\n\n } else {\n\n println!(\"Could not connect to X!\");\n\n }\n\n}\n", "file_path": "examples/connect.rs", "rank": 3, "score": 43382.35590054927 }, { "content": "fn main() {\n\n let points: &[xcb::Point] = &[\n\n xcb::Point::new(10, 10),\n\n xcb::Point::new(10, 20),\n\n xcb::Point::new(20, 10),\n\n xcb::Point::new(20, 20),\n\n ];\n\n let polyline: &[xcb::Point] = &[\n\n xcb::Point::new(50, 10),\n\n xcb::Point::new(5, 20), /* rest of points are relative */\n\n xcb::Point::new(25, -20),\n\n xcb::Point::new(10, 10),\n\n ];\n\n let segments: &[xcb::Segment] = &[\n\n xcb::Segment::new(100, 10, 140, 30),\n\n xcb::Segment::new(110, 25, 130, 60),\n\n ];\n\n let rectangles: &[xcb::Rectangle] = &[\n\n xcb::Rectangle::new(10, 50, 40, 20),\n\n xcb::Rectangle::new(80, 50, 10, 40),\n", "file_path": "examples/drawing.rs", "rank": 4, "score": 43382.35590054927 }, { "content": "fn main() {\n\n let (conn, screen_num) = xcb::Connection::connect(None).unwrap();\n\n let setup = conn.get_setup();\n\n let screen = setup.roots().nth(screen_num as usize).unwrap();\n\n\n\n let window = conn.generate_id();\n\n\n\n let values = [\n\n (xcb::CW_BACK_PIXEL, screen.white_pixel()),\n\n (\n\n xcb::CW_EVENT_MASK,\n\n xcb::EVENT_MASK_EXPOSURE | xcb::EVENT_MASK_KEY_PRESS,\n\n ),\n\n ];\n\n\n\n xcb::create_window(\n\n &conn,\n\n xcb::COPY_FROM_PARENT as u8,\n\n window,\n\n screen.root(),\n", "file_path": "examples/basic_window.rs", "rank": 5, "score": 41852.32007585439 }, { "content": "fn main() {\n\n let (conn, _) = xcb::Connection::connect(None).unwrap();\n\n\n\n conn.prefetch_extension_data(xkb::id());\n\n\n\n // generally useful to retrieve the first event from this\n\n // extension. event response_type will be set on this\n\n let _first_ev = match conn.get_extension_data(xkb::id()) {\n\n Some(r) => r.first_event(),\n\n None => {\n\n panic!(\"XKB extension not supported by X server!\");\n\n }\n\n };\n\n\n\n // we need at least xcb-xkb-1.0 to be available on client\n\n // machine\n\n {\n\n let cookie = xkb::use_extension(&conn, 1, 0);\n\n\n\n match cookie.get_reply() {\n", "file_path": "examples/xkb_init.rs", "rank": 6, "score": 41852.32007585439 }, { "content": "fn main() {\n\n let (conn, _) = Connection::connect(None).unwrap();\n\n let setup = conn.get_setup();\n\n println!(\"X Setup:\");\n\n println!(\" - status = {}\", setup.status());\n\n println!(\" - protocol major version = {}\", setup.protocol_major_version());\n\n println!(\" - protocol minor version = {}\", setup.protocol_minor_version());\n\n println!(\" - length = {}\", setup.length());\n\n println!(\" - release number = {}\", setup.release_number());\n\n println!(\" - resource Id base = {}\", setup.resource_id_base());\n\n println!(\" - motion buffer size = {}\", setup.motion_buffer_size());\n\n println!(\" - maximum request length = {}\", setup.maximum_request_length());\n\n println!(\" - image byte order = {:?}\", setup.image_byte_order());\n\n println!(\" - bitmap format bit order = {:?}\", setup.bitmap_format_bit_order());\n\n println!(\" - bitmap format scanline unit = {}\", setup.bitmap_format_scanline_unit());\n\n println!(\" - bitmap format scanline pad = {}\", setup.bitmap_format_scanline_pad());\n\n println!(\" - min keycode = {:?}\", setup.min_keycode());\n\n println!(\" - max keycode = {:?}\", setup.max_keycode());\n\n println!(\" - vendor = {}\", setup.vendor());\n\n println!(\" - pixmap formats = {:#?}\", setup.pixmap_formats());\n\n println!(\" - roots = {:#?}\", setup.roots());\n\n}\n", "file_path": "examples/print_setup.rs", "rank": 7, "score": 41852.32007585439 }, { "content": "fn main() {\n\n unsafe {\n\n let (conn, screen_num) = xcb::Connection::connect_with_xlib_display().unwrap();\n\n conn.set_event_queue_owner(xcb::EventQueueOwner::Xcb);\n\n\n\n if glx_dec_version(conn.get_raw_dpy()) < 13 {\n\n panic!(\"glx-1.3 is not supported\");\n\n }\n\n\n\n let fbc = get_glxfbconfig(\n\n conn.get_raw_dpy(),\n\n screen_num,\n\n &[\n\n GLX_X_RENDERABLE,\n\n 1,\n\n GLX_DRAWABLE_TYPE,\n\n GLX_WINDOW_BIT,\n\n GLX_RENDER_TYPE,\n\n GLX_RGBA_BIT,\n\n GLX_X_VISUAL_TYPE,\n", "file_path": "examples/opengl_window.rs", "rank": 8, "score": 41852.32007585439 }, { "content": "fn main() {\n\n let (conn, screen_num) = {\n\n let (conn, screen_num) = xcb::Connection::connect(None).unwrap();\n\n (Arc::new(conn), screen_num)\n\n };\n\n let setup = conn.get_setup();\n\n let screen = setup.roots().nth(screen_num as usize).unwrap();\n\n\n\n let window = conn.generate_id();\n\n\n\n let values = [\n\n (xcb::CW_BACK_PIXEL, screen.black_pixel()),\n\n (\n\n xcb::CW_EVENT_MASK,\n\n xcb::EVENT_MASK_EXPOSURE\n\n | xcb::EVENT_MASK_KEY_PRESS\n\n | xcb::EVENT_MASK_STRUCTURE_NOTIFY\n\n | xcb::EVENT_MASK_PROPERTY_CHANGE,\n\n ),\n\n ];\n", "file_path": "examples/threaded_window.rs", "rank": 9, "score": 41852.32007585439 }, { "content": "fn main() {\n\n let (conn, screen_num) = xcb::Connection::connect(None).unwrap();\n\n\n\n let setup = conn.get_setup();\n\n\n\n let screen = setup.roots().nth(screen_num as usize).unwrap();\n\n\n\n println!(\"\");\n\n println!(\"Informations of screen {}:\", screen.root());\n\n println!(\" width..........: {}\", screen.width_in_pixels());\n\n println!(\" height.........: {}\", screen.height_in_pixels());\n\n println!(\" white pixel....: {:x}\", screen.white_pixel());\n\n println!(\" black pixel....: {:x}\", screen.black_pixel());\n\n}\n", "file_path": "examples/screen_info.rs", "rank": 10, "score": 41852.32007585439 }, { "content": "fn main() {\n\n unsafe {\n\n let mut screen_num: c_int = 0;\n\n let c = xcb_connect(ptr::null(), &mut screen_num);\n\n if c.is_null() {\n\n panic!();\n\n }\n\n\n\n let setup = xcb_get_setup(c);\n\n let mut iter = xcb_setup_roots_iterator(setup);\n\n for _ in 0..screen_num {\n\n xcb_screen_next(&mut iter as *mut xcb_screen_iterator_t);\n\n }\n\n let screen = &*iter.data;\n\n println!(\"\");\n\n println!(\"Informations of screen {}:\", screen.root);\n\n println!(\" width..........: {}\", screen.width_in_pixels);\n\n println!(\" height.........: {}\", screen.height_in_pixels);\n\n println!(\" white pixel....: {:x}\", screen.white_pixel);\n\n println!(\" black pixel....: {:x}\", screen.black_pixel);\n\n\n\n xcb_disconnect(c);\n\n }\n\n}\n", "file_path": "examples/ffi_screen_info.rs", "rank": 11, "score": 40482.50444198519 }, { "content": "fn main() {\n\n let (con, screen_num) = xcb::Connection::connect(None).unwrap();\n\n let screen = con.get_setup().roots().nth(screen_num as usize).unwrap();\n\n\n\n let randr_base = con\n\n .get_extension_data(&mut randr::id())\n\n .unwrap()\n\n .first_event();\n\n let _ = randr::select_input(&con, screen.root(), randr::NOTIFY_MASK_CRTC_CHANGE as u16)\n\n .request_check();\n\n\n\n loop {\n\n con.flush();\n\n let event = con.wait_for_event().unwrap();\n\n\n\n if event.response_type() == randr_base + randr::NOTIFY {\n\n let ev: &randr::NotifyEvent = unsafe { xcb::cast_event(&event) };\n\n let d = ev.u().cc();\n\n println!(\n\n \"received CRTC_NOTIFY event:\\n\\\n", "file_path": "examples/randr_crtc_listen.rs", "rank": 12, "score": 40482.50444198519 }, { "content": "fn main() {\n\n unsafe {\n\n let mut screen_num: c_int = 0;\n\n let c = xcb_connect(null(), &mut screen_num);\n\n if c.is_null() {\n\n panic!();\n\n }\n\n\n\n // generally useful to retrieve the first event from this\n\n // extension. event response_type will be set on this\n\n let _first_ev = {\n\n xcb_prefetch_extension_data(c, &mut xcb_xkb_id);\n\n\n\n let reply = xcb_get_extension_data(c, &mut xcb_xkb_id);\n\n if reply.is_null() || (*reply).present == 0 {\n\n panic!(\"XKB extension not supported by X server\");\n\n }\n\n\n\n (*reply).first_event\n\n };\n", "file_path": "examples/ffi_xkb_init.rs", "rank": 13, "score": 40482.50444198519 }, { "content": "fn get_glxfbconfig(\n\n dpy: *mut xlib::Display,\n\n screen_num: i32,\n\n visual_attribs: &[i32],\n\n) -> GLXFBConfig {\n\n unsafe {\n\n let mut fbcount: c_int = 0;\n\n let fbcs = glXChooseFBConfig(\n\n dpy,\n\n screen_num,\n\n visual_attribs.as_ptr(),\n\n &mut fbcount as *mut c_int,\n\n );\n\n\n\n if fbcount == 0 {\n\n panic!(\"could not find compatible fb config\");\n\n }\n\n // we pick the first from the list\n\n let fbc = *fbcs;\n\n xlib::XFree(fbcs as *mut c_void);\n\n fbc\n\n }\n\n}\n\n\n", "file_path": "examples/opengl_window.rs", "rank": 14, "score": 40482.50444198519 }, { "content": "fn main() {\n\n let rectangles: &[xcb::Rectangle] = &[xcb::Rectangle::new(200, 200, 400, 400)];\n\n\n\n let (conn, screen_num) = xcb::Connection::connect(None).unwrap();\n\n let setup = conn.get_setup();\n\n let screen = setup.roots().nth(screen_num as usize).unwrap();\n\n\n\n let gc = conn.generate_id();\n\n\n\n xcb::create_gc(\n\n &conn,\n\n gc,\n\n screen.root(),\n\n &[\n\n (xcb::GC_FUNCTION, xcb::GX_XOR),\n\n (xcb::GC_FOREGROUND, screen.white_pixel()),\n\n (xcb::GC_BACKGROUND, screen.black_pixel()),\n\n (xcb::GC_LINE_WIDTH, 1),\n\n (xcb::GC_LINE_STYLE, xcb::LINE_STYLE_ON_OFF_DASH),\n\n (xcb::GC_GRAPHICS_EXPOSURES, 0),\n", "file_path": "examples/draw_root_window.rs", "rank": 15, "score": 40482.50444198519 }, { "content": "fn main() {\n\n let (conn, screen_num) = xcb::Connection::connect(None).unwrap();\n\n let setup = conn.get_setup();\n\n let screen = setup.roots().nth(screen_num as usize).unwrap();\n\n let window_dummy = conn.generate_id();\n\n\n\n xcb::create_window(\n\n &conn,\n\n 0,\n\n window_dummy,\n\n screen.root(),\n\n 0,\n\n 0,\n\n 1,\n\n 1,\n\n 0,\n\n 0,\n\n 0,\n\n &[],\n\n );\n", "file_path": "examples/randr_screen_modes.rs", "rank": 16, "score": 40482.50444198519 }, { "content": "fn main() {\n\n let (conn, screen_num) = xcb::Connection::connect(None).unwrap();\n\n let setup = conn.get_setup();\n\n let screen = setup.roots().nth(screen_num as usize).unwrap();\n\n let window_dummy = conn.generate_id();\n\n\n\n xcb::create_window(\n\n &conn,\n\n 0,\n\n window_dummy,\n\n screen.root(),\n\n 0,\n\n 0,\n\n 1,\n\n 1,\n\n 0,\n\n 0,\n\n 0,\n\n &[],\n\n );\n", "file_path": "examples/randr_screen_info.rs", "rank": 17, "score": 40482.50444198519 }, { "content": "fn main() {\n\n let (conn, screen_num) = xcb::Connection::connect(None).unwrap();\n\n let setup = conn.get_setup();\n\n let screen = setup.roots().nth(screen_num as usize).unwrap();\n\n let window_dummy = conn.generate_id();\n\n\n\n xcb::create_window(\n\n &conn,\n\n 0,\n\n window_dummy,\n\n screen.root(),\n\n 0,\n\n 0,\n\n 1,\n\n 1,\n\n 0,\n\n 0,\n\n 0,\n\n &[],\n\n );\n", "file_path": "examples/randr_crtc_info.rs", "rank": 18, "score": 40482.50444198519 }, { "content": "fn main() {\n\n unsafe {\n\n //Open connection to X server\n\n let conn = xcb_connect(ptr::null(), ptr::null_mut());\n\n\n\n //Get the first X screen\n\n let first_screen = xcb_setup_roots_iterator(xcb_get_setup(conn)).data;\n\n\n\n //Generate ID for the X window\n\n let window_dummy = xcb_generate_id(conn);\n\n\n\n //Create dummy X window\n\n xcb_create_window(\n\n conn,\n\n 0,\n\n window_dummy,\n\n (*first_screen).root,\n\n 0,\n\n 0,\n\n 1,\n", "file_path": "examples/ffi_randr_crtc_info.rs", "rank": 19, "score": 39248.99454259242 }, { "content": "fn main() {\n\n\n\n let (conn, screen_num) = xcb::Connection::connect(None).unwrap();\n\n let setup = conn.get_setup();\n\n let screen = setup.roots().nth(screen_num as usize).unwrap();\n\n let window_dummy = conn.generate_id();\n\n\n\n xcb::create_window(&conn, 0, window_dummy, screen.root(), 0, 0, 1, 1, 0, 0, 0, &[]);\n\n\n\n conn.flush();\n\n\n\n // must not compile because crtcs is data owned by reply.\n\n // one needs to make screen_res_reply live longer than crtcs, or\n\n // get ownership by calling to_vec().\n\n // randr_crtc_info.rs is the working version\n\n let crtcs;\n\n {\n\n let screen_res_cookie = randr::get_screen_resources(&conn, window_dummy);\n\n let screen_res_reply = screen_res_cookie.get_reply().unwrap();\n\n crtcs = screen_res_reply.crtcs();\n", "file_path": "examples/must_fail_borrow_check__reply.rs", "rank": 20, "score": 38132.40964725308 }, { "content": "fn main() {\n\n\n\n let setup;\n\n let screen_num;\n\n {\n\n let (conn, sn) = xcb::Connection::connect(None).unwrap();\n\n setup = conn.get_setup();\n\n screen_num = sn;\n\n }\n\n\n\n let screen = setup.roots().nth(screen_num as usize).unwrap();\n\n\n\n println!(\"\");\n\n println!(\"Informations of screen {}:\", screen.root());\n\n println!(\" width..........: {}\", screen.width_in_pixels());\n\n println!(\" height.........: {}\", screen.height_in_pixels());\n\n println!(\" white pixel....: {:x}\", screen.white_pixel());\n\n println!(\" black pixel....: {:x}\", screen.black_pixel());\n\n}\n", "file_path": "examples/must_fail_borrow_check__setup.rs", "rank": 21, "score": 38132.40964725308 }, { "content": "extern crate xcb;\n\n\n", "file_path": "examples/connect_str.rs", "rank": 22, "score": 28396.297115061883 }, { "content": "// per https://gitlab.freedesktop.org/xorg/app/xrandr/-/blob/master/xrandr.c#L576\n\nfn mode_refresh(mode_info: &randr::ModeInfo) -> f64 {\n\n let flags = mode_info.mode_flags();\n\n let vtotal = {\n\n let mut val = mode_info.vtotal();\n\n if (flags & randr::MODE_FLAG_DOUBLE_SCAN) != 0 {\n\n val *= 2;\n\n }\n\n if (flags & randr::MODE_FLAG_INTERLACE) != 0 {\n\n val /= 2;\n\n }\n\n val\n\n };\n\n\n\n if vtotal != 0 && mode_info.htotal() != 0 {\n\n (mode_info.dot_clock() as f64) / (vtotal as f64 * mode_info.htotal() as f64)\n\n } else {\n\n 0.0\n\n }\n\n}\n\n\n", "file_path": "examples/randr_screen_modes.rs", "rank": 23, "score": 27282.510807722127 }, { "content": "// returns the glx version in a decimal form\n\n// eg. 1.3 => 13\n\nfn glx_dec_version(dpy: *mut xlib::Display) -> i32 {\n\n let mut maj: c_int = 0;\n\n let mut min: c_int = 0;\n\n unsafe {\n\n if glXQueryVersion(dpy, &mut maj as *mut c_int, &mut min as *mut c_int) == 0 {\n\n panic!(\"cannot get glx version\");\n\n }\n\n }\n\n (maj * 10 + min) as i32\n\n}\n\n\n", "file_path": "examples/opengl_window.rs", "rank": 24, "score": 27141.59481676949 }, { "content": "pub fn pack_bitfield<T, L>(bf: &mut Vec<(T, L)>) -> (T, Vec<L>)\n\nwhere\n\n T: Ord + Zero + Copy + BitAnd<Output = T> + BitOr<Output = T>,\n\n L: Copy,\n\n{\n\n bf.sort_by(|a, b| {\n\n let &(a, _) = a;\n\n let &(b, _) = b;\n\n if a < b {\n\n Ordering::Less\n\n } else if a > b {\n\n Ordering::Greater\n\n } else {\n\n Ordering::Equal\n\n }\n\n });\n\n\n\n let mut mask = T::zero();\n\n let mut list: Vec<L> = Vec::new();\n\n\n", "file_path": "src/base.rs", "rank": 25, "score": 21553.61260795816 }, { "content": "extern crate gl;\n\nextern crate libc;\n\nextern crate x11;\n\nextern crate xcb;\n\n\n\nuse xcb::dri2;\n\n\n\nuse x11::glx::*;\n\nuse x11::xlib;\n\n\n\nuse std::ffi::{CStr, CString};\n\nuse std::os::raw::{c_int, c_void};\n\nuse std::ptr::null_mut;\n\n\n\nconst GLX_CONTEXT_MAJOR_VERSION_ARB: u32 = 0x2091;\n\nconst GLX_CONTEXT_MINOR_VERSION_ARB: u32 = 0x2092;\n\n\n", "file_path": "examples/opengl_window.rs", "rank": 26, "score": 13.323493560103415 }, { "content": "extern crate libc;\n\nextern crate xcb;\n\n\n\nuse libc::c_int;\n\nuse std::ptr;\n\nuse xcb::ffi::*;\n\n\n", "file_path": "examples/ffi_screen_info.rs", "rank": 27, "score": 11.514594908641191 }, { "content": "extern crate libc;\n\nextern crate xcb;\n\n\n\nuse xcb::ffi::randr::*;\n\nuse xcb::ffi::*;\n\n\n\nuse libc::{c_void, free};\n\nuse std::ptr;\n\n\n", "file_path": "examples/ffi_randr_crtc_info.rs", "rank": 28, "score": 11.316623752219067 }, { "content": "\n\nextern crate xcb;\n\nextern crate libc;\n\n\n\nuse xcb::randr;\n\n\n", "file_path": "examples/must_fail_borrow_check__reply.rs", "rank": 29, "score": 11.13975127272816 }, { "content": "extern crate libc;\n\nextern crate xcb;\n\n\n\nuse xcb::xkb;\n\n\n", "file_path": "examples/xkb_init.rs", "rank": 30, "score": 11.13975127272816 }, { "content": "extern crate libc;\n\nextern crate xcb;\n\n\n\nuse xcb::randr;\n\n\n", "file_path": "examples/randr_crtc_info.rs", "rank": 31, "score": 11.13975127272816 }, { "content": "extern crate libc;\n\nextern crate xcb;\n\n\n\nuse xcb::ffi::xkb::*;\n\nuse xcb::ffi::*;\n\n\n\nuse libc::{c_char, c_int, c_void};\n\nuse std::ptr::{null, null_mut};\n\n\n", "file_path": "examples/ffi_xkb_init.rs", "rank": 32, "score": 11.038896855354906 }, { "content": "extern crate xcb;\n\n\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n", "file_path": "examples/draw_root_window.rs", "rank": 33, "score": 10.34543118598518 }, { "content": "extern crate xcb;\n\nuse xcb::{Connection};\n\n\n", "file_path": "examples/print_setup.rs", "rank": 34, "score": 10.26546752055978 }, { "content": "extern crate xcb;\n\n\n\nuse xcb::randr;\n\n\n", "file_path": "examples/randr_crtc_listen.rs", "rank": 35, "score": 10.26546752055978 }, { "content": "extern crate xcb;\n\n\n\nuse xcb::randr;\n\n\n", "file_path": "examples/randr_screen_info.rs", "rank": 36, "score": 10.26546752055978 }, { "content": "extern crate xcb;\n\n\n\nuse std::iter::{Iterator};\n\n\n", "file_path": "examples/must_fail_borrow_check__setup.rs", "rank": 37, "score": 10.122713701510555 }, { "content": "extern crate xcb;\n\n\n\nuse std::iter::Iterator;\n\n\n", "file_path": "examples/screen_info.rs", "rank": 38, "score": 10.122713701510555 }, { "content": "extern crate xcb;\n\n\n\nuse std::iter::Iterator;\n\n\n", "file_path": "examples/basic_window.rs", "rank": 39, "score": 10.122713701510555 }, { "content": "extern crate xcb;\n\n\n\nuse std::iter::Iterator;\n\nuse std::sync::Arc;\n\nuse std::{thread, time};\n\n\n", "file_path": "examples/threaded_window.rs", "rank": 40, "score": 10.090786815903941 }, { "content": "//! If this variant is used, the `Reply` is retrieved assuming that there was no error.\n\n//!\n\n//! The server can also communicate with clients by sending `Event`s.\n\n//! The client listens to events with calls such as `Connection::wait_for_event`\n\n//! (blocking) or `Connection::poll_for_event` (non-blocking).\n\n//!\n\n//! API documentation is detailed in modules `base` and `xproto`.\n\n//!\n\n//! - `base`: contains `Connection` and a few utils\n\n//! - `xproto`: X protocol requests and events\n\n//!\n\n//! X protocol extensions are activated with cargo features\n\n\n\nextern crate libc;\n\n#[cfg(feature = \"xlib_xcb\")]\n\nextern crate x11;\n\n\n\n#[macro_use]\n\nextern crate log;\n\n\n", "file_path": "src/lib.rs", "rank": 41, "score": 9.222448760099876 }, { "content": "extern crate xcb;\n\n\n\nuse xcb::randr;\n\n\n\n// per https://gitlab.freedesktop.org/xorg/app/xrandr/-/blob/master/xrandr.c#L576\n", "file_path": "examples/randr_screen_modes.rs", "rank": 42, "score": 8.779697530902194 }, { "content": "extern crate xcb;\n\n\n", "file_path": "examples/drawing.rs", "rank": 43, "score": 7.256211096410141 }, { "content": "extern crate xcb;\n\n\n", "file_path": "examples/connect.rs", "rank": 44, "score": 7.256211096410141 }, { "content": "pub mod base;\n\n\n\npub mod xproto {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/xproto.rs\"));\n\n}\n\npub mod big_requests {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/big_requests.rs\"));\n\n}\n\npub mod xc_misc {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/xc_misc.rs\"));\n\n}\n\n\n\npub use base::*;\n\npub use xproto::*;\n\n\n\n#[cfg(feature = \"composite\")]\n\npub mod composite {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/composite.rs\"));\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 45, "score": 6.7375877935579185 }, { "content": " pub mod ext;\n\n\n\n pub mod xproto {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/xproto.rs\"));\n\n }\n\n pub mod big_requests {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/big_requests.rs\"));\n\n }\n\n pub mod xc_misc {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/xc_misc.rs\"));\n\n }\n\n\n\n pub use ffi::base::*;\n\n pub use ffi::xproto::*;\n\n\n\n #[cfg(feature = \"xlib_xcb\")]\n\n pub mod xlib_xcb;\n\n\n\n #[cfg(feature = \"composite\")]\n\n pub mod composite {\n", "file_path": "src/lib.rs", "rank": 46, "score": 6.691171609448359 }, { "content": "}\n\n\n\nimpl error::Error for ConnError {\n\n fn description(&self) -> &str {\n\n self.to_str()\n\n }\n\n}\n\n\n\npub type ConnResult<T> = Result<T, ConnError>;\n\n\n\n/// xcb::Connection handles communication with the X server.\n\n/// It wraps an `xcb_connection_t` object and\n\n/// will call `xcb_disconnect` when the `Connection` goes out of scope\n\npub struct Connection {\n\n c: *mut xcb_connection_t,\n\n #[cfg(feature = \"xlib_xcb\")]\n\n dpy: *mut xlib::Display,\n\n}\n\n\n\n#[cfg(feature = \"thread\")]\n", "file_path": "src/base.rs", "rank": 47, "score": 6.141731344109726 }, { "content": " self.dpy,\n\n match owner {\n\n EventQueueOwner::Xcb => XCBOwnsEventQueue,\n\n EventQueueOwner::Xlib => XlibOwnsEventQueue,\n\n },\n\n );\n\n }\n\n }\n\n\n\n /// Connects to the X server.\n\n /// `displayname:` The name of the display.\n\n ///\n\n /// Connects to the X server specified by `displayname.` If\n\n /// `displayname` is `None,` uses the value of the DISPLAY environment\n\n /// variable.\n\n ///\n\n /// Returns Ok(connection object, preferred screen) in case of success, or\n\n /// Err(ConnError) in case of error. If no screen is preferred, the second\n\n /// member of the tuple is set to 0.\n\n pub fn connect(displayname: Option<&str>) -> ConnResult<(Connection, i32)> {\n", "file_path": "src/base.rs", "rank": 48, "score": 5.703342716416183 }, { "content": " | xkb::MAP_PART_KEY_ACTIONS\n\n | xkb::MAP_PART_KEY_BEHAVIORS\n\n | xkb::MAP_PART_VIRTUAL_MODS\n\n | xkb::MAP_PART_VIRTUAL_MOD_MAP;\n\n\n\n let events = xkb::EVENT_TYPE_NEW_KEYBOARD_NOTIFY\n\n | xkb::EVENT_TYPE_MAP_NOTIFY\n\n | xkb::EVENT_TYPE_STATE_NOTIFY;\n\n\n\n let cookie = xkb::select_events_checked(\n\n &conn,\n\n xkb::ID_USE_CORE_KBD as u16,\n\n events as u16,\n\n 0,\n\n events as u16,\n\n map_parts as u16,\n\n map_parts as u16,\n\n None,\n\n );\n\n\n\n cookie\n\n .request_check()\n\n .expect(\"failed to select notify events from xcb xkb\");\n\n }\n\n\n\n // proceed with create_window and event loop...\n\n}\n", "file_path": "examples/xkb_init.rs", "rank": 49, "score": 4.269498357575706 }, { "content": " * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF\n\n * ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED\n\n * TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n\n * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT\n\n * SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\n\n * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n\n * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR\n\n * IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n\n * DEALINGS IN THE SOFTWARE.\n\n */\n\n\n\n#![allow(non_upper_case_globals)]\n\n#![allow(non_snake_case)]\n\n\n\nuse ffi::xcb_connection_t;\n\nuse libc::{c_uint, c_void};\n\n\n\nuse x11::xlib;\n\n\n\npub type XEventQueueOwner = c_uint;\n\npub static XlibOwnsEventQueue: XEventQueueOwner = 0;\n\npub static XCBOwnsEventQueue: XEventQueueOwner = 1;\n\n\n\n#[link(name = \"X11-xcb\")]\n\nextern \"C\" {\n\n pub fn XGetXCBConnection(dpy: *mut xlib::Display) -> *mut xcb_connection_t;\n\n pub fn XSetEventQueueOwner(dpy: *mut xlib::Display, owner: XEventQueueOwner);\n\n}\n", "file_path": "src/ffi/xlib_xcb.rs", "rank": 50, "score": 4.234746558965973 }, { "content": " pub fn connect_with_auth_info(\n\n displayname: Option<&str>,\n\n auth_info: &AuthInfo,\n\n ) -> ConnResult<(Connection, i32)> {\n\n unsafe {\n\n let mut screen_num: c_int = 0;\n\n let displayname = displayname.map(|s| CString::new(s).unwrap());\n\n let cconn = if let Some(display) = displayname {\n\n xcb_connect_to_display_with_auth_info(\n\n display.as_ptr(),\n\n mem::transmute(auth_info),\n\n &mut screen_num,\n\n )\n\n } else {\n\n xcb_connect_to_display_with_auth_info(\n\n null(),\n\n mem::transmute(auth_info),\n\n &mut screen_num,\n\n )\n\n };\n", "file_path": "src/base.rs", "rank": 51, "score": 4.101898982049326 }, { "content": "\n\nuse libc::{self, c_char, c_int, c_void};\n\nuse std::option::Option;\n\n\n\nuse std::error;\n\nuse std::fmt;\n\nuse std::marker::PhantomData;\n\nuse std::mem;\n\nuse std::ptr::{null, null_mut};\n\n// std::num::Zero is unstable in rustc 1.5 => remove the Zero defined\n\n// hereunder as soon as Zero gets stabilized (or replaced by something else)\n\n//use std::num::Zero;\n\nuse std::cmp::Ordering;\n\nuse std::ffi::CString;\n\nuse std::ops::{BitAnd, BitOr};\n\nuse std::os::unix::io::{AsRawFd, RawFd};\n\n\n\n/// Current protocol version\n\npub const X_PROTOCOL: u32 = 11;\n\n/// Current minor version\n", "file_path": "src/base.rs", "rank": 52, "score": 4.05936225449337 }, { "content": "#[repr(C)]\n\npub struct xcb_auth_info_t {\n\n /// length of the string name (as returned by strlen)\n\n pub namelen: c_int,\n\n /// String containing the authentication protocol name,\n\n /// such as \"MIT-MAGIC-COOKIE-1\" or \"XDM-AUTHORIZATION-1\".\n\n pub name: *mut c_char,\n\n /// length of the data member\n\n pub datalen: c_int,\n\n /// data interpreted in a protocol specific manner\n\n pub data: *mut c_char,\n\n}\n\n\n\n#[link(name = \"xcb\")]\n\nextern \"C\" {\n\n\n\n /// Forces any buffered output to be written to the server. Blocks\n\n /// until the write is complete.\n\n ///\n\n /// Return > 0 on success, <= 0 otherwise.\n", "file_path": "src/ffi/base.rs", "rank": 53, "score": 4.01509302900711 }, { "content": " {\n\n let map_parts = XCB_XKB_MAP_PART_KEY_TYPES\n\n | XCB_XKB_MAP_PART_KEY_SYMS\n\n | XCB_XKB_MAP_PART_MODIFIER_MAP\n\n | XCB_XKB_MAP_PART_EXPLICIT_COMPONENTS\n\n | XCB_XKB_MAP_PART_KEY_ACTIONS\n\n | XCB_XKB_MAP_PART_KEY_BEHAVIORS\n\n | XCB_XKB_MAP_PART_VIRTUAL_MODS\n\n | XCB_XKB_MAP_PART_VIRTUAL_MOD_MAP;\n\n\n\n let events = XCB_XKB_EVENT_TYPE_NEW_KEYBOARD_NOTIFY\n\n | XCB_XKB_EVENT_TYPE_MAP_NOTIFY\n\n | XCB_XKB_EVENT_TYPE_STATE_NOTIFY;\n\n\n\n let cookie = xcb_xkb_select_events_checked(\n\n c,\n\n XCB_XKB_ID_USE_CORE_KBD as u16,\n\n events as u16,\n\n 0,\n\n events as u16,\n", "file_path": "examples/ffi_xkb_init.rs", "rank": 54, "score": 3.9846784661693118 }, { "content": "#[cfg(feature = \"xkb\")]\n\npub mod xkb {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/xkb.rs\"));\n\n}\n\n\n\n#[cfg(feature = \"xvmc\")]\n\npub mod xvmc {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/xvmc.rs\"));\n\n}\n\n\n\n#[cfg(feature = \"xv\")]\n\npub mod xv {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/xv.rs\"));\n\n}\n\n\n\npub mod ffi {\n\n #![allow(non_camel_case_types)]\n\n #![allow(improper_ctypes)]\n\n\n\n pub mod base;\n", "file_path": "src/lib.rs", "rank": 55, "score": 3.9435800238466627 }, { "content": "## Example\n\n\n\nDrawing example (checkout for more [here](https://github.com/rust-x-bindings/rust-xcb/tree/master/examples)\n\nand also [here](https://github.com/rust-x-bindings/toy_xcb))\n\n\n\n```rust\n\nextern crate xcb;\n\n\n\nfn main() {\n\n let points: &[xcb::Point] = &[\n\n xcb::Point::new(10, 10),\n\n xcb::Point::new(10, 20),\n\n xcb::Point::new(20, 10),\n\n xcb::Point::new(20, 20),\n\n ];\n\n let polyline: &[xcb::Point] = &[\n\n xcb::Point::new(50, 10 ),\n\n xcb::Point::new( 5, 20 ), /* rest of points are relative */\n\n xcb::Point::new(25, -20),\n\n xcb::Point::new(10, 10 )\n\n ];\n\n let segments: &[xcb::Segment] = &[\n\n xcb::Segment::new(100, 10, 140, 30),\n\n xcb::Segment::new(110, 25, 130, 60)\n\n ];\n\n let rectangles: &[xcb::Rectangle] = &[\n\n xcb::Rectangle::new(10, 50, 40, 20),\n\n xcb::Rectangle::new(80, 50, 10, 40)\n\n ];\n\n let arcs: &[xcb::Arc] = &[\n\n xcb::Arc::new(10, 100, 60, 40, 0, 90 << 6),\n\n xcb::Arc::new(90, 100, 55, 40, 0, 270 << 6)\n\n ];\n\n\n\n\n\n let (conn, screen_num) = xcb::Connection::connect(None).unwrap();\n\n let setup = conn.get_setup();\n\n let screen = setup.roots().nth(screen_num as usize).unwrap();\n\n\n\n let foreground = conn.generate_id();\n\n\n\n xcb::create_gc(&conn, foreground, screen.root(), &[\n\n (xcb::GC_FOREGROUND, screen.black_pixel()),\n\n (xcb::GC_GRAPHICS_EXPOSURES, 0),\n\n ]);\n\n\n\n let win = conn.generate_id();\n\n xcb::create_window(&conn,\n\n xcb::COPY_FROM_PARENT as u8,\n\n win,\n\n screen.root(),\n\n 0, 0,\n\n 150, 150,\n\n 10,\n\n xcb::WINDOW_CLASS_INPUT_OUTPUT as u16,\n\n screen.root_visual(), &[\n\n (xcb::CW_BACK_PIXEL, screen.white_pixel()),\n\n (xcb::CW_EVENT_MASK,\n\n xcb::EVENT_MASK_EXPOSURE | xcb::EVENT_MASK_KEY_PRESS),\n\n ]\n\n );\n\n xcb::map_window(&conn, win);\n", "file_path": "README.md", "rank": 56, "score": 3.9168244068150844 }, { "content": " * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF\n\n * ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED\n\n * TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n\n * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT\n\n * SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\n\n * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n\n * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR\n\n * IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n\n * DEALINGS IN THE SOFTWARE.\n\n */\n\n\n\nuse ffi::base::*;\n\nuse ffi::ext::xcb_extension_t;\n\n#[cfg(feature = \"xlib_xcb\")]\n\nuse ffi::xlib_xcb::*;\n\nuse ffi::xproto::*;\n\nuse xproto::*;\n\n\n\n#[cfg(feature = \"xlib_xcb\")]\n\nuse x11::xlib;\n", "file_path": "src/base.rs", "rank": 57, "score": 3.8505394624958003 }, { "content": "#[cfg(feature = \"randr\")]\n\npub mod randr {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/randr.rs\"));\n\n}\n\n\n\n#[cfg(feature = \"record\")]\n\npub mod record {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/record.rs\"));\n\n}\n\n\n\n#[cfg(feature = \"render\")]\n\npub mod render {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/render.rs\"));\n\n}\n\n\n\n#[cfg(feature = \"res\")]\n\npub mod res {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/res.rs\"));\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 58, "score": 3.778515252885013 }, { "content": "#[cfg(feature = \"xf86dri\")]\n\npub mod xf86dri {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/xf86dri.rs\"));\n\n}\n\n\n\n#[cfg(feature = \"xf86vidmode\")]\n\npub mod xf86vidmode {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/xf86vidmode.rs\"));\n\n}\n\n\n\n#[cfg(feature = \"xfixes\")]\n\npub mod xfixes {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/xfixes.rs\"));\n\n}\n\n\n\n#[cfg(feature = \"xinerama\")]\n\npub mod xinerama {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/xinerama.rs\"));\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 59, "score": 3.778515252885013 }, { "content": "#[cfg(feature = \"genericevent\")]\n\npub mod genericevent {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/genericevent.rs\"));\n\n}\n\n\n\n#[cfg(feature = \"glx\")]\n\npub mod glx {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/glx.rs\"));\n\n}\n\n\n\n// #[cfg(feature = \"input\")]\n\n// pub mod input {\n\n// include!(concat!(env!(\"OUT_DIR\"), \"/input.rs\"));\n\n// }\n\n\n\n#[cfg(feature = \"present\")]\n\npub mod present {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/present.rs\"));\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 60, "score": 3.778515252885013 }, { "content": "#[cfg(feature = \"screensaver\")]\n\npub mod screensaver {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/screensaver.rs\"));\n\n}\n\n\n\n#[cfg(feature = \"selinux\")]\n\npub mod selinux {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/selinux.rs\"));\n\n}\n\n\n\n#[cfg(feature = \"shape\")]\n\npub mod shape {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/shape.rs\"));\n\n}\n\n\n\n#[cfg(feature = \"shm\")]\n\npub mod shm {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/shm.rs\"));\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 61, "score": 3.778515252885013 }, { "content": "#[cfg(feature = \"sync\")]\n\npub mod sync {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/sync.rs\"));\n\n}\n\n\n\n#[cfg(feature = \"test\")]\n\npub mod test {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/test.rs\"));\n\n}\n\n\n\n#[cfg(feature = \"x_print\")]\n\npub mod x_print {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/x_print.rs\"));\n\n}\n\n\n\n#[cfg(feature = \"xevie\")]\n\npub mod xevie {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/xevie.rs\"));\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 62, "score": 3.778515252885013 }, { "content": "#[cfg(feature = \"damage\")]\n\npub mod damage {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/damage.rs\"));\n\n}\n\n\n\n#[cfg(feature = \"dpms\")]\n\npub mod dpms {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/dpms.rs\"));\n\n}\n\n\n\n#[cfg(feature = \"dri2\")]\n\npub mod dri2 {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/dri2.rs\"));\n\n}\n\n\n\n#[cfg(feature = \"dri3\")]\n\npub mod dri3 {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/dri3.rs\"));\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 63, "score": 3.778515252885013 }, { "content": " include!(concat!(env!(\"OUT_DIR\"), \"/ffi/res.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"screensaver\")]\n\n pub mod screensaver {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/screensaver.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"selinux\")]\n\n pub mod selinux {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/selinux.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"shape\")]\n\n pub mod shape {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/shape.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"shm\")]\n\n pub mod shm {\n", "file_path": "src/lib.rs", "rank": 64, "score": 3.721845297144673 }, { "content": " include!(concat!(env!(\"OUT_DIR\"), \"/ffi/present.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"randr\")]\n\n pub mod randr {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/randr.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"record\")]\n\n pub mod record {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/record.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"render\")]\n\n pub mod render {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/render.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"res\")]\n\n pub mod res {\n", "file_path": "src/lib.rs", "rank": 65, "score": 3.721845297144673 }, { "content": " include!(concat!(env!(\"OUT_DIR\"), \"/ffi/dri3.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"genericevent\")]\n\n pub mod genericevent {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/genericevent.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"glx\")]\n\n pub mod glx {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/glx.rs\"));\n\n }\n\n\n\n // #[cfg(feature = \"input\")]\n\n // pub mod input {\n\n // include!(concat!(env!(\"OUT_DIR\"), \"/ffi/input.rs\"));\n\n // }\n\n\n\n #[cfg(feature = \"present\")]\n\n pub mod present {\n", "file_path": "src/lib.rs", "rank": 66, "score": 3.721845297144673 }, { "content": " include!(concat!(env!(\"OUT_DIR\"), \"/ffi/composite.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"damage\")]\n\n pub mod damage {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/damage.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"dpms\")]\n\n pub mod dpms {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/dpms.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"dri2\")]\n\n pub mod dri2 {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/dri2.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"dri3\")]\n\n pub mod dri3 {\n", "file_path": "src/lib.rs", "rank": 67, "score": 3.721845297144673 }, { "content": " include!(concat!(env!(\"OUT_DIR\"), \"/ffi/xevie.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"xf86dri\")]\n\n pub mod xf86dri {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/xf86dri.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"xf86vidmode\")]\n\n pub mod xf86vidmode {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/xf86vidmode.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"xfixes\")]\n\n pub mod xfixes {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/xfixes.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"xinerama\")]\n\n pub mod xinerama {\n", "file_path": "src/lib.rs", "rank": 68, "score": 3.721845297144673 }, { "content": " include!(concat!(env!(\"OUT_DIR\"), \"/ffi/shm.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"sync\")]\n\n pub mod sync {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/sync.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"test\")]\n\n pub mod test {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/test.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"x_print\")]\n\n pub mod x_print {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/x_print.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"xevie\")]\n\n pub mod xevie {\n", "file_path": "src/lib.rs", "rank": 69, "score": 3.721845297144673 }, { "content": " /// The event queue owner defaults to XLib\n\n /// One would need to open an XCB connection with Xlib in order to use\n\n /// OpenGL.\n\n #[cfg(feature = \"xlib_xcb\")]\n\n pub fn connect_with_xlib_display() -> ConnResult<(Connection, i32)> {\n\n unsafe {\n\n let dpy = xlib::XOpenDisplay(null());\n\n let cconn = XGetXCBConnection(dpy);\n\n assert!(\n\n !dpy.is_null() && !cconn.is_null(),\n\n \"XLib could not connect to the X server\"\n\n );\n\n\n\n let conn = Connection { c: cconn, dpy: dpy };\n\n\n\n conn.has_error()\n\n .map(|_| (conn, xlib::XDefaultScreen(dpy) as i32))\n\n }\n\n }\n\n\n", "file_path": "src/base.rs", "rank": 70, "score": 3.7049820234852056 }, { "content": " include!(concat!(env!(\"OUT_DIR\"), \"/ffi/xinerama.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"xkb\")]\n\n pub mod xkb {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/xkb.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"xvmc\")]\n\n pub mod xvmc {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/xvmc.rs\"));\n\n }\n\n\n\n #[cfg(feature = \"xv\")]\n\n pub mod xv {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/ffi/xv.rs\"));\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 71, "score": 3.532111727103942 }, { "content": " * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF\n\n * ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED\n\n * TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n\n * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT\n\n * SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\n\n * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n\n * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR\n\n * IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n\n * DEALINGS IN THE SOFTWARE.\n\n */\n\n\n\nuse ffi::base::*;\n\nuse ffi::xproto;\n\nuse libc::{c_char, c_int, c_uint, c_void, iovec};\n\n\n\n#[repr(C)]\n\npub struct xcb_extension_t {\n\n name: *const c_char,\n\n global_id: c_int,\n\n}\n", "file_path": "src/ffi/ext.rs", "rank": 72, "score": 3.4492564839013955 }, { "content": " ConnError::ClosedExtNotSupported => \"Connection closed, X extension not supported\",\n\n ConnError::ClosedMemInsufficient => \"Connection closed, insufficient memory\",\n\n ConnError::ClosedReqLenExceed => {\n\n \"Connection closed, exceeded request length that server accepts.\"\n\n }\n\n ConnError::ClosedParseErr => \"Connection closed, error during parsing display string\",\n\n ConnError::ClosedInvalidScreen => {\n\n \"Connection closed, the server does not have a screen matching the display\"\n\n }\n\n ConnError::ClosedFdPassingFailed => {\n\n \"Connection closed, file-descriptor passing operation failed\"\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for ConnError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n self.to_str().fmt(f)\n\n }\n", "file_path": "src/base.rs", "rank": 73, "score": 3.4271996149020416 }, { "content": " * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF\n\n * ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED\n\n * TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n\n * PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT\n\n * SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\n\n * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n\n * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR\n\n * IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n\n * DEALINGS IN THE SOFTWARE.\n\n */\n\n\n\nuse ffi::ext::xcb_extension_t;\n\nuse ffi::xproto::{xcb_query_extension_reply_t, xcb_setup_t};\n\n\n\nuse libc::{c_char, c_int, c_uint, c_void};\n\n\n\n// Pre-defined constants\n\n\n\n/// xcb connection errors because of socket, pipe and other stream errors.\n\npub const XCB_CONN_ERROR: c_int = 1;\n", "file_path": "src/ffi/base.rs", "rank": 74, "score": 3.303873251588798 }, { "content": " /// When finished, use xcb_disconnect() to close the connection and free\n\n /// the structure.\n\n pub fn xcb_connect(displayname: *const c_char, screenp: *mut c_int) -> *mut xcb_connection_t;\n\n\n\n /// Connects to the X server, using an authorization information.\n\n /// display: The name of the display.\n\n /// auth: The authorization information.\n\n /// screen: A pointer to a preferred screen number.\n\n /// Returns A newly allocated xcb_connection_t structure.\n\n ///\n\n /// Connects to the X server specified by displayname, using the\n\n /// authorization auth. If a particular screen on that server is\n\n /// preferred, the int pointed to by screenp (if not NULL) will\n\n /// be set to that screen; otherwise screenp will be set to 0.\n\n ///\n\n /// Always returns a non-NULL pointer to a xcb_connection_t, even on failure.\n\n /// Callers need to use xcb_connection_has_error() to check for failure.\n\n /// When finished, use xcb_disconnect() to close the connection and free\n\n /// the structure.\n\n pub fn xcb_connect_to_display_with_auth_info(\n", "file_path": "src/ffi/base.rs", "rank": 75, "score": 3.269375352238934 }, { "content": " &conn,\n\n xcb::PROP_MODE_REPLACE as u8,\n\n win,\n\n wm_protocols,\n\n xcb::ATOM_ATOM,\n\n 32,\n\n &protocols,\n\n );\n\n\n\n xcb::map_window(&conn, win);\n\n conn.flush();\n\n xlib::XSync(conn.get_raw_dpy(), xlib::False);\n\n\n\n let glx_exts = CStr::from_ptr(glXQueryExtensionsString(conn.get_raw_dpy(), screen_num))\n\n .to_str()\n\n .unwrap();\n\n\n\n if !check_glx_extension(&glx_exts, \"GLX_ARB_create_context\") {\n\n panic!(\"could not find GLX extension GLX_ARB_create_context\");\n\n }\n", "file_path": "examples/opengl_window.rs", "rank": 76, "score": 2.798003822546106 }, { "content": "## [0.7.4] - 2016-06 - rtbo\n\n- templating send_event* to take event obj instead of str\n\n- correct iterator attribute lifetime (#16)\n\n\n\n## [0.7.3] - 2016-04-10 - rtbo\n\n- templating some accessors\n\n\n\n## [0.7.2] - 2016-04-02 - rtbo\n\n- fix #14\n\n\n\n## [0.7.1] - 2016-03-29 - rtbo\n\n- module names closer to ffi\n\n- fix #13\n\n\n\n## [0.7.0] - 2016-03-28 - rtbo\n\n- fix connection with strings (#9)\n\n- assign response_type in *Event::new (#10)\n\n- Connection::connect returns Result (#11)\n\n- Some documentation (#12)\n\n\n\n## [0.6.2] - 2016-03-04 - rtbo\n\n- fix: correct names for DRI2 and 3 FFI constants\n\n\n\n## [0.6.1] - 2016-03-02 - rtbo\n\n- fix: correct names for 'xtest' extension\n\n\n\n## [0.6.0] - 2016-02-22 - rtbo\n\n- xlib_xcb: Connection owns the xlib::Display and calls XCloseDisplay\n\n- requests accept template slices\n\n- POD types distinction\n\n\n\n## [0.5.0] - 2016-02-07 - rtbo\n\n- adding xlib_xcb\n\n- show how to create an opengl enabled window\n\n\n\n## [0.4.1] - 2016-02-07 - rtbo\n\n- generating union accessors\n\n- handling of bool parameters in the wrapper API\n\n- rewrite of wrappers structures (pub type instead of struct with base field)\n\n- module clean-up and export\n\n- Travis CI\n\n\n\n## [0.4.0] - 2016-03-02 - rtbo/laumann\n\n- first fully functional wrappers\n\n- rewritten rs_client.py\n\n- new examples\n\n- made ffi very close to C\n\n- fixed wrappers segfaults\n\n\n\n ## [0.3.0] - 2013 - Aatch\n", "file_path": "CHANGELOG.md", "rank": 77, "score": 2.689811760836016 }, { "content": "pub const X_PROTOCOL_REVISION: u32 = 0;\n\n/// X_TCP_PORT + display number = server port for TCP transport\n\npub const X_TCP_PORT: u32 = 6000;\n\n\n\n/// Opaque type used as key for `Connection::get_extension_data`\n\npub type Extension = xcb_extension_t;\n\n\n\n/// `xcb::NONE` is the universal null resource or null atom parameter value\n\n/// for many core X requests\n\npub const NONE: u32 = 0;\n\n/// `xcb::COPY_FROM_PARENT` can be used for many `xcb::create_window` parameters\n\npub const COPY_FROM_PARENT: u32 = 0;\n\n/// `xcb::CURRENT_TIME` can be used in most requests that take an `xcb::Timestamp`\n\npub const CURRENT_TIME: u32 = 0;\n\n/// `xcb::NO_SYMBOL` fills in unused entries in `xcb::Keysym` tables\n\npub const NO_SYMBOL: u32 = 0;\n\n\n\n/// `StructPtr` is a wrapper for pointer to struct owned by XCB\n\n/// that must not be freed\n\n/// it is instead bound to the lifetime of its parent that it borrows immutably\n", "file_path": "src/base.rs", "rank": 78, "score": 2.6847110201076765 }, { "content": "\n\n xcb::create_window(\n\n &conn,\n\n xcb::COPY_FROM_PARENT as u8,\n\n window,\n\n screen.root(),\n\n 0,\n\n 0,\n\n 320,\n\n 240,\n\n 10,\n\n xcb::WINDOW_CLASS_INPUT_OUTPUT as u16,\n\n screen.root_visual(),\n\n &values,\n\n );\n\n\n\n xcb::map_window(&conn, window);\n\n\n\n {\n\n let conn = conn.clone();\n", "file_path": "examples/threaded_window.rs", "rank": 79, "score": 2.681758126330084 }, { "content": " Connection,\n\n /// xcb connection shutdown because of extension not supported\n\n ClosedExtNotSupported,\n\n /// malloc(), calloc() and realloc() error upon failure, for eg ENOMEM\n\n ClosedMemInsufficient,\n\n /// Connection closed, exceeding request length that server accepts.\n\n ClosedReqLenExceed,\n\n /// Connection closed, error during parsing display string.\n\n ClosedParseErr,\n\n /// Connection closed because the server does not have a screen\n\n /// matching the display.\n\n ClosedInvalidScreen,\n\n /// Connection closed because some FD passing operation failed\n\n ClosedFdPassingFailed,\n\n}\n\n\n\nimpl ConnError {\n\n fn to_str(&self) -> &str {\n\n match *self {\n\n ConnError::Connection => \"Connection error, possible I/O error\",\n", "file_path": "src/base.rs", "rank": 80, "score": 2.6498368553943834 }, { "content": "\n\n // we need at least xcb-xkb-1.0 to be available on client\n\n // machine\n\n {\n\n let cookie = xcb_xkb_use_extension(c, 1, 0);\n\n let reply = xcb_xkb_use_extension_reply(c, cookie, null_mut());\n\n if reply.is_null() {\n\n panic!(\"could not get xkb extension supported version\");\n\n }\n\n if (*reply).supported == 0 {\n\n libc::free(reply as *mut c_void);\n\n panic!(\"xkb-1.0 is not supported\");\n\n }\n\n libc::free(reply as *mut c_void);\n\n }\n\n\n\n // we now select what events we want to receive\n\n // such as map change, keyboard hotplug ...\n\n // note that key strokes are given directly by\n\n // the XCB_KEY_PRESS event from xproto, not by xkb\n", "file_path": "examples/ffi_xkb_init.rs", "rank": 81, "score": 2.6297717301472647 }, { "content": " }\n\n\n\n /// Returns the next event without reading from the connection.\n\n ///\n\n /// This is a version of `poll_for_event` that only examines the\n\n /// event queue for new events. The function doesn't try to read new\n\n /// events from the connection if no queued events are found.\n\n ///\n\n /// This function is useful for callers that know in advance that all\n\n /// interesting events have already been read from the connection. For\n\n /// example, callers might use `wait_for_reply` and be interested\n\n /// only of events that preceded a specific reply.\n\n pub fn poll_for_queued_event(&self) -> Option<GenericEvent> {\n\n unsafe {\n\n let event = xcb_poll_for_queued_event(self.c);\n\n if event.is_null() {\n\n None\n\n } else {\n\n Some(GenericEvent { ptr: event })\n\n }\n", "file_path": "src/base.rs", "rank": 82, "score": 2.6297717301472647 }, { "content": " 0,\n\n 0,\n\n 150,\n\n 150,\n\n 10,\n\n xcb::WINDOW_CLASS_INPUT_OUTPUT as u16,\n\n screen.root_visual(),\n\n &values,\n\n );\n\n\n\n xcb::map_window(&conn, window);\n\n\n\n let title = \"Basic Window\";\n\n // setting title\n\n xcb::change_property(\n\n &conn,\n\n xcb::PROP_MODE_REPLACE as u8,\n\n window,\n\n xcb::ATOM_WM_NAME,\n\n xcb::ATOM_STRING,\n", "file_path": "examples/basic_window.rs", "rank": 83, "score": 2.4712389874481437 }, { "content": " pub fn xcb_parse_display(\n\n name: *const c_char,\n\n host: *mut *mut c_char,\n\n display: *mut c_int,\n\n screen: *mut c_int,\n\n ) -> c_int;\n\n\n\n /// Connects to the X server.\n\n /// displayname: The name of the display.\n\n /// screenp: A pointer to a preferred screen number.\n\n /// Returns A newly allocated xcb_connection_t structure.\n\n ///\n\n /// Connects to the X server specified by displayname. If\n\n /// displayname is NULL, uses the value of the DISPLAY environment\n\n /// variable. If a particular screen on that server is preferred, the\n\n /// int pointed to by screenp (if not NULL) will be set to that\n\n /// screen; otherwise the screen will be set to 0.\n\n ///\n\n /// Always returns a non-NULL pointer to a xcb_connection_t, even on failure.\n\n /// Callers need to use xcb_connection_has_error() to check for failure.\n", "file_path": "src/ffi/base.rs", "rank": 84, "score": 2.4515669438663936 }, { "content": " 8,\n\n title.as_bytes(),\n\n );\n\n\n\n conn.flush();\n\n\n\n // retrieving title\n\n let cookie = xcb::get_property(\n\n &conn,\n\n false,\n\n window,\n\n xcb::ATOM_WM_NAME,\n\n xcb::ATOM_STRING,\n\n 0,\n\n 1024,\n\n );\n\n if let Ok(reply) = cookie.get_reply() {\n\n assert_eq!(std::str::from_utf8(reply.value()).unwrap(), title);\n\n } else {\n\n panic!(\"could not retrieve window title!\");\n", "file_path": "examples/basic_window.rs", "rank": 85, "score": 2.431236909870841 }, { "content": " /// wraps a `xlib::Display` and get an XCB connection from an exisiting object\n\n /// `xlib::XCloseDisplay` will be called when the returned object is dropped\n\n #[cfg(feature = \"xlib_xcb\")]\n\n pub unsafe fn new_from_xlib_display(dpy: *mut xlib::Display) -> Connection {\n\n assert!(!dpy.is_null(), \"attempt connect with null display\");\n\n Connection {\n\n c: XGetXCBConnection(dpy),\n\n dpy: dpy,\n\n }\n\n }\n\n\n\n /// Connects to the X server, using an authorization information.\n\n /// display: The name of the display.\n\n /// auth_info: The authorization information.\n\n /// screen: A pointer to a preferred screen number.\n\n /// Returns A newly allocated `Connection` structure.\n\n ///\n\n /// Connects to the X server specified by displayname, using the\n\n /// authorization auth.\n\n /// The second member of the returned tuple is the preferred screen, or 0\n", "file_path": "src/base.rs", "rank": 86, "score": 2.4014944142947994 }, { "content": " ///\n\n /// Returns > 0 if the connection is in an error state; 0 otherwise.\n\n pub fn xcb_connection_has_error(c: *mut xcb_connection_t) -> c_int;\n\n\n\n /// Connects to the X server.\n\n ///\n\n /// Connects to an X server, given the open socket @p fd and the\n\n /// xcb_auth_info_t @p auth_info. The file descriptor @p fd is\n\n /// bidirectionally connected to an X server. If the connection\n\n /// should be unauthenticated, @p auth_info must be @c\n\n /// NULL.\n\n ///\n\n /// Always returns a non-NULL pointer to a xcb_connection_t, even on failure.\n\n /// Callers need to use xcb_connection_has_error() to check for failure.\n\n /// When finished, use xcb_disconnect() to close the connection and free\n\n /// the structure.\n\n pub fn xcb_connect_to_fd(fd: c_int, auth_info: *mut xcb_auth_info_t) -> *mut xcb_connection_t;\n\n\n\n /// Closes the connection.\n\n ///\n", "file_path": "src/ffi/base.rs", "rank": 87, "score": 2.3892942572025833 }, { "content": "\n\n/// Generic cookie\n\n#[derive(Copy, Clone)]\n\n#[repr(C)]\n\npub struct xcb_void_cookie_t {\n\n /// sequence number\n\n pub sequence: c_uint,\n\n}\n\n\n\n/// XCB_NONE is the universal null resource or null atom parameter value for many core X requests\n\npub const XCB_NONE: u32 = 0;\n\n/// XCB_COPY_FROM_PARENT can be used for many xcb_create_window parameters\n\npub const XCB_COPY_FROM_PARENT: u32 = 0;\n\n/// XCB_CURRENT_TIME can be used in most requests that take an xcb_timestamp_t\n\npub const XCB_CURRENT_TIME: u32 = 0;\n\n/// XCB_NO_SYMBOL fills in unused entries in xcb_keysym_t tables\n\npub const XCB_NO_SYMBOL: u32 = 0;\n\n\n\n/// Container for authorization information.\n\n/// A container for authorization information to be sent to the X server\n", "file_path": "src/ffi/base.rs", "rank": 88, "score": 2.377217432892268 }, { "content": " NullResponse,\n\n GenericError(GenericError),\n\n}\n\n\n\nimpl std::fmt::Display for ReplyError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {\n\n write!(f, \"xcb::ReplyError: \")?;\n\n match self {\n\n Self::NullResponse => {\n\n write!(\n\n f,\n\n \"Unexpected null pointer(check pending errors on connection)\"\n\n )\n\n }\n\n Self::GenericError(g) => {\n\n write!(f, \"{}\", g)\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/base.rs", "rank": 89, "score": 2.359208003778339 }, { "content": " );\n\n\n\n let win = conn.generate_id();\n\n xcb::create_window(\n\n &conn,\n\n xcb::COPY_FROM_PARENT as u8,\n\n win,\n\n screen.root(),\n\n 0,\n\n 0,\n\n 150,\n\n 150,\n\n 10,\n\n xcb::WINDOW_CLASS_INPUT_OUTPUT as u16,\n\n screen.root_visual(),\n\n &[\n\n (xcb::CW_BACK_PIXEL, screen.white_pixel()),\n\n (\n\n xcb::CW_EVENT_MASK,\n\n xcb::EVENT_MASK_EXPOSURE | xcb::EVENT_MASK_KEY_PRESS,\n", "file_path": "examples/drawing.rs", "rank": 90, "score": 2.3124048310858716 }, { "content": "\n\nimpl<T> Drop for Error<T> {\n\n fn drop(&mut self) {\n\n unsafe {\n\n libc::free(self.ptr as *mut c_void);\n\n }\n\n }\n\n}\n\n\n\nimpl<T> fmt::Display for Error<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"xcb::Error {{ response_type: {}, error_code: {} }}\",\n\n self.response_type(),\n\n self.error_code()\n\n )\n\n }\n\n}\n\nimpl<T: fmt::Debug> error::Error for Error<T> {\n", "file_path": "src/base.rs", "rank": 91, "score": 2.294222184079938 }, { "content": " xcb::WINDOW_CLASS_INPUT_OUTPUT as u16,\n\n (*vi).visualid as u32,\n\n &cw_values,\n\n );\n\n\n\n xlib::XFree(vi as *mut c_void);\n\n\n\n let title = \"XCB OpenGL\";\n\n xcb::change_property(\n\n &conn,\n\n xcb::PROP_MODE_REPLACE as u8,\n\n win,\n\n xcb::ATOM_WM_NAME,\n\n xcb::ATOM_STRING,\n\n 8,\n\n title.as_bytes(),\n\n );\n\n\n\n let protocols = [wm_delete_window];\n\n xcb::change_property(\n", "file_path": "examples/opengl_window.rs", "rank": 92, "score": 2.250415759928542 }, { "content": " display: *const c_char,\n\n auth: *mut xcb_auth_info_t,\n\n screen: *mut c_int,\n\n ) -> *mut xcb_connection_t;\n\n\n\n /// Allocates an XID for a new object.\n\n /// Returns A newly allocated XID.\n\n ///\n\n /// Allocates an XID for a new object. Typically used just prior to\n\n /// various object creation functions, such as xcb_create_window.\n\n pub fn xcb_generate_id(c: *mut xcb_connection_t) -> u32;\n\n\n\n}\n", "file_path": "src/ffi/base.rs", "rank": 93, "score": 2.220330748325641 }, { "content": "\n\n#[repr(C)]\n\npub struct xcb_protocol_request_t {\n\n count: usize,\n\n ext: *mut xcb_extension_t,\n\n opcode: u8,\n\n isvoid: u8,\n\n}\n\n\n\n#[repr(C)]\n\npub enum xcb_send_request_flags_t {\n\n XCB_REQUEST_CHECKED = 0x01,\n\n XCB_REQUEST_RAW = 0x02,\n\n XCB_REQUEST_DISCARD_REPLY = 0x04,\n\n XCB_REQUEST_REPLY_FDS = 0x08,\n\n}\n\n\n\n#[link(name = \"xcb\")]\n\nextern \"C\" {\n\n\n", "file_path": "src/ffi/ext.rs", "rank": 94, "score": 2.2150033550555235 }, { "content": " /// Returns the next event or error from the server.\n\n ///\n\n /// Returns the next event or error from the server, if one is\n\n /// available, or returns @c NULL otherwise. If no event is available, that\n\n /// might be because an I/O error like connection close occurred while\n\n /// attempting to read the next event, in which case the connection is\n\n /// shut down when this function returns.\n\n pub fn xcb_poll_for_event(c: *mut xcb_connection_t) -> *mut xcb_generic_event_t;\n\n\n\n /// Returns the next event without reading from the connection.\n\n ///\n\n /// This is a version of xcb_poll_for_event that only examines the\n\n /// event queue for new events. The function doesn't try to read new\n\n /// events from the connection if no queued events are found.\n\n ///\n\n /// This function is useful for callers that know in advance that all\n\n /// interesting events have already been read from the connection. For\n\n /// example, callers might use xcb_wait_for_reply and be interested\n\n /// only of events that preceded a specific reply.\n\n pub fn xcb_poll_for_queued_event(c: *mut xcb_connection_t) -> *mut xcb_generic_event_t;\n", "file_path": "src/ffi/base.rs", "rank": 95, "score": 2.1789702958941928 }, { "content": "\n\nimpl std::error::Error for ReplyError {\n\n fn description(&self) -> &str {\n\n \"xcb::ReplyError\"\n\n }\n\n}\n\n\n\n//TODO: Implement wrapper functions for constructing auth_info\n\npub type AuthInfo = xcb_auth_info_t;\n\n\n\n#[cfg(feature = \"xlib_xcb\")]\n\npub enum EventQueueOwner {\n\n Xcb,\n\n Xlib,\n\n}\n\n\n\n/// Error type that is returned by `Connection::has_error`\n\n#[derive(Debug)]\n\npub enum ConnError {\n\n /// xcb connection errors because of socket, pipe and other stream errors.\n", "file_path": "src/base.rs", "rank": 96, "score": 2.1773445499487116 }, { "content": " let prop_notify: &xcb::PropertyNotifyEvent = unsafe { xcb::cast_event(&event) };\n\n if prop_notify.atom() == xcb::ATOM_WM_NAME {\n\n // retrieving title\n\n let cookie = xcb::get_property(\n\n &conn,\n\n false,\n\n window,\n\n xcb::ATOM_WM_NAME,\n\n xcb::ATOM_STRING,\n\n 0,\n\n 1024,\n\n );\n\n if let Ok(reply) = cookie.get_reply() {\n\n println!(\n\n \"title changed to \\\"{}\\\"\",\n\n std::str::from_utf8(reply.value()).unwrap()\n\n );\n\n }\n\n }\n\n } else if r == xcb::KEY_PRESS as u8 {\n", "file_path": "examples/threaded_window.rs", "rank": 97, "score": 2.1400964226679 }, { "content": " conn.flush();\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n // only to make sure that rs_client generate correct names for DRI2\n\n // (used to be \"*_DRI_2_*\")\n\n // should be in a \"compile tests\" section instead of example\n\n let _ = xcb::ffi::dri2::XCB_DRI2_ATTACHMENT_BUFFER_ACCUM;\n\n\n\n glXDestroyContext(conn.get_raw_dpy(), ctx);\n\n\n\n xcb::unmap_window(&conn, win);\n\n xcb::destroy_window(&conn, win);\n\n xcb::free_colormap(&conn, cmap);\n\n conn.flush();\n\n }\n\n}\n", "file_path": "examples/opengl_window.rs", "rank": 98, "score": 2.0287747714317383 }, { "content": " unsafe {\n\n if self.dpy.is_null() {\n\n xcb_disconnect(self.c);\n\n } else {\n\n xlib::XCloseDisplay(self.dpy);\n\n }\n\n }\n\n }\n\n}\n\n\n\n// Mimics xproto::QueryExtensionReply, but without the Drop trait.\n\n// Used for Connection::get_extension_data whose returned value\n\n// must not be freed.\n\n// Named QueryExtensionData to avoid name collision\n\npub struct QueryExtensionData<'a> {\n\n ptr: *const xcb_query_extension_reply_t,\n\n _marker: PhantomData<&'a ()>,\n\n}\n\n\n\nimpl<'a> QueryExtensionData<'a> {\n", "file_path": "src/base.rs", "rank": 99, "score": 2.0287747714317383 } ]